mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-28 22:26:36 +08:00
Compare commits
661 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 47e55ab324 | |||
| 82b932dbc7 | |||
| c8bd413e4c | |||
| 2c4499ec45 | |||
| 15a534909f | |||
| 9a5208976c | |||
| fd11aca8e5 | |||
| b57c82b122 | |||
| 3a8c848af5 | |||
| fe99905a2b | |||
| 591870eb6e | |||
| df3d044f03 | |||
| ee654f08d2 | |||
| ceff119f89 | |||
| c2e8f90023 | |||
| 702b5b35e8 | |||
| 2a758402ad | |||
| e77168feba | |||
| 52da81cf9e | |||
| b36d9744ae | |||
| c8338dec57 | |||
| f096917eeb | |||
| 413956e9dd | |||
| 6404af0a91 | |||
| 27a36344d4 | |||
| e20d56a34c | |||
| 1d93519cb2 | |||
| 13076bb87b | |||
| e04cd99ae2 | |||
| 41905e2569 | |||
| 0782a7d3c6 | |||
| 4236a62855 | |||
| 9afb5bc136 | |||
| f0fcf8aa9a | |||
| 274fc5ffaa | |||
| 80a16e71df | |||
| 6220906164 | |||
| fa5284361c | |||
| f3923452df | |||
| 11470906cf | |||
| e1df82946e | |||
| 08c01b76d5 | |||
| 678392c040 | |||
| 6be197cbb6 | |||
| 8dd4a41bf8 | |||
| e9453a3971 | |||
| 7c9b6e032b | |||
| 3beb85efa0 | |||
| bc7b864a6c | |||
| 93091f4551 | |||
| 2d9e7b4acd | |||
| 6f3f69b62e | |||
| bfd5435087 | |||
| 0e9fe68110 | |||
| 89f438fe45 | |||
| 2e2c8f6ca9 | |||
| 6cd4fd91e6 | |||
| 83e17d8c4a | |||
| e1143d40bc | |||
| f98abf14a8 | |||
| 2a87778e10 | |||
| 5836823187 | |||
| 5a7026cf55 | |||
| bc7935d627 | |||
| 7787085664 | |||
| 960ecd3158 | |||
| aee9860970 | |||
| 9ebbc5a74d | |||
| 1c65f64bda | |||
| 32841549c1 | |||
| 046d4ffdef | |||
| 4c4d434bc1 | |||
| 80612bc992 | |||
| 927db0b373 | |||
| 120648ac81 | |||
| f367189703 | |||
| 1b1554c563 | |||
| 59f3da2bdf | |||
| b40d639fdb | |||
| 05da2a5872 | |||
| 4fbaa4aae9 | |||
| 3188cd2659 | |||
| c4a982e9fa | |||
| b27dc26be3 | |||
| ab1836f216 | |||
| 7a53d2dd97 | |||
| f3d347f55f | |||
| 9da48ab0bd | |||
| 4a7e40630b | |||
| d6897b6054 | |||
| 828ae1e82f | |||
| 57d189b483 | |||
| 0a8eb11c3d | |||
| 38f0a92da9 | |||
| 067ddcbf23 | |||
| 46305ef35e | |||
| bd9163904a | |||
| b6d7733058 | |||
| 4f036a881d | |||
| 59075a0b58 | |||
| 30bd25716b | |||
| 99dae3c64c | |||
| 045314a1aa | |||
| 2b20d0b3bb | |||
| 59f4c51222 | |||
| 8c1fbfb130 | |||
| cec06bfb5d | |||
| 2167e3a3c0 | |||
| 2ea8dddef6 | |||
| 18867daba7 | |||
| d68176326d | |||
| d531bd4f1a | |||
| ac936005e6 | |||
| d8192f8f17 | |||
| eb35e2b89f | |||
| 97b983fd0b | |||
| b40a7b2e7d | |||
| 9a10558f80 | |||
| f82628c40c | |||
| 7af98328f5 | |||
| 678a4f959c | |||
| 15a8bb2e9c | |||
| b091ff2730 | |||
| 5b22f94502 | |||
| a7671583b3 | |||
| d32fa02d97 | |||
| f72a35188d | |||
| ea619dba3b | |||
| 36b0835740 | |||
| 0795616b34 | |||
| 941651a16f | |||
| 360114ed42 | |||
| ffedb2c6d3 | |||
| 947e63ca14 | |||
| 34d74d9928 | |||
| accae95126 | |||
| 68e5c86e9c | |||
| 64c75d558e | |||
| 41c84fd78f | |||
| d76912ab15 | |||
| 4fe3c24198 | |||
| 44bada64c9 | |||
| 867ec94258 | |||
| fd0a1fde6b | |||
| 653001b14f | |||
| d4f8c724ed | |||
| a7dd3b7e9e | |||
| 638c510468 | |||
| ff11e3171e | |||
| 030d6ba004 | |||
| b226e06e2d | |||
| 2e09db02f3 | |||
| 6abf55c048 | |||
| f9d4179bf2 | |||
| 64b1e0b4c3 | |||
| b65daeb945 | |||
| fbe55cef05 | |||
| 0878526ba8 | |||
| a2db3e3292 | |||
| f522391d1e | |||
| 9562762af2 | |||
| 455fd04050 | |||
| 14c250e3d7 | |||
| a093e616cf | |||
| 696397ebba | |||
| 6f1a555d5f | |||
| 1996aa0dac | |||
| f4e2783eb4 | |||
| 2fd4a3134d | |||
| f1dc2df23c | |||
| de27c006d8 | |||
| 23a9544b73 | |||
| 011bbe9556 | |||
| a442c9cac6 | |||
| 671e719d75 | |||
| 07845be5bd | |||
| 8d406bd2e6 | |||
| 2a4627d9a0 | |||
| 6814ace1aa | |||
| ca9645f39b | |||
| 8e03843145 | |||
| 51ece37db2 | |||
| 45fb2719cf | |||
| bdd9f3d4d1 | |||
| 1f60863f60 | |||
| 02e6870755 | |||
| aa08920e51 | |||
| 7818644129 | |||
| 55c9fc0017 | |||
| 140dd2c8cc | |||
| fada223249 | |||
| 00f8a80ca4 | |||
| 4e9407b4ae | |||
| 42461bc378 | |||
| 92780c486a | |||
| 81f9296d79 | |||
| 606f4e6c9e | |||
| 4cd4526492 | |||
| cc8a10376a | |||
| 5ebe334a2f | |||
| 932496a8ec | |||
| a8a060676a | |||
| 2c10ccd622 | |||
| a2211c200d | |||
| 21ba9e6d72 | |||
| ac9113b0ef | |||
| 11779697de | |||
| d6e006f086 | |||
| d39fa75d36 | |||
| f56bceb2a9 | |||
| bbaf918d74 | |||
| 89a97be2c5 | |||
| 6f2fc2f1cb | |||
| 42da080d89 | |||
| 1f4a17863f | |||
| 4d3a3a97ef | |||
| ff1020ccfb | |||
| ca3bd2cf9f | |||
| eb661c028d | |||
| 10c28c5ecd | |||
| 96810b7d97 | |||
| 365f9b01ae | |||
| 7d4d687dde | |||
| 6a664fea3b | |||
| dcdc1b0ec7 | |||
| 4af4c36e60 | |||
| 05e5244d94 | |||
| c2ee2bf7fe | |||
| 461c81e14a | |||
| 675d18d359 | |||
| 750335978c | |||
| ae7c623a35 | |||
| f24bdc0f83 | |||
| 07ef35b7e6 | |||
| 7c9823a1ff | |||
| a0c3bcf798 | |||
| 1a4a7d1705 | |||
| f141947085 | |||
| a07e947644 | |||
| ae4692a845 | |||
| 7dac269429 | |||
| ec5575dce2 | |||
| 6fee60e110 | |||
| 52f91c2388 | |||
| 348265afc1 | |||
| a7e466142d | |||
| 2fccf3924d | |||
| 4705d07e11 | |||
| 68be3b9a3d | |||
| e2d17d808b | |||
| 95edbd43ba | |||
| b96d553cd8 | |||
| bffdb5fb11 | |||
| 109e782493 | |||
| ff2c70608d | |||
| 5903d1c8f1 | |||
| f0392e7501 | |||
| 4037788e0c | |||
| 59884ab0fb | |||
| 4a6d37f0e8 | |||
| 731e2d5f26 | |||
| df3cbb9b9e | |||
| 5402666b19 | |||
| 4ec6a4e493 | |||
| 2d5ad42128 | |||
| dccda35f65 | |||
| d142b9095e | |||
| c2c079886f | |||
| c3ae1aaecd | |||
| f099bc1236 | |||
| 0b5d1ebefa | |||
| 082c2ed11c | |||
| a764f0a5b2 | |||
| 651d9fff9f | |||
| fddfce303c | |||
| a24fc8291b | |||
| 37e4485415 | |||
| 8d3f9d61da | |||
| 27c55f6514 | |||
| 9883c572cd | |||
| f9619defcc | |||
| 01f0ced1e6 | |||
| 647fb115a0 | |||
| 2114b9e3ad | |||
| 45b96acf6b | |||
| 3305215144 | |||
| 86b03f399a | |||
| 8dc5b4dc56 | |||
| ef5341b664 | |||
| 050534e743 | |||
| 3fe94d3386 | |||
| 3364cf96cf | |||
| a1ed4430ce | |||
| 7f11a79ad9 | |||
| ddcd9cf2c4 | |||
| c2e9064474 | |||
| bc9e1e3b9a | |||
| 613d2c5790 | |||
| 51bc41b2e8 | |||
| 9de3ecc4a8 | |||
| c4a66204f0 | |||
| 3558a6c170 | |||
| 595fc4ccec | |||
| 3ad147d349 | |||
| d285d8cd97 | |||
| 5714895291 | |||
| a33936e8ff | |||
| 9f8161d13e | |||
| a599a0f4bf | |||
| 7498bc63a3 | |||
| 894bf995bb | |||
| 52dbacc506 | |||
| cbcbbc41af | |||
| 6044314811 | |||
| 5fb38ecc2a | |||
| 73db759558 | |||
| 6e9691a419 | |||
| fd53b83190 | |||
| c7b5bfb809 | |||
| cfd1250615 | |||
| c8eeba5880 | |||
| 1812491679 | |||
| 7b6ab22b78 | |||
| c20d112f60 | |||
| 2817be14d5 | |||
| f6217bb990 | |||
| a3ceb7a944 | |||
| 0f8f35bd5b | |||
| 6373ff898b | |||
| d1c4077a75 | |||
| 059f375d85 | |||
| 8cbfb5aef6 | |||
| 5ebabf5bed | |||
| e23c8a5dcd | |||
| 89ea760e67 | |||
| 02b976ffa4 | |||
| 556b5ad686 | |||
| 884aabd130 | |||
| f0dac1d90e | |||
| 4a2978150c | |||
| df0c092b22 | |||
| 7d4258f50e | |||
| e24fabb03c | |||
| ce08ee399b | |||
| badd5aa101 | |||
| 5ff3be22b4 | |||
| df09cbd271 | |||
| 957bc021eb | |||
| 49dbfdbfb0 | |||
| 9a5c5c46f2 | |||
| 8197f9a873 | |||
| bab6a4a219 | |||
| 6c93157b14 | |||
| 033029eaa1 | |||
| a958ddb27a | |||
| f63f007326 | |||
| b47f1afa35 | |||
| 2369be7244 | |||
| 00bb6fbd28 | |||
| 063b06494a | |||
| b824185a3a | |||
| 8e6ddd7c1b | |||
| d1bc7ad2ee | |||
| 321474fb97 | |||
| ea89e4e0c6 | |||
| 9e31631d8f | |||
| 712d537d66 | |||
| bd4eb19393 | |||
| 02efab7c11 | |||
| 8ce129bc51 | |||
| d5a44e913d | |||
| 1444de981c | |||
| bd76b8ff1a | |||
| a95f22fa88 | |||
| 38ac6a7c27 | |||
| e5f3d5ae26 | |||
| 4cbc91f2fa | |||
| 6d3d3a40ab | |||
| 51b12841d6 | |||
| 993bf7c2c8 | |||
| b42b5fcf65 | |||
| 5d391fb1f9 | |||
| 2ddfcc7cf6 | |||
| 5ba51b21c9 | |||
| 3ea84ad9c8 | |||
| 0a5dce50fb | |||
| 6c9afd1ffb | |||
| bfef96d56e | |||
| 74adf3d59c | |||
| ba7e087aef | |||
| f911aa2997 | |||
| 42f9ac997f | |||
| c7cf7aad4e | |||
| 2118bc2556 | |||
| b49eb6826b | |||
| 8dd2394e93 | |||
| 5aea82d9c4 | |||
| 47005ebe10 | |||
| 3ee47e4af7 | |||
| 55c0468ac9 | |||
| eeb36a5ce7 | |||
| aceca266ff | |||
| d82e502a71 | |||
| 0494b92371 | |||
| 8683a5b1b7 | |||
| 4cbe470089 | |||
| 6cd1824a77 | |||
| 2844700dc4 | |||
| f8fd1ea7e1 | |||
| 57edc215d7 | |||
| 7a4044b05f | |||
| e84d5412bc | |||
| 151480dc85 | |||
| 2331b3a270 | |||
| 5cd1a678c8 | |||
| cc9546b761 | |||
| a63dcfed6f | |||
| 4dd8cdc38b | |||
| 1a4822d6be | |||
| ce161f09cc | |||
| 672958a192 | |||
| 3820de916c | |||
| ef44979b5c | |||
| d38f8a1562 | |||
| 8e4d011b15 | |||
| 7baa67dfe8 | |||
| e58271ef76 | |||
| 4fd4a41e7c | |||
| 82d4e5fb87 | |||
| d16643a53d | |||
| 93ca1e0b91 | |||
| 4046bffaf1 | |||
| 03f9be7cbb | |||
| 5e05f43c3d | |||
| 205a6483f5 | |||
| 2595644dfd | |||
| 30019dab9f | |||
| 4d46726eb7 | |||
| 0e8b9588ba | |||
| 344a106eba | |||
| bccad7b4a8 | |||
| f7926724aa | |||
| 5bba562048 | |||
| 49c74d08e8 | |||
| 1112b6291b | |||
| ef5d1d4b74 | |||
| a98887d4ca | |||
| 7ca3e11566 | |||
| a2e080c2d3 | |||
| ad6f7fd4b0 | |||
| 2a0f835ffe | |||
| 13d8241eee | |||
| 1ddd11f045 | |||
| 81eb03d230 | |||
| 7d23c3aed0 | |||
| 6be0338aa0 | |||
| 44dec89f1f | |||
| 2b260901df | |||
| 948bc93786 | |||
| 0f0fb53256 | |||
| 0fcb1680fd | |||
| 50715ba332 | |||
| f9510edbbc | |||
| 6560388f2b | |||
| e37aea5f81 | |||
| 7db9045b74 | |||
| a6bd765a02 | |||
| 74afb8d710 | |||
| ea4a5cd665 | |||
| 22a51a3868 | |||
| e9710b7aa9 | |||
| bd0eff2954 | |||
| e3cfe8e848 | |||
| c610bb605a | |||
| a6afb7dfe2 | |||
| 7b96113d4c | |||
| 8370bc61b7 | |||
| 74eb894453 | |||
| 34d29d7e8b | |||
| badf33e3b9 | |||
| 3cb72377d7 | |||
| ab4b62031f | |||
| 80f3ccf1ac | |||
| a1164b9c89 | |||
| fd7e55b23d | |||
| f128a1fa9e | |||
| 65a5a56d95 | |||
| ca2d6f3301 | |||
| a94b3b9df2 | |||
| 30377319d8 | |||
| 07dca37ef0 | |||
| 036b29f084 | |||
| 9863862348 | |||
| bb6022477e | |||
| 28bc87c5e2 | |||
| c51e6b2a58 | |||
| 481192300d | |||
| 1777620ea5 | |||
| f3a03b06b2 | |||
| dd046be976 | |||
| 5c9672a265 | |||
| 09a3854ed8 | |||
| 43f51baa96 | |||
| 5a2011e687 | |||
| 7dd9ce0b5f | |||
| b66881a371 | |||
| 4d7934061e | |||
| 660fa8888b | |||
| 3285f09c92 | |||
| 51ec708c58 | |||
| 9b8971a9de | |||
| 6546f86b4e | |||
| 8de6b97806 | |||
| e4e0a88053 | |||
| 7719fd6350 | |||
| 15ef6dd72f | |||
| 5b5f19cbc1 | |||
| ea38e12d42 | |||
| 885eb2eab9 | |||
| 6587acef88 | |||
| ad03ede7cd | |||
| 468e4042c2 | |||
| af1344033d | |||
| 4012d65b3c | |||
| e2bc1a3478 | |||
| 6c2c447a72 | |||
| e7022db9a4 | |||
| ca4a0ee1b2 | |||
| 27b0550876 | |||
| 797e03f843 | |||
| b4e06237ef | |||
| 751a13fb64 | |||
| fa7b857aa9 | |||
| 257af75ece | |||
| cbdacf21f6 | |||
| b1f3130519 | |||
| 3c224c817b | |||
| a3c9402218 | |||
| a7d40e9132 | |||
| 648342b62f | |||
| 4870d42949 | |||
| caaf7043cc | |||
| 237a66913b | |||
| 3c50c7d3ac | |||
| b44e65a12e | |||
| e3f40db963 | |||
| b5ad7b7062 | |||
| 6fc7def562 | |||
| c8f608b2dd | |||
| 5c81e01de5 | |||
| 83fac6d0a0 | |||
| a6681d6366 | |||
| 1388c4420d | |||
| 962bd5f5df | |||
| 627c11c429 | |||
| 4ba17361e9 | |||
| c946858328 | |||
| ba6e2af5fd | |||
| 2ffe6f7439 | |||
| e3987e21b9 | |||
| a713f54732 | |||
| 519f03097e | |||
| 299c655e39 | |||
| b8c0fb4572 | |||
| d1e172171f | |||
| 81ae6cf78d | |||
| 1120575021 | |||
| 221947acc4 | |||
| 21d8ffca56 | |||
| 41cff3e09e | |||
| b6c4722687 | |||
| 6ea4248bdc | |||
| 88a28212b3 | |||
| 9d0309aedc | |||
| 9a8ce9d3e2 | |||
| 7499608a8b | |||
| 0ebbb60102 | |||
| 80f6d22d2a | |||
| 088b049b4c | |||
| fa9b7b259c | |||
| 14616cf845 | |||
| d2915f6984 | |||
| ccce8beeeb | |||
| 3d2e0f1a1b | |||
| 918d5a9ff8 | |||
| 7d05d4ced7 | |||
| dbdda0fbab | |||
| cf7fdd274b | |||
| 982ed233a2 | |||
| 1f96c95b42 | |||
| 8604c4f57c | |||
| a674338c21 | |||
| 89d82ff031 | |||
| c71d25f744 | |||
| f57f32cf3a | |||
| b6314164c5 | |||
| 856201c0f2 | |||
| 9d8b96c1d0 | |||
| 7c3c185038 | |||
| a9259917c6 | |||
| 8c28587821 | |||
| 12979a3f21 | |||
| 376eb15c63 | |||
| 89ba7abe30 | |||
| 2fd5ac1031 | |||
| 40e84ca41a | |||
| a28c672695 | |||
| 74e0b58d89 | |||
| 7c20c964b4 | |||
| 5d0981d046 | |||
| a793dd2ea8 | |||
| 915e385244 | |||
| 7a344a32f9 | |||
| 8c1ee3845a | |||
| 8c751d5afc | |||
| f5faf0c94f | |||
| af72e8dc33 | |||
| bcd70affb5 | |||
| 6987e9f23b | |||
| 41665b0865 | |||
| d1744aaaf3 | |||
| d5f8548200 | |||
| 4d8698624c | |||
| 1009819801 | |||
| 8fe782f4ea | |||
| 7140950e93 | |||
| 0181747881 | |||
| 3c41159d26 | |||
| e0e1d04da5 | |||
| f0a14f5fce | |||
| 174a2578e8 | |||
| a0959b9d38 | |||
| 13299197b8 | |||
| 249296e417 | |||
| db0f6840d9 | |||
| 1033a3ae26 | |||
| 1845daf41f | |||
| 4c8f9f0d77 | |||
| cc00c3ec93 | |||
| 653b785958 | |||
| 971c1bcba7 | |||
| 065917bf1c | |||
| 820934fc77 | |||
| d3d2ccc76c | |||
| c8ab9079b3 | |||
| 0d5589bfda | |||
| b846a0f547 | |||
| 69578ebfce | |||
| 06cef71ba6 | |||
| d2b1da0e26 | |||
| 7c6d30f4c8 | |||
| ea0352ee4a | |||
| fa5cf10f56 | |||
| 3fe71ab7dd | |||
| 9f715d6bc2 | |||
| 48de3b26ba | |||
| 273c4bc4d3 | |||
| 420c97199a | |||
| ecf0322165 | |||
| 38234aca53 | |||
| 1c06ec39ca |
22
.github/copilot-instructions.md
vendored
Normal file
22
.github/copilot-instructions.md
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
# Project instructions for Copilot
|
||||
|
||||
## How to run (minimum)
|
||||
- Install:
|
||||
- python -m venv .venv && source .venv/bin/activate
|
||||
- pip install -r requirements.txt
|
||||
- Run:
|
||||
- (fill) e.g. uvicorn app.main:app --reload
|
||||
- Verify:
|
||||
- (fill) curl http://127.0.0.1:8000/health
|
||||
|
||||
## Project layout (what matters)
|
||||
- app/: API entrypoints + routers
|
||||
- services/: business logic
|
||||
- configs/: config loading (.env)
|
||||
- docs/: documents
|
||||
- tests/: pytest
|
||||
|
||||
## Conventions
|
||||
- Prefer small, incremental changes.
|
||||
- Add logging for new flows.
|
||||
- Add/adjust tests for behavior changes.
|
||||
41
.github/workflows/release.yml
vendored
41
.github/workflows/release.yml
vendored
@ -3,11 +3,18 @@ name: release
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 13 * * *' # This schedule runs every 13:00:00Z(21:00:00+08:00)
|
||||
# https://github.com/orgs/community/discussions/26286?utm_source=chatgpt.com#discussioncomment-3251208
|
||||
# "The create event does not support branch filter and tag filter."
|
||||
# The "create tags" trigger is specifically focused on the creation of new tags, while the "push tags" trigger is activated when tags are pushed, including both new tag creations and updates to existing tags.
|
||||
create:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*" # normal release
|
||||
- "nightly" # the only one mutable tag
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
actions: read
|
||||
checks: read
|
||||
statuses: read
|
||||
|
||||
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
@ -21,9 +28,9 @@ jobs:
|
||||
- name: Ensure workspace ownership
|
||||
run: echo "chown -R ${USER} ${GITHUB_WORKSPACE}" && sudo chown -R ${USER} ${GITHUB_WORKSPACE}
|
||||
|
||||
# https://github.com/actions/checkout/blob/v3/README.md
|
||||
# https://github.com/actions/checkout/blob/v6/README.md
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the secret as an environment variable
|
||||
fetch-depth: 0
|
||||
@ -31,12 +38,12 @@ jobs:
|
||||
|
||||
- name: Prepare release body
|
||||
run: |
|
||||
if [[ ${GITHUB_EVENT_NAME} == "create" ]]; then
|
||||
if [[ ${GITHUB_EVENT_NAME} != "schedule" ]]; then
|
||||
RELEASE_TAG=${GITHUB_REF#refs/tags/}
|
||||
if [[ ${RELEASE_TAG} == "nightly" ]]; then
|
||||
PRERELEASE=true
|
||||
else
|
||||
if [[ ${RELEASE_TAG} == v* ]]; then
|
||||
PRERELEASE=false
|
||||
else
|
||||
PRERELEASE=true
|
||||
fi
|
||||
echo "Workflow triggered by create tag: ${RELEASE_TAG}"
|
||||
else
|
||||
@ -55,7 +62,7 @@ jobs:
|
||||
git fetch --tags
|
||||
if [[ ${GITHUB_EVENT_NAME} == "schedule" ]]; then
|
||||
# Determine if a given tag exists and matches a specific Git commit.
|
||||
# actions/checkout@v4 fetch-tags doesn't work when triggered by schedule
|
||||
# actions/checkout@v6 fetch-tags doesn't work when triggered by schedule
|
||||
if [ "$(git rev-parse -q --verify "refs/tags/${RELEASE_TAG}")" = "${GITHUB_SHA}" ]; then
|
||||
echo "mutable tag ${RELEASE_TAG} exists and matches ${GITHUB_SHA}"
|
||||
else
|
||||
@ -75,6 +82,14 @@ jobs:
|
||||
# The body field does not support environment variable substitution directly.
|
||||
body_path: release_body.md
|
||||
|
||||
- name: Build and push image
|
||||
run: |
|
||||
sudo docker login --username infiniflow --password-stdin <<< ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
sudo docker build --build-arg NEED_MIRROR=1 --build-arg HTTPS_PROXY=${HTTPS_PROXY} --build-arg HTTP_PROXY=${HTTP_PROXY} -t infiniflow/ragflow:${RELEASE_TAG} -f Dockerfile .
|
||||
sudo docker tag infiniflow/ragflow:${RELEASE_TAG} infiniflow/ragflow:latest
|
||||
sudo docker push infiniflow/ragflow:${RELEASE_TAG}
|
||||
sudo docker push infiniflow/ragflow:latest
|
||||
|
||||
- name: Build and push ragflow-sdk
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
@ -84,11 +99,3 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
cd admin/client && uv build && uv publish --token ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
||||
- name: Build and push image
|
||||
run: |
|
||||
sudo docker login --username infiniflow --password-stdin <<< ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
sudo docker build --build-arg NEED_MIRROR=1 -t infiniflow/ragflow:${RELEASE_TAG} -f Dockerfile .
|
||||
sudo docker tag infiniflow/ragflow:${RELEASE_TAG} infiniflow/ragflow:latest
|
||||
sudo docker push infiniflow/ragflow:${RELEASE_TAG}
|
||||
sudo docker push infiniflow/ragflow:latest
|
||||
|
||||
77
.github/workflows/tests.yml
vendored
77
.github/workflows/tests.yml
vendored
@ -1,4 +1,6 @@
|
||||
name: tests
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -12,7 +14,7 @@ on:
|
||||
# The only difference between pull_request and pull_request_target is the context in which the workflow runs:
|
||||
# — pull_request_target workflows use the workflow files from the default branch, and secrets are available.
|
||||
# — pull_request workflows use the workflow files from the pull request branch, and secrets are unavailable.
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
types: [ synchronize, ready_for_review ]
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
@ -31,12 +33,9 @@ jobs:
|
||||
name: ragflow_tests
|
||||
# https://docs.github.com/en/actions/using-jobs/using-conditions-to-control-job-execution
|
||||
# https://github.com/orgs/community/discussions/26261
|
||||
if: ${{ github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'ci') }}
|
||||
if: ${{ github.event_name != 'pull_request' || (github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'ci')) }}
|
||||
runs-on: [ "self-hosted", "ragflow-test" ]
|
||||
steps:
|
||||
# https://github.com/hmarr/debug-action
|
||||
#- uses: hmarr/debug-action@v2
|
||||
|
||||
- name: Ensure workspace ownership
|
||||
run: |
|
||||
echo "Workflow triggered by ${{ github.event_name }}"
|
||||
@ -44,7 +43,7 @@ jobs:
|
||||
|
||||
# https://github.com/actions/checkout/issues/1781
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') && format('refs/pull/{0}/merge', github.event.pull_request.number) || github.sha }}
|
||||
fetch-depth: 0
|
||||
@ -53,7 +52,7 @@ jobs:
|
||||
- name: Check workflow duplication
|
||||
if: ${{ !cancelled() && !failure() }}
|
||||
run: |
|
||||
if [[ ${GITHUB_EVENT_NAME} != "pull_request_target" && ${GITHUB_EVENT_NAME} != "schedule" ]]; then
|
||||
if [[ ${GITHUB_EVENT_NAME} != "pull_request" && ${GITHUB_EVENT_NAME} != "schedule" ]]; then
|
||||
HEAD=$(git rev-parse HEAD)
|
||||
# Find a PR that introduced a given commit
|
||||
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
|
||||
@ -78,7 +77,7 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
elif [[ ${GITHUB_EVENT_NAME} == "pull_request_target" ]]; then
|
||||
elif [[ ${GITHUB_EVENT_NAME} == "pull_request" ]]; then
|
||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
|
||||
# Calculate the hash of the current workspace content
|
||||
@ -87,6 +86,9 @@ jobs:
|
||||
mkdir -p ${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}
|
||||
echo "${PR_SHA} ${GITHUB_RUN_ID}" > ${PR_SHA_FP}
|
||||
fi
|
||||
ARTIFACTS_DIR=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/${GITHUB_RUN_ID}
|
||||
echo "ARTIFACTS_DIR=${ARTIFACTS_DIR}" >> ${GITHUB_ENV}
|
||||
rm -rf ${ARTIFACTS_DIR} && mkdir -p ${ARTIFACTS_DIR}
|
||||
|
||||
# https://github.com/astral-sh/ruff-action
|
||||
- name: Static check with Ruff
|
||||
@ -98,7 +100,7 @@ jobs:
|
||||
- name: Check comments of changed Python files
|
||||
if: ${{ false }}
|
||||
run: |
|
||||
if [[ ${{ github.event_name }} == 'pull_request_target' ]]; then
|
||||
if [[ ${{ github.event_name }} == 'pull_request' || ${{ github.event_name }} == 'pull_request_target' ]]; then
|
||||
CHANGED_FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha }}...${{ github.event.pull_request.head.sha }} \
|
||||
| grep -E '\.(py)$' || true)
|
||||
|
||||
@ -127,13 +129,21 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Run unit test
|
||||
run: |
|
||||
uv sync --python 3.12 --group test --frozen
|
||||
source .venv/bin/activate
|
||||
which pytest || echo "pytest not in PATH"
|
||||
echo "Start to run unit test"
|
||||
python3 run_tests.py
|
||||
|
||||
- name: Build ragflow:nightly
|
||||
run: |
|
||||
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-${HOME}}
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:${GITHUB_RUN_ID}
|
||||
echo "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> ${GITHUB_ENV}
|
||||
sudo docker pull ubuntu:22.04
|
||||
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t ${RAGFLOW_IMAGE} .
|
||||
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 --build-arg HTTPS_PROXY=${HTTPS_PROXY} --build-arg HTTP_PROXY=${HTTP_PROXY} -f Dockerfile -t ${RAGFLOW_IMAGE} .
|
||||
if [[ ${GITHUB_EVENT_NAME} == "schedule" ]]; then
|
||||
export HTTP_API_TEST_LEVEL=p3
|
||||
else
|
||||
@ -154,7 +164,7 @@ jobs:
|
||||
INFINITY_THRIFT_PORT=$((23817 + RUNNER_NUM * 10))
|
||||
INFINITY_HTTP_PORT=$((23820 + RUNNER_NUM * 10))
|
||||
INFINITY_PSQL_PORT=$((5432 + RUNNER_NUM * 10))
|
||||
MYSQL_PORT=$((5455 + RUNNER_NUM * 10))
|
||||
EXPOSE_MYSQL_PORT=$((5455 + RUNNER_NUM * 10))
|
||||
MINIO_PORT=$((9000 + RUNNER_NUM * 10))
|
||||
MINIO_CONSOLE_PORT=$((9001 + RUNNER_NUM * 10))
|
||||
REDIS_PORT=$((6379 + RUNNER_NUM * 10))
|
||||
@ -174,7 +184,7 @@ jobs:
|
||||
echo -e "INFINITY_THRIFT_PORT=${INFINITY_THRIFT_PORT}" >> docker/.env
|
||||
echo -e "INFINITY_HTTP_PORT=${INFINITY_HTTP_PORT}" >> docker/.env
|
||||
echo -e "INFINITY_PSQL_PORT=${INFINITY_PSQL_PORT}" >> docker/.env
|
||||
echo -e "MYSQL_PORT=${MYSQL_PORT}" >> docker/.env
|
||||
echo -e "EXPOSE_MYSQL_PORT=${EXPOSE_MYSQL_PORT}" >> docker/.env
|
||||
echo -e "MINIO_PORT=${MINIO_PORT}" >> docker/.env
|
||||
echo -e "MINIO_CONSOLE_PORT=${MINIO_CONSOLE_PORT}" >> docker/.env
|
||||
echo -e "REDIS_PORT=${REDIS_PORT}" >> docker/.env
|
||||
@ -193,34 +203,41 @@ jobs:
|
||||
echo "HOST_ADDRESS=http://host.docker.internal:${SVR_HTTP_PORT}" >> ${GITHUB_ENV}
|
||||
|
||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} up -d
|
||||
uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python
|
||||
uv sync --python 3.12 --only-group test --no-default-groups --frozen && uv pip install sdk/python --group test
|
||||
|
||||
- name: Run sdk tests against Elasticsearch
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS}/v1/system/ping > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
|
||||
source .venv/bin/activate && set -o pipefail; pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api 2>&1 | tee es_sdk_test.log
|
||||
|
||||
- name: Run frontend api tests against Elasticsearch
|
||||
- name: Run web api tests against Elasticsearch
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS}/v1/system/ping > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
source .venv/bin/activate && pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py
|
||||
source .venv/bin/activate && set -o pipefail; pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_web_api/ 2>&1 | tee es_web_api_test.log
|
||||
|
||||
- name: Run http api tests against Elasticsearch
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS}/v1/system/ping > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
|
||||
source .venv/bin/activate && set -o pipefail; pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api 2>&1 | tee es_http_api_test.log
|
||||
|
||||
- name: Collect ragflow log
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cp -r docker/ragflow-logs ${ARTIFACTS_DIR}/ragflow-logs-es
|
||||
echo "ragflow log" && tail -n 200 docker/ragflow-logs/ragflow_server.log
|
||||
sudo rm -rf docker/ragflow-logs
|
||||
|
||||
- name: Stop ragflow:nightly
|
||||
if: always() # always run this step even if previous steps failed
|
||||
@ -236,30 +253,36 @@ jobs:
|
||||
- name: Run sdk tests against Infinity
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS}/v1/system/ping > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
|
||||
source .venv/bin/activate && set -o pipefail; DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api 2>&1 | tee infinity_sdk_test.log
|
||||
|
||||
- name: Run frontend api tests against Infinity
|
||||
- name: Run web api tests against Infinity
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS}/v1/system/ping > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py
|
||||
source .venv/bin/activate && set -o pipefail; DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_web_api/ 2>&1 | tee infinity_web_api_test.log
|
||||
|
||||
- name: Run http api tests against Infinity
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS}/v1/system/ping > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
|
||||
source .venv/bin/activate && set -o pipefail; DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api 2>&1 | tee infinity_http_api_test.log
|
||||
|
||||
- name: Collect ragflow log
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cp -r docker/ragflow-logs ${ARTIFACTS_DIR}/ragflow-logs-infinity
|
||||
echo "ragflow log" && tail -n 200 docker/ragflow-logs/ragflow_server.log
|
||||
sudo rm -rf docker/ragflow-logs
|
||||
- name: Stop ragflow:nightly
|
||||
if: always() # always run this step even if previous steps failed
|
||||
run: |
|
||||
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@ -44,6 +44,7 @@ cl100k_base.tiktoken
|
||||
chrome*
|
||||
huggingface.co/
|
||||
nltk_data/
|
||||
uv-x86_64*.tar.gz
|
||||
|
||||
# Exclude hash-like temporary files like 9b5ad71b2ce5302211f9c61530b329a4922fc6a4
|
||||
*[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]*
|
||||
@ -51,6 +52,13 @@ nltk_data/
|
||||
.venv
|
||||
docker/data
|
||||
|
||||
# OceanBase data and conf
|
||||
docker/oceanbase/conf
|
||||
docker/oceanbase/data
|
||||
|
||||
# SeekDB data and conf
|
||||
docker/seekdb
|
||||
|
||||
|
||||
#--------------------------------------------------#
|
||||
# The following was generated with gitignore.nvim: #
|
||||
@ -195,3 +203,11 @@ ragflow_cli.egg-info
|
||||
|
||||
# Default backup dir
|
||||
backup
|
||||
|
||||
|
||||
.hypothesis
|
||||
|
||||
|
||||
# Added by cargo
|
||||
|
||||
/target
|
||||
|
||||
110
AGENTS.md
Normal file
110
AGENTS.md
Normal file
@ -0,0 +1,110 @@
|
||||
# RAGFlow Project Instructions for GitHub Copilot
|
||||
|
||||
This file provides context, build instructions, and coding standards for the RAGFlow project.
|
||||
It is structured to follow GitHub Copilot's [customization guidelines](https://docs.github.com/en/copilot/concepts/prompting/response-customization).
|
||||
|
||||
## 1. Project Overview
|
||||
RAGFlow is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document understanding. It is a full-stack application with a Python backend and a React/TypeScript frontend.
|
||||
|
||||
- **Backend**: Python 3.10+ (Flask/Quart)
|
||||
- **Frontend**: TypeScript, React, UmiJS
|
||||
- **Architecture**: Microservices based on Docker.
|
||||
- `api/`: Backend API server.
|
||||
- `rag/`: Core RAG logic (indexing, retrieval).
|
||||
- `deepdoc/`: Document parsing and OCR.
|
||||
- `web/`: Frontend application.
|
||||
|
||||
## 2. Directory Structure
|
||||
- `api/`: Backend API server (Flask/Quart).
|
||||
- `apps/`: API Blueprints (Knowledge Base, Chat, etc.).
|
||||
- `db/`: Database models and services.
|
||||
- `rag/`: Core RAG logic.
|
||||
- `llm/`: LLM, Embedding, and Rerank model abstractions.
|
||||
- `deepdoc/`: Document parsing and OCR modules.
|
||||
- `agent/`: Agentic reasoning components.
|
||||
- `web/`: Frontend application (React + UmiJS).
|
||||
- `docker/`: Docker deployment configurations.
|
||||
- `sdk/`: Python SDK.
|
||||
- `test/`: Backend tests.
|
||||
|
||||
## 3. Build Instructions
|
||||
|
||||
### Backend (Python)
|
||||
The project uses **uv** for dependency management.
|
||||
|
||||
1. **Setup Environment**:
|
||||
```bash
|
||||
uv sync --python 3.12 --all-extras
|
||||
uv run download_deps.py
|
||||
```
|
||||
|
||||
2. **Run Server**:
|
||||
- **Pre-requisite**: Start dependent services (MySQL, ES/Infinity, Redis, MinIO).
|
||||
```bash
|
||||
docker compose -f docker/docker-compose-base.yml up -d
|
||||
```
|
||||
- **Launch**:
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
bash docker/launch_backend_service.sh
|
||||
```
|
||||
|
||||
### Frontend (TypeScript/React)
|
||||
Located in `web/`.
|
||||
|
||||
1. **Install Dependencies**:
|
||||
```bash
|
||||
cd web
|
||||
npm install
|
||||
```
|
||||
|
||||
2. **Run Dev Server**:
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
Runs on port 8000 by default.
|
||||
|
||||
### Docker Deployment
|
||||
To run the full stack using Docker:
|
||||
```bash
|
||||
cd docker
|
||||
docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
## 4. Testing Instructions
|
||||
|
||||
### Backend Tests
|
||||
- **Run All Tests**:
|
||||
```bash
|
||||
uv run pytest
|
||||
```
|
||||
- **Run Specific Test**:
|
||||
```bash
|
||||
uv run pytest test/test_api.py
|
||||
```
|
||||
|
||||
### Frontend Tests
|
||||
- **Run Tests**:
|
||||
```bash
|
||||
cd web
|
||||
npm run test
|
||||
```
|
||||
|
||||
## 5. Coding Standards & Guidelines
|
||||
- **Python Formatting**: Use `ruff` for linting and formatting.
|
||||
```bash
|
||||
ruff check
|
||||
ruff format
|
||||
```
|
||||
- **Frontend Linting**:
|
||||
```bash
|
||||
cd web
|
||||
npm run lint
|
||||
```
|
||||
- **Pre-commit**: Ensure pre-commit hooks are installed.
|
||||
```bash
|
||||
pre-commit install
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
@ -45,7 +45,7 @@ RAGFlow is an open-source RAG (Retrieval-Augmented Generation) engine based on d
|
||||
### Backend Development
|
||||
```bash
|
||||
# Install Python dependencies
|
||||
uv sync --python 3.10 --all-extras
|
||||
uv sync --python 3.12 --all-extras
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
|
||||
|
||||
58
Dockerfile
58
Dockerfile
@ -1,5 +1,5 @@
|
||||
# base stage
|
||||
FROM ubuntu:22.04 AS base
|
||||
FROM ubuntu:24.04 AS base
|
||||
USER root
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
@ -10,60 +10,67 @@ WORKDIR /ragflow
|
||||
# Copy models downloaded via download_deps.py
|
||||
RUN mkdir -p /ragflow/rag/res/deepdoc /root/.ragflow
|
||||
RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/huggingface.co,target=/huggingface.co \
|
||||
cp /huggingface.co/InfiniFlow/huqie/huqie.txt.trie /ragflow/rag/res/ && \
|
||||
tar --exclude='.*' -cf - \
|
||||
/huggingface.co/InfiniFlow/text_concat_xgb_v1.0 \
|
||||
/huggingface.co/InfiniFlow/deepdoc \
|
||||
| tar -xf - --strip-components=3 -C /ragflow/rag/res/deepdoc
|
||||
| tar -xf - --strip-components=3 -C /ragflow/rag/res/deepdoc
|
||||
|
||||
# https://github.com/chrismattmann/tika-python
|
||||
# This is the only way to run python-tika without internet access. Without this set, the default is to check the tika version and pull latest every time from Apache.
|
||||
RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \
|
||||
cp -r /deps/nltk_data /root/ && \
|
||||
cp /deps/tika-server-standard-3.0.0.jar /deps/tika-server-standard-3.0.0.jar.md5 /ragflow/ && \
|
||||
cp /deps/tika-server-standard-3.2.3.jar /deps/tika-server-standard-3.2.3.jar.md5 /ragflow/ && \
|
||||
cp /deps/cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
|
||||
|
||||
ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard-3.0.0.jar"
|
||||
ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard-3.2.3.jar"
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Setup apt
|
||||
# Python package and implicit dependencies:
|
||||
# opencv-python: libglib2.0-0 libglx-mesa0 libgl1
|
||||
# aspose-slides: pkg-config libicu-dev libgdiplus libssl1.1_1.1.1f-1ubuntu2_amd64.deb
|
||||
# python-pptx: default-jdk tika-server-standard-3.0.0.jar
|
||||
# python-pptx: default-jdk tika-server-standard-3.2.3.jar
|
||||
# selenium: libatk-bridge2.0-0 chrome-linux64-121-0-6167-85
|
||||
# Building C extensions: libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev
|
||||
RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
|
||||
apt update && \
|
||||
apt --no-install-recommends install -y ca-certificates; \
|
||||
if [ "$NEED_MIRROR" == "1" ]; then \
|
||||
sed -i 's|http://ports.ubuntu.com|http://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
|
||||
sed -i 's|http://archive.ubuntu.com|http://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
|
||||
sed -i 's|http://archive.ubuntu.com/ubuntu|https://mirrors.tuna.tsinghua.edu.cn/ubuntu|g' /etc/apt/sources.list.d/ubuntu.sources; \
|
||||
sed -i 's|http://security.ubuntu.com/ubuntu|https://mirrors.tuna.tsinghua.edu.cn/ubuntu|g' /etc/apt/sources.list.d/ubuntu.sources; \
|
||||
fi; \
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache && \
|
||||
chmod 1777 /tmp && \
|
||||
apt update && \
|
||||
apt --no-install-recommends install -y ca-certificates && \
|
||||
apt update && \
|
||||
apt install -y libglib2.0-0 libglx-mesa0 libgl1 && \
|
||||
apt install -y pkg-config libicu-dev libgdiplus && \
|
||||
apt install -y default-jdk && \
|
||||
apt install -y libatk-bridge2.0-0 && \
|
||||
apt install -y libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev && \
|
||||
apt install -y libjemalloc-dev && \
|
||||
apt install -y python3-pip pipx nginx unzip curl wget git vim less && \
|
||||
apt install -y nginx unzip curl wget git vim less && \
|
||||
apt install -y ghostscript && \
|
||||
apt install -y pandoc && \
|
||||
apt install -y texlive
|
||||
apt install -y texlive && \
|
||||
apt install -y fonts-freefont-ttf fonts-noto-cjk && \
|
||||
apt install -y postgresql-client
|
||||
|
||||
RUN if [ "$NEED_MIRROR" == "1" ]; then \
|
||||
pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \
|
||||
pip3 config set global.trusted-host pypi.tuna.tsinghua.edu.cn; \
|
||||
# Install uv
|
||||
RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \
|
||||
if [ "$NEED_MIRROR" == "1" ]; then \
|
||||
mkdir -p /etc/uv && \
|
||||
echo "[[index]]" > /etc/uv/uv.toml && \
|
||||
echo 'python-install-mirror = "https://registry.npmmirror.com/-/binary/python-build-standalone/"' > /etc/uv/uv.toml && \
|
||||
echo '[[index]]' >> /etc/uv/uv.toml && \
|
||||
echo 'url = "https://pypi.tuna.tsinghua.edu.cn/simple"' >> /etc/uv/uv.toml && \
|
||||
echo "default = true" >> /etc/uv/uv.toml; \
|
||||
echo 'default = true' >> /etc/uv/uv.toml; \
|
||||
fi; \
|
||||
pipx install uv
|
||||
arch="$(uname -m)"; \
|
||||
if [ "$arch" = "x86_64" ]; then uv_arch="x86_64"; else uv_arch="aarch64"; fi; \
|
||||
tar xzf "/deps/uv-${uv_arch}-unknown-linux-gnu.tar.gz" \
|
||||
&& cp "uv-${uv_arch}-unknown-linux-gnu/"* /usr/local/bin/ \
|
||||
&& rm -rf "uv-${uv_arch}-unknown-linux-gnu" \
|
||||
&& uv python install 3.12
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
ENV PATH=/root/.local/bin:$PATH
|
||||
@ -79,12 +86,12 @@ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
|
||||
# A modern version of cargo is needed for the latest version of the Rust compiler.
|
||||
RUN apt update && apt install -y curl build-essential \
|
||||
&& if [ "$NEED_MIRROR" == "1" ]; then \
|
||||
# Use TUNA mirrors for rustup/rust dist files
|
||||
# Use TUNA mirrors for rustup/rust dist files \
|
||||
export RUSTUP_DIST_SERVER="https://mirrors.tuna.tsinghua.edu.cn/rustup"; \
|
||||
export RUSTUP_UPDATE_ROOT="https://mirrors.tuna.tsinghua.edu.cn/rustup/rustup"; \
|
||||
echo "Using TUNA mirrors for Rustup."; \
|
||||
fi; \
|
||||
# Force curl to use HTTP/1.1
|
||||
# Force curl to use HTTP/1.1 \
|
||||
curl --proto '=https' --tlsv1.2 --http1.1 -sSf https://sh.rustup.rs | bash -s -- -y --profile minimal \
|
||||
&& echo 'export PATH="/root/.cargo/bin:${PATH}"' >> /root/.bashrc
|
||||
|
||||
@ -101,10 +108,10 @@ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
|
||||
apt update && \
|
||||
arch="$(uname -m)"; \
|
||||
if [ "$arch" = "arm64" ] || [ "$arch" = "aarch64" ]; then \
|
||||
# ARM64 (macOS/Apple Silicon or Linux aarch64)
|
||||
# ARM64 (macOS/Apple Silicon or Linux aarch64) \
|
||||
ACCEPT_EULA=Y apt install -y unixodbc-dev msodbcsql18; \
|
||||
else \
|
||||
# x86_64 or others
|
||||
# x86_64 or others \
|
||||
ACCEPT_EULA=Y apt install -y unixodbc-dev msodbcsql17; \
|
||||
fi || \
|
||||
{ echo "Failed to install ODBC driver"; exit 1; }
|
||||
@ -148,11 +155,14 @@ RUN --mount=type=cache,id=ragflow_uv,target=/root/.cache/uv,sharing=locked \
|
||||
else \
|
||||
sed -i 's|pypi.tuna.tsinghua.edu.cn|pypi.org|g' uv.lock; \
|
||||
fi; \
|
||||
uv sync --python 3.10 --frozen
|
||||
uv sync --python 3.12 --frozen && \
|
||||
# Ensure pip is available in the venv for runtime package installation (fixes #12651)
|
||||
.venv/bin/python3 -m ensurepip --upgrade
|
||||
|
||||
COPY web web
|
||||
COPY docs docs
|
||||
RUN --mount=type=cache,id=ragflow_npm,target=/root/.npm,sharing=locked \
|
||||
export NODE_OPTIONS="--max-old-space-size=4096" && \
|
||||
cd web && npm install && npm run build
|
||||
|
||||
COPY .git /ragflow/.git
|
||||
@ -183,11 +193,11 @@ COPY deepdoc deepdoc
|
||||
COPY rag rag
|
||||
COPY agent agent
|
||||
COPY graphrag graphrag
|
||||
COPY agentic_reasoning agentic_reasoning
|
||||
COPY pyproject.toml uv.lock ./
|
||||
COPY mcp mcp
|
||||
COPY plugin plugin
|
||||
COPY common common
|
||||
COPY memory memory
|
||||
|
||||
COPY docker/service_conf.yaml.template ./conf/service_conf.yaml.template
|
||||
COPY docker/entrypoint.sh ./
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
FROM scratch
|
||||
|
||||
# Copy resources downloaded via download_deps.py
|
||||
COPY chromedriver-linux64-121-0-6167-85 chrome-linux64-121-0-6167-85 cl100k_base.tiktoken libssl1.1_1.1.1f-1ubuntu2_amd64.deb libssl1.1_1.1.1f-1ubuntu2_arm64.deb tika-server-standard-3.0.0.jar tika-server-standard-3.0.0.jar.md5 libssl*.deb /
|
||||
COPY chromedriver-linux64-121-0-6167-85 chrome-linux64-121-0-6167-85 cl100k_base.tiktoken libssl1.1_1.1.1f-1ubuntu2_amd64.deb libssl1.1_1.1.1f-1ubuntu2_arm64.deb tika-server-standard-3.2.3.jar tika-server-standard-3.2.3.jar.md5 libssl*.deb uv-x86_64-unknown-linux-gnu.tar.gz uv-aarch64-unknown-linux-gnu.tar.gz /
|
||||
|
||||
COPY nltk_data /nltk_data
|
||||
|
||||
|
||||
42
README.md
42
README.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Document</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Roadmap</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -72,7 +72,7 @@
|
||||
|
||||
## 💡 What is RAGFlow?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) is a leading open-source Retrieval-Augmented Generation (RAG) engine that fuses cutting-edge RAG with Agent capabilities to create a superior context layer for LLMs. It offers a streamlined RAG workflow adaptable to enterprises of any scale. Powered by a converged context engine and pre-built agent templates, RAGFlow enables developers to transform complex data into high-fidelity, production-ready AI systems with exceptional efficiency and precision.
|
||||
[RAGFlow](https://ragflow.io/) is a leading open-source Retrieval-Augmented Generation ([RAG](https://ragflow.io/basics/what-is-rag)) engine that fuses cutting-edge RAG with Agent capabilities to create a superior context layer for LLMs. It offers a streamlined RAG workflow adaptable to enterprises of any scale. Powered by a converged [context engine](https://ragflow.io/basics/what-is-agent-context-engine) and pre-built agent templates, RAGFlow enables developers to transform complex data into high-fidelity, production-ready AI systems with exceptional efficiency and precision.
|
||||
|
||||
## 🎮 Demo
|
||||
|
||||
@ -85,8 +85,9 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Latest Updates
|
||||
|
||||
- 2025-12-26 Supports 'Memory' for AI agent.
|
||||
- 2025-11-19 Supports Gemini 3 Pro.
|
||||
- 2025-11-12 Supports data synchronization from Confluence, AWS S3, Discord, Google Drive.
|
||||
- 2025-11-12 Supports data synchronization from Confluence, S3, Notion, Discord, Google Drive.
|
||||
- 2025-10-23 Supports MinerU & Docling as document parsing methods.
|
||||
- 2025-10-15 Supports orchestrable ingestion pipeline.
|
||||
- 2025-08-08 Supports OpenAI's latest GPT-5 series models.
|
||||
@ -187,15 +188,15 @@ releases! 🌟
|
||||
> All Docker images are built for x86 platforms. We don't currently offer Docker images for ARM64.
|
||||
> If you are on an ARM64 platform, follow [this guide](https://ragflow.io/docs/dev/build_docker_image) to build a Docker image compatible with your system.
|
||||
|
||||
> The command below downloads the `v0.22.1` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.22.1`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server.
|
||||
> The command below downloads the `v0.23.1` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.23.1`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
|
||||
# git checkout v0.23.1
|
||||
# Optional: use a stable tag (see releases: https://github.com/infiniflow/ragflow/releases)
|
||||
# This steps ensures the **entrypoint.sh** file in the code matches the Docker image version.
|
||||
|
||||
# This step ensures the **entrypoint.sh** file in the code matches the Docker image version.
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
@ -206,10 +207,10 @@ releases! 🌟
|
||||
|
||||
> Note: Prior to `v0.22.0`, we provided both images with embedding models and slim images without embedding models. Details as follows:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> Starting with `v0.22.0`, we ship only the slim edition and no longer append the **-slim** suffix to the image tag.
|
||||
|
||||
@ -232,7 +233,7 @@ releases! 🌟
|
||||
* Running on all addresses (0.0.0.0)
|
||||
```
|
||||
|
||||
> If you skip this confirmation step and directly log in to RAGFlow, your browser may prompt a `network anormal`
|
||||
> If you skip this confirmation step and directly log in to RAGFlow, your browser may prompt a `network abnormal`
|
||||
> error because, at that moment, your RAGFlow may not be fully initialized.
|
||||
>
|
||||
5. In your web browser, enter the IP address of your server and log in to RAGFlow.
|
||||
@ -302,6 +303,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
Or if you are behind a proxy, you can pass proxy arguments:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 Launch service from source for development
|
||||
|
||||
1. Install `uv` and `pre-commit`, or skip this step if they are already installed:
|
||||
@ -314,7 +324,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # install RAGFlow dependent python modules
|
||||
uv sync --python 3.12 # install RAGFlow dependent python modules
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -386,7 +396,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 Roadmap
|
||||
|
||||
See the [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214)
|
||||
See the [RAGFlow Roadmap 2026](https://github.com/infiniflow/ragflow/issues/12241)
|
||||
|
||||
## 🏄 Community
|
||||
|
||||
|
||||
38
README_id.md
38
README_id.md
@ -22,7 +22,7 @@
|
||||
<img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Dokumentasi</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Peta Jalan</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Peta Jalan</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -72,7 +72,7 @@
|
||||
|
||||
## 💡 Apa Itu RAGFlow?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) adalah mesin RAG (Retrieval-Augmented Generation) open-source terkemuka yang mengintegrasikan teknologi RAG mutakhir dengan kemampuan Agent untuk menciptakan lapisan kontekstual superior bagi LLM. Menyediakan alur kerja RAG yang efisien dan dapat diadaptasi untuk perusahaan segala skala. Didukung oleh mesin konteks terkonvergensi dan template Agent yang telah dipra-bangun, RAGFlow memungkinkan pengembang mengubah data kompleks menjadi sistem AI kesetiaan-tinggi dan siap-produksi dengan efisiensi dan presisi yang luar biasa.
|
||||
[RAGFlow](https://ragflow.io/) adalah mesin [RAG](https://ragflow.io/basics/what-is-rag) (Retrieval-Augmented Generation) open-source terkemuka yang mengintegrasikan teknologi RAG mutakhir dengan kemampuan Agent untuk menciptakan lapisan kontekstual superior bagi LLM. Menyediakan alur kerja RAG yang efisien dan dapat diadaptasi untuk perusahaan segala skala. Didukung oleh mesin konteks terkonvergensi dan template Agent yang telah dipra-bangun, RAGFlow memungkinkan pengembang mengubah data kompleks menjadi sistem AI kesetiaan-tinggi dan siap-produksi dengan efisiensi dan presisi yang luar biasa.
|
||||
|
||||
## 🎮 Demo
|
||||
|
||||
@ -85,8 +85,9 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Pembaruan Terbaru
|
||||
|
||||
- 2025-12-26 Mendukung 'Memori' untuk agen AI.
|
||||
- 2025-11-19 Mendukung Gemini 3 Pro.
|
||||
- 2025-11-12 Mendukung sinkronisasi data dari Confluence, AWS S3, Discord, Google Drive.
|
||||
- 2025-11-12 Mendukung sinkronisasi data dari Confluence, S3, Notion, Discord, Google Drive.
|
||||
- 2025-10-23 Mendukung MinerU & Docling sebagai metode penguraian dokumen.
|
||||
- 2025-10-15 Dukungan untuk jalur data yang terorkestrasi.
|
||||
- 2025-08-08 Mendukung model seri GPT-5 terbaru dari OpenAI.
|
||||
@ -187,12 +188,12 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Semua gambar Docker dibangun untuk platform x86. Saat ini, kami tidak menawarkan gambar Docker untuk ARM64.
|
||||
> Jika Anda menggunakan platform ARM64, [silakan gunakan panduan ini untuk membangun gambar Docker yang kompatibel dengan sistem Anda](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> Perintah di bawah ini mengunduh edisi v0.22.1 dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.22.1, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server.
|
||||
> Perintah di bawah ini mengunduh edisi v0.23.1 dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.23.1, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
|
||||
# git checkout v0.23.1
|
||||
# Opsional: gunakan tag stabil (lihat releases: https://github.com/infiniflow/ragflow/releases)
|
||||
# This steps ensures the **entrypoint.sh** file in the code matches the Docker image version.
|
||||
|
||||
@ -206,10 +207,10 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
> Catatan: Sebelum `v0.22.0`, kami menyediakan image dengan model embedding dan image slim tanpa model embedding. Detailnya sebagai berikut:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> Mulai dari `v0.22.0`, kami hanya menyediakan edisi slim dan tidak lagi menambahkan akhiran **-slim** pada tag image.
|
||||
|
||||
@ -232,7 +233,7 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
* Running on all addresses (0.0.0.0)
|
||||
```
|
||||
|
||||
> Jika Anda melewatkan langkah ini dan langsung login ke RAGFlow, browser Anda mungkin menampilkan error `network anormal`
|
||||
> Jika Anda melewatkan langkah ini dan langsung login ke RAGFlow, browser Anda mungkin menampilkan error `network abnormal`
|
||||
> karena RAGFlow mungkin belum sepenuhnya siap.
|
||||
>
|
||||
2. Buka browser web Anda, masukkan alamat IP server Anda, dan login ke RAGFlow.
|
||||
@ -276,6 +277,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
Jika berada di belakang proxy, Anda dapat melewatkan argumen proxy:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 Menjalankan Aplikasi dari untuk Pengembangan
|
||||
|
||||
1. Instal `uv` dan `pre-commit`, atau lewati langkah ini jika sudah terinstal:
|
||||
@ -288,7 +298,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # install RAGFlow dependent python modules
|
||||
uv sync --python 3.12 # install RAGFlow dependent python modules
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -358,7 +368,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 Roadmap
|
||||
|
||||
Lihat [Roadmap RAGFlow 2025](https://github.com/infiniflow/ragflow/issues/4214)
|
||||
Lihat [Roadmap RAGFlow 2026](https://github.com/infiniflow/ragflow/issues/12241)
|
||||
|
||||
## 🏄 Komunitas
|
||||
|
||||
|
||||
40
README_ja.md
40
README_ja.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Document</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Roadmap</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -53,7 +53,7 @@
|
||||
|
||||
## 💡 RAGFlow とは?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) は、先進的なRAG(Retrieval-Augmented Generation)技術と Agent 機能を融合し、大規模言語モデル(LLM)に優れたコンテキスト層を構築する最先端のオープンソース RAG エンジンです。あらゆる規模の企業に対応可能な合理化された RAG ワークフローを提供し、統合型コンテキストエンジンと事前構築されたAgentテンプレートにより、開発者が複雑なデータを驚異的な効率性と精度で高精細なプロダクションレディAIシステムへ変換することを可能にします。
|
||||
[RAGFlow](https://ragflow.io/) は、先進的な[RAG](https://ragflow.io/basics/what-is-rag)(Retrieval-Augmented Generation)技術と Agent 機能を融合し、大規模言語モデル(LLM)に優れたコンテキスト層を構築する最先端のオープンソース RAG エンジンです。あらゆる規模の企業に対応可能な合理化された RAG ワークフローを提供し、統合型[コンテキストエンジン](https://ragflow.io/basics/what-is-agent-context-engine)と事前構築されたAgentテンプレートにより、開発者が複雑なデータを驚異的な効率性と精度で高精細なプロダクションレディAIシステムへ変換することを可能にします。
|
||||
|
||||
## 🎮 Demo
|
||||
|
||||
@ -66,8 +66,9 @@
|
||||
|
||||
## 🔥 最新情報
|
||||
|
||||
- 2025-11-19 Gemini 3 Proをサポートしています
|
||||
- 2025-11-12 Confluence、AWS S3、Discord、Google Drive からのデータ同期をサポートします。
|
||||
- 2025-12-26 AIエージェントの「メモリ」機能をサポート。
|
||||
- 2025-11-19 Gemini 3 Proをサポートしています。
|
||||
- 2025-11-12 Confluence、S3、Notion、Discord、Google Drive からのデータ同期をサポートします。
|
||||
- 2025-10-23 ドキュメント解析方法として MinerU と Docling をサポートします。
|
||||
- 2025-10-15 オーケストレーションされたデータパイプラインのサポート。
|
||||
- 2025-08-08 OpenAI の最新 GPT-5 シリーズモデルをサポートします。
|
||||
@ -167,12 +168,12 @@
|
||||
> 現在、公式に提供されているすべての Docker イメージは x86 アーキテクチャ向けにビルドされており、ARM64 用の Docker イメージは提供されていません。
|
||||
> ARM64 アーキテクチャのオペレーティングシステムを使用している場合は、[このドキュメント](https://ragflow.io/docs/dev/build_docker_image)を参照して Docker イメージを自分でビルドしてください。
|
||||
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.22.1 エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.22.1 とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.23.1 エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.23.1 とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
# git checkout v0.23.1
|
||||
# 任意: 安定版タグを利用 (一覧: https://github.com/infiniflow/ragflow/releases)
|
||||
# この手順は、コード内の entrypoint.sh ファイルが Docker イメージのバージョンと一致していることを確認します。
|
||||
|
||||
@ -186,15 +187,15 @@
|
||||
|
||||
> 注意:`v0.22.0` より前のバージョンでは、embedding モデルを含むイメージと、embedding モデルを含まない slim イメージの両方を提供していました。詳細は以下の通りです:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> `v0.22.0` 以降、当プロジェクトでは slim エディションのみを提供し、イメージタグに **-slim** サフィックスを付けなくなりました。
|
||||
|
||||
1. サーバーを立ち上げた後、サーバーの状態を確認する:
|
||||
|
||||
1. サーバーを立ち上げた後、サーバーの状態を確認する:
|
||||
|
||||
```bash
|
||||
$ docker logs -f docker-ragflow-cpu-1
|
||||
```
|
||||
@ -276,6 +277,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
プロキシ環境下にいる場合は、プロキシ引数を指定できます:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 ソースコードからサービスを起動する方法
|
||||
|
||||
1. `uv` と `pre-commit` をインストールする。すでにインストールされている場合は、このステップをスキップしてください:
|
||||
@ -288,7 +298,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # install RAGFlow dependent python modules
|
||||
uv sync --python 3.12 # install RAGFlow dependent python modules
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -358,7 +368,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 ロードマップ
|
||||
|
||||
[RAGFlow ロードマップ 2025](https://github.com/infiniflow/ragflow/issues/4214) を参照
|
||||
[RAGFlow ロードマップ 2026](https://github.com/infiniflow/ragflow/issues/12241) を参照
|
||||
|
||||
## 🏄 コミュニティ
|
||||
|
||||
|
||||
38
README_ko.md
38
README_ko.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Document</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Roadmap</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -54,7 +54,7 @@
|
||||
|
||||
## 💡 RAGFlow란?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) 는 최첨단 RAG(Retrieval-Augmented Generation)와 Agent 기능을 융합하여 대규모 언어 모델(LLM)을 위한 우수한 컨텍스트 계층을 생성하는 선도적인 오픈소스 RAG 엔진입니다. 모든 규모의 기업에 적용 가능한 효율적인 RAG 워크플로를 제공하며, 통합 컨텍스트 엔진과 사전 구축된 Agent 템플릿을 통해 개발자들이 복잡한 데이터를 예외적인 효율성과 정밀도로 고급 구현도의 프로덕션 준비 완료 AI 시스템으로 변환할 수 있도록 지원합니다.
|
||||
[RAGFlow](https://ragflow.io/) 는 최첨단 [RAG](https://ragflow.io/basics/what-is-rag)(Retrieval-Augmented Generation)와 Agent 기능을 융합하여 대규모 언어 모델(LLM)을 위한 우수한 컨텍스트 계층을 생성하는 선도적인 오픈소스 RAG 엔진입니다. 모든 규모의 기업에 적용 가능한 효율적인 RAG 워크플로를 제공하며, 통합 [컨텍스트 엔진](https://ragflow.io/basics/what-is-agent-context-engine)과 사전 구축된 Agent 템플릿을 통해 개발자들이 복잡한 데이터를 예외적인 효율성과 정밀도로 고급 구현도의 프로덕션 준비 완료 AI 시스템으로 변환할 수 있도록 지원합니다.
|
||||
|
||||
## 🎮 데모
|
||||
|
||||
@ -67,8 +67,9 @@
|
||||
|
||||
## 🔥 업데이트
|
||||
|
||||
- 2025-12-26 AI 에이전트의 '메모리' 기능 지원.
|
||||
- 2025-11-19 Gemini 3 Pro를 지원합니다.
|
||||
- 2025-11-12 Confluence, AWS S3, Discord, Google Drive에서 데이터 동기화를 지원합니다.
|
||||
- 2025-11-12 Confluence, S3, Notion, Discord, Google Drive에서 데이터 동기화를 지원합니다.
|
||||
- 2025-10-23 문서 파싱 방법으로 MinerU 및 Docling을 지원합니다.
|
||||
- 2025-10-15 조정된 데이터 파이프라인 지원.
|
||||
- 2025-08-08 OpenAI의 최신 GPT-5 시리즈 모델을 지원합니다.
|
||||
@ -169,12 +170,12 @@
|
||||
> 모든 Docker 이미지는 x86 플랫폼을 위해 빌드되었습니다. 우리는 현재 ARM64 플랫폼을 위한 Docker 이미지를 제공하지 않습니다.
|
||||
> ARM64 플랫폼을 사용 중이라면, [시스템과 호환되는 Docker 이미지를 빌드하려면 이 가이드를 사용해 주세요](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.22.1 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.22.1과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오.
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.23.1 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.23.1과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
|
||||
# git checkout v0.23.1
|
||||
# Optional: use a stable tag (see releases: https://github.com/infiniflow/ragflow/releases)
|
||||
# 이 단계는 코드의 entrypoint.sh 파일이 Docker 이미지 버전과 일치하도록 보장합니다.
|
||||
|
||||
@ -188,10 +189,10 @@
|
||||
|
||||
> 참고: `v0.22.0` 이전 버전에서는 embedding 모델이 포함된 이미지와 embedding 모델이 포함되지 않은 slim 이미지를 모두 제공했습니다. 자세한 내용은 다음과 같습니다:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> `v0.22.0`부터는 slim 에디션만 배포하며 이미지 태그에 **-slim** 접미사를 더 이상 붙이지 않습니다.
|
||||
|
||||
@ -213,7 +214,7 @@
|
||||
* Running on all addresses (0.0.0.0)
|
||||
```
|
||||
|
||||
> 만약 확인 단계를 건너뛰고 바로 RAGFlow에 로그인하면, RAGFlow가 완전히 초기화되지 않았기 때문에 브라우저에서 `network anormal` 오류가 발생할 수 있습니다.
|
||||
> 만약 확인 단계를 건너뛰고 바로 RAGFlow에 로그인하면, RAGFlow가 완전히 초기화되지 않았기 때문에 브라우저에서 `network abnormal` 오류가 발생할 수 있습니다.
|
||||
|
||||
2. 웹 브라우저에 서버의 IP 주소를 입력하고 RAGFlow에 로그인하세요.
|
||||
> 기본 설정을 사용할 경우, `http://IP_OF_YOUR_MACHINE`만 입력하면 됩니다 (포트 번호는 제외). 기본 HTTP 서비스 포트 `80`은 기본 구성으로 사용할 때 생략할 수 있습니다.
|
||||
@ -270,6 +271,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
프록시 환경인 경우, 프록시 인수를 전달할 수 있습니다:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 소스 코드로 서비스를 시작합니다.
|
||||
|
||||
1. `uv` 와 `pre-commit` 을 설치하거나, 이미 설치된 경우 이 단계를 건너뜁니다:
|
||||
@ -283,7 +293,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # install RAGFlow dependent python modules
|
||||
uv sync --python 3.12 # install RAGFlow dependent python modules
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -362,7 +372,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 로드맵
|
||||
|
||||
[RAGFlow 로드맵 2025](https://github.com/infiniflow/ragflow/issues/4214)을 확인하세요.
|
||||
[RAGFlow 로드맵 2026](https://github.com/infiniflow/ragflow/issues/12241)을 확인하세요.
|
||||
|
||||
## 🏄 커뮤니티
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Documentação</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Roadmap</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -73,7 +73,7 @@
|
||||
|
||||
## 💡 O que é o RAGFlow?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) é um mecanismo de RAG (Retrieval-Augmented Generation) open-source líder que fusiona tecnologias RAG de ponta com funcionalidades Agent para criar uma camada contextual superior para LLMs. Oferece um fluxo de trabalho RAG otimizado adaptável a empresas de qualquer escala. Alimentado por um motor de contexto convergente e modelos Agent pré-construídos, o RAGFlow permite que desenvolvedores transformem dados complexos em sistemas de IA de alta fidelidade e pronto para produção com excepcional eficiência e precisão.
|
||||
[RAGFlow](https://ragflow.io/) é um mecanismo de [RAG](https://ragflow.io/basics/what-is-rag) (Retrieval-Augmented Generation) open-source líder que fusiona tecnologias RAG de ponta com funcionalidades Agent para criar uma camada contextual superior para LLMs. Oferece um fluxo de trabalho RAG otimizado adaptável a empresas de qualquer escala. Alimentado por [um motor de contexto](https://ragflow.io/basics/what-is-agent-context-engine) convergente e modelos Agent pré-construídos, o RAGFlow permite que desenvolvedores transformem dados complexos em sistemas de IA de alta fidelidade e pronto para produção com excepcional eficiência e precisão.
|
||||
|
||||
## 🎮 Demo
|
||||
|
||||
@ -86,8 +86,9 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Últimas Atualizações
|
||||
|
||||
- 26-12-2025 Suporte à função 'Memória' para agentes de IA.
|
||||
- 19-11-2025 Suporta Gemini 3 Pro.
|
||||
- 12-11-2025 Suporta a sincronização de dados do Confluence, AWS S3, Discord e Google Drive.
|
||||
- 12-11-2025 Suporta a sincronização de dados do Confluence, S3, Notion, Discord e Google Drive.
|
||||
- 23-10-2025 Suporta MinerU e Docling como métodos de análise de documentos.
|
||||
- 15-10-2025 Suporte para pipelines de dados orquestrados.
|
||||
- 08-08-2025 Suporta a mais recente série GPT-5 da OpenAI.
|
||||
@ -187,12 +188,12 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Todas as imagens Docker são construídas para plataformas x86. Atualmente, não oferecemos imagens Docker para ARM64.
|
||||
> Se você estiver usando uma plataforma ARM64, por favor, utilize [este guia](https://ragflow.io/docs/dev/build_docker_image) para construir uma imagem Docker compatível com o seu sistema.
|
||||
|
||||
> O comando abaixo baixa a edição`v0.22.1` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.22.1`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor.
|
||||
> O comando abaixo baixa a edição`v0.23.1` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.23.1`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
|
||||
# git checkout v0.23.1
|
||||
# Opcional: use uma tag estável (veja releases: https://github.com/infiniflow/ragflow/releases)
|
||||
# Esta etapa garante que o arquivo entrypoint.sh no código corresponda à versão da imagem do Docker.
|
||||
|
||||
@ -206,10 +207,10 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
> Nota: Antes da `v0.22.0`, fornecíamos imagens com modelos de embedding e imagens slim sem modelos de embedding. Detalhes a seguir:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> A partir da `v0.22.0`, distribuímos apenas a edição slim e não adicionamos mais o sufixo **-slim** às tags das imagens.
|
||||
|
||||
@ -231,7 +232,7 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
* Rodando em todos os endereços (0.0.0.0)
|
||||
```
|
||||
|
||||
> Se você pular essa etapa de confirmação e acessar diretamente o RAGFlow, seu navegador pode exibir um erro `network anormal`, pois, nesse momento, seu RAGFlow pode não estar totalmente inicializado.
|
||||
> Se você pular essa etapa de confirmação e acessar diretamente o RAGFlow, seu navegador pode exibir um erro `network abnormal`, pois, nesse momento, seu RAGFlow pode não estar totalmente inicializado.
|
||||
>
|
||||
5. No seu navegador, insira o endereço IP do seu servidor e faça login no RAGFlow.
|
||||
|
||||
@ -293,6 +294,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
Se você estiver atrás de um proxy, pode passar argumentos de proxy:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 Lançar o serviço a partir do código-fonte para desenvolvimento
|
||||
|
||||
1. Instale o `uv` e o `pre-commit`, ou pule esta etapa se eles já estiverem instalados:
|
||||
@ -305,7 +315,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # instala os módulos Python dependentes do RAGFlow
|
||||
uv sync --python 3.12 # instala os módulos Python dependentes do RAGFlow
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -375,7 +385,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 Roadmap
|
||||
|
||||
Veja o [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214)
|
||||
Veja o [RAGFlow Roadmap 2026](https://github.com/infiniflow/ragflow/issues/12241)
|
||||
|
||||
## 🏄 Comunidade
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Document</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Roadmap</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -72,7 +72,7 @@
|
||||
|
||||
## 💡 RAGFlow 是什麼?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) 是一款領先的開源 RAG(Retrieval-Augmented Generation)引擎,通過融合前沿的 RAG 技術與 Agent 能力,為大型語言模型提供卓越的上下文層。它提供可適配任意規模企業的端到端 RAG 工作流,憑藉融合式上下文引擎與預置的 Agent 模板,助力開發者以極致效率與精度將複雜數據轉化為高可信、生產級的人工智能系統。
|
||||
[RAGFlow](https://ragflow.io/) 是一款領先的開源 [RAG](https://ragflow.io/basics/what-is-rag)(Retrieval-Augmented Generation)引擎,通過融合前沿的 RAG 技術與 Agent 能力,為大型語言模型提供卓越的上下文層。它提供可適配任意規模企業的端到端 RAG 工作流,憑藉融合式[上下文引擎](https://ragflow.io/basics/what-is-agent-context-engine)與預置的 Agent 模板,助力開發者以極致效率與精度將複雜數據轉化為高可信、生產級的人工智能系統。
|
||||
|
||||
## 🎮 Demo 試用
|
||||
|
||||
@ -85,15 +85,16 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-11-19 支援 Gemini 3 Pro.
|
||||
- 2025-11-12 支援從 Confluence、AWS S3、Discord、Google Drive 進行資料同步。
|
||||
- 2025-12-26 支援AI代理的「記憶」功能。
|
||||
- 2025-11-19 支援 Gemini 3 Pro。
|
||||
- 2025-11-12 支援從 Confluence、S3、Notion、Discord、Google Drive 進行資料同步。
|
||||
- 2025-10-23 支援 MinerU 和 Docling 作為文件解析方法。
|
||||
- 2025-10-15 支援可編排的資料管道。
|
||||
- 2025-08-08 支援 OpenAI 最新的 GPT-5 系列模型。
|
||||
- 2025-08-01 支援 agentic workflow 和 MCP
|
||||
- 2025-08-01 支援 agentic workflow 和 MCP。
|
||||
- 2025-05-23 為 Agent 新增 Python/JS 程式碼執行器元件。
|
||||
- 2025-05-05 支援跨語言查詢。
|
||||
- 2025-03-19 PDF和DOCX中的圖支持用多模態大模型去解析得到描述.
|
||||
- 2025-03-19 PDF和DOCX中的圖支持用多模態大模型去解析得到描述。
|
||||
- 2024-12-18 升級了 DeepDoc 的文檔佈局分析模型。
|
||||
- 2024-08-22 支援用 RAG 技術實現從自然語言到 SQL 語句的轉換。
|
||||
|
||||
@ -124,7 +125,7 @@
|
||||
|
||||
### 🍔 **相容各類異質資料來源**
|
||||
|
||||
- 支援豐富的文件類型,包括 Word 文件、PPT、excel 表格、txt 檔案、圖片、PDF、影印件、影印件、結構化資料、網頁等。
|
||||
- 支援豐富的文件類型,包括 Word 文件、PPT、excel 表格、txt 檔案、圖片、PDF、影印件、複印件、結構化資料、網頁等。
|
||||
|
||||
### 🛀 **全程無憂、自動化的 RAG 工作流程**
|
||||
|
||||
@ -186,12 +187,12 @@
|
||||
> 所有 Docker 映像檔都是為 x86 平台建置的。目前,我們不提供 ARM64 平台的 Docker 映像檔。
|
||||
> 如果您使用的是 ARM64 平台,請使用 [這份指南](https://ragflow.io/docs/dev/build_docker_image) 來建置適合您系統的 Docker 映像檔。
|
||||
|
||||
> 執行以下指令會自動下載 RAGFlow Docker 映像 `v0.22.1`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.22.1` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。
|
||||
> 執行以下指令會自動下載 RAGFlow Docker 映像 `v0.23.1`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.23.1` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
|
||||
# git checkout v0.23.1
|
||||
# 可選:使用穩定版標籤(查看發佈:https://github.com/infiniflow/ragflow/releases)
|
||||
# 此步驟確保程式碼中的 entrypoint.sh 檔案與 Docker 映像版本一致。
|
||||
|
||||
@ -205,10 +206,10 @@
|
||||
|
||||
> 注意:在 `v0.22.0` 之前的版本,我們會同時提供包含 embedding 模型的映像和不含 embedding 模型的 slim 映像。具體如下:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> 從 `v0.22.0` 開始,我們只發佈 slim 版本,並且不再在映像標籤後附加 **-slim** 後綴。
|
||||
|
||||
@ -236,7 +237,7 @@
|
||||
* Running on all addresses (0.0.0.0)
|
||||
```
|
||||
|
||||
> 如果您跳過這一步驟系統確認步驟就登入 RAGFlow,你的瀏覽器有可能會提示 `network anormal` 或 `網路異常`,因為 RAGFlow 可能並未完全啟動成功。
|
||||
> 如果您跳過這一步驟系統確認步驟就登入 RAGFlow,你的瀏覽器有可能會提示 `network abnormal` 或 `網路異常`,因為 RAGFlow 可能並未完全啟動成功。
|
||||
>
|
||||
5. 在你的瀏覽器中輸入你的伺服器對應的 IP 位址並登入 RAGFlow。
|
||||
|
||||
@ -302,6 +303,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
若您位於代理環境,可傳遞代理參數:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 以原始碼啟動服務
|
||||
|
||||
1. 安裝 `uv` 和 `pre-commit`。如已安裝,可跳過此步驟:
|
||||
@ -315,7 +325,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # install RAGFlow dependent python modules
|
||||
uv sync --python 3.12 # install RAGFlow dependent python modules
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -389,7 +399,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 路線圖
|
||||
|
||||
詳見 [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214) 。
|
||||
詳見 [RAGFlow Roadmap 2026](https://github.com/infiniflow/ragflow/issues/12241) 。
|
||||
|
||||
## 🏄 開源社群
|
||||
|
||||
|
||||
44
README_zh.md
44
README_zh.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.23.1">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -37,7 +37,7 @@
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://ragflow.io/docs/dev/">Document</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/4214">Roadmap</a> |
|
||||
<a href="https://github.com/infiniflow/ragflow/issues/12241">Roadmap</a> |
|
||||
<a href="https://twitter.com/infiniflowai">Twitter</a> |
|
||||
<a href="https://discord.gg/NjYzJD3GM3">Discord</a> |
|
||||
<a href="https://demo.ragflow.io">Demo</a>
|
||||
@ -72,7 +72,7 @@
|
||||
|
||||
## 💡 RAGFlow 是什么?
|
||||
|
||||
[RAGFlow](https://ragflow.io/) 是一款领先的开源检索增强生成(RAG)引擎,通过融合前沿的 RAG 技术与 Agent 能力,为大型语言模型提供卓越的上下文层。它提供可适配任意规模企业的端到端 RAG 工作流,凭借融合式上下文引擎与预置的 Agent 模板,助力开发者以极致效率与精度将复杂数据转化为高可信、生产级的人工智能系统。
|
||||
[RAGFlow](https://ragflow.io/) 是一款领先的开源检索增强生成([RAG](https://ragflow.io/basics/what-is-rag))引擎,通过融合前沿的 RAG 技术与 Agent 能力,为大型语言模型提供卓越的上下文层。它提供可适配任意规模企业的端到端 RAG 工作流,凭借融合式[上下文引擎](https://ragflow.io/basics/what-is-agent-context-engine)与预置的 Agent 模板,助力开发者以极致效率与精度将复杂数据转化为高可信、生产级的人工智能系统。
|
||||
|
||||
## 🎮 Demo 试用
|
||||
|
||||
@ -85,15 +85,16 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-11-19 支持 Gemini 3 Pro.
|
||||
- 2025-11-12 支持从 Confluence、AWS S3、Discord、Google Drive 进行数据同步。
|
||||
- 2025-12-26 支持AI代理的“记忆”功能。
|
||||
- 2025-11-19 支持 Gemini 3 Pro。
|
||||
- 2025-11-12 支持从 Confluence、S3、Notion、Discord、Google Drive 进行数据同步。
|
||||
- 2025-10-23 支持 MinerU 和 Docling 作为文档解析方法。
|
||||
- 2025-10-15 支持可编排的数据管道。
|
||||
- 2025-08-08 支持 OpenAI 最新的 GPT-5 系列模型。
|
||||
- 2025-08-01 支持 agentic workflow 和 MCP。
|
||||
- 2025-05-23 Agent 新增 Python/JS 代码执行器组件。
|
||||
- 2025-05-05 支持跨语言查询。
|
||||
- 2025-03-19 PDF 和 DOCX 中的图支持用多模态大模型去解析得到描述.
|
||||
- 2025-03-19 PDF 和 DOCX 中的图支持用多模态大模型去解析得到描述。
|
||||
- 2024-12-18 升级了 DeepDoc 的文档布局分析模型。
|
||||
- 2024-08-22 支持用 RAG 技术实现从自然语言到 SQL 语句的转换。
|
||||
|
||||
@ -187,12 +188,12 @@
|
||||
> 请注意,目前官方提供的所有 Docker 镜像均基于 x86 架构构建,并不提供基于 ARM64 的 Docker 镜像。
|
||||
> 如果你的操作系统是 ARM64 架构,请参考[这篇文档](https://ragflow.io/docs/dev/build_docker_image)自行构建 Docker 镜像。
|
||||
|
||||
> 运行以下命令会自动下载 RAGFlow Docker 镜像 `v0.22.1`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.22.1` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。
|
||||
> 运行以下命令会自动下载 RAGFlow Docker 镜像 `v0.23.1`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.23.1` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
|
||||
# git checkout v0.22.1
|
||||
|
||||
# git checkout v0.23.1
|
||||
# 可选:使用稳定版本标签(查看发布:https://github.com/infiniflow/ragflow/releases)
|
||||
# 这一步确保代码中的 entrypoint.sh 文件与 Docker 镜像的版本保持一致。
|
||||
|
||||
@ -203,13 +204,13 @@
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
|
||||
> 注意:在 `v0.22.0` 之前的版本,我们会同时提供包含 embedding 模型的镜像和不含 embedding 模型的 slim 镜像。具体如下:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|----------------|
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> 从 `v0.22.0` 开始,我们只发布 slim 版本,并且不再在镜像标签后附加 **-slim** 后缀。
|
||||
|
||||
@ -237,7 +238,7 @@
|
||||
* Running on all addresses (0.0.0.0)
|
||||
```
|
||||
|
||||
> 如果您在没有看到上面的提示信息出来之前,就尝试登录 RAGFlow,你的浏览器有可能会提示 `network anormal` 或 `网络异常`。
|
||||
> 如果您在没有看到上面的提示信息出来之前,就尝试登录 RAGFlow,你的浏览器有可能会提示 `network abnormal` 或 `网络异常`。
|
||||
|
||||
5. 在你的浏览器中输入你的服务器对应的 IP 地址并登录 RAGFlow。
|
||||
> 上面这个例子中,您只需输入 http://IP_OF_YOUR_MACHINE 即可:未改动过配置则无需输入端口(默认的 HTTP 服务端口 80)。
|
||||
@ -301,6 +302,15 @@ cd ragflow/
|
||||
docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
如果您处在代理环境下,可以传递代理参数:
|
||||
|
||||
```bash
|
||||
docker build --platform linux/amd64 \
|
||||
--build-arg http_proxy=http://YOUR_PROXY:PORT \
|
||||
--build-arg https_proxy=http://YOUR_PROXY:PORT \
|
||||
-f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
```
|
||||
|
||||
## 🔨 以源代码启动服务
|
||||
|
||||
1. 安装 `uv` 和 `pre-commit`。如已经安装,可跳过本步骤:
|
||||
@ -315,7 +325,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
```bash
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
cd ragflow/
|
||||
uv sync --python 3.10 # install RAGFlow dependent python modules
|
||||
uv sync --python 3.12 # install RAGFlow dependent python modules
|
||||
uv run download_deps.py
|
||||
pre-commit install
|
||||
```
|
||||
@ -392,7 +402,7 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
|
||||
## 📜 路线图
|
||||
|
||||
详见 [RAGFlow Roadmap 2025](https://github.com/infiniflow/ragflow/issues/4214) 。
|
||||
详见 [RAGFlow Roadmap 2026](https://github.com/infiniflow/ragflow/issues/12241) 。
|
||||
|
||||
## 🏄 开源社区
|
||||
|
||||
|
||||
@ -6,8 +6,8 @@ Use this section to tell people about which versions of your project are
|
||||
currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| <=0.7.0 | :white_check_mark: |
|
||||
|---------|--------------------|
|
||||
| <=0.7.0 | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ cp pyproject.toml release/$PROJECT_NAME/pyproject.toml
|
||||
cp README.md release/$PROJECT_NAME/README.md
|
||||
|
||||
mkdir release/$PROJECT_NAME/$SOURCE_DIR/$PACKAGE_DIR -p
|
||||
cp admin_client.py release/$PROJECT_NAME/$SOURCE_DIR/$PACKAGE_DIR/admin_client.py
|
||||
cp ragflow_cli.py release/$PROJECT_NAME/$SOURCE_DIR/$PACKAGE_DIR/ragflow_cli.py
|
||||
|
||||
if [ -d "release/$PROJECT_NAME/$SOURCE_DIR" ]; then
|
||||
echo "✅ source dir: release/$PROJECT_NAME/$SOURCE_DIR"
|
||||
|
||||
@ -48,7 +48,7 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
||||
1. Ensure the Admin Service is running.
|
||||
2. Install ragflow-cli.
|
||||
```bash
|
||||
pip install ragflow-cli==0.22.1
|
||||
pip install ragflow-cli==0.23.1
|
||||
```
|
||||
3. Launch the CLI client:
|
||||
```bash
|
||||
|
||||
@ -1,978 +0,0 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
from cmd import Cmd
|
||||
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
||||
from typing import Dict, List, Any
|
||||
from lark import Lark, Transformer, Tree
|
||||
import requests
|
||||
import getpass
|
||||
|
||||
GRAMMAR = r"""
|
||||
start: command
|
||||
|
||||
command: sql_command | meta_command
|
||||
|
||||
sql_command: list_services
|
||||
| show_service
|
||||
| startup_service
|
||||
| shutdown_service
|
||||
| restart_service
|
||||
| list_users
|
||||
| show_user
|
||||
| drop_user
|
||||
| alter_user
|
||||
| create_user
|
||||
| activate_user
|
||||
| list_datasets
|
||||
| list_agents
|
||||
| create_role
|
||||
| drop_role
|
||||
| alter_role
|
||||
| list_roles
|
||||
| show_role
|
||||
| grant_permission
|
||||
| revoke_permission
|
||||
| alter_user_role
|
||||
| show_user_permission
|
||||
| show_version
|
||||
|
||||
// meta command definition
|
||||
meta_command: "\\" meta_command_name [meta_args]
|
||||
|
||||
meta_command_name: /[a-zA-Z?]+/
|
||||
meta_args: (meta_arg)+
|
||||
|
||||
meta_arg: /[^\\s"']+/ | quoted_string
|
||||
|
||||
// command definition
|
||||
|
||||
LIST: "LIST"i
|
||||
SERVICES: "SERVICES"i
|
||||
SHOW: "SHOW"i
|
||||
CREATE: "CREATE"i
|
||||
SERVICE: "SERVICE"i
|
||||
SHUTDOWN: "SHUTDOWN"i
|
||||
STARTUP: "STARTUP"i
|
||||
RESTART: "RESTART"i
|
||||
USERS: "USERS"i
|
||||
DROP: "DROP"i
|
||||
USER: "USER"i
|
||||
ALTER: "ALTER"i
|
||||
ACTIVE: "ACTIVE"i
|
||||
PASSWORD: "PASSWORD"i
|
||||
DATASETS: "DATASETS"i
|
||||
OF: "OF"i
|
||||
AGENTS: "AGENTS"i
|
||||
ROLE: "ROLE"i
|
||||
ROLES: "ROLES"i
|
||||
DESCRIPTION: "DESCRIPTION"i
|
||||
GRANT: "GRANT"i
|
||||
REVOKE: "REVOKE"i
|
||||
ALL: "ALL"i
|
||||
PERMISSION: "PERMISSION"i
|
||||
TO: "TO"i
|
||||
FROM: "FROM"i
|
||||
FOR: "FOR"i
|
||||
RESOURCES: "RESOURCES"i
|
||||
ON: "ON"i
|
||||
SET: "SET"i
|
||||
VERSION: "VERSION"i
|
||||
|
||||
list_services: LIST SERVICES ";"
|
||||
show_service: SHOW SERVICE NUMBER ";"
|
||||
startup_service: STARTUP SERVICE NUMBER ";"
|
||||
shutdown_service: SHUTDOWN SERVICE NUMBER ";"
|
||||
restart_service: RESTART SERVICE NUMBER ";"
|
||||
|
||||
list_users: LIST USERS ";"
|
||||
drop_user: DROP USER quoted_string ";"
|
||||
alter_user: ALTER USER PASSWORD quoted_string quoted_string ";"
|
||||
show_user: SHOW USER quoted_string ";"
|
||||
create_user: CREATE USER quoted_string quoted_string ";"
|
||||
activate_user: ALTER USER ACTIVE quoted_string status ";"
|
||||
|
||||
list_datasets: LIST DATASETS OF quoted_string ";"
|
||||
list_agents: LIST AGENTS OF quoted_string ";"
|
||||
|
||||
create_role: CREATE ROLE identifier [DESCRIPTION quoted_string] ";"
|
||||
drop_role: DROP ROLE identifier ";"
|
||||
alter_role: ALTER ROLE identifier SET DESCRIPTION quoted_string ";"
|
||||
list_roles: LIST ROLES ";"
|
||||
show_role: SHOW ROLE identifier ";"
|
||||
|
||||
grant_permission: GRANT action_list ON identifier TO ROLE identifier ";"
|
||||
revoke_permission: REVOKE action_list ON identifier FROM ROLE identifier ";"
|
||||
alter_user_role: ALTER USER quoted_string SET ROLE identifier ";"
|
||||
show_user_permission: SHOW USER PERMISSION quoted_string ";"
|
||||
|
||||
show_version: SHOW VERSION ";"
|
||||
|
||||
action_list: identifier ("," identifier)*
|
||||
|
||||
identifier: WORD
|
||||
quoted_string: QUOTED_STRING
|
||||
status: WORD
|
||||
|
||||
QUOTED_STRING: /'[^']+'/ | /"[^"]+"/
|
||||
WORD: /[a-zA-Z0-9_\-\.]+/
|
||||
NUMBER: /[0-9]+/
|
||||
|
||||
%import common.WS
|
||||
%ignore WS
|
||||
"""
|
||||
|
||||
|
||||
class AdminTransformer(Transformer):
|
||||
|
||||
def start(self, items):
|
||||
return items[0]
|
||||
|
||||
def command(self, items):
|
||||
return items[0]
|
||||
|
||||
def list_services(self, items):
|
||||
result = {'type': 'list_services'}
|
||||
return result
|
||||
|
||||
def show_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "show_service", "number": service_id}
|
||||
|
||||
def startup_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "startup_service", "number": service_id}
|
||||
|
||||
def shutdown_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "shutdown_service", "number": service_id}
|
||||
|
||||
def restart_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "restart_service", "number": service_id}
|
||||
|
||||
def list_users(self, items):
|
||||
return {"type": "list_users"}
|
||||
|
||||
def show_user(self, items):
|
||||
user_name = items[2]
|
||||
return {"type": "show_user", "user_name": user_name}
|
||||
|
||||
def drop_user(self, items):
|
||||
user_name = items[2]
|
||||
return {"type": "drop_user", "user_name": user_name}
|
||||
|
||||
def alter_user(self, items):
|
||||
user_name = items[3]
|
||||
new_password = items[4]
|
||||
return {"type": "alter_user", "user_name": user_name, "password": new_password}
|
||||
|
||||
def create_user(self, items):
|
||||
user_name = items[2]
|
||||
password = items[3]
|
||||
return {"type": "create_user", "user_name": user_name, "password": password, "role": "user"}
|
||||
|
||||
def activate_user(self, items):
|
||||
user_name = items[3]
|
||||
activate_status = items[4]
|
||||
return {"type": "activate_user", "activate_status": activate_status, "user_name": user_name}
|
||||
|
||||
def list_datasets(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "list_datasets", "user_name": user_name}
|
||||
|
||||
def list_agents(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "list_agents", "user_name": user_name}
|
||||
|
||||
def create_role(self, items):
|
||||
role_name = items[2]
|
||||
if len(items) > 4:
|
||||
description = items[4]
|
||||
return {"type": "create_role", "role_name": role_name, "description": description}
|
||||
else:
|
||||
return {"type": "create_role", "role_name": role_name}
|
||||
|
||||
def drop_role(self, items):
|
||||
role_name = items[2]
|
||||
return {"type": "drop_role", "role_name": role_name}
|
||||
|
||||
def alter_role(self, items):
|
||||
role_name = items[2]
|
||||
description = items[5]
|
||||
return {"type": "alter_role", "role_name": role_name, "description": description}
|
||||
|
||||
def list_roles(self, items):
|
||||
return {"type": "list_roles"}
|
||||
|
||||
def show_role(self, items):
|
||||
role_name = items[2]
|
||||
return {"type": "show_role", "role_name": role_name}
|
||||
|
||||
def grant_permission(self, items):
|
||||
action_list = items[1]
|
||||
resource = items[3]
|
||||
role_name = items[6]
|
||||
return {"type": "grant_permission", "role_name": role_name, "resource": resource, "actions": action_list}
|
||||
|
||||
def revoke_permission(self, items):
|
||||
action_list = items[1]
|
||||
resource = items[3]
|
||||
role_name = items[6]
|
||||
return {
|
||||
"type": "revoke_permission",
|
||||
"role_name": role_name,
|
||||
"resource": resource, "actions": action_list
|
||||
}
|
||||
|
||||
def alter_user_role(self, items):
|
||||
user_name = items[2]
|
||||
role_name = items[5]
|
||||
return {"type": "alter_user_role", "user_name": user_name, "role_name": role_name}
|
||||
|
||||
def show_user_permission(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "show_user_permission", "user_name": user_name}
|
||||
|
||||
def show_version(self, items):
|
||||
return {"type": "show_version"}
|
||||
|
||||
def action_list(self, items):
|
||||
return items
|
||||
|
||||
def meta_command(self, items):
|
||||
command_name = str(items[0]).lower()
|
||||
args = items[1:] if len(items) > 1 else []
|
||||
|
||||
# handle quoted parameter
|
||||
parsed_args = []
|
||||
for arg in args:
|
||||
if hasattr(arg, 'value'):
|
||||
parsed_args.append(arg.value)
|
||||
else:
|
||||
parsed_args.append(str(arg))
|
||||
|
||||
return {'type': 'meta', 'command': command_name, 'args': parsed_args}
|
||||
|
||||
def meta_command_name(self, items):
|
||||
return items[0]
|
||||
|
||||
def meta_args(self, items):
|
||||
return items
|
||||
|
||||
|
||||
def encrypt(input_string):
|
||||
pub = '-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArq9XTUSeYr2+N1h3Afl/z8Dse/2yD0ZGrKwx+EEEcdsBLca9Ynmx3nIB5obmLlSfmskLpBo0UACBmB5rEjBp2Q2f3AG3Hjd4B+gNCG6BDaawuDlgANIhGnaTLrIqWrrcm4EMzJOnAOI1fgzJRsOOUEfaS318Eq9OVO3apEyCCt0lOQK6PuksduOjVxtltDav+guVAA068NrPYmRNabVKRNLJpL8w4D44sfth5RvZ3q9t+6RTArpEtc5sh5ChzvqPOzKGMXW83C95TxmXqpbK6olN4RevSfVjEAgCydH6HN6OhtOQEcnrU97r9H0iZOWwbw3pVrZiUkuRD1R56Wzs2wIDAQAB\n-----END PUBLIC KEY-----'
|
||||
pub_key = RSA.importKey(pub)
|
||||
cipher = Cipher_pkcs1_v1_5.new(pub_key)
|
||||
cipher_text = cipher.encrypt(base64.b64encode(input_string.encode('utf-8')))
|
||||
return base64.b64encode(cipher_text).decode("utf-8")
|
||||
|
||||
|
||||
def encode_to_base64(input_string):
|
||||
base64_encoded = base64.b64encode(input_string.encode('utf-8'))
|
||||
return base64_encoded.decode('utf-8')
|
||||
|
||||
|
||||
class AdminCLI(Cmd):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.parser = Lark(GRAMMAR, start='start', parser='lalr', transformer=AdminTransformer())
|
||||
self.command_history = []
|
||||
self.is_interactive = False
|
||||
self.admin_account = "admin@ragflow.io"
|
||||
self.admin_password: str = "admin"
|
||||
self.session = requests.Session()
|
||||
self.access_token: str = ""
|
||||
self.host: str = ""
|
||||
self.port: int = 0
|
||||
|
||||
intro = r"""Type "\h" for help."""
|
||||
prompt = "admin> "
|
||||
|
||||
def onecmd(self, command: str) -> bool:
|
||||
try:
|
||||
result = self.parse_command(command)
|
||||
|
||||
if isinstance(result, dict):
|
||||
if 'type' in result and result.get('type') == 'empty':
|
||||
return False
|
||||
|
||||
self.execute_command(result)
|
||||
|
||||
if isinstance(result, Tree):
|
||||
return False
|
||||
|
||||
if result.get('type') == 'meta' and result.get('command') in ['q', 'quit', 'exit']:
|
||||
return True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nUse '\\q' to quit")
|
||||
except EOFError:
|
||||
print("\nGoodbye!")
|
||||
return True
|
||||
return False
|
||||
|
||||
def emptyline(self) -> bool:
|
||||
return False
|
||||
|
||||
def default(self, line: str) -> bool:
|
||||
return self.onecmd(line)
|
||||
|
||||
def parse_command(self, command_str: str) -> dict[str, str]:
|
||||
if not command_str.strip():
|
||||
return {'type': 'empty'}
|
||||
|
||||
self.command_history.append(command_str)
|
||||
|
||||
try:
|
||||
result = self.parser.parse(command_str)
|
||||
return result
|
||||
except Exception as e:
|
||||
return {'type': 'error', 'message': f'Parse error: {str(e)}'}
|
||||
|
||||
def verify_admin(self, arguments: dict, single_command: bool):
|
||||
self.host = arguments['host']
|
||||
self.port = arguments['port']
|
||||
print(f"Attempt to access ip: {self.host}, port: {self.port}")
|
||||
url = f"http://{self.host}:{self.port}/api/v1/admin/login"
|
||||
|
||||
attempt_count = 3
|
||||
if single_command:
|
||||
attempt_count = 1
|
||||
|
||||
try_count = 0
|
||||
while True:
|
||||
try_count += 1
|
||||
if try_count > attempt_count:
|
||||
return False
|
||||
|
||||
if single_command:
|
||||
admin_passwd = arguments['password']
|
||||
else:
|
||||
admin_passwd = getpass.getpass(f"password for {self.admin_account}: ").strip()
|
||||
try:
|
||||
self.admin_password = encrypt(admin_passwd)
|
||||
response = self.session.post(url, json={'email': self.admin_account, 'password': self.admin_password})
|
||||
if response.status_code == 200:
|
||||
res_json = response.json()
|
||||
error_code = res_json.get('code', -1)
|
||||
if error_code == 0:
|
||||
self.session.headers.update({
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': response.headers['Authorization'],
|
||||
'User-Agent': 'RAGFlow-CLI/0.22.1'
|
||||
})
|
||||
print("Authentication successful.")
|
||||
return True
|
||||
else:
|
||||
error_message = res_json.get('message', 'Unknown error')
|
||||
print(f"Authentication failed: {error_message}, try again")
|
||||
continue
|
||||
else:
|
||||
print(f"Bad response,status: {response.status_code}, password is wrong")
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
print(f"Can't access {self.host}, port: {self.port}")
|
||||
|
||||
def _format_service_detail_table(self, data):
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
if not all([isinstance(v, list) for v in data.values()]):
|
||||
# normal table
|
||||
return data
|
||||
# handle task_executor heartbeats map, for example {'name': [{'done': 2, 'now': timestamp1}, {'done': 3, 'now': timestamp2}]
|
||||
task_executor_list = []
|
||||
for k, v in data.items():
|
||||
# display latest status
|
||||
heartbeats = sorted(v, key=lambda x: x["now"], reverse=True)
|
||||
task_executor_list.append({
|
||||
"task_executor_name": k,
|
||||
**heartbeats[0],
|
||||
} if heartbeats else {"task_executor_name": k})
|
||||
return task_executor_list
|
||||
|
||||
def _print_table_simple(self, data):
|
||||
if not data:
|
||||
print("No data to print")
|
||||
return
|
||||
if isinstance(data, dict):
|
||||
# handle single row data
|
||||
data = [data]
|
||||
|
||||
columns = list(set().union(*(d.keys() for d in data)))
|
||||
columns.sort()
|
||||
col_widths = {}
|
||||
|
||||
def get_string_width(text):
|
||||
half_width_chars = (
|
||||
" !\"#$%&'()*+,-./0123456789:;<=>?@"
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`"
|
||||
"abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
"\t\n\r"
|
||||
)
|
||||
width = 0
|
||||
for char in text:
|
||||
if char in half_width_chars:
|
||||
width += 1
|
||||
else:
|
||||
width += 2
|
||||
return width
|
||||
|
||||
for col in columns:
|
||||
max_width = get_string_width(str(col))
|
||||
for item in data:
|
||||
value_len = get_string_width(str(item.get(col, '')))
|
||||
if value_len > max_width:
|
||||
max_width = value_len
|
||||
col_widths[col] = max(2, max_width)
|
||||
|
||||
# Generate delimiter
|
||||
separator = "+" + "+".join(["-" * (col_widths[col] + 2) for col in columns]) + "+"
|
||||
|
||||
# Print header
|
||||
print(separator)
|
||||
header = "|" + "|".join([f" {col:<{col_widths[col]}} " for col in columns]) + "|"
|
||||
print(header)
|
||||
print(separator)
|
||||
|
||||
# Print data
|
||||
for item in data:
|
||||
row = "|"
|
||||
for col in columns:
|
||||
value = str(item.get(col, ''))
|
||||
if get_string_width(value) > col_widths[col]:
|
||||
value = value[:col_widths[col] - 3] + "..."
|
||||
row += f" {value:<{col_widths[col] - (get_string_width(value) - len(value))}} |"
|
||||
print(row)
|
||||
|
||||
print(separator)
|
||||
|
||||
def run_interactive(self):
|
||||
|
||||
self.is_interactive = True
|
||||
print("RAGFlow Admin command line interface - Type '\\?' for help, '\\q' to quit")
|
||||
|
||||
while True:
|
||||
try:
|
||||
command = input("admin> ").strip()
|
||||
if not command:
|
||||
continue
|
||||
|
||||
print(f"command: {command}")
|
||||
result = self.parse_command(command)
|
||||
self.execute_command(result)
|
||||
|
||||
if isinstance(result, Tree):
|
||||
continue
|
||||
|
||||
if result.get('type') == 'meta' and result.get('command') in ['q', 'quit', 'exit']:
|
||||
break
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nUse '\\q' to quit")
|
||||
except EOFError:
|
||||
print("\nGoodbye!")
|
||||
break
|
||||
|
||||
def run_single_command(self, command: str):
|
||||
result = self.parse_command(command)
|
||||
self.execute_command(result)
|
||||
|
||||
def parse_connection_args(self, args: List[str]) -> Dict[str, Any]:
|
||||
parser = argparse.ArgumentParser(description='Admin CLI Client', add_help=False)
|
||||
parser.add_argument('-h', '--host', default='localhost', help='Admin service host')
|
||||
parser.add_argument('-p', '--port', type=int, default=9381, help='Admin service port')
|
||||
parser.add_argument('-w', '--password', default='admin', type=str, help='Superuser password')
|
||||
parser.add_argument('command', nargs='?', help='Single command')
|
||||
try:
|
||||
parsed_args, remaining_args = parser.parse_known_args(args)
|
||||
if remaining_args:
|
||||
command = remaining_args[0]
|
||||
return {
|
||||
'host': parsed_args.host,
|
||||
'port': parsed_args.port,
|
||||
'password': parsed_args.password,
|
||||
'command': command
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'host': parsed_args.host,
|
||||
'port': parsed_args.port,
|
||||
}
|
||||
except SystemExit:
|
||||
return {'error': 'Invalid connection arguments'}
|
||||
|
||||
def execute_command(self, parsed_command: Dict[str, Any]):
|
||||
|
||||
command_dict: dict
|
||||
if isinstance(parsed_command, Tree):
|
||||
command_dict = parsed_command.children[0]
|
||||
else:
|
||||
if parsed_command['type'] == 'error':
|
||||
print(f"Error: {parsed_command['message']}")
|
||||
return
|
||||
else:
|
||||
command_dict = parsed_command
|
||||
|
||||
# print(f"Parsed command: {command_dict}")
|
||||
|
||||
command_type = command_dict['type']
|
||||
|
||||
match command_type:
|
||||
case 'list_services':
|
||||
self._handle_list_services(command_dict)
|
||||
case 'show_service':
|
||||
self._handle_show_service(command_dict)
|
||||
case 'restart_service':
|
||||
self._handle_restart_service(command_dict)
|
||||
case 'shutdown_service':
|
||||
self._handle_shutdown_service(command_dict)
|
||||
case 'startup_service':
|
||||
self._handle_startup_service(command_dict)
|
||||
case 'list_users':
|
||||
self._handle_list_users(command_dict)
|
||||
case 'show_user':
|
||||
self._handle_show_user(command_dict)
|
||||
case 'drop_user':
|
||||
self._handle_drop_user(command_dict)
|
||||
case 'alter_user':
|
||||
self._handle_alter_user(command_dict)
|
||||
case 'create_user':
|
||||
self._handle_create_user(command_dict)
|
||||
case 'activate_user':
|
||||
self._handle_activate_user(command_dict)
|
||||
case 'list_datasets':
|
||||
self._handle_list_datasets(command_dict)
|
||||
case 'list_agents':
|
||||
self._handle_list_agents(command_dict)
|
||||
case 'create_role':
|
||||
self._create_role(command_dict)
|
||||
case 'drop_role':
|
||||
self._drop_role(command_dict)
|
||||
case 'alter_role':
|
||||
self._alter_role(command_dict)
|
||||
case 'list_roles':
|
||||
self._list_roles(command_dict)
|
||||
case 'show_role':
|
||||
self._show_role(command_dict)
|
||||
case 'grant_permission':
|
||||
self._grant_permission(command_dict)
|
||||
case 'revoke_permission':
|
||||
self._revoke_permission(command_dict)
|
||||
case 'alter_user_role':
|
||||
self._alter_user_role(command_dict)
|
||||
case 'show_user_permission':
|
||||
self._show_user_permission(command_dict)
|
||||
case 'show_version':
|
||||
self._show_version(command_dict)
|
||||
case 'meta':
|
||||
self._handle_meta_command(command_dict)
|
||||
case _:
|
||||
print(f"Command '{command_type}' would be executed with API")
|
||||
|
||||
def _handle_list_services(self, command):
|
||||
print("Listing all services")
|
||||
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/services'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to get all services, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_show_service(self, command):
|
||||
service_id: int = command['number']
|
||||
print(f"Showing service: {service_id}")
|
||||
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/services/{service_id}'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
res_data = res_json['data']
|
||||
if 'status' in res_data and res_data['status'] == 'alive':
|
||||
print(f"Service {res_data['service_name']} is alive, ")
|
||||
if isinstance(res_data['message'], str):
|
||||
print(res_data['message'])
|
||||
else:
|
||||
data = self._format_service_detail_table(res_data['message'])
|
||||
self._print_table_simple(data)
|
||||
else:
|
||||
print(f"Service {res_data['service_name']} is down, {res_data['message']}")
|
||||
else:
|
||||
print(f"Fail to show service, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_restart_service(self, command):
|
||||
service_id: int = command['number']
|
||||
print(f"Restart service {service_id}")
|
||||
|
||||
def _handle_shutdown_service(self, command):
|
||||
service_id: int = command['number']
|
||||
print(f"Shutdown service {service_id}")
|
||||
|
||||
def _handle_startup_service(self, command):
|
||||
service_id: int = command['number']
|
||||
print(f"Startup service {service_id}")
|
||||
|
||||
def _handle_list_users(self, command):
|
||||
print("Listing all users")
|
||||
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to get all users, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_show_user(self, command):
|
||||
username_tree: Tree = command['user_name']
|
||||
user_name: str = username_tree.children[0].strip("'\"")
|
||||
print(f"Showing user: {user_name}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name}'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
table_data = res_json['data']
|
||||
table_data.pop('avatar')
|
||||
self._print_table_simple(table_data)
|
||||
else:
|
||||
print(f"Fail to get user {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_drop_user(self, command):
|
||||
username_tree: Tree = command['user_name']
|
||||
user_name: str = username_tree.children[0].strip("'\"")
|
||||
print(f"Drop user: {user_name}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name}'
|
||||
response = self.session.delete(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
print(res_json["message"])
|
||||
else:
|
||||
print(f"Fail to drop user, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_alter_user(self, command):
|
||||
user_name_tree: Tree = command['user_name']
|
||||
user_name: str = user_name_tree.children[0].strip("'\"")
|
||||
password_tree: Tree = command['password']
|
||||
password: str = password_tree.children[0].strip("'\"")
|
||||
print(f"Alter user: {user_name}, password: {password}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name}/password'
|
||||
response = self.session.put(url, json={'new_password': encrypt(password)})
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
print(res_json["message"])
|
||||
else:
|
||||
print(f"Fail to alter password, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_create_user(self, command):
|
||||
user_name_tree: Tree = command['user_name']
|
||||
user_name: str = user_name_tree.children[0].strip("'\"")
|
||||
password_tree: Tree = command['password']
|
||||
password: str = password_tree.children[0].strip("'\"")
|
||||
role: str = command['role']
|
||||
print(f"Create user: {user_name}, password: {password}, role: {role}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users'
|
||||
response = self.session.post(
|
||||
url,
|
||||
json={'user_name': user_name, 'password': encrypt(password), 'role': role}
|
||||
)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to create user {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_activate_user(self, command):
|
||||
user_name_tree: Tree = command['user_name']
|
||||
user_name: str = user_name_tree.children[0].strip("'\"")
|
||||
activate_tree: Tree = command['activate_status']
|
||||
activate_status: str = activate_tree.children[0].strip("'\"")
|
||||
if activate_status.lower() in ['on', 'off']:
|
||||
print(f"Alter user {user_name} activate status, turn {activate_status.lower()}.")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name}/activate'
|
||||
response = self.session.put(url, json={'activate_status': activate_status})
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
print(res_json["message"])
|
||||
else:
|
||||
print(f"Fail to alter activate status, code: {res_json['code']}, message: {res_json['message']}")
|
||||
else:
|
||||
print(f"Unknown activate status: {activate_status}.")
|
||||
|
||||
def _handle_list_datasets(self, command):
|
||||
username_tree: Tree = command['user_name']
|
||||
user_name: str = username_tree.children[0].strip("'\"")
|
||||
print(f"Listing all datasets of user: {user_name}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name}/datasets'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
table_data = res_json['data']
|
||||
for t in table_data:
|
||||
t.pop('avatar')
|
||||
self._print_table_simple(table_data)
|
||||
else:
|
||||
print(f"Fail to get all datasets of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_list_agents(self, command):
|
||||
username_tree: Tree = command['user_name']
|
||||
user_name: str = username_tree.children[0].strip("'\"")
|
||||
print(f"Listing all agents of user: {user_name}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name}/agents'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
table_data = res_json['data']
|
||||
for t in table_data:
|
||||
t.pop('avatar')
|
||||
self._print_table_simple(table_data)
|
||||
else:
|
||||
print(f"Fail to get all agents of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _create_role(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name: str = role_name_tree.children[0].strip("'\"")
|
||||
desc_str: str = ''
|
||||
if 'description' in command:
|
||||
desc_tree: Tree = command['description']
|
||||
desc_str = desc_tree.children[0].strip("'\"")
|
||||
|
||||
print(f"create role name: {role_name}, description: {desc_str}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles'
|
||||
response = self.session.post(
|
||||
url,
|
||||
json={'role_name': role_name, 'description': desc_str}
|
||||
)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to create role {role_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _drop_role(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name: str = role_name_tree.children[0].strip("'\"")
|
||||
print(f"drop role name: {role_name}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles/{role_name}'
|
||||
response = self.session.delete(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to drop role {role_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _alter_role(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name: str = role_name_tree.children[0].strip("'\"")
|
||||
desc_tree: Tree = command['description']
|
||||
desc_str: str = desc_tree.children[0].strip("'\"")
|
||||
|
||||
print(f"alter role name: {role_name}, description: {desc_str}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles/{role_name}'
|
||||
response = self.session.put(
|
||||
url,
|
||||
json={'description': desc_str}
|
||||
)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(
|
||||
f"Fail to update role {role_name} with description: {desc_str}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _list_roles(self, command):
|
||||
print("Listing all roles")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to list roles, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _show_role(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name: str = role_name_tree.children[0].strip("'\"")
|
||||
print(f"show role: {role_name}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles/{role_name}/permission'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to list roles, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _grant_permission(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name_str: str = role_name_tree.children[0].strip("'\"")
|
||||
resource_tree: Tree = command['resource']
|
||||
resource_str: str = resource_tree.children[0].strip("'\"")
|
||||
action_tree_list: list = command['actions']
|
||||
actions: list = []
|
||||
for action_tree in action_tree_list:
|
||||
action_str: str = action_tree.children[0].strip("'\"")
|
||||
actions.append(action_str)
|
||||
print(f"grant role_name: {role_name_str}, resource: {resource_str}, actions: {actions}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles/{role_name_str}/permission'
|
||||
response = self.session.post(
|
||||
url,
|
||||
json={'actions': actions, 'resource': resource_str}
|
||||
)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(
|
||||
f"Fail to grant role {role_name_str} with {actions} on {resource_str}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _revoke_permission(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name_str: str = role_name_tree.children[0].strip("'\"")
|
||||
resource_tree: Tree = command['resource']
|
||||
resource_str: str = resource_tree.children[0].strip("'\"")
|
||||
action_tree_list: list = command['actions']
|
||||
actions: list = []
|
||||
for action_tree in action_tree_list:
|
||||
action_str: str = action_tree.children[0].strip("'\"")
|
||||
actions.append(action_str)
|
||||
print(f"revoke role_name: {role_name_str}, resource: {resource_str}, actions: {actions}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/roles/{role_name_str}/permission'
|
||||
response = self.session.delete(
|
||||
url,
|
||||
json={'actions': actions, 'resource': resource_str}
|
||||
)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(
|
||||
f"Fail to revoke role {role_name_str} with {actions} on {resource_str}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _alter_user_role(self, command):
|
||||
role_name_tree: Tree = command['role_name']
|
||||
role_name_str: str = role_name_tree.children[0].strip("'\"")
|
||||
user_name_tree: Tree = command['user_name']
|
||||
user_name_str: str = user_name_tree.children[0].strip("'\"")
|
||||
print(f"alter_user_role user_name: {user_name_str}, role_name: {role_name_str}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name_str}/role'
|
||||
response = self.session.put(
|
||||
url,
|
||||
json={'role_name': role_name_str}
|
||||
)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(
|
||||
f"Fail to alter user: {user_name_str} to role {role_name_str}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _show_user_permission(self, command):
|
||||
user_name_tree: Tree = command['user_name']
|
||||
user_name_str: str = user_name_tree.children[0].strip("'\"")
|
||||
print(f"show_user_permission user_name: {user_name_str}")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/users/{user_name_str}/permission'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(
|
||||
f"Fail to show user: {user_name_str} permission, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _show_version(self, command):
|
||||
print("show_version")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/version'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to show version, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_meta_command(self, command):
|
||||
meta_command = command['command']
|
||||
args = command.get('args', [])
|
||||
|
||||
if meta_command in ['?', 'h', 'help']:
|
||||
self.show_help()
|
||||
elif meta_command in ['q', 'quit', 'exit']:
|
||||
print("Goodbye!")
|
||||
else:
|
||||
print(f"Meta command '{meta_command}' with args {args}")
|
||||
|
||||
def show_help(self):
|
||||
"""Help info"""
|
||||
help_text = """
|
||||
Commands:
|
||||
LIST SERVICES
|
||||
SHOW SERVICE <service>
|
||||
STARTUP SERVICE <service>
|
||||
SHUTDOWN SERVICE <service>
|
||||
RESTART SERVICE <service>
|
||||
LIST USERS
|
||||
SHOW USER <user>
|
||||
DROP USER <user>
|
||||
CREATE USER <user> <password>
|
||||
ALTER USER PASSWORD <user> <new_password>
|
||||
ALTER USER ACTIVE <user> <on/off>
|
||||
LIST DATASETS OF <user>
|
||||
LIST AGENTS OF <user>
|
||||
|
||||
Meta Commands:
|
||||
\\?, \\h, \\help Show this help
|
||||
\\q, \\quit, \\exit Quit the CLI
|
||||
"""
|
||||
print(help_text)
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
cli = AdminCLI()
|
||||
|
||||
args = cli.parse_connection_args(sys.argv)
|
||||
if 'error' in args:
|
||||
print(f"Error: {args['error']}")
|
||||
return
|
||||
|
||||
if 'command' in args:
|
||||
if 'password' not in args:
|
||||
print("Error: password is missing")
|
||||
return
|
||||
if cli.verify_admin(args, single_command=True):
|
||||
command: str = args['command']
|
||||
print(f"Run single command: {command}")
|
||||
cli.run_single_command(command)
|
||||
else:
|
||||
if cli.verify_admin(args, single_command=False):
|
||||
print(r"""
|
||||
____ ___ ______________ ___ __ _
|
||||
/ __ \/ | / ____/ ____/ /___ _ __ / | ____/ /___ ___ (_)___
|
||||
/ /_/ / /| |/ / __/ /_ / / __ \ | /| / / / /| |/ __ / __ `__ \/ / __ \
|
||||
/ _, _/ ___ / /_/ / __/ / / /_/ / |/ |/ / / ___ / /_/ / / / / / / / / / /
|
||||
/_/ |_/_/ |_\____/_/ /_/\____/|__/|__/ /_/ |_\__,_/_/ /_/ /_/_/_/ /_/
|
||||
""")
|
||||
cli.cmdloop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
182
admin/client/http_client.py
Normal file
182
admin/client/http_client.py
Normal file
@ -0,0 +1,182 @@
|
||||
#
|
||||
# Copyright 2026 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import time
|
||||
import json
|
||||
import typing
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import requests
|
||||
# from requests.sessions import HTTPAdapter
|
||||
|
||||
|
||||
class HttpClient:
|
||||
def __init__(
|
||||
self,
|
||||
host: str = "127.0.0.1",
|
||||
port: int = 9381,
|
||||
api_version: str = "v1",
|
||||
api_key: Optional[str] = None,
|
||||
connect_timeout: float = 5.0,
|
||||
read_timeout: float = 60.0,
|
||||
verify_ssl: bool = False,
|
||||
) -> None:
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.api_version = api_version
|
||||
self.api_key = api_key
|
||||
self.login_token: str | None = None
|
||||
self.connect_timeout = connect_timeout
|
||||
self.read_timeout = read_timeout
|
||||
self.verify_ssl = verify_ssl
|
||||
|
||||
def api_base(self) -> str:
|
||||
return f"{self.host}:{self.port}/api/{self.api_version}"
|
||||
|
||||
def non_api_base(self) -> str:
|
||||
return f"{self.host}:{self.port}/{self.api_version}"
|
||||
|
||||
def build_url(self, path: str, use_api_base: bool = True) -> str:
|
||||
base = self.api_base() if use_api_base else self.non_api_base()
|
||||
if self.verify_ssl:
|
||||
return f"https://{base}/{path.lstrip('/')}"
|
||||
else:
|
||||
return f"http://{base}/{path.lstrip('/')}"
|
||||
|
||||
def _headers(self, auth_kind: Optional[str], extra: Optional[Dict[str, str]]) -> Dict[str, str]:
|
||||
headers = {}
|
||||
if auth_kind == "api" and self.api_key:
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
elif auth_kind == "web" and self.login_token:
|
||||
headers["Authorization"] = self.login_token
|
||||
elif auth_kind == "admin" and self.login_token:
|
||||
headers["Authorization"] = self.login_token
|
||||
else:
|
||||
pass
|
||||
if extra:
|
||||
headers.update(extra)
|
||||
return headers
|
||||
|
||||
def request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
use_api_base: bool = True,
|
||||
auth_kind: Optional[str] = "api",
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
json_body: Optional[Dict[str, Any]] = None,
|
||||
data: Any = None,
|
||||
files: Any = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
stream: bool = False,
|
||||
iterations: int = 1,
|
||||
) -> requests.Response | dict:
|
||||
url = self.build_url(path, use_api_base=use_api_base)
|
||||
merged_headers = self._headers(auth_kind, headers)
|
||||
# timeout: Tuple[float, float] = (self.connect_timeout, self.read_timeout)
|
||||
session = requests.Session()
|
||||
# adapter = HTTPAdapter(pool_connections=100, pool_maxsize=100)
|
||||
# session.mount("http://", adapter)
|
||||
http_function = typing.Any
|
||||
match method:
|
||||
case "GET":
|
||||
http_function = session.get
|
||||
case "POST":
|
||||
http_function = session.post
|
||||
case "PUT":
|
||||
http_function = session.put
|
||||
case "DELETE":
|
||||
http_function = session.delete
|
||||
case "PATCH":
|
||||
http_function = session.patch
|
||||
case _:
|
||||
raise ValueError(f"Invalid HTTP method: {method}")
|
||||
|
||||
if iterations > 1:
|
||||
response_list = []
|
||||
total_duration = 0.0
|
||||
for _ in range(iterations):
|
||||
start_time = time.perf_counter()
|
||||
response = http_function(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
||||
# response = session.get(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
||||
# response = requests.request(
|
||||
# method=method,
|
||||
# url=url,
|
||||
# headers=merged_headers,
|
||||
# json=json_body,
|
||||
# data=data,
|
||||
# files=files,
|
||||
# params=params,
|
||||
# stream=stream,
|
||||
# verify=self.verify_ssl,
|
||||
# )
|
||||
end_time = time.perf_counter()
|
||||
total_duration += end_time - start_time
|
||||
response_list.append(response)
|
||||
return {"duration": total_duration, "response_list": response_list}
|
||||
else:
|
||||
return http_function(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
||||
# return session.get(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
||||
# return requests.request(
|
||||
# method=method,
|
||||
# url=url,
|
||||
# headers=merged_headers,
|
||||
# json=json_body,
|
||||
# data=data,
|
||||
# files=files,
|
||||
# params=params,
|
||||
# stream=stream,
|
||||
# verify=self.verify_ssl,
|
||||
# )
|
||||
|
||||
def request_json(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
use_api_base: bool = True,
|
||||
auth_kind: Optional[str] = "api",
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
json_body: Optional[Dict[str, Any]] = None,
|
||||
data: Any = None,
|
||||
files: Any = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
stream: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
response = self.request(
|
||||
method,
|
||||
path,
|
||||
use_api_base=use_api_base,
|
||||
auth_kind=auth_kind,
|
||||
headers=headers,
|
||||
json_body=json_body,
|
||||
data=data,
|
||||
files=files,
|
||||
params=params,
|
||||
stream=stream,
|
||||
)
|
||||
try:
|
||||
return response.json()
|
||||
except Exception as exc:
|
||||
raise ValueError(f"Non-JSON response from {path}: {exc}") from exc
|
||||
|
||||
@staticmethod
|
||||
def parse_json_bytes(raw: bytes) -> Dict[str, Any]:
|
||||
try:
|
||||
return json.loads(raw.decode("utf-8"))
|
||||
except Exception as exc:
|
||||
raise ValueError(f"Invalid JSON payload: {exc}") from exc
|
||||
623
admin/client/parser.py
Normal file
623
admin/client/parser.py
Normal file
@ -0,0 +1,623 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from lark import Transformer
|
||||
|
||||
GRAMMAR = r"""
|
||||
start: command
|
||||
|
||||
command: sql_command | meta_command
|
||||
|
||||
sql_command: login_user
|
||||
| ping_server
|
||||
| list_services
|
||||
| show_service
|
||||
| startup_service
|
||||
| shutdown_service
|
||||
| restart_service
|
||||
| register_user
|
||||
| list_users
|
||||
| show_user
|
||||
| drop_user
|
||||
| alter_user
|
||||
| create_user
|
||||
| activate_user
|
||||
| list_datasets
|
||||
| list_agents
|
||||
| create_role
|
||||
| drop_role
|
||||
| alter_role
|
||||
| list_roles
|
||||
| show_role
|
||||
| grant_permission
|
||||
| revoke_permission
|
||||
| alter_user_role
|
||||
| show_user_permission
|
||||
| show_version
|
||||
| grant_admin
|
||||
| revoke_admin
|
||||
| set_variable
|
||||
| show_variable
|
||||
| list_variables
|
||||
| list_configs
|
||||
| list_environments
|
||||
| generate_key
|
||||
| list_keys
|
||||
| drop_key
|
||||
| show_current_user
|
||||
| set_default_llm
|
||||
| set_default_vlm
|
||||
| set_default_embedding
|
||||
| set_default_reranker
|
||||
| set_default_asr
|
||||
| set_default_tts
|
||||
| reset_default_llm
|
||||
| reset_default_vlm
|
||||
| reset_default_embedding
|
||||
| reset_default_reranker
|
||||
| reset_default_asr
|
||||
| reset_default_tts
|
||||
| create_model_provider
|
||||
| drop_model_provider
|
||||
| create_user_dataset_with_parser
|
||||
| create_user_dataset_with_pipeline
|
||||
| drop_user_dataset
|
||||
| list_user_datasets
|
||||
| list_user_dataset_files
|
||||
| list_user_agents
|
||||
| list_user_chats
|
||||
| create_user_chat
|
||||
| drop_user_chat
|
||||
| list_user_model_providers
|
||||
| list_user_default_models
|
||||
| parse_dataset_docs
|
||||
| parse_dataset_sync
|
||||
| parse_dataset_async
|
||||
| import_docs_into_dataset
|
||||
| search_on_datasets
|
||||
| benchmark
|
||||
|
||||
// meta command definition
|
||||
meta_command: "\\" meta_command_name [meta_args]
|
||||
|
||||
meta_command_name: /[a-zA-Z?]+/
|
||||
meta_args: (meta_arg)+
|
||||
|
||||
meta_arg: /[^\\s"']+/ | quoted_string
|
||||
|
||||
// command definition
|
||||
|
||||
LOGIN: "LOGIN"i
|
||||
REGISTER: "REGISTER"i
|
||||
LIST: "LIST"i
|
||||
SERVICES: "SERVICES"i
|
||||
SHOW: "SHOW"i
|
||||
CREATE: "CREATE"i
|
||||
SERVICE: "SERVICE"i
|
||||
SHUTDOWN: "SHUTDOWN"i
|
||||
STARTUP: "STARTUP"i
|
||||
RESTART: "RESTART"i
|
||||
USERS: "USERS"i
|
||||
DROP: "DROP"i
|
||||
USER: "USER"i
|
||||
ALTER: "ALTER"i
|
||||
ACTIVE: "ACTIVE"i
|
||||
ADMIN: "ADMIN"i
|
||||
PASSWORD: "PASSWORD"i
|
||||
DATASET: "DATASET"i
|
||||
DATASETS: "DATASETS"i
|
||||
OF: "OF"i
|
||||
AGENTS: "AGENTS"i
|
||||
ROLE: "ROLE"i
|
||||
ROLES: "ROLES"i
|
||||
DESCRIPTION: "DESCRIPTION"i
|
||||
GRANT: "GRANT"i
|
||||
REVOKE: "REVOKE"i
|
||||
ALL: "ALL"i
|
||||
PERMISSION: "PERMISSION"i
|
||||
TO: "TO"i
|
||||
FROM: "FROM"i
|
||||
FOR: "FOR"i
|
||||
RESOURCES: "RESOURCES"i
|
||||
ON: "ON"i
|
||||
SET: "SET"i
|
||||
RESET: "RESET"i
|
||||
VERSION: "VERSION"i
|
||||
VAR: "VAR"i
|
||||
VARS: "VARS"i
|
||||
CONFIGS: "CONFIGS"i
|
||||
ENVS: "ENVS"i
|
||||
KEY: "KEY"i
|
||||
KEYS: "KEYS"i
|
||||
GENERATE: "GENERATE"i
|
||||
MODEL: "MODEL"i
|
||||
MODELS: "MODELS"i
|
||||
PROVIDER: "PROVIDER"i
|
||||
PROVIDERS: "PROVIDERS"i
|
||||
DEFAULT: "DEFAULT"i
|
||||
CHATS: "CHATS"i
|
||||
CHAT: "CHAT"i
|
||||
FILES: "FILES"i
|
||||
AS: "AS"i
|
||||
PARSE: "PARSE"i
|
||||
IMPORT: "IMPORT"i
|
||||
INTO: "INTO"i
|
||||
WITH: "WITH"i
|
||||
PARSER: "PARSER"i
|
||||
PIPELINE: "PIPELINE"i
|
||||
SEARCH: "SEARCH"i
|
||||
CURRENT: "CURRENT"i
|
||||
LLM: "LLM"i
|
||||
VLM: "VLM"i
|
||||
EMBEDDING: "EMBEDDING"i
|
||||
RERANKER: "RERANKER"i
|
||||
ASR: "ASR"i
|
||||
TTS: "TTS"i
|
||||
ASYNC: "ASYNC"i
|
||||
SYNC: "SYNC"i
|
||||
BENCHMARK: "BENCHMARK"i
|
||||
PING: "PING"i
|
||||
|
||||
login_user: LOGIN USER quoted_string ";"
|
||||
list_services: LIST SERVICES ";"
|
||||
show_service: SHOW SERVICE NUMBER ";"
|
||||
startup_service: STARTUP SERVICE NUMBER ";"
|
||||
shutdown_service: SHUTDOWN SERVICE NUMBER ";"
|
||||
restart_service: RESTART SERVICE NUMBER ";"
|
||||
|
||||
register_user: REGISTER USER quoted_string AS quoted_string PASSWORD quoted_string ";"
|
||||
list_users: LIST USERS ";"
|
||||
drop_user: DROP USER quoted_string ";"
|
||||
alter_user: ALTER USER PASSWORD quoted_string quoted_string ";"
|
||||
show_user: SHOW USER quoted_string ";"
|
||||
create_user: CREATE USER quoted_string quoted_string ";"
|
||||
activate_user: ALTER USER ACTIVE quoted_string status ";"
|
||||
|
||||
list_datasets: LIST DATASETS OF quoted_string ";"
|
||||
list_agents: LIST AGENTS OF quoted_string ";"
|
||||
|
||||
create_role: CREATE ROLE identifier [DESCRIPTION quoted_string] ";"
|
||||
drop_role: DROP ROLE identifier ";"
|
||||
alter_role: ALTER ROLE identifier SET DESCRIPTION quoted_string ";"
|
||||
list_roles: LIST ROLES ";"
|
||||
show_role: SHOW ROLE identifier ";"
|
||||
|
||||
grant_permission: GRANT identifier_list ON identifier TO ROLE identifier ";"
|
||||
revoke_permission: REVOKE identifier_list ON identifier FROM ROLE identifier ";"
|
||||
alter_user_role: ALTER USER quoted_string SET ROLE identifier ";"
|
||||
show_user_permission: SHOW USER PERMISSION quoted_string ";"
|
||||
|
||||
show_version: SHOW VERSION ";"
|
||||
|
||||
grant_admin: GRANT ADMIN quoted_string ";"
|
||||
revoke_admin: REVOKE ADMIN quoted_string ";"
|
||||
|
||||
generate_key: GENERATE KEY FOR USER quoted_string ";"
|
||||
list_keys: LIST KEYS OF quoted_string ";"
|
||||
drop_key: DROP KEY quoted_string OF quoted_string ";"
|
||||
|
||||
set_variable: SET VAR identifier identifier ";"
|
||||
show_variable: SHOW VAR identifier ";"
|
||||
list_variables: LIST VARS ";"
|
||||
list_configs: LIST CONFIGS ";"
|
||||
list_environments: LIST ENVS ";"
|
||||
|
||||
benchmark: BENCHMARK NUMBER NUMBER user_statement
|
||||
|
||||
user_statement: ping_server
|
||||
| show_current_user
|
||||
| create_model_provider
|
||||
| drop_model_provider
|
||||
| set_default_llm
|
||||
| set_default_vlm
|
||||
| set_default_embedding
|
||||
| set_default_reranker
|
||||
| set_default_asr
|
||||
| set_default_tts
|
||||
| reset_default_llm
|
||||
| reset_default_vlm
|
||||
| reset_default_embedding
|
||||
| reset_default_reranker
|
||||
| reset_default_asr
|
||||
| reset_default_tts
|
||||
| create_user_dataset_with_parser
|
||||
| create_user_dataset_with_pipeline
|
||||
| drop_user_dataset
|
||||
| list_user_datasets
|
||||
| list_user_dataset_files
|
||||
| list_user_agents
|
||||
| list_user_chats
|
||||
| create_user_chat
|
||||
| drop_user_chat
|
||||
| list_user_model_providers
|
||||
| list_user_default_models
|
||||
| import_docs_into_dataset
|
||||
| search_on_datasets
|
||||
|
||||
ping_server: PING ";"
|
||||
show_current_user: SHOW CURRENT USER ";"
|
||||
create_model_provider: CREATE MODEL PROVIDER quoted_string quoted_string ";"
|
||||
drop_model_provider: DROP MODEL PROVIDER quoted_string ";"
|
||||
set_default_llm: SET DEFAULT LLM quoted_string ";"
|
||||
set_default_vlm: SET DEFAULT VLM quoted_string ";"
|
||||
set_default_embedding: SET DEFAULT EMBEDDING quoted_string ";"
|
||||
set_default_reranker: SET DEFAULT RERANKER quoted_string ";"
|
||||
set_default_asr: SET DEFAULT ASR quoted_string ";"
|
||||
set_default_tts: SET DEFAULT TTS quoted_string ";"
|
||||
|
||||
reset_default_llm: RESET DEFAULT LLM ";"
|
||||
reset_default_vlm: RESET DEFAULT VLM ";"
|
||||
reset_default_embedding: RESET DEFAULT EMBEDDING ";"
|
||||
reset_default_reranker: RESET DEFAULT RERANKER ";"
|
||||
reset_default_asr: RESET DEFAULT ASR ";"
|
||||
reset_default_tts: RESET DEFAULT TTS ";"
|
||||
|
||||
list_user_datasets: LIST DATASETS ";"
|
||||
create_user_dataset_with_parser: CREATE DATASET quoted_string WITH EMBEDDING quoted_string PARSER quoted_string ";"
|
||||
create_user_dataset_with_pipeline: CREATE DATASET quoted_string WITH EMBEDDING quoted_string PIPELINE quoted_string ";"
|
||||
drop_user_dataset: DROP DATASET quoted_string ";"
|
||||
list_user_dataset_files: LIST FILES OF DATASET quoted_string ";"
|
||||
list_user_agents: LIST AGENTS ";"
|
||||
list_user_chats: LIST CHATS ";"
|
||||
create_user_chat: CREATE CHAT quoted_string ";"
|
||||
drop_user_chat: DROP CHAT quoted_string ";"
|
||||
list_user_model_providers: LIST MODEL PROVIDERS ";"
|
||||
list_user_default_models: LIST DEFAULT MODELS ";"
|
||||
import_docs_into_dataset: IMPORT quoted_string INTO DATASET quoted_string ";"
|
||||
search_on_datasets: SEARCH quoted_string ON DATASETS quoted_string ";"
|
||||
|
||||
parse_dataset_docs: PARSE quoted_string OF DATASET quoted_string ";"
|
||||
parse_dataset_sync: PARSE DATASET quoted_string SYNC ";"
|
||||
parse_dataset_async: PARSE DATASET quoted_string ASYNC ";"
|
||||
|
||||
identifier_list: identifier ("," identifier)*
|
||||
|
||||
identifier: WORD
|
||||
quoted_string: QUOTED_STRING
|
||||
status: WORD
|
||||
|
||||
QUOTED_STRING: /'[^']+'/ | /"[^"]+"/
|
||||
WORD: /[a-zA-Z0-9_\-\.]+/
|
||||
NUMBER: /[0-9]+/
|
||||
|
||||
%import common.WS
|
||||
%ignore WS
|
||||
"""
|
||||
|
||||
|
||||
class RAGFlowCLITransformer(Transformer):
|
||||
def start(self, items):
|
||||
return items[0]
|
||||
|
||||
def command(self, items):
|
||||
return items[0]
|
||||
|
||||
def login_user(self, items):
|
||||
email = items[2].children[0].strip("'\"")
|
||||
return {"type": "login_user", "email": email}
|
||||
|
||||
def ping_server(self, items):
|
||||
return {"type": "ping_server"}
|
||||
|
||||
def list_services(self, items):
|
||||
result = {"type": "list_services"}
|
||||
return result
|
||||
|
||||
def show_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "show_service", "number": service_id}
|
||||
|
||||
def startup_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "startup_service", "number": service_id}
|
||||
|
||||
def shutdown_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "shutdown_service", "number": service_id}
|
||||
|
||||
def restart_service(self, items):
|
||||
service_id = int(items[2])
|
||||
return {"type": "restart_service", "number": service_id}
|
||||
|
||||
def register_user(self, items):
|
||||
user_name: str = items[2].children[0].strip("'\"")
|
||||
nickname: str = items[4].children[0].strip("'\"")
|
||||
password: str = items[6].children[0].strip("'\"")
|
||||
return {"type": "register_user", "user_name": user_name, "nickname": nickname, "password": password}
|
||||
|
||||
def list_users(self, items):
|
||||
return {"type": "list_users"}
|
||||
|
||||
def show_user(self, items):
|
||||
user_name = items[2]
|
||||
return {"type": "show_user", "user_name": user_name}
|
||||
|
||||
def drop_user(self, items):
|
||||
user_name = items[2]
|
||||
return {"type": "drop_user", "user_name": user_name}
|
||||
|
||||
def alter_user(self, items):
|
||||
user_name = items[3]
|
||||
new_password = items[4]
|
||||
return {"type": "alter_user", "user_name": user_name, "password": new_password}
|
||||
|
||||
def create_user(self, items):
|
||||
user_name = items[2]
|
||||
password = items[3]
|
||||
return {"type": "create_user", "user_name": user_name, "password": password, "role": "user"}
|
||||
|
||||
def activate_user(self, items):
|
||||
user_name = items[3]
|
||||
activate_status = items[4]
|
||||
return {"type": "activate_user", "activate_status": activate_status, "user_name": user_name}
|
||||
|
||||
def list_datasets(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "list_datasets", "user_name": user_name}
|
||||
|
||||
def list_agents(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "list_agents", "user_name": user_name}
|
||||
|
||||
def create_role(self, items):
|
||||
role_name = items[2]
|
||||
if len(items) > 4:
|
||||
description = items[4]
|
||||
return {"type": "create_role", "role_name": role_name, "description": description}
|
||||
else:
|
||||
return {"type": "create_role", "role_name": role_name}
|
||||
|
||||
def drop_role(self, items):
|
||||
role_name = items[2]
|
||||
return {"type": "drop_role", "role_name": role_name}
|
||||
|
||||
def alter_role(self, items):
|
||||
role_name = items[2]
|
||||
description = items[5]
|
||||
return {"type": "alter_role", "role_name": role_name, "description": description}
|
||||
|
||||
def list_roles(self, items):
|
||||
return {"type": "list_roles"}
|
||||
|
||||
def show_role(self, items):
|
||||
role_name = items[2]
|
||||
return {"type": "show_role", "role_name": role_name}
|
||||
|
||||
def grant_permission(self, items):
|
||||
action_list = items[1]
|
||||
resource = items[3]
|
||||
role_name = items[6]
|
||||
return {"type": "grant_permission", "role_name": role_name, "resource": resource, "actions": action_list}
|
||||
|
||||
def revoke_permission(self, items):
|
||||
action_list = items[1]
|
||||
resource = items[3]
|
||||
role_name = items[6]
|
||||
return {"type": "revoke_permission", "role_name": role_name, "resource": resource, "actions": action_list}
|
||||
|
||||
def alter_user_role(self, items):
|
||||
user_name = items[2]
|
||||
role_name = items[5]
|
||||
return {"type": "alter_user_role", "user_name": user_name, "role_name": role_name}
|
||||
|
||||
def show_user_permission(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "show_user_permission", "user_name": user_name}
|
||||
|
||||
def show_version(self, items):
|
||||
return {"type": "show_version"}
|
||||
|
||||
def grant_admin(self, items):
|
||||
user_name = items[2]
|
||||
return {"type": "grant_admin", "user_name": user_name}
|
||||
|
||||
def revoke_admin(self, items):
|
||||
user_name = items[2]
|
||||
return {"type": "revoke_admin", "user_name": user_name}
|
||||
|
||||
def generate_key(self, items):
|
||||
user_name = items[4]
|
||||
return {"type": "generate_key", "user_name": user_name}
|
||||
|
||||
def list_keys(self, items):
|
||||
user_name = items[3]
|
||||
return {"type": "list_keys", "user_name": user_name}
|
||||
|
||||
def drop_key(self, items):
|
||||
key = items[2]
|
||||
user_name = items[4]
|
||||
return {"type": "drop_key", "key": key, "user_name": user_name}
|
||||
|
||||
def set_variable(self, items):
|
||||
var_name = items[2]
|
||||
var_value = items[3]
|
||||
return {"type": "set_variable", "var_name": var_name, "var_value": var_value}
|
||||
|
||||
def show_variable(self, items):
|
||||
var_name = items[2]
|
||||
return {"type": "show_variable", "var_name": var_name}
|
||||
|
||||
def list_variables(self, items):
|
||||
return {"type": "list_variables"}
|
||||
|
||||
def list_configs(self, items):
|
||||
return {"type": "list_configs"}
|
||||
|
||||
def list_environments(self, items):
|
||||
return {"type": "list_environments"}
|
||||
|
||||
def create_model_provider(self, items):
|
||||
provider_name = items[3].children[0].strip("'\"")
|
||||
provider_key = items[4].children[0].strip("'\"")
|
||||
return {"type": "create_model_provider", "provider_name": provider_name, "provider_key": provider_key}
|
||||
|
||||
def drop_model_provider(self, items):
|
||||
provider_name = items[3].children[0].strip("'\"")
|
||||
return {"type": "drop_model_provider", "provider_name": provider_name}
|
||||
|
||||
def show_current_user(self, items):
|
||||
return {"type": "show_current_user"}
|
||||
|
||||
def set_default_llm(self, items):
|
||||
llm_id = items[3].children[0].strip("'\"")
|
||||
return {"type": "set_default_model", "model_type": "llm_id", "model_id": llm_id}
|
||||
|
||||
def set_default_vlm(self, items):
|
||||
vlm_id = items[3].children[0].strip("'\"")
|
||||
return {"type": "set_default_model", "model_type": "img2txt_id", "model_id": vlm_id}
|
||||
|
||||
def set_default_embedding(self, items):
|
||||
embedding_id = items[3].children[0].strip("'\"")
|
||||
return {"type": "set_default_model", "model_type": "embd_id", "model_id": embedding_id}
|
||||
|
||||
def set_default_reranker(self, items):
|
||||
reranker_id = items[3].children[0].strip("'\"")
|
||||
return {"type": "set_default_model", "model_type": "reranker_id", "model_id": reranker_id}
|
||||
|
||||
def set_default_asr(self, items):
|
||||
asr_id = items[3].children[0].strip("'\"")
|
||||
return {"type": "set_default_model", "model_type": "asr_id", "model_id": asr_id}
|
||||
|
||||
def set_default_tts(self, items):
|
||||
tts_id = items[3].children[0].strip("'\"")
|
||||
return {"type": "set_default_model", "model_type": "tts_id", "model_id": tts_id}
|
||||
|
||||
def reset_default_llm(self, items):
|
||||
return {"type": "reset_default_model", "model_type": "llm_id"}
|
||||
|
||||
def reset_default_vlm(self, items):
|
||||
return {"type": "reset_default_model", "model_type": "img2txt_id"}
|
||||
|
||||
def reset_default_embedding(self, items):
|
||||
return {"type": "reset_default_model", "model_type": "embd_id"}
|
||||
|
||||
def reset_default_reranker(self, items):
|
||||
return {"type": "reset_default_model", "model_type": "reranker_id"}
|
||||
|
||||
def reset_default_asr(self, items):
|
||||
return {"type": "reset_default_model", "model_type": "asr_id"}
|
||||
|
||||
def reset_default_tts(self, items):
|
||||
return {"type": "reset_default_model", "model_type": "tts_id"}
|
||||
|
||||
def list_user_datasets(self, items):
|
||||
return {"type": "list_user_datasets"}
|
||||
|
||||
def create_user_dataset_with_parser(self, items):
|
||||
dataset_name = items[2].children[0].strip("'\"")
|
||||
embedding = items[5].children[0].strip("'\"")
|
||||
parser_type = items[7].children[0].strip("'\"")
|
||||
return {"type": "create_user_dataset", "dataset_name": dataset_name, "embedding": embedding,
|
||||
"parser_type": parser_type}
|
||||
|
||||
def create_user_dataset_with_pipeline(self, items):
|
||||
dataset_name = items[2].children[0].strip("'\"")
|
||||
embedding = items[5].children[0].strip("'\"")
|
||||
pipeline = items[7].children[0].strip("'\"")
|
||||
return {"type": "create_user_dataset", "dataset_name": dataset_name, "embedding": embedding,
|
||||
"pipeline": pipeline}
|
||||
|
||||
def drop_user_dataset(self, items):
|
||||
dataset_name = items[2].children[0].strip("'\"")
|
||||
return {"type": "drop_user_dataset", "dataset_name": dataset_name}
|
||||
|
||||
def list_user_dataset_files(self, items):
|
||||
dataset_name = items[4].children[0].strip("'\"")
|
||||
return {"type": "list_user_dataset_files", "dataset_name": dataset_name}
|
||||
|
||||
def list_user_agents(self, items):
|
||||
return {"type": "list_user_agents"}
|
||||
|
||||
def list_user_chats(self, items):
|
||||
return {"type": "list_user_chats"}
|
||||
|
||||
def create_user_chat(self, items):
|
||||
chat_name = items[2].children[0].strip("'\"")
|
||||
return {"type": "create_user_chat", "chat_name": chat_name}
|
||||
|
||||
def drop_user_chat(self, items):
|
||||
chat_name = items[2].children[0].strip("'\"")
|
||||
return {"type": "drop_user_chat", "chat_name": chat_name}
|
||||
|
||||
def list_user_model_providers(self, items):
|
||||
return {"type": "list_user_model_providers"}
|
||||
|
||||
def list_user_default_models(self, items):
|
||||
return {"type": "list_user_default_models"}
|
||||
|
||||
def parse_dataset_docs(self, items):
|
||||
document_list_str = items[1].children[0].strip("'\"")
|
||||
document_names = document_list_str.split(",")
|
||||
if len(document_names) == 1:
|
||||
document_names = document_names[0]
|
||||
document_names = document_names.split(" ")
|
||||
dataset_name = items[4].children[0].strip("'\"")
|
||||
return {"type": "parse_dataset_docs", "dataset_name": dataset_name, "document_names": document_names}
|
||||
|
||||
def parse_dataset_sync(self, items):
|
||||
dataset_name = items[2].children[0].strip("'\"")
|
||||
return {"type": "parse_dataset", "dataset_name": dataset_name, "method": "sync"}
|
||||
|
||||
def parse_dataset_async(self, items):
|
||||
dataset_name = items[2].children[0].strip("'\"")
|
||||
return {"type": "parse_dataset", "dataset_name": dataset_name, "method": "async"}
|
||||
|
||||
def import_docs_into_dataset(self, items):
|
||||
document_list_str = items[1].children[0].strip("'\"")
|
||||
document_paths = document_list_str.split(",")
|
||||
if len(document_paths) == 1:
|
||||
document_paths = document_paths[0]
|
||||
document_paths = document_paths.split(" ")
|
||||
dataset_name = items[4].children[0].strip("'\"")
|
||||
return {"type": "import_docs_into_dataset", "dataset_name": dataset_name, "document_paths": document_paths}
|
||||
|
||||
def search_on_datasets(self, items):
|
||||
question = items[1].children[0].strip("'\"")
|
||||
datasets_str = items[4].children[0].strip("'\"")
|
||||
datasets = datasets_str.split(",")
|
||||
if len(datasets) == 1:
|
||||
datasets = datasets[0]
|
||||
datasets = datasets.split(" ")
|
||||
return {"type": "search_on_datasets", "datasets": datasets, "question": question}
|
||||
|
||||
def benchmark(self, items):
|
||||
concurrency: int = int(items[1])
|
||||
iterations: int = int(items[2])
|
||||
command = items[3].children[0]
|
||||
return {"type": "benchmark", "concurrency": concurrency, "iterations": iterations, "command": command}
|
||||
|
||||
def action_list(self, items):
|
||||
return items
|
||||
|
||||
def meta_command(self, items):
|
||||
command_name = str(items[0]).lower()
|
||||
args = items[1:] if len(items) > 1 else []
|
||||
|
||||
# handle quoted parameter
|
||||
parsed_args = []
|
||||
for arg in args:
|
||||
if hasattr(arg, "value"):
|
||||
parsed_args.append(arg.value)
|
||||
else:
|
||||
parsed_args.append(str(arg))
|
||||
|
||||
return {"type": "meta", "command": command_name, "args": parsed_args}
|
||||
|
||||
def meta_command_name(self, items):
|
||||
return items[0]
|
||||
|
||||
def meta_args(self, items):
|
||||
return items
|
||||
@ -1,14 +1,14 @@
|
||||
[project]
|
||||
name = "ragflow-cli"
|
||||
version = "0.22.1"
|
||||
version = "0.23.1"
|
||||
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
||||
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10,<3.13"
|
||||
requires-python = ">=3.12,<3.15"
|
||||
dependencies = [
|
||||
"requests>=2.30.0,<3.0.0",
|
||||
"beartype>=0.18.5,<0.19.0",
|
||||
"beartype>=0.20.0,<1.0.0",
|
||||
"pycryptodomex>=3.10.0",
|
||||
"lark>=1.1.0",
|
||||
]
|
||||
@ -20,5 +20,8 @@ test = [
|
||||
"requests-toolbelt>=1.0.0",
|
||||
]
|
||||
|
||||
[tool.setuptools]
|
||||
py-modules = ["ragflow_cli", "parser"]
|
||||
|
||||
[project.scripts]
|
||||
ragflow-cli = "admin_client:main"
|
||||
ragflow-cli = "ragflow_cli:main"
|
||||
|
||||
322
admin/client/ragflow_cli.py
Normal file
322
admin/client/ragflow_cli.py
Normal file
@ -0,0 +1,322 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import base64
|
||||
import getpass
|
||||
from cmd import Cmd
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import requests
|
||||
import warnings
|
||||
from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from lark import Lark, Tree
|
||||
from parser import GRAMMAR, RAGFlowCLITransformer
|
||||
from http_client import HttpClient
|
||||
from ragflow_client import RAGFlowClient, run_command
|
||||
from user import login_user
|
||||
|
||||
warnings.filterwarnings("ignore", category=getpass.GetPassWarning)
|
||||
|
||||
def encrypt(input_string):
|
||||
pub = "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArq9XTUSeYr2+N1h3Afl/z8Dse/2yD0ZGrKwx+EEEcdsBLca9Ynmx3nIB5obmLlSfmskLpBo0UACBmB5rEjBp2Q2f3AG3Hjd4B+gNCG6BDaawuDlgANIhGnaTLrIqWrrcm4EMzJOnAOI1fgzJRsOOUEfaS318Eq9OVO3apEyCCt0lOQK6PuksduOjVxtltDav+guVAA068NrPYmRNabVKRNLJpL8w4D44sfth5RvZ3q9t+6RTArpEtc5sh5ChzvqPOzKGMXW83C95TxmXqpbK6olN4RevSfVjEAgCydH6HN6OhtOQEcnrU97r9H0iZOWwbw3pVrZiUkuRD1R56Wzs2wIDAQAB\n-----END PUBLIC KEY-----"
|
||||
pub_key = RSA.importKey(pub)
|
||||
cipher = Cipher_pkcs1_v1_5.new(pub_key)
|
||||
cipher_text = cipher.encrypt(base64.b64encode(input_string.encode("utf-8")))
|
||||
return base64.b64encode(cipher_text).decode("utf-8")
|
||||
|
||||
|
||||
def encode_to_base64(input_string):
|
||||
base64_encoded = base64.b64encode(input_string.encode("utf-8"))
|
||||
return base64_encoded.decode("utf-8")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class RAGFlowCLI(Cmd):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.parser = Lark(GRAMMAR, start="start", parser="lalr", transformer=RAGFlowCLITransformer())
|
||||
self.command_history = []
|
||||
self.account = "admin@ragflow.io"
|
||||
self.account_password: str = "admin"
|
||||
self.session = requests.Session()
|
||||
self.host: str = ""
|
||||
self.port: int = 0
|
||||
self.mode: str = "admin"
|
||||
self.ragflow_client = None
|
||||
|
||||
intro = r"""Type "\h" for help."""
|
||||
prompt = "ragflow> "
|
||||
|
||||
def onecmd(self, command: str) -> bool:
|
||||
try:
|
||||
result = self.parse_command(command)
|
||||
|
||||
if isinstance(result, dict):
|
||||
if "type" in result and result.get("type") == "empty":
|
||||
return False
|
||||
|
||||
self.execute_command(result)
|
||||
|
||||
if isinstance(result, Tree):
|
||||
return False
|
||||
|
||||
if result.get("type") == "meta" and result.get("command") in ["q", "quit", "exit"]:
|
||||
return True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nUse '\\q' to quit")
|
||||
except EOFError:
|
||||
print("\nGoodbye!")
|
||||
return True
|
||||
return False
|
||||
|
||||
def emptyline(self) -> bool:
|
||||
return False
|
||||
|
||||
def default(self, line: str) -> bool:
|
||||
return self.onecmd(line)
|
||||
|
||||
def parse_command(self, command_str: str) -> dict[str, str]:
|
||||
if not command_str.strip():
|
||||
return {"type": "empty"}
|
||||
|
||||
self.command_history.append(command_str)
|
||||
|
||||
try:
|
||||
result = self.parser.parse(command_str)
|
||||
return result
|
||||
except Exception as e:
|
||||
return {"type": "error", "message": f"Parse error: {str(e)}"}
|
||||
|
||||
def verify_auth(self, arguments: dict, single_command: bool, auth: bool):
|
||||
server_type = arguments.get("type", "admin")
|
||||
http_client = HttpClient(arguments["host"], arguments["port"])
|
||||
if not auth:
|
||||
self.ragflow_client = RAGFlowClient(http_client, server_type)
|
||||
return True
|
||||
|
||||
user_name = arguments["username"]
|
||||
attempt_count = 3
|
||||
if single_command:
|
||||
attempt_count = 1
|
||||
|
||||
try_count = 0
|
||||
while True:
|
||||
try_count += 1
|
||||
if try_count > attempt_count:
|
||||
return False
|
||||
|
||||
if single_command:
|
||||
user_password = arguments["password"]
|
||||
else:
|
||||
user_password = getpass.getpass(f"password for {user_name}: ").strip()
|
||||
|
||||
try:
|
||||
token = login_user(http_client, server_type, user_name, user_password)
|
||||
http_client.login_token = token
|
||||
self.ragflow_client = RAGFlowClient(http_client, server_type)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
print("Can't access server for login (connection failed)")
|
||||
|
||||
def _format_service_detail_table(self, data):
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
if not all([isinstance(v, list) for v in data.values()]):
|
||||
# normal table
|
||||
return data
|
||||
# handle task_executor heartbeats map, for example {'name': [{'done': 2, 'now': timestamp1}, {'done': 3, 'now': timestamp2}]
|
||||
task_executor_list = []
|
||||
for k, v in data.items():
|
||||
# display latest status
|
||||
heartbeats = sorted(v, key=lambda x: x["now"], reverse=True)
|
||||
task_executor_list.append(
|
||||
{
|
||||
"task_executor_name": k,
|
||||
**heartbeats[0],
|
||||
}
|
||||
if heartbeats
|
||||
else {"task_executor_name": k}
|
||||
)
|
||||
return task_executor_list
|
||||
|
||||
def _print_table_simple(self, data):
|
||||
if not data:
|
||||
print("No data to print")
|
||||
return
|
||||
if isinstance(data, dict):
|
||||
# handle single row data
|
||||
data = [data]
|
||||
|
||||
columns = list(set().union(*(d.keys() for d in data)))
|
||||
columns.sort()
|
||||
col_widths = {}
|
||||
|
||||
def get_string_width(text):
|
||||
half_width_chars = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\t\n\r"
|
||||
width = 0
|
||||
for char in text:
|
||||
if char in half_width_chars:
|
||||
width += 1
|
||||
else:
|
||||
width += 2
|
||||
return width
|
||||
|
||||
for col in columns:
|
||||
max_width = get_string_width(str(col))
|
||||
for item in data:
|
||||
value_len = get_string_width(str(item.get(col, "")))
|
||||
if value_len > max_width:
|
||||
max_width = value_len
|
||||
col_widths[col] = max(2, max_width)
|
||||
|
||||
# Generate delimiter
|
||||
separator = "+" + "+".join(["-" * (col_widths[col] + 2) for col in columns]) + "+"
|
||||
|
||||
# Print header
|
||||
print(separator)
|
||||
header = "|" + "|".join([f" {col:<{col_widths[col]}} " for col in columns]) + "|"
|
||||
print(header)
|
||||
print(separator)
|
||||
|
||||
# Print data
|
||||
for item in data:
|
||||
row = "|"
|
||||
for col in columns:
|
||||
value = str(item.get(col, ""))
|
||||
if get_string_width(value) > col_widths[col]:
|
||||
value = value[: col_widths[col] - 3] + "..."
|
||||
row += f" {value:<{col_widths[col] - (get_string_width(value) - len(value))}} |"
|
||||
print(row)
|
||||
|
||||
print(separator)
|
||||
|
||||
def run_interactive(self, args):
|
||||
if self.verify_auth(args, single_command=False, auth=args["auth"]):
|
||||
print(r"""
|
||||
____ ___ ______________ ________ ____
|
||||
/ __ \/ | / ____/ ____/ /___ _ __ / ____/ / / _/
|
||||
/ /_/ / /| |/ / __/ /_ / / __ \ | /| / / / / / / / /
|
||||
/ _, _/ ___ / /_/ / __/ / / /_/ / |/ |/ / / /___/ /____/ /
|
||||
/_/ |_/_/ |_\____/_/ /_/\____/|__/|__/ \____/_____/___/
|
||||
""")
|
||||
self.cmdloop()
|
||||
|
||||
print("RAGFlow command line interface - Type '\\?' for help, '\\q' to quit")
|
||||
|
||||
def run_single_command(self, args):
|
||||
if self.verify_auth(args, single_command=True, auth=args["auth"]):
|
||||
command = args["command"]
|
||||
result = self.parse_command(command)
|
||||
self.execute_command(result)
|
||||
|
||||
|
||||
def parse_connection_args(self, args: List[str]) -> Dict[str, Any]:
|
||||
parser = argparse.ArgumentParser(description="RAGFlow CLI Client", add_help=False)
|
||||
parser.add_argument("-h", "--host", default="127.0.0.1", help="Admin or RAGFlow service host")
|
||||
parser.add_argument("-p", "--port", type=int, default=9381, help="Admin or RAGFlow service port")
|
||||
parser.add_argument("-w", "--password", default="admin", type=str, help="Superuser password")
|
||||
parser.add_argument("-t", "--type", default="admin", type=str, help="CLI mode, admin or user")
|
||||
parser.add_argument("-u", "--username", default=None,
|
||||
help="Username (email). In admin mode defaults to admin@ragflow.io, in user mode required.")
|
||||
parser.add_argument("command", nargs="?", help="Single command")
|
||||
try:
|
||||
parsed_args, remaining_args = parser.parse_known_args(args)
|
||||
# Determine username based on mode
|
||||
username = parsed_args.username
|
||||
if parsed_args.type == "admin":
|
||||
if username is None:
|
||||
username = "admin@ragflow.io"
|
||||
|
||||
if remaining_args:
|
||||
if remaining_args[0] == "command":
|
||||
command_str = ' '.join(remaining_args[1:]) + ';'
|
||||
auth = True
|
||||
if remaining_args[1] == "register":
|
||||
auth = False
|
||||
else:
|
||||
if username is None:
|
||||
print("Error: username (-u) is required in user mode")
|
||||
return {"error": "Username required"}
|
||||
return {
|
||||
"host": parsed_args.host,
|
||||
"port": parsed_args.port,
|
||||
"password": parsed_args.password,
|
||||
"type": parsed_args.type,
|
||||
"username": username,
|
||||
"command": command_str,
|
||||
"auth": auth
|
||||
}
|
||||
else:
|
||||
return {"error": "Invalid command"}
|
||||
else:
|
||||
auth = True
|
||||
if username is None:
|
||||
auth = False
|
||||
return {
|
||||
"host": parsed_args.host,
|
||||
"port": parsed_args.port,
|
||||
"type": parsed_args.type,
|
||||
"username": username,
|
||||
"auth": auth
|
||||
}
|
||||
except SystemExit:
|
||||
return {"error": "Invalid connection arguments"}
|
||||
|
||||
def execute_command(self, parsed_command: Dict[str, Any]):
|
||||
command_dict: dict
|
||||
if isinstance(parsed_command, Tree):
|
||||
command_dict = parsed_command.children[0]
|
||||
else:
|
||||
if parsed_command["type"] == "error":
|
||||
print(f"Error: {parsed_command['message']}")
|
||||
return
|
||||
else:
|
||||
command_dict = parsed_command
|
||||
|
||||
# print(f"Parsed command: {command_dict}")
|
||||
run_command(self.ragflow_client, command_dict)
|
||||
|
||||
def main():
|
||||
|
||||
cli = RAGFlowCLI()
|
||||
|
||||
args = cli.parse_connection_args(sys.argv)
|
||||
if "error" in args:
|
||||
print("Error: Invalid connection arguments")
|
||||
return
|
||||
|
||||
if "command" in args:
|
||||
# single command mode
|
||||
# for user mode, api key or password is ok
|
||||
# for admin mode, only password
|
||||
if "password" not in args:
|
||||
print("Error: password is missing")
|
||||
return
|
||||
|
||||
cli.run_single_command(args)
|
||||
else:
|
||||
cli.run_interactive(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1508
admin/client/ragflow_client.py
Normal file
1508
admin/client/ragflow_client.py
Normal file
File diff suppressed because it is too large
Load Diff
65
admin/client/user.py
Normal file
65
admin/client/user.py
Normal file
@ -0,0 +1,65 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from http_client import HttpClient
|
||||
|
||||
|
||||
class AuthException(Exception):
|
||||
def __init__(self, message, code=401):
|
||||
super().__init__(message)
|
||||
self.code = code
|
||||
self.message = message
|
||||
|
||||
|
||||
def encrypt_password(password_plain: str) -> str:
|
||||
try:
|
||||
from api.utils.crypt import crypt
|
||||
except Exception as exc:
|
||||
raise AuthException(
|
||||
"Password encryption unavailable; install pycryptodomex (uv sync --python 3.12 --group test)."
|
||||
) from exc
|
||||
return crypt(password_plain)
|
||||
|
||||
|
||||
def register_user(client: HttpClient, email: str, nickname: str, password: str) -> None:
|
||||
password_enc = encrypt_password(password)
|
||||
payload = {"email": email, "nickname": nickname, "password": password_enc}
|
||||
res = client.request_json("POST", "/user/register", use_api_base=False, auth_kind=None, json_body=payload)
|
||||
if res.get("code") == 0:
|
||||
return
|
||||
msg = res.get("message", "")
|
||||
if "has already registered" in msg:
|
||||
return
|
||||
raise AuthException(f"Register failed: {msg}")
|
||||
|
||||
|
||||
def login_user(client: HttpClient, server_type: str, email: str, password: str) -> str:
|
||||
password_enc = encrypt_password(password)
|
||||
payload = {"email": email, "password": password_enc}
|
||||
if server_type == "admin":
|
||||
response = client.request("POST", "/admin/login", use_api_base=True, auth_kind=None, json_body=payload)
|
||||
else:
|
||||
response = client.request("POST", "/user/login", use_api_base=False, auth_kind=None, json_body=payload)
|
||||
try:
|
||||
res = response.json()
|
||||
except Exception as exc:
|
||||
raise AuthException(f"Login failed: invalid JSON response ({exc})") from exc
|
||||
if res.get("code") != 0:
|
||||
raise AuthException(f"Login failed: {res.get('message')}")
|
||||
token = response.headers.get("Authorization")
|
||||
if not token:
|
||||
raise AuthException("Login failed: missing Authorization header")
|
||||
return token
|
||||
298
admin/client/uv.lock
generated
Normal file
298
admin/client/uv.lock
generated
Normal file
@ -0,0 +1,298 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.10, <3.13"
|
||||
|
||||
[[package]]
|
||||
name = "beartype"
|
||||
version = "0.22.6"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/88/e2/105ceb1704cb80fe4ab3872529ab7b6f365cf7c74f725e6132d0efcf1560/beartype-0.22.6.tar.gz", hash = "sha256:97fbda69c20b48c5780ac2ca60ce3c1bb9af29b3a1a0216898ffabdd523e48f4", size = 1588975, upload-time = "2025-11-20T04:47:14.736Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/98/c9/ceecc71fe2c9495a1d8e08d44f5f31f5bca1350d5b2e27a4b6265424f59e/beartype-0.22.6-py3-none-any.whl", hash = "sha256:0584bc46a2ea2a871509679278cda992eadde676c01356ab0ac77421f3c9a093", size = 1324807, upload-time = "2025-11-20T04:47:11.837Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.11.12"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.4"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lark"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/da/34/28fff3ab31ccff1fd4f6c7c7b0ceb2b6968d8ea4950663eadcb5720591a0/lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905", size = 382732, upload-time = "2025-10-27T18:25:56.653Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151, upload-time = "2025-10-27T18:25:54.882Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycryptodomex"
|
||||
version = "3.23.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c9/85/e24bf90972a30b0fcd16c73009add1d7d7cd9140c2498a68252028899e41/pycryptodomex-3.23.0.tar.gz", hash = "sha256:71909758f010c82bc99b0abf4ea12012c98962fbf0583c2164f8b84533c2e4da", size = 4922157, upload-time = "2025-05-17T17:23:41.434Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/dd/9c/1a8f35daa39784ed8adf93a694e7e5dc15c23c741bbda06e1d45f8979e9e/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6", size = 2499240, upload-time = "2025-05-17T17:22:46.953Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7a/62/f5221a191a97157d240cf6643747558759126c76ee92f29a3f4aee3197a5/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545", size = 1644042, upload-time = "2025-05-17T17:22:49.098Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8c/fd/5a054543c8988d4ed7b612721d7e78a4b9bf36bc3c5ad45ef45c22d0060e/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587", size = 2186227, upload-time = "2025-05-17T17:22:51.139Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c8/a9/8862616a85cf450d2822dbd4fff1fcaba90877907a6ff5bc2672cafe42f8/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f489c4765093fb60e2edafdf223397bc716491b2b69fe74367b70d6999257a5c", size = 2272578, upload-time = "2025-05-17T17:22:53.676Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/46/9f/bda9c49a7c1842820de674ab36c79f4fbeeee03f8ff0e4f3546c3889076b/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc69d0d3d989a1029df0eed67cc5e8e5d968f3724f4519bd03e0ec68df7543c", size = 2312166, upload-time = "2025-05-17T17:22:56.585Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/03/cc/870b9bf8ca92866ca0186534801cf8d20554ad2a76ca959538041b7a7cf4/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bbcb1dd0f646484939e142462d9e532482bc74475cecf9c4903d4e1cd21f003", size = 2185467, upload-time = "2025-05-17T17:22:59.237Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/96/e3/ce9348236d8e669fea5dd82a90e86be48b9c341210f44e25443162aba187/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:8a4fcd42ccb04c31268d1efeecfccfd1249612b4de6374205376b8f280321744", size = 2346104, upload-time = "2025-05-17T17:23:02.112Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a5/e9/e869bcee87beb89040263c416a8a50204f7f7a83ac11897646c9e71e0daf/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:55ccbe27f049743a4caf4f4221b166560d3438d0b1e5ab929e07ae1702a4d6fd", size = 2271038, upload-time = "2025-05-17T17:23:04.872Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8d/67/09ee8500dd22614af5fbaa51a4aee6e342b5fa8aecf0a6cb9cbf52fa6d45/pycryptodomex-3.23.0-cp37-abi3-win32.whl", hash = "sha256:189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c", size = 1771969, upload-time = "2025-05-17T17:23:07.115Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/69/96/11f36f71a865dd6df03716d33bd07a67e9d20f6b8d39820470b766af323c/pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9", size = 1803124, upload-time = "2025-05-17T17:23:09.267Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/93/45c1cdcbeb182ccd2e144c693eaa097763b08b38cded279f0053ed53c553/pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51", size = 1707161, upload-time = "2025-05-17T17:23:11.414Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f3/b8/3e76d948c3c4ac71335bbe75dac53e154b40b0f8f1f022dfa295257a0c96/pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5", size = 1627695, upload-time = "2025-05-17T17:23:17.38Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6a/cf/80f4297a4820dfdfd1c88cf6c4666a200f204b3488103d027b5edd9176ec/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798", size = 1675772, upload-time = "2025-05-17T17:23:19.202Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d1/42/1e969ee0ad19fe3134b0e1b856c39bd0b70d47a4d0e81c2a8b05727394c9/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f", size = 1668083, upload-time = "2025-05-17T17:23:21.867Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6e/c3/1de4f7631fea8a992a44ba632aa40e0008764c0fb9bf2854b0acf78c2cf2/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea", size = 1706056, upload-time = "2025-05-17T17:23:24.031Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe", size = 1806478, upload-time = "2025-05-17T17:23:26.066Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ragflow-cli"
|
||||
version = "0.23.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "beartype" },
|
||||
{ name = "lark" },
|
||||
{ name = "pycryptodomex" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
test = [
|
||||
{ name = "pytest" },
|
||||
{ name = "requests" },
|
||||
{ name = "requests-toolbelt" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "beartype", specifier = ">=0.20.0,<1.0.0" },
|
||||
{ name = "lark", specifier = ">=1.1.0" },
|
||||
{ name = "pycryptodomex", specifier = ">=3.10.0" },
|
||||
{ name = "requests", specifier = ">=2.30.0,<3.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
test = [
|
||||
{ name = "pytest", specifier = ">=8.3.5" },
|
||||
{ name = "requests", specifier = ">=2.32.3" },
|
||||
{ name = "requests-toolbelt", specifier = ">=1.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests-toolbelt"
|
||||
version = "1.0.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.5.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||
]
|
||||
@ -14,12 +14,15 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import time
|
||||
start_ts = time.time()
|
||||
|
||||
import os
|
||||
import signal
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
import traceback
|
||||
import faulthandler
|
||||
|
||||
from flask import Flask
|
||||
from flask_login import LoginManager
|
||||
@ -37,6 +40,7 @@ from common.versions import get_ragflow_version
|
||||
stop_event = threading.Event()
|
||||
|
||||
if __name__ == '__main__':
|
||||
faulthandler.enable()
|
||||
init_root_logger("admin_service")
|
||||
logging.info(r"""
|
||||
____ ___ ______________ ___ __ _
|
||||
@ -64,7 +68,7 @@ if __name__ == '__main__':
|
||||
SERVICE_CONFIGS.configs = load_configurations(SERVICE_CONF)
|
||||
|
||||
try:
|
||||
logging.info("RAGFlow Admin service start...")
|
||||
logging.info(f"RAGFlow admin is ready after {time.time() - start_ts}s initialization.")
|
||||
run_simple(
|
||||
hostname="0.0.0.0",
|
||||
port=9381,
|
||||
|
||||
@ -176,11 +176,11 @@ def login_verify(f):
|
||||
"message": "Access denied",
|
||||
"data": None
|
||||
}), 200
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
except Exception:
|
||||
logging.exception("An error occurred during admin login verification.")
|
||||
return jsonify({
|
||||
"code": 500,
|
||||
"message": error_msg
|
||||
"message": "An internal server error occurred."
|
||||
}), 200
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
@ -15,24 +15,34 @@
|
||||
#
|
||||
|
||||
import secrets
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from flask import Blueprint, request
|
||||
from common.time_utils import current_timestamp, datetime_format
|
||||
from datetime import datetime
|
||||
from flask import Blueprint, Response, request
|
||||
from flask_login import current_user, login_required, logout_user
|
||||
|
||||
from auth import login_verify, login_admin, check_admin_auth
|
||||
from responses import success_response, error_response
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr, SettingsMgr, ConfigMgr, EnvironmentsMgr, SandboxMgr
|
||||
from roles import RoleMgr
|
||||
from api.common.exceptions import AdminException
|
||||
from common.versions import get_ragflow_version
|
||||
from api.utils.api_utils import generate_confirmation_token
|
||||
|
||||
admin_bp = Blueprint('admin', __name__, url_prefix='/api/v1/admin')
|
||||
admin_bp = Blueprint("admin", __name__, url_prefix="/api/v1/admin")
|
||||
|
||||
|
||||
@admin_bp.route('/login', methods=['POST'])
|
||||
@admin_bp.route("/ping", methods=["GET"])
|
||||
def ping():
|
||||
return success_response("PONG")
|
||||
|
||||
|
||||
@admin_bp.route("/login", methods=["POST"])
|
||||
def login():
|
||||
if not request.json:
|
||||
return error_response('Authorize admin failed.' ,400)
|
||||
return error_response("Authorize admin failed.", 400)
|
||||
try:
|
||||
email = request.json.get("email", "")
|
||||
password = request.json.get("password", "")
|
||||
@ -41,7 +51,7 @@ def login():
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/logout', methods=['GET'])
|
||||
@admin_bp.route("/logout", methods=["GET"])
|
||||
@login_required
|
||||
def logout():
|
||||
try:
|
||||
@ -53,7 +63,7 @@ def logout():
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/auth', methods=['GET'])
|
||||
@admin_bp.route("/auth", methods=["GET"])
|
||||
@login_verify
|
||||
def auth_admin():
|
||||
try:
|
||||
@ -62,7 +72,7 @@ def auth_admin():
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users', methods=['GET'])
|
||||
@admin_bp.route("/users", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def list_users():
|
||||
@ -73,18 +83,18 @@ def list_users():
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users', methods=['POST'])
|
||||
@admin_bp.route("/users", methods=["POST"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def create_user():
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'username' not in data or 'password' not in data:
|
||||
if not data or "username" not in data or "password" not in data:
|
||||
return error_response("Username and password are required", 400)
|
||||
|
||||
username = data['username']
|
||||
password = data['password']
|
||||
role = data.get('role', 'user')
|
||||
username = data["username"]
|
||||
password = data["password"]
|
||||
role = data.get("role", "user")
|
||||
|
||||
res = UserMgr.create_user(username, password, role)
|
||||
if res["success"]:
|
||||
@ -100,7 +110,7 @@ def create_user():
|
||||
return error_response(str(e))
|
||||
|
||||
|
||||
@admin_bp.route('/users/<username>', methods=['DELETE'])
|
||||
@admin_bp.route("/users/<username>", methods=["DELETE"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def delete_user(username):
|
||||
@ -117,16 +127,16 @@ def delete_user(username):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<username>/password', methods=['PUT'])
|
||||
@admin_bp.route("/users/<username>/password", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def change_password(username):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'new_password' not in data:
|
||||
if not data or "new_password" not in data:
|
||||
return error_response("New password is required", 400)
|
||||
|
||||
new_password = data['new_password']
|
||||
new_password = data["new_password"]
|
||||
msg = UserMgr.update_user_password(username, new_password)
|
||||
return success_response(None, msg)
|
||||
|
||||
@ -136,15 +146,15 @@ def change_password(username):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<username>/activate', methods=['PUT'])
|
||||
@admin_bp.route("/users/<username>/activate", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def alter_user_activate_status(username):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'activate_status' not in data:
|
||||
if not data or "activate_status" not in data:
|
||||
return error_response("Activation status is required", 400)
|
||||
activate_status = data['activate_status']
|
||||
activate_status = data["activate_status"]
|
||||
msg = UserMgr.update_user_activate_status(username, activate_status)
|
||||
return success_response(None, msg)
|
||||
except AdminException as e:
|
||||
@ -153,7 +163,39 @@ def alter_user_activate_status(username):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<username>', methods=['GET'])
|
||||
@admin_bp.route("/users/<username>/admin", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def grant_admin(username):
|
||||
try:
|
||||
if current_user.email == username:
|
||||
return error_response(f"can't grant current user: {username}", 409)
|
||||
msg = UserMgr.grant_admin(username)
|
||||
return success_response(None, msg)
|
||||
|
||||
except AdminException as e:
|
||||
return error_response(e.message, e.code)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/users/<username>/admin", methods=["DELETE"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def revoke_admin(username):
|
||||
try:
|
||||
if current_user.email == username:
|
||||
return error_response(f"can't grant current user: {username}", 409)
|
||||
msg = UserMgr.revoke_admin(username)
|
||||
return success_response(None, msg)
|
||||
|
||||
except AdminException as e:
|
||||
return error_response(e.message, e.code)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/users/<username>", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_user_details(username):
|
||||
@ -167,7 +209,7 @@ def get_user_details(username):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<username>/datasets', methods=['GET'])
|
||||
@admin_bp.route("/users/<username>/datasets", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_user_datasets(username):
|
||||
@ -181,7 +223,7 @@ def get_user_datasets(username):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<username>/agents', methods=['GET'])
|
||||
@admin_bp.route("/users/<username>/agents", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_user_agents(username):
|
||||
@ -195,7 +237,7 @@ def get_user_agents(username):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/services', methods=['GET'])
|
||||
@admin_bp.route("/services", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_services():
|
||||
@ -206,7 +248,7 @@ def get_services():
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/service_types/<service_type>', methods=['GET'])
|
||||
@admin_bp.route("/service_types/<service_type>", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_services_by_type(service_type_str):
|
||||
@ -217,7 +259,7 @@ def get_services_by_type(service_type_str):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/services/<service_id>', methods=['GET'])
|
||||
@admin_bp.route("/services/<service_id>", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_service(service_id):
|
||||
@ -228,7 +270,7 @@ def get_service(service_id):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/services/<service_id>', methods=['DELETE'])
|
||||
@admin_bp.route("/services/<service_id>", methods=["DELETE"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def shutdown_service(service_id):
|
||||
@ -239,7 +281,7 @@ def shutdown_service(service_id):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/services/<service_id>', methods=['PUT'])
|
||||
@admin_bp.route("/services/<service_id>", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def restart_service(service_id):
|
||||
@ -250,38 +292,38 @@ def restart_service(service_id):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles', methods=['POST'])
|
||||
@admin_bp.route("/roles", methods=["POST"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def create_role():
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'role_name' not in data:
|
||||
if not data or "role_name" not in data:
|
||||
return error_response("Role name is required", 400)
|
||||
role_name: str = data['role_name']
|
||||
description: str = data['description']
|
||||
role_name: str = data["role_name"]
|
||||
description: str = data["description"]
|
||||
res = RoleMgr.create_role(role_name, description)
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles/<role_name>', methods=['PUT'])
|
||||
@admin_bp.route("/roles/<role_name>", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def update_role(role_name: str):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'description' not in data:
|
||||
if not data or "description" not in data:
|
||||
return error_response("Role description is required", 400)
|
||||
description: str = data['description']
|
||||
description: str = data["description"]
|
||||
res = RoleMgr.update_role_description(role_name, description)
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles/<role_name>', methods=['DELETE'])
|
||||
@admin_bp.route("/roles/<role_name>", methods=["DELETE"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def delete_role(role_name: str):
|
||||
@ -292,7 +334,7 @@ def delete_role(role_name: str):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles', methods=['GET'])
|
||||
@admin_bp.route("/roles", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def list_roles():
|
||||
@ -303,7 +345,7 @@ def list_roles():
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles/<role_name>/permission', methods=['GET'])
|
||||
@admin_bp.route("/roles/<role_name>/permission", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_role_permission(role_name: str):
|
||||
@ -314,54 +356,54 @@ def get_role_permission(role_name: str):
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles/<role_name>/permission', methods=['POST'])
|
||||
@admin_bp.route("/roles/<role_name>/permission", methods=["POST"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def grant_role_permission(role_name: str):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'actions' not in data or 'resource' not in data:
|
||||
if not data or "actions" not in data or "resource" not in data:
|
||||
return error_response("Permission is required", 400)
|
||||
actions: list = data['actions']
|
||||
resource: str = data['resource']
|
||||
actions: list = data["actions"]
|
||||
resource: str = data["resource"]
|
||||
res = RoleMgr.grant_role_permission(role_name, actions, resource)
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/roles/<role_name>/permission', methods=['DELETE'])
|
||||
@admin_bp.route("/roles/<role_name>/permission", methods=["DELETE"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def revoke_role_permission(role_name: str):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'actions' not in data or 'resource' not in data:
|
||||
if not data or "actions" not in data or "resource" not in data:
|
||||
return error_response("Permission is required", 400)
|
||||
actions: list = data['actions']
|
||||
resource: str = data['resource']
|
||||
actions: list = data["actions"]
|
||||
resource: str = data["resource"]
|
||||
res = RoleMgr.revoke_role_permission(role_name, actions, resource)
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<user_name>/role', methods=['PUT'])
|
||||
@admin_bp.route("/users/<user_name>/role", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def update_user_role(user_name: str):
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data or 'role_name' not in data:
|
||||
if not data or "role_name" not in data:
|
||||
return error_response("Role name is required", 400)
|
||||
role_name: str = data['role_name']
|
||||
role_name: str = data["role_name"]
|
||||
res = RoleMgr.update_user_role(user_name, role_name)
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route('/users/<user_name>/permission', methods=['GET'])
|
||||
@admin_bp.route("/users/<user_name>/permission", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_user_permission(user_name: str):
|
||||
@ -371,7 +413,140 @@ def get_user_permission(user_name: str):
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
@admin_bp.route('/version', methods=['GET'])
|
||||
|
||||
@admin_bp.route("/variables", methods=["PUT"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def set_variable():
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data and "var_name" not in data:
|
||||
return error_response("Var name is required", 400)
|
||||
|
||||
if "var_value" not in data:
|
||||
return error_response("Var value is required", 400)
|
||||
var_name: str = data["var_name"]
|
||||
var_value: str = data["var_value"]
|
||||
|
||||
SettingsMgr.update_by_name(var_name, var_value)
|
||||
return success_response(None, "Set variable successfully")
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/variables", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_variable():
|
||||
try:
|
||||
if request.content_length is None or request.content_length == 0:
|
||||
# list variables
|
||||
res = list(SettingsMgr.get_all())
|
||||
return success_response(res)
|
||||
|
||||
# get var
|
||||
data = request.get_json()
|
||||
if not data and "var_name" not in data:
|
||||
return error_response("Var name is required", 400)
|
||||
var_name: str = data["var_name"]
|
||||
res = SettingsMgr.get_by_name(var_name)
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/configs", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_config():
|
||||
try:
|
||||
res = list(ConfigMgr.get_all())
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/environments", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_environments():
|
||||
try:
|
||||
res = list(EnvironmentsMgr.get_all())
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/users/<username>/keys", methods=["POST"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def generate_user_api_key(username: str) -> tuple[Response, int]:
|
||||
try:
|
||||
user_details: list[dict[str, Any]] = UserMgr.get_user_details(username)
|
||||
if not user_details:
|
||||
return error_response("User not found!", 404)
|
||||
tenants: list[dict[str, Any]] = UserServiceMgr.get_user_tenants(username)
|
||||
if not tenants:
|
||||
return error_response("Tenant not found!", 404)
|
||||
tenant_id: str = tenants[0]["tenant_id"]
|
||||
key: str = generate_confirmation_token()
|
||||
obj: dict[str, Any] = {
|
||||
"tenant_id": tenant_id,
|
||||
"token": key,
|
||||
"beta": generate_confirmation_token().replace("ragflow-", "")[:32],
|
||||
"create_time": current_timestamp(),
|
||||
"create_date": datetime_format(datetime.now()),
|
||||
"update_time": None,
|
||||
"update_date": None,
|
||||
}
|
||||
|
||||
if not UserMgr.save_api_key(obj):
|
||||
return error_response("Failed to generate API key!", 500)
|
||||
return success_response(obj, "API key generated successfully")
|
||||
except AdminException as e:
|
||||
return error_response(e.message, e.code)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/users/<username>/keys", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_user_api_keys(username: str) -> tuple[Response, int]:
|
||||
try:
|
||||
api_keys: list[dict[str, Any]] = UserMgr.get_user_api_key(username)
|
||||
return success_response(api_keys, "Get user API keys")
|
||||
except AdminException as e:
|
||||
return error_response(e.message, e.code)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/users/<username>/keys/<key>", methods=["DELETE"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def delete_user_api_key(username: str, key: str) -> tuple[Response, int]:
|
||||
try:
|
||||
deleted = UserMgr.delete_api_key(username, key)
|
||||
if deleted:
|
||||
return success_response(None, "API key deleted successfully")
|
||||
else:
|
||||
return error_response("API key not found or could not be deleted", 404)
|
||||
except AdminException as e:
|
||||
return error_response(e.message, e.code)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/version", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def show_version():
|
||||
@ -380,3 +555,100 @@ def show_version():
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/sandbox/providers", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def list_sandbox_providers():
|
||||
"""List all available sandbox providers."""
|
||||
try:
|
||||
res = SandboxMgr.list_providers()
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/sandbox/providers/<provider_id>/schema", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_sandbox_provider_schema(provider_id: str):
|
||||
"""Get configuration schema for a specific provider."""
|
||||
try:
|
||||
res = SandboxMgr.get_provider_config_schema(provider_id)
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/sandbox/config", methods=["GET"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def get_sandbox_config():
|
||||
"""Get current sandbox configuration."""
|
||||
try:
|
||||
res = SandboxMgr.get_config()
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/sandbox/config", methods=["POST"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def set_sandbox_config():
|
||||
"""Set sandbox provider configuration."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
logging.error("set_sandbox_config: Request body is required")
|
||||
return error_response("Request body is required", 400)
|
||||
|
||||
provider_type = data.get("provider_type")
|
||||
if not provider_type:
|
||||
logging.error("set_sandbox_config: provider_type is required")
|
||||
return error_response("provider_type is required", 400)
|
||||
|
||||
config = data.get("config", {})
|
||||
set_active = data.get("set_active", True) # Default to True for backward compatibility
|
||||
|
||||
logging.info(f"set_sandbox_config: provider_type={provider_type}, set_active={set_active}")
|
||||
logging.info(f"set_sandbox_config: config keys={list(config.keys())}")
|
||||
|
||||
res = SandboxMgr.set_config(provider_type, config, set_active)
|
||||
return success_response(res, "Sandbox configuration updated successfully")
|
||||
except AdminException as e:
|
||||
logging.exception("set_sandbox_config AdminException")
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
logging.exception("set_sandbox_config unexpected error")
|
||||
return error_response(str(e), 500)
|
||||
|
||||
|
||||
@admin_bp.route("/sandbox/test", methods=["POST"])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def test_sandbox_connection():
|
||||
"""Test connection to sandbox provider."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return error_response("Request body is required", 400)
|
||||
|
||||
provider_type = data.get("provider_type")
|
||||
if not provider_type:
|
||||
return error_response("provider_type is required", 400)
|
||||
|
||||
config = data.get("config", {})
|
||||
res = SandboxMgr.test_connection(provider_type, config)
|
||||
return success_response(res)
|
||||
except AdminException as e:
|
||||
return error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
@ -13,15 +13,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from werkzeug.security import check_password_hash
|
||||
from common.constants import ActiveEnum
|
||||
from api.db.services import UserService
|
||||
from api.db.joint_services.user_account_service import create_new_user, delete_user_data
|
||||
from api.db.services.canvas_service import UserCanvasService
|
||||
from api.db.services.user_service import TenantService
|
||||
from api.db.services.user_service import TenantService, UserTenantService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.system_settings_service import SystemSettingsService
|
||||
from api.db.services.api_service import APITokenService
|
||||
from api.db.db_models import APIToken
|
||||
from api.utils.crypt import decrypt
|
||||
from api.utils import health_utils
|
||||
|
||||
@ -35,13 +43,15 @@ class UserMgr:
|
||||
users = UserService.get_all_users()
|
||||
result = []
|
||||
for user in users:
|
||||
result.append({
|
||||
'email': user.email,
|
||||
'nickname': user.nickname,
|
||||
'create_date': user.create_date,
|
||||
'is_active': user.is_active,
|
||||
'is_superuser': user.is_superuser,
|
||||
})
|
||||
result.append(
|
||||
{
|
||||
"email": user.email,
|
||||
"nickname": user.nickname,
|
||||
"create_date": user.create_date,
|
||||
"is_active": user.is_active,
|
||||
"is_superuser": user.is_superuser,
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
@ -50,19 +60,21 @@ class UserMgr:
|
||||
users = UserService.query_user_by_email(username)
|
||||
result = []
|
||||
for user in users:
|
||||
result.append({
|
||||
'avatar': user.avatar,
|
||||
'email': user.email,
|
||||
'language': user.language,
|
||||
'last_login_time': user.last_login_time,
|
||||
'is_active': user.is_active,
|
||||
'is_anonymous': user.is_anonymous,
|
||||
'login_channel': user.login_channel,
|
||||
'status': user.status,
|
||||
'is_superuser': user.is_superuser,
|
||||
'create_date': user.create_date,
|
||||
'update_date': user.update_date
|
||||
})
|
||||
result.append(
|
||||
{
|
||||
"avatar": user.avatar,
|
||||
"email": user.email,
|
||||
"language": user.language,
|
||||
"last_login_time": user.last_login_time,
|
||||
"is_active": user.is_active,
|
||||
"is_anonymous": user.is_anonymous,
|
||||
"login_channel": user.login_channel,
|
||||
"status": user.status,
|
||||
"is_superuser": user.is_superuser,
|
||||
"create_date": user.create_date,
|
||||
"update_date": user.update_date,
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
@ -124,8 +136,8 @@ class UserMgr:
|
||||
# format activate_status before handle
|
||||
_activate_status = activate_status.lower()
|
||||
target_status = {
|
||||
'on': ActiveEnum.ACTIVE.value,
|
||||
'off': ActiveEnum.INACTIVE.value,
|
||||
"on": ActiveEnum.ACTIVE.value,
|
||||
"off": ActiveEnum.INACTIVE.value,
|
||||
}.get(_activate_status)
|
||||
if not target_status:
|
||||
raise AdminException(f"Invalid activate_status: {activate_status}")
|
||||
@ -135,9 +147,84 @@ class UserMgr:
|
||||
UserService.update_user(usr.id, {"is_active": target_status})
|
||||
return f"Turn {_activate_status} user activate status successfully!"
|
||||
|
||||
@staticmethod
|
||||
def get_user_api_key(username: str) -> list[dict[str, Any]]:
|
||||
# use email to find user. check exist and unique.
|
||||
user_list: list[Any] = UserService.query_user_by_email(username)
|
||||
if not user_list:
|
||||
raise UserNotFoundError(username)
|
||||
elif len(user_list) > 1:
|
||||
raise AdminException(f"More than one user with username '{username}' found!")
|
||||
|
||||
usr: Any = user_list[0]
|
||||
# tenant_id is typically the same as user_id for the owner tenant
|
||||
tenant_id: str = usr.id
|
||||
|
||||
# Query all API keys for this tenant
|
||||
api_keys: Any = APITokenService.query(tenant_id=tenant_id)
|
||||
|
||||
result: list[dict[str, Any]] = []
|
||||
for key in api_keys:
|
||||
result.append(key.to_dict())
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def save_api_key(api_key: dict[str, Any]) -> bool:
|
||||
return APITokenService.save(**api_key)
|
||||
|
||||
@staticmethod
|
||||
def delete_api_key(username: str, key: str) -> bool:
|
||||
# use email to find user. check exist and unique.
|
||||
user_list: list[Any] = UserService.query_user_by_email(username)
|
||||
if not user_list:
|
||||
raise UserNotFoundError(username)
|
||||
elif len(user_list) > 1:
|
||||
raise AdminException(f"Exist more than 1 user: {username}!")
|
||||
|
||||
usr: Any = user_list[0]
|
||||
# tenant_id is typically the same as user_id for the owner tenant
|
||||
tenant_id: str = usr.id
|
||||
|
||||
# Delete the API key
|
||||
deleted_count: int = APITokenService.filter_delete([APIToken.tenant_id == tenant_id, APIToken.token == key])
|
||||
return deleted_count > 0
|
||||
|
||||
@staticmethod
|
||||
def grant_admin(username: str):
|
||||
# use email to find user. check exist and unique.
|
||||
user_list = UserService.query_user_by_email(username)
|
||||
if not user_list:
|
||||
raise UserNotFoundError(username)
|
||||
elif len(user_list) > 1:
|
||||
raise AdminException(f"Exist more than 1 user: {username}!")
|
||||
|
||||
# check activate status different from new
|
||||
usr = user_list[0]
|
||||
if usr.is_superuser:
|
||||
return f"{usr} is already superuser!"
|
||||
# update is_active
|
||||
UserService.update_user(usr.id, {"is_superuser": True})
|
||||
return "Grant successfully!"
|
||||
|
||||
@staticmethod
|
||||
def revoke_admin(username: str):
|
||||
# use email to find user. check exist and unique.
|
||||
user_list = UserService.query_user_by_email(username)
|
||||
if not user_list:
|
||||
raise UserNotFoundError(username)
|
||||
elif len(user_list) > 1:
|
||||
raise AdminException(f"Exist more than 1 user: {username}!")
|
||||
# check activate status different from new
|
||||
usr = user_list[0]
|
||||
if not usr.is_superuser:
|
||||
return f"{usr} isn't superuser, yet!"
|
||||
# update is_active
|
||||
UserService.update_user(usr.id, {"is_superuser": False})
|
||||
return "Revoke successfully!"
|
||||
|
||||
|
||||
class UserServiceMgr:
|
||||
|
||||
@staticmethod
|
||||
def get_user_datasets(username):
|
||||
# use email to find user.
|
||||
@ -167,35 +254,43 @@ class UserServiceMgr:
|
||||
tenant_ids = [m["tenant_id"] for m in tenants]
|
||||
# filter permitted agents and owned agents
|
||||
res = UserCanvasService.get_all_agents_by_tenant_ids(tenant_ids, usr.id)
|
||||
return [{
|
||||
'title': r['title'],
|
||||
'permission': r['permission'],
|
||||
'canvas_category': r['canvas_category'].split('_')[0],
|
||||
'avatar': r['avatar']
|
||||
} for r in res]
|
||||
return [{"title": r["title"], "permission": r["permission"], "canvas_category": r["canvas_category"].split("_")[0], "avatar": r["avatar"]} for r in res]
|
||||
|
||||
@staticmethod
|
||||
def get_user_tenants(email: str) -> list[dict[str, Any]]:
|
||||
users: list[Any] = UserService.query_user_by_email(email)
|
||||
if not users:
|
||||
raise UserNotFoundError(email)
|
||||
user: Any = users[0]
|
||||
|
||||
tenants: list[dict[str, Any]] = UserTenantService.get_tenants_by_user_id(user.id)
|
||||
return tenants
|
||||
|
||||
|
||||
class ServiceMgr:
|
||||
|
||||
@staticmethod
|
||||
def get_all_services():
|
||||
doc_engine = os.getenv("DOC_ENGINE", "elasticsearch")
|
||||
result = []
|
||||
configs = SERVICE_CONFIGS.configs
|
||||
for service_id, config in enumerate(configs):
|
||||
config_dict = config.to_dict()
|
||||
if config_dict["service_type"] == "retrieval":
|
||||
if config_dict["extra"]["retrieval_type"] != doc_engine:
|
||||
continue
|
||||
try:
|
||||
service_detail = ServiceMgr.get_service_details(service_id)
|
||||
if "status" in service_detail:
|
||||
config_dict['status'] = service_detail['status']
|
||||
config_dict["status"] = service_detail["status"]
|
||||
else:
|
||||
config_dict['status'] = 'timeout'
|
||||
config_dict["status"] = "timeout"
|
||||
except Exception as e:
|
||||
logging.warning(f"Can't get service details, error: {e}")
|
||||
config_dict['status'] = 'timeout'
|
||||
if not config_dict['host']:
|
||||
config_dict['host'] = '-'
|
||||
if not config_dict['port']:
|
||||
config_dict['port'] = '-'
|
||||
config_dict["status"] = "timeout"
|
||||
if not config_dict["host"]:
|
||||
config_dict["host"] = "-"
|
||||
if not config_dict["port"]:
|
||||
config_dict["port"] = "-"
|
||||
result.append(config_dict)
|
||||
return result
|
||||
|
||||
@ -211,11 +306,18 @@ class ServiceMgr:
|
||||
raise AdminException(f"invalid service_index: {service_idx}")
|
||||
|
||||
service_config = configs[service_idx]
|
||||
service_info = {'name': service_config.name, 'detail_func_name': service_config.detail_func_name}
|
||||
|
||||
detail_func = getattr(health_utils, service_info.get('detail_func_name'))
|
||||
# exclude retrieval service if retrieval_type is not matched
|
||||
doc_engine = os.getenv("DOC_ENGINE", "elasticsearch")
|
||||
if service_config.service_type == "retrieval":
|
||||
if service_config.retrieval_type != doc_engine:
|
||||
raise AdminException(f"invalid service_index: {service_idx}")
|
||||
|
||||
service_info = {"name": service_config.name, "detail_func_name": service_config.detail_func_name}
|
||||
|
||||
detail_func = getattr(health_utils, service_info.get("detail_func_name"))
|
||||
res = detail_func()
|
||||
res.update({'service_name': service_info.get('name')})
|
||||
res.update({"service_name": service_info.get("name")})
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
@ -225,3 +327,397 @@ class ServiceMgr:
|
||||
@staticmethod
|
||||
def restart_service(service_id: int):
|
||||
raise AdminException("restart_service: not implemented")
|
||||
|
||||
|
||||
class SettingsMgr:
|
||||
@staticmethod
|
||||
def get_all():
|
||||
settings = SystemSettingsService.get_all()
|
||||
result = []
|
||||
for setting in settings:
|
||||
result.append(
|
||||
{
|
||||
"name": setting.name,
|
||||
"source": setting.source,
|
||||
"data_type": setting.data_type,
|
||||
"value": setting.value,
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_by_name(name: str):
|
||||
settings = SystemSettingsService.get_by_name(name)
|
||||
if len(settings) == 0:
|
||||
raise AdminException(f"Can't get setting: {name}")
|
||||
result = []
|
||||
for setting in settings:
|
||||
result.append(
|
||||
{
|
||||
"name": setting.name,
|
||||
"source": setting.source,
|
||||
"data_type": setting.data_type,
|
||||
"value": setting.value,
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def update_by_name(name: str, value: str):
|
||||
settings = SystemSettingsService.get_by_name(name)
|
||||
if len(settings) == 1:
|
||||
setting = settings[0]
|
||||
setting.value = value
|
||||
setting_dict = setting.to_dict()
|
||||
SystemSettingsService.update_by_name(name, setting_dict)
|
||||
elif len(settings) > 1:
|
||||
raise AdminException(f"Can't update more than 1 setting: {name}")
|
||||
else:
|
||||
# Create new setting if it doesn't exist
|
||||
|
||||
# Determine data_type based on name and value
|
||||
if name.startswith("sandbox."):
|
||||
data_type = "json"
|
||||
elif name.endswith(".enabled"):
|
||||
data_type = "boolean"
|
||||
else:
|
||||
data_type = "string"
|
||||
|
||||
new_setting = {
|
||||
"name": name,
|
||||
"value": str(value),
|
||||
"source": "admin",
|
||||
"data_type": data_type,
|
||||
}
|
||||
SystemSettingsService.save(**new_setting)
|
||||
|
||||
|
||||
class ConfigMgr:
|
||||
@staticmethod
|
||||
def get_all():
|
||||
result = []
|
||||
configs = SERVICE_CONFIGS.configs
|
||||
for config in configs:
|
||||
config_dict = config.to_dict()
|
||||
result.append(config_dict)
|
||||
return result
|
||||
|
||||
|
||||
class EnvironmentsMgr:
|
||||
@staticmethod
|
||||
def get_all():
|
||||
result = []
|
||||
|
||||
env_kv = {"env": "DOC_ENGINE", "value": os.getenv("DOC_ENGINE")}
|
||||
result.append(env_kv)
|
||||
|
||||
env_kv = {"env": "DEFAULT_SUPERUSER_EMAIL", "value": os.getenv("DEFAULT_SUPERUSER_EMAIL", "admin@ragflow.io")}
|
||||
result.append(env_kv)
|
||||
|
||||
env_kv = {"env": "DB_TYPE", "value": os.getenv("DB_TYPE", "mysql")}
|
||||
result.append(env_kv)
|
||||
|
||||
env_kv = {"env": "DEVICE", "value": os.getenv("DEVICE", "cpu")}
|
||||
result.append(env_kv)
|
||||
|
||||
env_kv = {"env": "STORAGE_IMPL", "value": os.getenv("STORAGE_IMPL", "MINIO")}
|
||||
result.append(env_kv)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class SandboxMgr:
|
||||
"""Manager for sandbox provider configuration and operations."""
|
||||
|
||||
# Provider registry with metadata
|
||||
PROVIDER_REGISTRY = {
|
||||
"self_managed": {
|
||||
"name": "Self-Managed",
|
||||
"description": "On-premise deployment using Daytona/Docker",
|
||||
"tags": ["self-hosted", "low-latency", "secure"],
|
||||
},
|
||||
"aliyun_codeinterpreter": {
|
||||
"name": "Aliyun Code Interpreter",
|
||||
"description": "Aliyun Function Compute Code Interpreter - Code execution in serverless microVMs",
|
||||
"tags": ["saas", "cloud", "scalable", "aliyun"],
|
||||
},
|
||||
"e2b": {
|
||||
"name": "E2B",
|
||||
"description": "E2B Cloud - Code Execution Sandboxes",
|
||||
"tags": ["saas", "fast", "global"],
|
||||
},
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def list_providers():
|
||||
"""List all available sandbox providers."""
|
||||
result = []
|
||||
for provider_id, metadata in SandboxMgr.PROVIDER_REGISTRY.items():
|
||||
result.append({
|
||||
"id": provider_id,
|
||||
**metadata
|
||||
})
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_provider_config_schema(provider_id: str):
|
||||
"""Get configuration schema for a specific provider."""
|
||||
from agent.sandbox.providers import (
|
||||
SelfManagedProvider,
|
||||
AliyunCodeInterpreterProvider,
|
||||
E2BProvider,
|
||||
)
|
||||
|
||||
schemas = {
|
||||
"self_managed": SelfManagedProvider.get_config_schema(),
|
||||
"aliyun_codeinterpreter": AliyunCodeInterpreterProvider.get_config_schema(),
|
||||
"e2b": E2BProvider.get_config_schema(),
|
||||
}
|
||||
|
||||
if provider_id not in schemas:
|
||||
raise AdminException(f"Unknown provider: {provider_id}")
|
||||
|
||||
return schemas.get(provider_id, {})
|
||||
|
||||
@staticmethod
|
||||
def get_config():
|
||||
"""Get current sandbox configuration."""
|
||||
try:
|
||||
# Get active provider type
|
||||
provider_type_settings = SystemSettingsService.get_by_name("sandbox.provider_type")
|
||||
if not provider_type_settings:
|
||||
# Return default config if not set
|
||||
provider_type = "self_managed"
|
||||
else:
|
||||
provider_type = provider_type_settings[0].value
|
||||
|
||||
# Get provider-specific config
|
||||
provider_config_settings = SystemSettingsService.get_by_name(f"sandbox.{provider_type}")
|
||||
if not provider_config_settings:
|
||||
provider_config = {}
|
||||
else:
|
||||
try:
|
||||
provider_config = json.loads(provider_config_settings[0].value)
|
||||
except json.JSONDecodeError:
|
||||
provider_config = {}
|
||||
|
||||
return {
|
||||
"provider_type": provider_type,
|
||||
"config": provider_config,
|
||||
}
|
||||
except Exception as e:
|
||||
raise AdminException(f"Failed to get sandbox config: {str(e)}")
|
||||
|
||||
@staticmethod
|
||||
def set_config(provider_type: str, config: dict, set_active: bool = True):
|
||||
"""
|
||||
Set sandbox provider configuration.
|
||||
|
||||
Args:
|
||||
provider_type: Provider identifier (e.g., "self_managed", "e2b")
|
||||
config: Provider configuration dictionary
|
||||
set_active: If True, also update the active provider. If False,
|
||||
only update the configuration without switching providers.
|
||||
Default: True
|
||||
|
||||
Returns:
|
||||
Dictionary with updated provider_type and config
|
||||
"""
|
||||
from agent.sandbox.providers import (
|
||||
SelfManagedProvider,
|
||||
AliyunCodeInterpreterProvider,
|
||||
E2BProvider,
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate provider type
|
||||
if provider_type not in SandboxMgr.PROVIDER_REGISTRY:
|
||||
raise AdminException(f"Unknown provider type: {provider_type}")
|
||||
|
||||
# Get provider schema for validation
|
||||
schema = SandboxMgr.get_provider_config_schema(provider_type)
|
||||
|
||||
# Validate config against schema
|
||||
for field_name, field_schema in schema.items():
|
||||
if field_schema.get("required", False) and field_name not in config:
|
||||
raise AdminException(f"Required field '{field_name}' is missing")
|
||||
|
||||
# Type validation
|
||||
if field_name in config:
|
||||
field_type = field_schema.get("type")
|
||||
if field_type == "integer":
|
||||
if not isinstance(config[field_name], int):
|
||||
raise AdminException(f"Field '{field_name}' must be an integer")
|
||||
elif field_type == "string":
|
||||
if not isinstance(config[field_name], str):
|
||||
raise AdminException(f"Field '{field_name}' must be a string")
|
||||
elif field_type == "bool":
|
||||
if not isinstance(config[field_name], bool):
|
||||
raise AdminException(f"Field '{field_name}' must be a boolean")
|
||||
|
||||
# Range validation for integers
|
||||
if field_type == "integer" and field_name in config:
|
||||
min_val = field_schema.get("min")
|
||||
max_val = field_schema.get("max")
|
||||
if min_val is not None and config[field_name] < min_val:
|
||||
raise AdminException(f"Field '{field_name}' must be >= {min_val}")
|
||||
if max_val is not None and config[field_name] > max_val:
|
||||
raise AdminException(f"Field '{field_name}' must be <= {max_val}")
|
||||
|
||||
# Provider-specific custom validation
|
||||
provider_classes = {
|
||||
"self_managed": SelfManagedProvider,
|
||||
"aliyun_codeinterpreter": AliyunCodeInterpreterProvider,
|
||||
"e2b": E2BProvider,
|
||||
}
|
||||
provider = provider_classes[provider_type]()
|
||||
is_valid, error_msg = provider.validate_config(config)
|
||||
if not is_valid:
|
||||
raise AdminException(f"Provider validation failed: {error_msg}")
|
||||
|
||||
# Update provider_type only if set_active is True
|
||||
if set_active:
|
||||
SettingsMgr.update_by_name("sandbox.provider_type", provider_type)
|
||||
|
||||
# Always update the provider config
|
||||
config_json = json.dumps(config)
|
||||
SettingsMgr.update_by_name(f"sandbox.{provider_type}", config_json)
|
||||
|
||||
return {"provider_type": provider_type, "config": config}
|
||||
except AdminException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise AdminException(f"Failed to set sandbox config: {str(e)}")
|
||||
|
||||
@staticmethod
|
||||
def test_connection(provider_type: str, config: dict):
|
||||
"""
|
||||
Test connection to sandbox provider by executing a simple Python script.
|
||||
|
||||
This creates a temporary sandbox instance and runs a test code to verify:
|
||||
- Connection credentials are valid
|
||||
- Sandbox can be created
|
||||
- Code execution works correctly
|
||||
|
||||
Args:
|
||||
provider_type: Provider identifier
|
||||
config: Provider configuration dictionary
|
||||
|
||||
Returns:
|
||||
dict with test results including stdout, stderr, exit_code, execution_time
|
||||
"""
|
||||
try:
|
||||
from agent.sandbox.providers import (
|
||||
SelfManagedProvider,
|
||||
AliyunCodeInterpreterProvider,
|
||||
E2BProvider,
|
||||
)
|
||||
|
||||
# Instantiate provider based on type
|
||||
provider_classes = {
|
||||
"self_managed": SelfManagedProvider,
|
||||
"aliyun_codeinterpreter": AliyunCodeInterpreterProvider,
|
||||
"e2b": E2BProvider,
|
||||
}
|
||||
|
||||
if provider_type not in provider_classes:
|
||||
raise AdminException(f"Unknown provider type: {provider_type}")
|
||||
|
||||
provider = provider_classes[provider_type]()
|
||||
|
||||
# Initialize with config
|
||||
if not provider.initialize(config):
|
||||
raise AdminException(f"Failed to initialize provider '{provider_type}'")
|
||||
|
||||
# Create a temporary sandbox instance for testing
|
||||
instance = provider.create_instance(template="python")
|
||||
|
||||
if not instance or instance.status != "READY":
|
||||
raise AdminException(f"Failed to create sandbox instance. Status: {instance.status if instance else 'None'}")
|
||||
|
||||
# Simple test code that exercises basic Python functionality
|
||||
test_code = """
|
||||
# Test basic Python functionality
|
||||
import sys
|
||||
import json
|
||||
import math
|
||||
|
||||
print("Python version:", sys.version)
|
||||
print("Platform:", sys.platform)
|
||||
|
||||
# Test basic calculations
|
||||
result = 2 + 2
|
||||
print(f"2 + 2 = {result}")
|
||||
|
||||
# Test JSON operations
|
||||
data = {"test": "data", "value": 123}
|
||||
print(f"JSON dump: {json.dumps(data)}")
|
||||
|
||||
# Test math operations
|
||||
print(f"Math.sqrt(16) = {math.sqrt(16)}")
|
||||
|
||||
# Test error handling
|
||||
try:
|
||||
x = 1 / 1
|
||||
print("Division test: OK")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
# Return success indicator
|
||||
print("TEST_PASSED")
|
||||
"""
|
||||
|
||||
# Execute test code with timeout
|
||||
execution_result = provider.execute_code(
|
||||
instance_id=instance.instance_id,
|
||||
code=test_code,
|
||||
language="python",
|
||||
timeout=10 # 10 seconds timeout
|
||||
)
|
||||
|
||||
# Clean up the test instance (if provider supports it)
|
||||
try:
|
||||
if hasattr(provider, 'terminate_instance'):
|
||||
provider.terminate_instance(instance.instance_id)
|
||||
logging.info(f"Cleaned up test instance {instance.instance_id}")
|
||||
else:
|
||||
logging.warning(f"Provider {provider_type} does not support terminate_instance, test instance may leak")
|
||||
except Exception as cleanup_error:
|
||||
logging.warning(f"Failed to cleanup test instance {instance.instance_id}: {cleanup_error}")
|
||||
|
||||
# Build detailed result message
|
||||
success = execution_result.exit_code == 0 and "TEST_PASSED" in execution_result.stdout
|
||||
|
||||
message_parts = [
|
||||
f"Test {success and 'PASSED' or 'FAILED'}",
|
||||
f"Exit code: {execution_result.exit_code}",
|
||||
f"Execution time: {execution_result.execution_time:.2f}s"
|
||||
]
|
||||
|
||||
if execution_result.stdout.strip():
|
||||
stdout_preview = execution_result.stdout.strip()[:200]
|
||||
message_parts.append(f"Output: {stdout_preview}...")
|
||||
|
||||
if execution_result.stderr.strip():
|
||||
stderr_preview = execution_result.stderr.strip()[:200]
|
||||
message_parts.append(f"Errors: {stderr_preview}...")
|
||||
|
||||
message = " | ".join(message_parts)
|
||||
|
||||
return {
|
||||
"success": success,
|
||||
"message": message,
|
||||
"details": {
|
||||
"exit_code": execution_result.exit_code,
|
||||
"execution_time": execution_result.execution_time,
|
||||
"stdout": execution_result.stdout,
|
||||
"stderr": execution_result.stderr,
|
||||
}
|
||||
}
|
||||
|
||||
except AdminException:
|
||||
raise
|
||||
except Exception as e:
|
||||
import traceback
|
||||
error_details = traceback.format_exc()
|
||||
raise AdminException(f"Connection test failed: {str(e)}\\n\\nStack trace:\\n{error_details}")
|
||||
|
||||
@ -13,6 +13,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from beartype.claw import beartype_this_package
|
||||
beartype_this_package()
|
||||
|
||||
287
agent/canvas.py
287
agent/canvas.py
@ -13,7 +13,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import base64
|
||||
import inspect
|
||||
import binascii
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
@ -25,7 +28,10 @@ from typing import Any, Union, Tuple
|
||||
|
||||
from agent.component import component_class
|
||||
from agent.component.base import ComponentBase
|
||||
from api.db.services.file_service import FileService
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.constants import LLMType
|
||||
from common.misc_utils import get_uuid, hash_str2int
|
||||
from common.exceptions import TaskCanceledException
|
||||
from rag.prompts.generator import chunks_format
|
||||
@ -79,14 +85,12 @@ class Graph:
|
||||
self.dsl = json.loads(dsl)
|
||||
self._tenant_id = tenant_id
|
||||
self.task_id = task_id if task_id else get_uuid()
|
||||
self._thread_pool = ThreadPoolExecutor(max_workers=5)
|
||||
self.load()
|
||||
|
||||
def load(self):
|
||||
self.components = self.dsl["components"]
|
||||
cpn_nms = set([])
|
||||
for k, cpn in self.components.items():
|
||||
cpn_nms.add(cpn["obj"]["component_name"])
|
||||
|
||||
for k, cpn in self.components.items():
|
||||
cpn_nms.add(cpn["obj"]["component_name"])
|
||||
param = component_class(cpn["obj"]["component_name"] + "Param")()
|
||||
@ -156,7 +160,7 @@ class Graph:
|
||||
return self._tenant_id
|
||||
|
||||
def get_value_with_variable(self,value: str) -> Any:
|
||||
pat = re.compile(r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*")
|
||||
pat = re.compile(r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.-]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*")
|
||||
out_parts = []
|
||||
last = 0
|
||||
|
||||
@ -206,17 +210,28 @@ class Graph:
|
||||
for key in path.split('.'):
|
||||
if cur is None:
|
||||
return None
|
||||
|
||||
if isinstance(cur, str):
|
||||
try:
|
||||
cur = json.loads(cur)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if isinstance(cur, dict):
|
||||
cur = cur.get(key)
|
||||
else:
|
||||
cur = getattr(cur, key, None)
|
||||
continue
|
||||
|
||||
if isinstance(cur, (list, tuple)):
|
||||
try:
|
||||
idx = int(key)
|
||||
cur = cur[idx]
|
||||
except Exception:
|
||||
return None
|
||||
continue
|
||||
|
||||
cur = getattr(cur, key, None)
|
||||
return cur
|
||||
|
||||
|
||||
def set_variable_value(self, exp: str,value):
|
||||
exp = exp.strip("{").strip("}").strip(" ").strip("{").strip("}")
|
||||
if exp.find("@") < 0:
|
||||
@ -263,27 +278,37 @@ class Graph:
|
||||
|
||||
class Canvas(Graph):
|
||||
|
||||
def __init__(self, dsl: str, tenant_id=None, task_id=None):
|
||||
def __init__(self, dsl: str, tenant_id=None, task_id=None, canvas_id=None):
|
||||
self.globals = {
|
||||
"sys.query": "",
|
||||
"sys.user_id": tenant_id,
|
||||
"sys.conversation_turns": 0,
|
||||
"sys.files": []
|
||||
"sys.files": [],
|
||||
"sys.history": []
|
||||
}
|
||||
self.variables = {}
|
||||
super().__init__(dsl, tenant_id, task_id)
|
||||
self._id = canvas_id
|
||||
|
||||
def load(self):
|
||||
super().load()
|
||||
self.history = self.dsl["history"]
|
||||
if "globals" in self.dsl:
|
||||
self.globals = self.dsl["globals"]
|
||||
if "sys.history" not in self.globals:
|
||||
self.globals["sys.history"] = []
|
||||
else:
|
||||
self.globals = {
|
||||
"sys.query": "",
|
||||
"sys.user_id": "",
|
||||
"sys.conversation_turns": 0,
|
||||
"sys.files": []
|
||||
"sys.files": [],
|
||||
"sys.history": []
|
||||
}
|
||||
if "variables" in self.dsl:
|
||||
self.variables = self.dsl["variables"]
|
||||
else:
|
||||
self.variables = {}
|
||||
|
||||
self.retrieval = self.dsl["retrieval"]
|
||||
self.memory = self.dsl.get("memory", [])
|
||||
@ -300,8 +325,9 @@ class Canvas(Graph):
|
||||
self.history = []
|
||||
self.retrieval = []
|
||||
self.memory = []
|
||||
print(self.variables)
|
||||
for k in self.globals.keys():
|
||||
if k.startswith("sys.") or k.startswith("env."):
|
||||
if k.startswith("sys."):
|
||||
if isinstance(self.globals[k], str):
|
||||
self.globals[k] = ""
|
||||
elif isinstance(self.globals[k], int):
|
||||
@ -314,9 +340,33 @@ class Canvas(Graph):
|
||||
self.globals[k] = {}
|
||||
else:
|
||||
self.globals[k] = None
|
||||
if k.startswith("env."):
|
||||
key = k[4:]
|
||||
if key in self.variables:
|
||||
variable = self.variables[key]
|
||||
if variable["type"] == "string":
|
||||
self.globals[k] = ""
|
||||
variable["value"] = ""
|
||||
elif variable["type"] == "number":
|
||||
self.globals[k] = 0
|
||||
variable["value"] = 0
|
||||
elif variable["type"] == "boolean":
|
||||
self.globals[k] = False
|
||||
variable["value"] = False
|
||||
elif variable["type"] == "object":
|
||||
self.globals[k] = {}
|
||||
variable["value"] = {}
|
||||
elif variable["type"].startswith("array"):
|
||||
self.globals[k] = []
|
||||
variable["value"] = []
|
||||
else:
|
||||
self.globals[k] = ""
|
||||
else:
|
||||
self.globals[k] = ""
|
||||
|
||||
async def run(self, **kwargs):
|
||||
st = time.perf_counter()
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self.message_id = get_uuid()
|
||||
created_at = int(time.time())
|
||||
self.add_user_input(kwargs.get("query"))
|
||||
@ -325,14 +375,19 @@ class Canvas(Graph):
|
||||
|
||||
if kwargs.get("webhook_payload"):
|
||||
for k, cpn in self.components.items():
|
||||
if self.components[k]["obj"].component_name.lower() == "webhook":
|
||||
for kk, vv in kwargs["webhook_payload"].items():
|
||||
if self.components[k]["obj"].component_name.lower() == "begin" and self.components[k]["obj"]._param.mode == "Webhook":
|
||||
payload = kwargs.get("webhook_payload", {})
|
||||
if "input" in payload:
|
||||
self.components[k]["obj"].set_input_value("request", payload["input"])
|
||||
for kk, vv in payload.items():
|
||||
if kk == "input":
|
||||
continue
|
||||
self.components[k]["obj"].set_output(kk, vv)
|
||||
|
||||
for k in kwargs.keys():
|
||||
if k in ["query", "user_id", "files"] and kwargs[k]:
|
||||
if k == "files":
|
||||
self.globals[f"sys.{k}"] = self.get_files(kwargs[k])
|
||||
self.globals[f"sys.{k}"] = await self.get_files_async(kwargs[k])
|
||||
else:
|
||||
self.globals[f"sys.{k}"] = kwargs[k]
|
||||
if not self.globals["sys.conversation_turns"] :
|
||||
@ -362,31 +417,50 @@ class Canvas(Graph):
|
||||
yield decorate("workflow_started", {"inputs": kwargs.get("inputs")})
|
||||
self.retrieval.append({"chunks": {}, "doc_aggs": {}})
|
||||
|
||||
def _run_batch(f, t):
|
||||
async def _run_batch(f, t):
|
||||
if self.is_canceled():
|
||||
msg = f"Task {self.task_id} has been canceled during batch execution."
|
||||
logging.info(msg)
|
||||
raise TaskCanceledException(msg)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
thr = []
|
||||
i = f
|
||||
while i < t:
|
||||
cpn = self.get_component_obj(self.path[i])
|
||||
if cpn.component_name.lower() in ["begin", "userfillup"]:
|
||||
thr.append(executor.submit(cpn.invoke, inputs=kwargs.get("inputs", {})))
|
||||
i += 1
|
||||
loop = asyncio.get_running_loop()
|
||||
tasks = []
|
||||
|
||||
def _run_async_in_thread(coro_func, **call_kwargs):
|
||||
return asyncio.run(coro_func(**call_kwargs))
|
||||
|
||||
i = f
|
||||
while i < t:
|
||||
cpn = self.get_component_obj(self.path[i])
|
||||
task_fn = None
|
||||
call_kwargs = None
|
||||
|
||||
if cpn.component_name.lower() in ["begin", "userfillup"]:
|
||||
call_kwargs = {"inputs": kwargs.get("inputs", {})}
|
||||
task_fn = cpn.invoke
|
||||
i += 1
|
||||
else:
|
||||
for _, ele in cpn.get_input_elements().items():
|
||||
if isinstance(ele, dict) and ele.get("_cpn_id") and ele.get("_cpn_id") not in self.path[:i] and self.path[0].lower().find("userfillup") < 0:
|
||||
self.path.pop(i)
|
||||
t -= 1
|
||||
break
|
||||
else:
|
||||
for _, ele in cpn.get_input_elements().items():
|
||||
if isinstance(ele, dict) and ele.get("_cpn_id") and ele.get("_cpn_id") not in self.path[:i] and self.path[0].lower().find("userfillup") < 0:
|
||||
self.path.pop(i)
|
||||
t -= 1
|
||||
break
|
||||
else:
|
||||
thr.append(executor.submit(cpn.invoke, **cpn.get_input()))
|
||||
i += 1
|
||||
for t in thr:
|
||||
t.result()
|
||||
call_kwargs = cpn.get_input()
|
||||
task_fn = cpn.invoke
|
||||
i += 1
|
||||
|
||||
if task_fn is None:
|
||||
continue
|
||||
|
||||
invoke_async = getattr(cpn, "invoke_async", None)
|
||||
if invoke_async and asyncio.iscoroutinefunction(invoke_async):
|
||||
tasks.append(loop.run_in_executor(self._thread_pool, partial(_run_async_in_thread, invoke_async, **(call_kwargs or {}))))
|
||||
else:
|
||||
tasks.append(loop.run_in_executor(self._thread_pool, partial(task_fn, **(call_kwargs or {}))))
|
||||
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
def _node_finished(cpn_obj):
|
||||
return decorate("node_finished",{
|
||||
@ -403,6 +477,7 @@ class Canvas(Graph):
|
||||
self.error = ""
|
||||
idx = len(self.path) - 1
|
||||
partials = []
|
||||
tts_mdl = None
|
||||
while idx < len(self.path):
|
||||
to = len(self.path)
|
||||
for i in range(idx, to):
|
||||
@ -413,35 +488,72 @@ class Canvas(Graph):
|
||||
"component_type": self.get_component_type(self.path[i]),
|
||||
"thoughts": self.get_component_thoughts(self.path[i])
|
||||
})
|
||||
_run_batch(idx, to)
|
||||
await _run_batch(idx, to)
|
||||
to = len(self.path)
|
||||
# post processing of components invocation
|
||||
# post-processing of components invocation
|
||||
for i in range(idx, to):
|
||||
cpn = self.get_component(self.path[i])
|
||||
cpn_obj = self.get_component_obj(self.path[i])
|
||||
if cpn_obj.component_name.lower() == "message":
|
||||
if cpn_obj.get_param("auto_play"):
|
||||
tts_mdl = LLMBundle(self._tenant_id, LLMType.TTS)
|
||||
if isinstance(cpn_obj.output("content"), partial):
|
||||
_m = ""
|
||||
for m in cpn_obj.output("content")():
|
||||
buff_m = ""
|
||||
stream = cpn_obj.output("content")()
|
||||
async def _process_stream(m):
|
||||
nonlocal buff_m, _m, tts_mdl
|
||||
if not m:
|
||||
continue
|
||||
return
|
||||
if m == "<think>":
|
||||
yield decorate("message", {"content": "", "start_to_think": True})
|
||||
return decorate("message", {"content": "", "start_to_think": True})
|
||||
|
||||
elif m == "</think>":
|
||||
yield decorate("message", {"content": "", "end_to_think": True})
|
||||
else:
|
||||
yield decorate("message", {"content": m})
|
||||
_m += m
|
||||
return decorate("message", {"content": "", "end_to_think": True})
|
||||
|
||||
buff_m += m
|
||||
_m += m
|
||||
|
||||
if len(buff_m) > 16:
|
||||
ev = decorate(
|
||||
"message",
|
||||
{
|
||||
"content": m,
|
||||
"audio_binary": self.tts(tts_mdl, buff_m)
|
||||
}
|
||||
)
|
||||
buff_m = ""
|
||||
return ev
|
||||
|
||||
return decorate("message", {"content": m})
|
||||
|
||||
if inspect.isasyncgen(stream):
|
||||
async for m in stream:
|
||||
ev= await _process_stream(m)
|
||||
if ev:
|
||||
yield ev
|
||||
else:
|
||||
for m in stream:
|
||||
ev= await _process_stream(m)
|
||||
if ev:
|
||||
yield ev
|
||||
if buff_m:
|
||||
yield decorate("message", {"content": "", "audio_binary": self.tts(tts_mdl, buff_m)})
|
||||
buff_m = ""
|
||||
cpn_obj.set_output("content", _m)
|
||||
cite = re.search(r"\[ID:[ 0-9]+\]", _m)
|
||||
else:
|
||||
yield decorate("message", {"content": cpn_obj.output("content")})
|
||||
cite = re.search(r"\[ID:[ 0-9]+\]", cpn_obj.output("content"))
|
||||
|
||||
if isinstance(cpn_obj.output("attachment"), tuple):
|
||||
yield decorate("message", {"attachment": cpn_obj.output("attachment")})
|
||||
|
||||
yield decorate("message_end", {"reference": self.get_reference() if cite else None})
|
||||
message_end = {}
|
||||
if cpn_obj.get_param("status"):
|
||||
message_end["status"] = cpn_obj.get_param("status")
|
||||
if isinstance(cpn_obj.output("attachment"), dict):
|
||||
message_end["attachment"] = cpn_obj.output("attachment")
|
||||
if cite:
|
||||
message_end["reference"] = self.get_reference()
|
||||
yield decorate("message_end", message_end)
|
||||
|
||||
while partials:
|
||||
_cpn_obj = self.get_component_obj(partials[0])
|
||||
@ -462,7 +574,7 @@ class Canvas(Graph):
|
||||
else:
|
||||
self.error = cpn_obj.error()
|
||||
|
||||
if cpn_obj.component_name.lower() != "iteration":
|
||||
if cpn_obj.component_name.lower() not in ("iteration","loop"):
|
||||
if isinstance(cpn_obj.output("content"), partial):
|
||||
if self.error:
|
||||
cpn_obj.set_output("content", None)
|
||||
@ -487,14 +599,16 @@ class Canvas(Graph):
|
||||
for cpn_id in cpn_ids:
|
||||
_append_path(cpn_id)
|
||||
|
||||
if cpn_obj.component_name.lower() == "iterationitem" and cpn_obj.end():
|
||||
if cpn_obj.component_name.lower() in ("iterationitem","loopitem") and cpn_obj.end():
|
||||
iter = cpn_obj.get_parent()
|
||||
yield _node_finished(iter)
|
||||
_extend_path(self.get_component(cpn["parent_id"])["downstream"])
|
||||
elif cpn_obj.component_name.lower() in ["categorize", "switch"]:
|
||||
_extend_path(cpn_obj.output("_next"))
|
||||
elif cpn_obj.component_name.lower() == "iteration":
|
||||
elif cpn_obj.component_name.lower() in ("iteration", "loop"):
|
||||
_append_path(cpn_obj.get_start())
|
||||
elif cpn_obj.component_name.lower() == "exitloop" and cpn_obj.get_parent().component_name.lower() == "loop":
|
||||
_extend_path(self.get_component(cpn["parent_id"])["downstream"])
|
||||
elif not cpn["downstream"] and cpn_obj.get_parent():
|
||||
_append_path(cpn_obj.get_parent().get_start())
|
||||
else:
|
||||
@ -530,6 +644,7 @@ class Canvas(Graph):
|
||||
"created_at": st,
|
||||
})
|
||||
self.history.append(("assistant", self.get_component_obj(self.path[-1]).output()))
|
||||
self.globals["sys.history"].append(f"{self.history[-1][0]}: {self.history[-1][1]}")
|
||||
elif "Task has been canceled" in self.error:
|
||||
yield decorate("workflow_finished",
|
||||
{
|
||||
@ -550,6 +665,50 @@ class Canvas(Graph):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def tts(self,tts_mdl, text):
|
||||
def clean_tts_text(text: str) -> str:
|
||||
if not text:
|
||||
return ""
|
||||
|
||||
text = text.encode("utf-8", "ignore").decode("utf-8", "ignore")
|
||||
|
||||
text = re.sub(r"[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F]", "", text)
|
||||
|
||||
emoji_pattern = re.compile(
|
||||
"[\U0001F600-\U0001F64F"
|
||||
"\U0001F300-\U0001F5FF"
|
||||
"\U0001F680-\U0001F6FF"
|
||||
"\U0001F1E0-\U0001F1FF"
|
||||
"\U00002700-\U000027BF"
|
||||
"\U0001F900-\U0001F9FF"
|
||||
"\U0001FA70-\U0001FAFF"
|
||||
"\U0001FAD0-\U0001FAFF]+",
|
||||
flags=re.UNICODE
|
||||
)
|
||||
text = emoji_pattern.sub("", text)
|
||||
|
||||
text = re.sub(r"\s+", " ", text).strip()
|
||||
|
||||
MAX_LEN = 500
|
||||
if len(text) > MAX_LEN:
|
||||
text = text[:MAX_LEN]
|
||||
|
||||
return text
|
||||
if not tts_mdl or not text:
|
||||
return None
|
||||
text = clean_tts_text(text)
|
||||
if not text:
|
||||
return None
|
||||
bin = b""
|
||||
try:
|
||||
for chunk in tts_mdl.tts(text):
|
||||
bin += chunk
|
||||
except Exception as e:
|
||||
logging.error(f"TTS failed: {e}, text={text!r}")
|
||||
return None
|
||||
return binascii.hexlify(bin).decode("utf-8")
|
||||
|
||||
def get_history(self, window_size):
|
||||
convs = []
|
||||
if window_size <= 0:
|
||||
@ -563,6 +722,7 @@ class Canvas(Graph):
|
||||
|
||||
def add_user_input(self, question):
|
||||
self.history.append(("user", question))
|
||||
self.globals["sys.history"].append(f"{self.history[-1][0]}: {self.history[-1][1]}")
|
||||
|
||||
def get_prologue(self):
|
||||
return self.components["begin"]["obj"]._param.prologue
|
||||
@ -570,6 +730,9 @@ class Canvas(Graph):
|
||||
def get_mode(self):
|
||||
return self.components["begin"]["obj"]._param.mode
|
||||
|
||||
def get_sys_query(self):
|
||||
return self.globals.get("sys.query", "")
|
||||
|
||||
def set_global_param(self, **kwargs):
|
||||
self.globals.update(kwargs)
|
||||
|
||||
@ -579,21 +742,30 @@ class Canvas(Graph):
|
||||
def get_component_input_elements(self, cpnnm):
|
||||
return self.components[cpnnm]["obj"].get_input_elements()
|
||||
|
||||
def get_files(self, files: Union[None, list[dict]]) -> list[str]:
|
||||
from api.db.services.file_service import FileService
|
||||
async def get_files_async(self, files: Union[None, list[dict]]) -> list[str]:
|
||||
if not files:
|
||||
return []
|
||||
def image_to_base64(file):
|
||||
return "data:{};base64,{}".format(file["mime_type"],
|
||||
base64.b64encode(FileService.get_blob(file["created_by"], file["id"])).decode("utf-8"))
|
||||
exe = ThreadPoolExecutor(max_workers=5)
|
||||
threads = []
|
||||
loop = asyncio.get_running_loop()
|
||||
tasks = []
|
||||
for file in files:
|
||||
if file["mime_type"].find("image") >=0:
|
||||
threads.append(exe.submit(image_to_base64, file))
|
||||
tasks.append(loop.run_in_executor(self._thread_pool, image_to_base64, file))
|
||||
continue
|
||||
threads.append(exe.submit(FileService.parse, file["name"], FileService.get_blob(file["created_by"], file["id"]), True, file["created_by"]))
|
||||
return [th.result() for th in threads]
|
||||
tasks.append(loop.run_in_executor(self._thread_pool, FileService.parse, file["name"], FileService.get_blob(file["created_by"], file["id"]), True, file["created_by"]))
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
def get_files(self, files: Union[None, list[dict]]) -> list[str]:
|
||||
"""
|
||||
Synchronous wrapper for get_files_async, used by sync component invoke paths.
|
||||
"""
|
||||
loop = getattr(self, "_loop", None)
|
||||
if loop and loop.is_running():
|
||||
return asyncio.run_coroutine_threadsafe(self.get_files_async(files), loop).result()
|
||||
|
||||
return asyncio.run(self.get_files_async(files))
|
||||
|
||||
def tool_use_callback(self, agent_id: str, func_name: str, params: dict, result: Any, elapsed_time=None):
|
||||
agent_ids = agent_id.split("-->")
|
||||
@ -646,5 +818,4 @@ class Canvas(Graph):
|
||||
return self.memory
|
||||
|
||||
def get_component_thoughts(self, cpn_id) -> str:
|
||||
return self.components.get(cpn_id)["obj"].thoughts()
|
||||
|
||||
return self.components.get(cpn_id)["obj"].thoughts()
|
||||
@ -13,10 +13,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from copy import deepcopy
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
@ -28,8 +29,8 @@ from api.db.services.llm_service import LLMBundle
|
||||
from api.db.services.tenant_llm_service import TenantLLMService
|
||||
from api.db.services.mcp_server_service import MCPServerService
|
||||
from common.connection_utils import timeout
|
||||
from rag.prompts.generator import next_step, COMPLETE_TASK, analyze_task, \
|
||||
citation_prompt, reflect, rank_memories, kb_prompt, citation_plus, full_question, message_fit_in
|
||||
from rag.prompts.generator import next_step_async, COMPLETE_TASK, \
|
||||
citation_prompt, kb_prompt, citation_plus, full_question, message_fit_in, structured_output_prompt
|
||||
from common.mcp_tool_call_conn import MCPToolCallSession, mcp_tool_metadata_to_openai_tool
|
||||
from agent.component.llm import LLMParam, LLM
|
||||
|
||||
@ -83,9 +84,11 @@ class Agent(LLM, ToolBase):
|
||||
def __init__(self, canvas, id, param: LLMParam):
|
||||
LLM.__init__(self, canvas, id, param)
|
||||
self.tools = {}
|
||||
for cpn in self._param.tools:
|
||||
for idx, cpn in enumerate(self._param.tools):
|
||||
cpn = self._load_tool_obj(cpn)
|
||||
self.tools[cpn.get_meta()["function"]["name"]] = cpn
|
||||
original_name = cpn.get_meta()["function"]["name"]
|
||||
indexed_name = f"{original_name}_{idx}"
|
||||
self.tools[indexed_name] = cpn
|
||||
|
||||
self.chat_mdl = LLMBundle(self._canvas.get_tenant_id(), TenantLLMService.llm_id2llm_type(self._param.llm_id), self._param.llm_id,
|
||||
max_retries=self._param.max_retries,
|
||||
@ -93,7 +96,12 @@ class Agent(LLM, ToolBase):
|
||||
max_rounds=self._param.max_rounds,
|
||||
verbose_tool_use=True
|
||||
)
|
||||
self.tool_meta = [v.get_meta() for _,v in self.tools.items()]
|
||||
self.tool_meta = []
|
||||
for indexed_name, tool_obj in self.tools.items():
|
||||
original_meta = tool_obj.get_meta()
|
||||
indexed_meta = deepcopy(original_meta)
|
||||
indexed_meta["function"]["name"] = indexed_name
|
||||
self.tool_meta.append(indexed_meta)
|
||||
|
||||
for mcp in self._param.mcp:
|
||||
_, mcp_server = MCPServerService.get_by_id(mcp["mcp_id"])
|
||||
@ -107,7 +115,8 @@ class Agent(LLM, ToolBase):
|
||||
|
||||
def _load_tool_obj(self, cpn: dict) -> object:
|
||||
from agent.component import component_class
|
||||
param = component_class(cpn["component_name"] + "Param")()
|
||||
tool_name = cpn["component_name"]
|
||||
param = component_class(tool_name + "Param")()
|
||||
param.update(cpn["params"])
|
||||
try:
|
||||
param.check()
|
||||
@ -137,8 +146,34 @@ class Agent(LLM, ToolBase):
|
||||
res.update(cpn.get_input_form())
|
||||
return res
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 20*60)))
|
||||
def _get_output_schema(self):
|
||||
try:
|
||||
cand = self._param.outputs.get("structured")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if isinstance(cand, dict):
|
||||
if isinstance(cand.get("properties"), dict) and len(cand["properties"]) > 0:
|
||||
return cand
|
||||
for k in ("schema", "structured"):
|
||||
if isinstance(cand.get(k), dict) and isinstance(cand[k].get("properties"), dict) and len(cand[k]["properties"]) > 0:
|
||||
return cand[k]
|
||||
|
||||
return None
|
||||
|
||||
async def _force_format_to_schema_async(self, text: str, schema_prompt: str) -> str:
|
||||
fmt_msgs = [
|
||||
{"role": "system", "content": schema_prompt + "\nIMPORTANT: Output ONLY valid JSON. No markdown, no extra text."},
|
||||
{"role": "user", "content": text},
|
||||
]
|
||||
_, fmt_msgs = message_fit_in(fmt_msgs, int(self.chat_mdl.max_length * 0.97))
|
||||
return await self._generate_async(fmt_msgs)
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
return asyncio.run(self._invoke_async(**kwargs))
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 20*60)))
|
||||
async def _invoke_async(self, **kwargs):
|
||||
if self.check_if_canceled("Agent processing"):
|
||||
return
|
||||
|
||||
@ -157,20 +192,25 @@ class Agent(LLM, ToolBase):
|
||||
if not self.tools:
|
||||
if self.check_if_canceled("Agent processing"):
|
||||
return
|
||||
return LLM._invoke(self, **kwargs)
|
||||
return await LLM._invoke_async(self, **kwargs)
|
||||
|
||||
prompt, msg, user_defined_prompt = self._prepare_prompt_variables()
|
||||
output_schema = self._get_output_schema()
|
||||
schema_prompt = ""
|
||||
if output_schema:
|
||||
schema = json.dumps(output_schema, ensure_ascii=False, indent=2)
|
||||
schema_prompt = structured_output_prompt(schema)
|
||||
|
||||
downstreams = self._canvas.get_component(self._id)["downstream"] if self._canvas.get_component(self._id) else []
|
||||
ex = self.exception_handler()
|
||||
if any([self._canvas.get_component_obj(cid).component_name.lower()=="message" for cid in downstreams]) and not (ex and ex["goto"]):
|
||||
self.set_output("content", partial(self.stream_output_with_tools, prompt, msg, user_defined_prompt))
|
||||
if any([self._canvas.get_component_obj(cid).component_name.lower()=="message" for cid in downstreams]) and not (ex and ex["goto"]) and not output_schema:
|
||||
self.set_output("content", partial(self.stream_output_with_tools_async, prompt, deepcopy(msg), user_defined_prompt))
|
||||
return
|
||||
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
use_tools = []
|
||||
ans = ""
|
||||
for delta_ans, tk in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
||||
async for delta_ans, _tk in self._react_with_tools_streamly_async_simple(prompt, msg, use_tools, user_defined_prompt,schema_prompt=schema_prompt):
|
||||
if self.check_if_canceled("Agent processing"):
|
||||
return
|
||||
ans += delta_ans
|
||||
@ -183,16 +223,38 @@ class Agent(LLM, ToolBase):
|
||||
self.set_output("_ERROR", ans)
|
||||
return
|
||||
|
||||
if output_schema:
|
||||
error = ""
|
||||
for _ in range(self._param.max_retries + 1):
|
||||
try:
|
||||
def clean_formated_answer(ans: str) -> str:
|
||||
ans = re.sub(r"^.*</think>", "", ans, flags=re.DOTALL)
|
||||
ans = re.sub(r"^.*```json", "", ans, flags=re.DOTALL)
|
||||
return re.sub(r"```\n*$", "", ans, flags=re.DOTALL)
|
||||
obj = json_repair.loads(clean_formated_answer(ans))
|
||||
self.set_output("structured", obj)
|
||||
if use_tools:
|
||||
self.set_output("use_tools", use_tools)
|
||||
return obj
|
||||
except Exception:
|
||||
error = "The answer cannot be parsed as JSON"
|
||||
ans = await self._force_format_to_schema_async(ans, schema_prompt)
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
continue
|
||||
|
||||
self.set_output("_ERROR", error)
|
||||
return
|
||||
|
||||
self.set_output("content", ans)
|
||||
if use_tools:
|
||||
self.set_output("use_tools", use_tools)
|
||||
return ans
|
||||
|
||||
def stream_output_with_tools(self, prompt, msg, user_defined_prompt={}):
|
||||
async def stream_output_with_tools_async(self, prompt, msg, user_defined_prompt={}):
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
answer_without_toolcall = ""
|
||||
use_tools = []
|
||||
for delta_ans,_ in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
||||
async for delta_ans, _ in self._react_with_tools_streamly_async_simple(prompt, msg, use_tools, user_defined_prompt):
|
||||
if self.check_if_canceled("Agent streaming"):
|
||||
return
|
||||
|
||||
@ -210,55 +272,58 @@ class Agent(LLM, ToolBase):
|
||||
if use_tools:
|
||||
self.set_output("use_tools", use_tools)
|
||||
|
||||
def _gen_citations(self, text):
|
||||
retrievals = self._canvas.get_reference()
|
||||
retrievals = {"chunks": list(retrievals["chunks"].values()), "doc_aggs": list(retrievals["doc_aggs"].values())}
|
||||
formated_refer = kb_prompt(retrievals, self.chat_mdl.max_length, True)
|
||||
for delta_ans in self._generate_streamly([{"role": "system", "content": citation_plus("\n\n".join(formated_refer))},
|
||||
{"role": "user", "content": text}
|
||||
]):
|
||||
yield delta_ans
|
||||
|
||||
def _react_with_tools_streamly(self, prompt, history: list[dict], use_tools, user_defined_prompt={}):
|
||||
async def _react_with_tools_streamly_async_simple(self, prompt, history: list[dict], use_tools, user_defined_prompt={}, schema_prompt: str = ""):
|
||||
token_count = 0
|
||||
tool_metas = self.tool_meta
|
||||
hist = deepcopy(history)
|
||||
last_calling = ""
|
||||
if len(hist) > 3:
|
||||
st = timer()
|
||||
user_request = full_question(messages=history, chat_mdl=self.chat_mdl)
|
||||
user_request = await full_question(messages=history, chat_mdl=self.chat_mdl)
|
||||
self.callback("Multi-turn conversation optimization", {}, user_request, elapsed_time=timer()-st)
|
||||
else:
|
||||
user_request = history[-1]["content"]
|
||||
|
||||
def use_tool(name, args):
|
||||
nonlocal hist, use_tools, token_count,last_calling,user_request
|
||||
def build_task_desc(prompt: str, user_request: str, user_defined_prompt: dict | None = None) -> str:
|
||||
"""Build a minimal task_desc by concatenating prompt, query, and tool schemas."""
|
||||
user_defined_prompt = user_defined_prompt or {}
|
||||
|
||||
task_desc = (
|
||||
"### Agent Prompt\n"
|
||||
f"{prompt}\n\n"
|
||||
"### User Request\n"
|
||||
f"{user_request}\n\n"
|
||||
)
|
||||
|
||||
if user_defined_prompt:
|
||||
udp_json = json.dumps(user_defined_prompt, ensure_ascii=False, indent=2)
|
||||
task_desc += "\n### User Defined Prompts\n" + udp_json + "\n"
|
||||
|
||||
return task_desc
|
||||
|
||||
|
||||
async def use_tool_async(name, args):
|
||||
nonlocal hist, use_tools, last_calling
|
||||
logging.info(f"{last_calling=} == {name=}")
|
||||
# Summarize of function calling
|
||||
#if all([
|
||||
# isinstance(self.toolcall_session.get_tool_obj(name), Agent),
|
||||
# last_calling,
|
||||
# last_calling != name
|
||||
#]):
|
||||
# self.toolcall_session.get_tool_obj(name).add2system_prompt(f"The chat history with other agents are as following: \n" + self.get_useful_memory(user_request, str(args["user_prompt"]),user_defined_prompt))
|
||||
last_calling = name
|
||||
tool_response = self.toolcall_session.tool_call(name, args)
|
||||
tool_response = await self.toolcall_session.tool_call_async(name, args)
|
||||
use_tools.append({
|
||||
"name": name,
|
||||
"arguments": args,
|
||||
"results": tool_response
|
||||
})
|
||||
# self.callback("add_memory", {}, "...")
|
||||
#self.add_memory(hist[-2]["content"], hist[-1]["content"], name, args, str(tool_response), user_defined_prompt)
|
||||
|
||||
return name, tool_response
|
||||
|
||||
def complete():
|
||||
async def complete():
|
||||
nonlocal hist
|
||||
need2cite = self._param.cite and self._canvas.get_reference()["chunks"] and self._id.find("-->") < 0
|
||||
if schema_prompt:
|
||||
need2cite = False
|
||||
cited = False
|
||||
if hist[0]["role"] == "system" and need2cite:
|
||||
if len(hist) < 7:
|
||||
if hist and hist[0]["role"] == "system":
|
||||
if schema_prompt:
|
||||
hist[0]["content"] += "\n" + schema_prompt
|
||||
if need2cite and len(hist) < 7:
|
||||
hist[0]["content"] += citation_prompt()
|
||||
cited = True
|
||||
yield "", token_count
|
||||
@ -267,7 +332,7 @@ class Agent(LLM, ToolBase):
|
||||
if len(hist) > 12:
|
||||
_hist = [hist[0], hist[1], *hist[-10:]]
|
||||
entire_txt = ""
|
||||
for delta_ans in self._generate_streamly(_hist):
|
||||
async for delta_ans in self._generate_streamly(_hist):
|
||||
if not need2cite or cited:
|
||||
yield delta_ans, 0
|
||||
entire_txt += delta_ans
|
||||
@ -276,7 +341,7 @@ class Agent(LLM, ToolBase):
|
||||
|
||||
st = timer()
|
||||
txt = ""
|
||||
for delta_ans in self._gen_citations(entire_txt):
|
||||
async for delta_ans in self._gen_citations_async(entire_txt):
|
||||
if self.check_if_canceled("Agent streaming"):
|
||||
return
|
||||
yield delta_ans, 0
|
||||
@ -284,6 +349,21 @@ class Agent(LLM, ToolBase):
|
||||
|
||||
self.callback("gen_citations", {}, txt, elapsed_time=timer()-st)
|
||||
|
||||
def build_observation(tool_call_res: list[tuple]) -> str:
|
||||
"""
|
||||
Build a Observation from tool call results.
|
||||
No LLM involved.
|
||||
"""
|
||||
if not tool_call_res:
|
||||
return ""
|
||||
|
||||
lines = ["Observation:"]
|
||||
for name, result in tool_call_res:
|
||||
lines.append(f"[{name} result]")
|
||||
lines.append(str(result))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def append_user_content(hist, content):
|
||||
if hist[-1]["role"] == "user":
|
||||
hist[-1]["content"] += content
|
||||
@ -291,14 +371,14 @@ class Agent(LLM, ToolBase):
|
||||
hist.append({"role": "user", "content": content})
|
||||
|
||||
st = timer()
|
||||
task_desc = analyze_task(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
||||
task_desc = build_task_desc(prompt, user_request, user_defined_prompt)
|
||||
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
||||
for _ in range(self._param.max_rounds + 1):
|
||||
if self.check_if_canceled("Agent streaming"):
|
||||
return
|
||||
response, tk = next_step(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
||||
response, tk = await next_step_async(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
||||
# self.callback("next_step", {}, str(response)[:256]+"...")
|
||||
token_count += tk
|
||||
token_count += tk or 0
|
||||
hist.append({"role": "assistant", "content": response})
|
||||
try:
|
||||
functions = json_repair.loads(re.sub(r"```.*", "", response))
|
||||
@ -307,23 +387,24 @@ class Agent(LLM, ToolBase):
|
||||
for f in functions:
|
||||
if not isinstance(f, dict):
|
||||
raise TypeError(f"An object type should be returned, but `{f}`")
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
thr = []
|
||||
for func in functions:
|
||||
name = func["name"]
|
||||
args = func["arguments"]
|
||||
if name == COMPLETE_TASK:
|
||||
append_user_content(hist, f"Respond with a formal answer. FORGET(DO NOT mention) about `{COMPLETE_TASK}`. The language for the response MUST be as the same as the first user request.\n")
|
||||
for txt, tkcnt in complete():
|
||||
yield txt, tkcnt
|
||||
return
|
||||
|
||||
thr.append(executor.submit(use_tool, name, args))
|
||||
tool_tasks = []
|
||||
for func in functions:
|
||||
name = func["name"]
|
||||
args = func["arguments"]
|
||||
if name == COMPLETE_TASK:
|
||||
append_user_content(hist, f"Respond with a formal answer. FORGET(DO NOT mention) about `{COMPLETE_TASK}`. The language for the response MUST be as the same as the first user request.\n")
|
||||
async for txt, tkcnt in complete():
|
||||
yield txt, tkcnt
|
||||
return
|
||||
|
||||
st = timer()
|
||||
reflection = reflect(self.chat_mdl, hist, [th.result() for th in thr], user_defined_prompt)
|
||||
append_user_content(hist, reflection)
|
||||
self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
|
||||
tool_tasks.append(asyncio.create_task(use_tool_async(name, args)))
|
||||
|
||||
results = await asyncio.gather(*tool_tasks) if tool_tasks else []
|
||||
st = timer()
|
||||
reflection = build_observation(results)
|
||||
append_user_content(hist, reflection)
|
||||
self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(msg=f"Wrong JSON argument format in LLM ReAct response: {e}")
|
||||
@ -347,21 +428,146 @@ Respond immediately with your final comprehensive answer.
|
||||
return
|
||||
append_user_content(hist, final_instruction)
|
||||
|
||||
for txt, tkcnt in complete():
|
||||
async for txt, tkcnt in complete():
|
||||
yield txt, tkcnt
|
||||
|
||||
def get_useful_memory(self, goal: str, sub_goal:str, topn=3, user_defined_prompt:dict={}) -> str:
|
||||
# self.callback("get_useful_memory", {"topn": 3}, "...")
|
||||
mems = self._canvas.get_memory()
|
||||
rank = rank_memories(self.chat_mdl, goal, sub_goal, [summ for (user, assist, summ) in mems], user_defined_prompt)
|
||||
try:
|
||||
rank = json_repair.loads(re.sub(r"```.*", "", rank))[:topn]
|
||||
mems = [mems[r] for r in rank]
|
||||
return "\n\n".join([f"User: {u}\nAgent: {a}" for u, a,_ in mems])
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
# async def _react_with_tools_streamly_async(self, prompt, history: list[dict], use_tools, user_defined_prompt={}, schema_prompt: str = ""):
|
||||
# token_count = 0
|
||||
# tool_metas = self.tool_meta
|
||||
# hist = deepcopy(history)
|
||||
# last_calling = ""
|
||||
# if len(hist) > 3:
|
||||
# st = timer()
|
||||
# user_request = await full_question(messages=history, chat_mdl=self.chat_mdl)
|
||||
# self.callback("Multi-turn conversation optimization", {}, user_request, elapsed_time=timer()-st)
|
||||
# else:
|
||||
# user_request = history[-1]["content"]
|
||||
|
||||
return "Error occurred."
|
||||
# async def use_tool_async(name, args):
|
||||
# nonlocal hist, use_tools, last_calling
|
||||
# logging.info(f"{last_calling=} == {name=}")
|
||||
# last_calling = name
|
||||
# tool_response = await self.toolcall_session.tool_call_async(name, args)
|
||||
# use_tools.append({
|
||||
# "name": name,
|
||||
# "arguments": args,
|
||||
# "results": tool_response
|
||||
# })
|
||||
# # self.callback("add_memory", {}, "...")
|
||||
# #self.add_memory(hist[-2]["content"], hist[-1]["content"], name, args, str(tool_response), user_defined_prompt)
|
||||
|
||||
# return name, tool_response
|
||||
|
||||
# async def complete():
|
||||
# nonlocal hist
|
||||
# need2cite = self._param.cite and self._canvas.get_reference()["chunks"] and self._id.find("-->") < 0
|
||||
# if schema_prompt:
|
||||
# need2cite = False
|
||||
# cited = False
|
||||
# if hist and hist[0]["role"] == "system":
|
||||
# if schema_prompt:
|
||||
# hist[0]["content"] += "\n" + schema_prompt
|
||||
# if need2cite and len(hist) < 7:
|
||||
# hist[0]["content"] += citation_prompt()
|
||||
# cited = True
|
||||
# yield "", token_count
|
||||
|
||||
# _hist = hist
|
||||
# if len(hist) > 12:
|
||||
# _hist = [hist[0], hist[1], *hist[-10:]]
|
||||
# entire_txt = ""
|
||||
# async for delta_ans in self._generate_streamly(_hist):
|
||||
# if not need2cite or cited:
|
||||
# yield delta_ans, 0
|
||||
# entire_txt += delta_ans
|
||||
# if not need2cite or cited:
|
||||
# return
|
||||
|
||||
# st = timer()
|
||||
# txt = ""
|
||||
# async for delta_ans in self._gen_citations_async(entire_txt):
|
||||
# if self.check_if_canceled("Agent streaming"):
|
||||
# return
|
||||
# yield delta_ans, 0
|
||||
# txt += delta_ans
|
||||
|
||||
# self.callback("gen_citations", {}, txt, elapsed_time=timer()-st)
|
||||
|
||||
# def append_user_content(hist, content):
|
||||
# if hist[-1]["role"] == "user":
|
||||
# hist[-1]["content"] += content
|
||||
# else:
|
||||
# hist.append({"role": "user", "content": content})
|
||||
|
||||
# st = timer()
|
||||
# task_desc = await analyze_task_async(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
||||
# self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
||||
# for _ in range(self._param.max_rounds + 1):
|
||||
# if self.check_if_canceled("Agent streaming"):
|
||||
# return
|
||||
# response, tk = await next_step_async(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
||||
# # self.callback("next_step", {}, str(response)[:256]+"...")
|
||||
# token_count += tk or 0
|
||||
# hist.append({"role": "assistant", "content": response})
|
||||
# try:
|
||||
# functions = json_repair.loads(re.sub(r"```.*", "", response))
|
||||
# if not isinstance(functions, list):
|
||||
# raise TypeError(f"List should be returned, but `{functions}`")
|
||||
# for f in functions:
|
||||
# if not isinstance(f, dict):
|
||||
# raise TypeError(f"An object type should be returned, but `{f}`")
|
||||
|
||||
# tool_tasks = []
|
||||
# for func in functions:
|
||||
# name = func["name"]
|
||||
# args = func["arguments"]
|
||||
# if name == COMPLETE_TASK:
|
||||
# append_user_content(hist, f"Respond with a formal answer. FORGET(DO NOT mention) about `{COMPLETE_TASK}`. The language for the response MUST be as the same as the first user request.\n")
|
||||
# async for txt, tkcnt in complete():
|
||||
# yield txt, tkcnt
|
||||
# return
|
||||
|
||||
# tool_tasks.append(asyncio.create_task(use_tool_async(name, args)))
|
||||
|
||||
# results = await asyncio.gather(*tool_tasks) if tool_tasks else []
|
||||
# st = timer()
|
||||
# reflection = await reflect_async(self.chat_mdl, hist, results, user_defined_prompt)
|
||||
# append_user_content(hist, reflection)
|
||||
# self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
|
||||
|
||||
# except Exception as e:
|
||||
# logging.exception(msg=f"Wrong JSON argument format in LLM ReAct response: {e}")
|
||||
# e = f"\nTool call error, please correct the input parameter of response format and call it again.\n *** Exception ***\n{e}"
|
||||
# append_user_content(hist, str(e))
|
||||
|
||||
# logging.warning( f"Exceed max rounds: {self._param.max_rounds}")
|
||||
# final_instruction = f"""
|
||||
# {user_request}
|
||||
# IMPORTANT: You have reached the conversation limit. Based on ALL the information and research you have gathered so far, please provide a DIRECT and COMPREHENSIVE final answer to the original request.
|
||||
# Instructions:
|
||||
# 1. SYNTHESIZE all information collected during this conversation
|
||||
# 2. Provide a COMPLETE response using existing data - do not suggest additional research
|
||||
# 3. Structure your response as a FINAL DELIVERABLE, not a plan
|
||||
# 4. If information is incomplete, state what you found and provide the best analysis possible with available data
|
||||
# 5. DO NOT mention conversation limits or suggest further steps
|
||||
# 6. Focus on delivering VALUE with the information already gathered
|
||||
# Respond immediately with your final comprehensive answer.
|
||||
# """
|
||||
# if self.check_if_canceled("Agent final instruction"):
|
||||
# return
|
||||
# append_user_content(hist, final_instruction)
|
||||
|
||||
# async for txt, tkcnt in complete():
|
||||
# yield txt, tkcnt
|
||||
|
||||
async def _gen_citations_async(self, text):
|
||||
retrievals = self._canvas.get_reference()
|
||||
retrievals = {"chunks": list(retrievals["chunks"].values()), "doc_aggs": list(retrievals["doc_aggs"].values())}
|
||||
formated_refer = kb_prompt(retrievals, self.chat_mdl.max_length, True)
|
||||
async for delta_ans in self._generate_streamly([{"role": "system", "content": citation_plus("\n\n".join(formated_refer))},
|
||||
{"role": "user", "content": text}
|
||||
]):
|
||||
yield delta_ans
|
||||
|
||||
def reset(self, only_output=False):
|
||||
"""
|
||||
@ -369,7 +575,7 @@ Respond immediately with your final comprehensive answer.
|
||||
"""
|
||||
for k in self._param.outputs.keys():
|
||||
self._param.outputs[k]["value"] = None
|
||||
|
||||
|
||||
for k, cpn in self.tools.items():
|
||||
if hasattr(cpn, "reset") and callable(cpn.reset):
|
||||
cpn.reset()
|
||||
@ -378,4 +584,3 @@ Respond immediately with your final comprehensive answer.
|
||||
for k in self._param.inputs.keys():
|
||||
self._param.inputs[k]["value"] = None
|
||||
self._param.debug_inputs = {}
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
import time
|
||||
from abc import ABC
|
||||
@ -23,11 +24,13 @@ import os
|
||||
import logging
|
||||
from typing import Any, List, Union
|
||||
import pandas as pd
|
||||
import trio
|
||||
from agent import settings
|
||||
from common.connection_utils import timeout
|
||||
|
||||
|
||||
|
||||
from common.misc_utils import thread_pool_exec
|
||||
|
||||
_FEEDED_DEPRECATED_PARAMS = "_feeded_deprecated_params"
|
||||
_DEPRECATED_PARAMS = "_deprecated_params"
|
||||
_USER_FEEDED_PARAMS = "_user_feeded_params"
|
||||
@ -97,7 +100,7 @@ class ComponentParamBase(ABC):
|
||||
def _recursive_convert_obj_to_dict(obj):
|
||||
ret_dict = {}
|
||||
if isinstance(obj, dict):
|
||||
for k,v in obj.items():
|
||||
for k, v in obj.items():
|
||||
if isinstance(v, dict) or (v and type(v).__name__ not in dir(builtins)):
|
||||
ret_dict[k] = _recursive_convert_obj_to_dict(v)
|
||||
else:
|
||||
@ -253,96 +256,65 @@ class ComponentParamBase(ABC):
|
||||
self._validate_param(attr, validation_json)
|
||||
|
||||
@staticmethod
|
||||
def check_string(param, descr):
|
||||
def check_string(param, description):
|
||||
if type(param).__name__ not in ["str"]:
|
||||
raise ValueError(
|
||||
descr + " {} not supported, should be string type".format(param)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be string type".format(param))
|
||||
|
||||
@staticmethod
|
||||
def check_empty(param, descr):
|
||||
def check_empty(param, description):
|
||||
if not param:
|
||||
raise ValueError(
|
||||
descr + " does not support empty value."
|
||||
)
|
||||
raise ValueError(description + " does not support empty value.")
|
||||
|
||||
@staticmethod
|
||||
def check_positive_integer(param, descr):
|
||||
def check_positive_integer(param, description):
|
||||
if type(param).__name__ not in ["int", "long"] or param <= 0:
|
||||
raise ValueError(
|
||||
descr + " {} not supported, should be positive integer".format(param)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be positive integer".format(param))
|
||||
|
||||
@staticmethod
|
||||
def check_positive_number(param, descr):
|
||||
def check_positive_number(param, description):
|
||||
if type(param).__name__ not in ["float", "int", "long"] or param <= 0:
|
||||
raise ValueError(
|
||||
descr + " {} not supported, should be positive numeric".format(param)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be positive numeric".format(param))
|
||||
|
||||
@staticmethod
|
||||
def check_nonnegative_number(param, descr):
|
||||
def check_nonnegative_number(param, description):
|
||||
if type(param).__name__ not in ["float", "int", "long"] or param < 0:
|
||||
raise ValueError(
|
||||
descr
|
||||
+ " {} not supported, should be non-negative numeric".format(param)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be non-negative numeric".format(param))
|
||||
|
||||
@staticmethod
|
||||
def check_decimal_float(param, descr):
|
||||
def check_decimal_float(param, description):
|
||||
if type(param).__name__ not in ["float", "int"] or param < 0 or param > 1:
|
||||
raise ValueError(
|
||||
descr
|
||||
+ " {} not supported, should be a float number in range [0, 1]".format(
|
||||
param
|
||||
)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be a float number in range [0, 1]".format(param))
|
||||
|
||||
@staticmethod
|
||||
def check_boolean(param, descr):
|
||||
def check_boolean(param, description):
|
||||
if type(param).__name__ != "bool":
|
||||
raise ValueError(
|
||||
descr + " {} not supported, should be bool type".format(param)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be bool type".format(param))
|
||||
|
||||
@staticmethod
|
||||
def check_open_unit_interval(param, descr):
|
||||
def check_open_unit_interval(param, description):
|
||||
if type(param).__name__ not in ["float"] or param <= 0 or param >= 1:
|
||||
raise ValueError(
|
||||
descr + " should be a numeric number between 0 and 1 exclusively"
|
||||
)
|
||||
raise ValueError(description + " should be a numeric number between 0 and 1 exclusively")
|
||||
|
||||
@staticmethod
|
||||
def check_valid_value(param, descr, valid_values):
|
||||
def check_valid_value(param, description, valid_values):
|
||||
if param not in valid_values:
|
||||
raise ValueError(
|
||||
descr
|
||||
+ " {} is not supported, it should be in {}".format(param, valid_values)
|
||||
)
|
||||
raise ValueError(description + " {} is not supported, it should be in {}".format(param, valid_values))
|
||||
|
||||
@staticmethod
|
||||
def check_defined_type(param, descr, types):
|
||||
def check_defined_type(param, description, types):
|
||||
if type(param).__name__ not in types:
|
||||
raise ValueError(
|
||||
descr + " {} not supported, should be one of {}".format(param, types)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be one of {}".format(param, types))
|
||||
|
||||
@staticmethod
|
||||
def check_and_change_lower(param, valid_list, descr=""):
|
||||
def check_and_change_lower(param, valid_list, description=""):
|
||||
if type(param).__name__ != "str":
|
||||
raise ValueError(
|
||||
descr
|
||||
+ " {} not supported, should be one of {}".format(param, valid_list)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be one of {}".format(param, valid_list))
|
||||
|
||||
lower_param = param.lower()
|
||||
if lower_param in valid_list:
|
||||
return lower_param
|
||||
else:
|
||||
raise ValueError(
|
||||
descr
|
||||
+ " {} not supported, should be one of {}".format(param, valid_list)
|
||||
)
|
||||
raise ValueError(description + " {} not supported, should be one of {}".format(param, valid_list))
|
||||
|
||||
@staticmethod
|
||||
def _greater_equal_than(value, limit):
|
||||
@ -374,16 +346,16 @@ class ComponentParamBase(ABC):
|
||||
def _not_in(value, wrong_value_list):
|
||||
return value not in wrong_value_list
|
||||
|
||||
def _warn_deprecated_param(self, param_name, descr):
|
||||
def _warn_deprecated_param(self, param_name, description):
|
||||
if self._deprecated_params_set.get(param_name):
|
||||
logging.warning(
|
||||
f"{descr} {param_name} is deprecated and ignored in this version."
|
||||
f"{description} {param_name} is deprecated and ignored in this version."
|
||||
)
|
||||
|
||||
def _warn_to_deprecate_param(self, param_name, descr, new_param):
|
||||
def _warn_to_deprecate_param(self, param_name, description, new_param):
|
||||
if self._deprecated_params_set.get(param_name):
|
||||
logging.warning(
|
||||
f"{descr} {param_name} will be deprecated in future release; "
|
||||
f"{description} {param_name} will be deprecated in future release; "
|
||||
f"please use {new_param} instead."
|
||||
)
|
||||
return True
|
||||
@ -392,8 +364,8 @@ class ComponentParamBase(ABC):
|
||||
|
||||
class ComponentBase(ABC):
|
||||
component_name: str
|
||||
thread_limiter = trio.CapacityLimiter(int(os.environ.get('MAX_CONCURRENT_CHATS', 10)))
|
||||
variable_ref_patt = r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*"
|
||||
thread_limiter = asyncio.Semaphore(int(os.environ.get("MAX_CONCURRENT_CHATS", 10)))
|
||||
variable_ref_patt = r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.-]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*"
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
@ -407,10 +379,11 @@ class ComponentBase(ABC):
|
||||
"params": {}
|
||||
}}""".format(self.component_name,
|
||||
self._param
|
||||
)
|
||||
)
|
||||
|
||||
def __init__(self, canvas, id, param: ComponentParamBase):
|
||||
from agent.canvas import Graph # Local import to avoid cyclic dependency
|
||||
|
||||
assert isinstance(canvas, Graph), "canvas must be an instance of Canvas"
|
||||
self._canvas = canvas
|
||||
self._id = id
|
||||
@ -445,14 +418,42 @@ class ComponentBase(ABC):
|
||||
self.set_output("_elapsed_time", time.perf_counter() - self.output("_created_time"))
|
||||
return self.output()
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
async def invoke_async(self, **kwargs) -> dict[str, Any]:
|
||||
"""
|
||||
Async wrapper for component invocation.
|
||||
Prefers coroutine `_invoke_async` if present; otherwise falls back to `_invoke`.
|
||||
Handles timing and error recording consistently with `invoke`.
|
||||
"""
|
||||
self.set_output("_created_time", time.perf_counter())
|
||||
try:
|
||||
if self.check_if_canceled("Component processing"):
|
||||
return
|
||||
|
||||
fn_async = getattr(self, "_invoke_async", None)
|
||||
if fn_async and asyncio.iscoroutinefunction(fn_async):
|
||||
await fn_async(**kwargs)
|
||||
elif asyncio.iscoroutinefunction(self._invoke):
|
||||
await self._invoke(**kwargs)
|
||||
else:
|
||||
await thread_pool_exec(self._invoke, **kwargs)
|
||||
except Exception as e:
|
||||
if self.get_exception_default_value():
|
||||
self.set_exception_default_value()
|
||||
else:
|
||||
self.set_output("_ERROR", str(e))
|
||||
logging.exception(e)
|
||||
self._param.debug_inputs = {}
|
||||
self.set_output("_elapsed_time", time.perf_counter() - self.output("_created_time"))
|
||||
return self.output()
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10 * 60)))
|
||||
def _invoke(self, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
def output(self, var_nm: str=None) -> Union[dict[str, Any], Any]:
|
||||
def output(self, var_nm: str = None) -> Union[dict[str, Any], Any]:
|
||||
if var_nm:
|
||||
return self._param.outputs.get(var_nm, {}).get("value", "")
|
||||
return {k: o.get("value") for k,o in self._param.outputs.items()}
|
||||
return {k: o.get("value") for k, o in self._param.outputs.items()}
|
||||
|
||||
def set_output(self, key: str, value: Any):
|
||||
if key not in self._param.outputs:
|
||||
@ -463,18 +464,18 @@ class ComponentBase(ABC):
|
||||
return self._param.outputs.get("_ERROR", {}).get("value")
|
||||
|
||||
def reset(self, only_output=False):
|
||||
outputs: dict = self._param.outputs # for better performance
|
||||
outputs: dict = self._param.outputs # for better performance
|
||||
for k in outputs.keys():
|
||||
outputs[k]["value"] = None
|
||||
if only_output:
|
||||
return
|
||||
|
||||
inputs: dict = self._param.inputs # for better performance
|
||||
inputs: dict = self._param.inputs # for better performance
|
||||
for k in inputs.keys():
|
||||
inputs[k]["value"] = None
|
||||
self._param.debug_inputs = {}
|
||||
|
||||
def get_input(self, key: str=None) -> Union[Any, dict[str, Any]]:
|
||||
def get_input(self, key: str = None) -> Union[Any, dict[str, Any]]:
|
||||
if key:
|
||||
return self._param.inputs.get(key, {}).get("value")
|
||||
|
||||
@ -498,13 +499,13 @@ class ComponentBase(ABC):
|
||||
|
||||
def get_input_elements_from_text(self, txt: str) -> dict[str, dict[str, str]]:
|
||||
res = {}
|
||||
for r in re.finditer(self.variable_ref_patt, txt, flags=re.IGNORECASE|re.DOTALL):
|
||||
for r in re.finditer(self.variable_ref_patt, txt, flags=re.IGNORECASE | re.DOTALL):
|
||||
exp = r.group(1)
|
||||
cpn_id, var_nm = exp.split("@") if exp.find("@")>0 else ("", exp)
|
||||
cpn_id, var_nm = exp.split("@") if exp.find("@") > 0 else ("", exp)
|
||||
res[exp] = {
|
||||
"name": (self._canvas.get_component_name(cpn_id) +f"@{var_nm}") if cpn_id else exp,
|
||||
"name": (self._canvas.get_component_name(cpn_id) + f"@{var_nm}") if cpn_id else exp,
|
||||
"value": self._canvas.get_variable_value(exp),
|
||||
"_retrival": self._canvas.get_variable_value(f"{cpn_id}@_references") if cpn_id else None,
|
||||
"_retrieval": self._canvas.get_variable_value(f"{cpn_id}@_references") if cpn_id else None,
|
||||
"_cpn_id": cpn_id
|
||||
}
|
||||
return res
|
||||
@ -555,6 +556,7 @@ class ComponentBase(ABC):
|
||||
for n, v in kv.items():
|
||||
def repl(_match, val=v):
|
||||
return str(val) if val is not None else ""
|
||||
|
||||
content = re.sub(
|
||||
r"\{%s\}" % re.escape(n),
|
||||
repl,
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
from agent.component.fillup import UserFillUpParam, UserFillUp
|
||||
from api.db.services.file_service import FileService
|
||||
|
||||
|
||||
class BeginParam(UserFillUpParam):
|
||||
@ -27,7 +28,7 @@ class BeginParam(UserFillUpParam):
|
||||
self.prologue = "Hi! I'm your smart assistant. What can I do for you?"
|
||||
|
||||
def check(self):
|
||||
self.check_valid_value(self.mode, "The 'mode' should be either `conversational` or `task`", ["conversational", "task"])
|
||||
self.check_valid_value(self.mode, "The 'mode' should be either `conversational` or `task`", ["conversational", "task","Webhook"])
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return getattr(self, "inputs")
|
||||
@ -48,7 +49,7 @@ class Begin(UserFillUp):
|
||||
if v.get("optional") and v.get("value", None) is None:
|
||||
v = None
|
||||
else:
|
||||
v = self._canvas.get_files([v["value"]])
|
||||
v = FileService.get_files([v["value"]])
|
||||
else:
|
||||
v = v.get("value")
|
||||
self.set_output(k, v)
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@ -96,20 +97,30 @@ Here's description of each category:
|
||||
class Categorize(LLM, ABC):
|
||||
component_name = "Categorize"
|
||||
|
||||
def get_input_elements(self) -> dict[str, dict]:
|
||||
query_key = self._param.query or "sys.query"
|
||||
elements = self.get_input_elements_from_text(f"{{{query_key}}}")
|
||||
if not elements:
|
||||
logging.warning(f"[Categorize] input element not detected for query key: {query_key}")
|
||||
return elements
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
async def _invoke_async(self, **kwargs):
|
||||
if self.check_if_canceled("Categorize processing"):
|
||||
return
|
||||
|
||||
msg = self._canvas.get_history(self._param.message_history_window_size)
|
||||
if not msg:
|
||||
msg = [{"role": "user", "content": ""}]
|
||||
if kwargs.get("sys.query"):
|
||||
msg[-1]["content"] = kwargs["sys.query"]
|
||||
self.set_input_value("sys.query", kwargs["sys.query"])
|
||||
query_key = self._param.query or "sys.query"
|
||||
if query_key in kwargs:
|
||||
query_value = kwargs[query_key]
|
||||
else:
|
||||
msg[-1]["content"] = self._canvas.get_variable_value(self._param.query)
|
||||
self.set_input_value(self._param.query, msg[-1]["content"])
|
||||
query_value = self._canvas.get_variable_value(query_key)
|
||||
if query_value is None:
|
||||
query_value = ""
|
||||
msg[-1]["content"] = query_value
|
||||
self.set_input_value(query_key, msg[-1]["content"])
|
||||
self._param.update_prompt()
|
||||
chat_mdl = LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT, self._param.llm_id)
|
||||
|
||||
@ -121,7 +132,7 @@ class Categorize(LLM, ABC):
|
||||
if self.check_if_canceled("Categorize processing"):
|
||||
return
|
||||
|
||||
ans = chat_mdl.chat(self._param.sys_prompt, [{"role": "user", "content": user_prompt}], self._param.gen_conf())
|
||||
ans = await chat_mdl.async_chat(self._param.sys_prompt, [{"role": "user", "content": user_prompt}], self._param.gen_conf())
|
||||
logging.info(f"input: {user_prompt}, answer: {str(ans)}")
|
||||
if ERROR_PREFIX in ans:
|
||||
raise Exception(ans)
|
||||
@ -136,7 +147,7 @@ class Categorize(LLM, ABC):
|
||||
category_counts[c] = count
|
||||
|
||||
cpn_ids = list(self._param.category_description.items())[-1][1]["to"]
|
||||
max_category = list(self._param.category_description.keys())[0]
|
||||
max_category = list(self._param.category_description.keys())[-1]
|
||||
if any(category_counts.values()):
|
||||
max_category = max(category_counts.items(), key=lambda x: x[1])[0]
|
||||
cpn_ids = self._param.category_description[max_category]["to"]
|
||||
@ -144,5 +155,9 @@ class Categorize(LLM, ABC):
|
||||
self.set_output("category_name", max_category)
|
||||
self.set_output("_next", cpn_ids)
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
return asyncio.run(self._invoke_async(**kwargs))
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Which should it falls into {}? ...".format(",".join([f"`{c}`" for c, _ in self._param.category_description.items()]))
|
||||
|
||||
1570
agent/component/docs_generator.py
Normal file
1570
agent/component/docs_generator.py
Normal file
File diff suppressed because it is too large
Load Diff
401
agent/component/excel_processor.py
Normal file
401
agent/component/excel_processor.py
Normal file
@ -0,0 +1,401 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
ExcelProcessor Component
|
||||
|
||||
A component for reading, processing, and generating Excel files in RAGFlow agents.
|
||||
Supports multiple Excel file inputs, data transformation, and Excel output generation.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from abc import ABC
|
||||
from io import BytesIO
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
from api.db.services.file_service import FileService
|
||||
from api.utils.api_utils import timeout
|
||||
from common import settings
|
||||
from common.misc_utils import get_uuid
|
||||
|
||||
|
||||
class ExcelProcessorParam(ComponentParamBase):
|
||||
"""
|
||||
Define the ExcelProcessor component parameters.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
# Input configuration
|
||||
self.input_files = [] # Variable references to uploaded files
|
||||
self.operation = "read" # read, merge, transform, output
|
||||
|
||||
# Processing options
|
||||
self.sheet_selection = "all" # all, first, or comma-separated sheet names
|
||||
self.merge_strategy = "concat" # concat, join
|
||||
self.join_on = "" # Column name for join operations
|
||||
|
||||
# Transform options (for LLM-guided transformations)
|
||||
self.transform_instructions = ""
|
||||
self.transform_data = "" # Variable reference to transformation data
|
||||
|
||||
# Output options
|
||||
self.output_format = "xlsx" # xlsx, csv
|
||||
self.output_filename = "output"
|
||||
|
||||
# Component outputs
|
||||
self.outputs = {
|
||||
"data": {
|
||||
"type": "object",
|
||||
"value": {}
|
||||
},
|
||||
"summary": {
|
||||
"type": "str",
|
||||
"value": ""
|
||||
},
|
||||
"markdown": {
|
||||
"type": "str",
|
||||
"value": ""
|
||||
}
|
||||
}
|
||||
|
||||
def check(self):
|
||||
self.check_valid_value(
|
||||
self.operation,
|
||||
"[ExcelProcessor] Operation",
|
||||
["read", "merge", "transform", "output"]
|
||||
)
|
||||
self.check_valid_value(
|
||||
self.output_format,
|
||||
"[ExcelProcessor] Output format",
|
||||
["xlsx", "csv"]
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class ExcelProcessor(ComponentBase, ABC):
|
||||
"""
|
||||
Excel processing component for RAGFlow agents.
|
||||
|
||||
Operations:
|
||||
- read: Parse Excel files into structured data
|
||||
- merge: Combine multiple Excel files
|
||||
- transform: Apply data transformations based on instructions
|
||||
- output: Generate Excel file output
|
||||
"""
|
||||
component_name = "ExcelProcessor"
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
"""Define input form for the component."""
|
||||
res = {}
|
||||
for ref in (self._param.input_files or []):
|
||||
for k, o in self.get_input_elements_from_text(ref).items():
|
||||
res[k] = {"name": o.get("name", ""), "type": "file"}
|
||||
if self._param.transform_data:
|
||||
for k, o in self.get_input_elements_from_text(self._param.transform_data).items():
|
||||
res[k] = {"name": o.get("name", ""), "type": "object"}
|
||||
return res
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("ExcelProcessor processing"):
|
||||
return
|
||||
|
||||
operation = self._param.operation.lower()
|
||||
|
||||
if operation == "read":
|
||||
self._read_excels()
|
||||
elif operation == "merge":
|
||||
self._merge_excels()
|
||||
elif operation == "transform":
|
||||
self._transform_data()
|
||||
elif operation == "output":
|
||||
self._output_excel()
|
||||
else:
|
||||
self.set_output("summary", f"Unknown operation: {operation}")
|
||||
|
||||
def _get_file_content(self, file_ref: str) -> tuple[bytes, str]:
|
||||
"""
|
||||
Get file content from a variable reference.
|
||||
Returns (content_bytes, filename).
|
||||
"""
|
||||
value = self._canvas.get_variable_value(file_ref)
|
||||
if value is None:
|
||||
return None, None
|
||||
|
||||
# Handle different value formats
|
||||
if isinstance(value, dict):
|
||||
# File reference from Begin/UserFillUp component
|
||||
file_id = value.get("id") or value.get("file_id")
|
||||
created_by = value.get("created_by") or self._canvas.get_tenant_id()
|
||||
filename = value.get("name") or value.get("filename", "unknown.xlsx")
|
||||
if file_id:
|
||||
content = FileService.get_blob(created_by, file_id)
|
||||
return content, filename
|
||||
elif isinstance(value, list) and len(value) > 0:
|
||||
# List of file references - return first
|
||||
return self._get_file_content_from_list(value[0])
|
||||
elif isinstance(value, str):
|
||||
# Could be base64 encoded or a path
|
||||
if value.startswith("data:"):
|
||||
import base64
|
||||
# Extract base64 content
|
||||
_, encoded = value.split(",", 1)
|
||||
return base64.b64decode(encoded), "uploaded.xlsx"
|
||||
|
||||
return None, None
|
||||
|
||||
def _get_file_content_from_list(self, item) -> tuple[bytes, str]:
|
||||
"""Extract file content from a list item."""
|
||||
if isinstance(item, dict):
|
||||
return self._get_file_content(item)
|
||||
return None, None
|
||||
|
||||
def _parse_excel_to_dataframes(self, content: bytes, filename: str) -> dict[str, pd.DataFrame]:
|
||||
"""Parse Excel content into a dictionary of DataFrames (one per sheet)."""
|
||||
try:
|
||||
excel_file = BytesIO(content)
|
||||
|
||||
if filename.lower().endswith(".csv"):
|
||||
df = pd.read_csv(excel_file)
|
||||
return {"Sheet1": df}
|
||||
else:
|
||||
# Read all sheets
|
||||
xlsx = pd.ExcelFile(excel_file, engine='openpyxl')
|
||||
sheet_selection = self._param.sheet_selection
|
||||
|
||||
if sheet_selection == "all":
|
||||
sheets_to_read = xlsx.sheet_names
|
||||
elif sheet_selection == "first":
|
||||
sheets_to_read = [xlsx.sheet_names[0]] if xlsx.sheet_names else []
|
||||
else:
|
||||
# Comma-separated sheet names
|
||||
requested = [s.strip() for s in sheet_selection.split(",")]
|
||||
sheets_to_read = [s for s in requested if s in xlsx.sheet_names]
|
||||
|
||||
dfs = {}
|
||||
for sheet in sheets_to_read:
|
||||
dfs[sheet] = pd.read_excel(xlsx, sheet_name=sheet)
|
||||
return dfs
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error parsing Excel file {filename}: {e}")
|
||||
return {}
|
||||
|
||||
def _read_excels(self):
|
||||
"""Read and parse Excel files into structured data."""
|
||||
all_data = {}
|
||||
summaries = []
|
||||
markdown_parts = []
|
||||
|
||||
for file_ref in (self._param.input_files or []):
|
||||
if self.check_if_canceled("ExcelProcessor reading"):
|
||||
return
|
||||
|
||||
# Get variable value
|
||||
value = self._canvas.get_variable_value(file_ref)
|
||||
self.set_input_value(file_ref, str(value)[:200] if value else "")
|
||||
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
# Handle file content
|
||||
content, filename = self._get_file_content(file_ref)
|
||||
if content is None:
|
||||
continue
|
||||
|
||||
# Parse Excel
|
||||
dfs = self._parse_excel_to_dataframes(content, filename)
|
||||
|
||||
for sheet_name, df in dfs.items():
|
||||
key = f"{filename}_{sheet_name}" if len(dfs) > 1 else filename
|
||||
all_data[key] = df.to_dict(orient="records")
|
||||
|
||||
# Build summary
|
||||
summaries.append(f"**{key}**: {len(df)} rows, {len(df.columns)} columns ({', '.join(df.columns.tolist()[:5])}{'...' if len(df.columns) > 5 else ''})")
|
||||
|
||||
# Build markdown table
|
||||
markdown_parts.append(f"### {key}\n\n{df.head(10).to_markdown(index=False)}\n")
|
||||
|
||||
# Set outputs
|
||||
self.set_output("data", all_data)
|
||||
self.set_output("summary", "\n".join(summaries) if summaries else "No Excel files found")
|
||||
self.set_output("markdown", "\n\n".join(markdown_parts) if markdown_parts else "No data")
|
||||
|
||||
def _merge_excels(self):
|
||||
"""Merge multiple Excel files/sheets into one."""
|
||||
all_dfs = []
|
||||
|
||||
for file_ref in (self._param.input_files or []):
|
||||
if self.check_if_canceled("ExcelProcessor merging"):
|
||||
return
|
||||
|
||||
value = self._canvas.get_variable_value(file_ref)
|
||||
self.set_input_value(file_ref, str(value)[:200] if value else "")
|
||||
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
content, filename = self._get_file_content(file_ref)
|
||||
if content is None:
|
||||
continue
|
||||
|
||||
dfs = self._parse_excel_to_dataframes(content, filename)
|
||||
all_dfs.extend(dfs.values())
|
||||
|
||||
if not all_dfs:
|
||||
self.set_output("data", {})
|
||||
self.set_output("summary", "No data to merge")
|
||||
return
|
||||
|
||||
# Merge strategy
|
||||
if self._param.merge_strategy == "concat":
|
||||
merged_df = pd.concat(all_dfs, ignore_index=True)
|
||||
elif self._param.merge_strategy == "join" and self._param.join_on:
|
||||
# Join on specified column
|
||||
merged_df = all_dfs[0]
|
||||
for df in all_dfs[1:]:
|
||||
merged_df = merged_df.merge(df, on=self._param.join_on, how="outer")
|
||||
else:
|
||||
merged_df = pd.concat(all_dfs, ignore_index=True)
|
||||
|
||||
self.set_output("data", {"merged": merged_df.to_dict(orient="records")})
|
||||
self.set_output("summary", f"Merged {len(all_dfs)} sources into {len(merged_df)} rows, {len(merged_df.columns)} columns")
|
||||
self.set_output("markdown", merged_df.head(20).to_markdown(index=False))
|
||||
|
||||
def _transform_data(self):
|
||||
"""Apply transformations to data based on instructions or input data."""
|
||||
# Get the data to transform
|
||||
transform_ref = self._param.transform_data
|
||||
if not transform_ref:
|
||||
self.set_output("summary", "No transform data reference provided")
|
||||
return
|
||||
|
||||
data = self._canvas.get_variable_value(transform_ref)
|
||||
self.set_input_value(transform_ref, str(data)[:300] if data else "")
|
||||
|
||||
if data is None:
|
||||
self.set_output("summary", "Transform data is empty")
|
||||
return
|
||||
|
||||
# Convert to DataFrame
|
||||
if isinstance(data, dict):
|
||||
# Could be {"sheet": [rows]} format
|
||||
if all(isinstance(v, list) for v in data.values()):
|
||||
# Multiple sheets
|
||||
all_markdown = []
|
||||
for sheet_name, rows in data.items():
|
||||
df = pd.DataFrame(rows)
|
||||
all_markdown.append(f"### {sheet_name}\n\n{df.to_markdown(index=False)}")
|
||||
self.set_output("data", data)
|
||||
self.set_output("markdown", "\n\n".join(all_markdown))
|
||||
else:
|
||||
df = pd.DataFrame([data])
|
||||
self.set_output("data", df.to_dict(orient="records"))
|
||||
self.set_output("markdown", df.to_markdown(index=False))
|
||||
elif isinstance(data, list):
|
||||
df = pd.DataFrame(data)
|
||||
self.set_output("data", df.to_dict(orient="records"))
|
||||
self.set_output("markdown", df.to_markdown(index=False))
|
||||
else:
|
||||
self.set_output("data", {"raw": str(data)})
|
||||
self.set_output("markdown", str(data))
|
||||
|
||||
self.set_output("summary", "Transformed data ready for processing")
|
||||
|
||||
def _output_excel(self):
|
||||
"""Generate Excel file output from data."""
|
||||
# Get data from transform_data reference
|
||||
transform_ref = self._param.transform_data
|
||||
if not transform_ref:
|
||||
self.set_output("summary", "No data reference for output")
|
||||
return
|
||||
|
||||
data = self._canvas.get_variable_value(transform_ref)
|
||||
self.set_input_value(transform_ref, str(data)[:300] if data else "")
|
||||
|
||||
if data is None:
|
||||
self.set_output("summary", "No data to output")
|
||||
return
|
||||
|
||||
try:
|
||||
# Prepare DataFrames
|
||||
if isinstance(data, dict):
|
||||
if all(isinstance(v, list) for v in data.values()):
|
||||
# Multi-sheet format
|
||||
dfs = {k: pd.DataFrame(v) for k, v in data.items()}
|
||||
else:
|
||||
dfs = {"Sheet1": pd.DataFrame([data])}
|
||||
elif isinstance(data, list):
|
||||
dfs = {"Sheet1": pd.DataFrame(data)}
|
||||
else:
|
||||
self.set_output("summary", "Invalid data format for Excel output")
|
||||
return
|
||||
|
||||
# Generate output
|
||||
doc_id = get_uuid()
|
||||
|
||||
if self._param.output_format == "csv":
|
||||
# For CSV, only output first sheet
|
||||
first_df = list(dfs.values())[0]
|
||||
binary_content = first_df.to_csv(index=False).encode("utf-8")
|
||||
filename = f"{self._param.output_filename}.csv"
|
||||
else:
|
||||
# Excel output
|
||||
excel_io = BytesIO()
|
||||
with pd.ExcelWriter(excel_io, engine='openpyxl') as writer:
|
||||
for sheet_name, df in dfs.items():
|
||||
# Sanitize sheet name (max 31 chars, no special chars)
|
||||
safe_name = sheet_name[:31].replace("/", "_").replace("\\", "_")
|
||||
df.to_excel(writer, sheet_name=safe_name, index=False)
|
||||
excel_io.seek(0)
|
||||
binary_content = excel_io.read()
|
||||
filename = f"{self._param.output_filename}.xlsx"
|
||||
|
||||
# Store file
|
||||
settings.STORAGE_IMPL.put(self._canvas._tenant_id, doc_id, binary_content)
|
||||
|
||||
# Set attachment output
|
||||
self.set_output("attachment", {
|
||||
"doc_id": doc_id,
|
||||
"format": self._param.output_format,
|
||||
"file_name": filename
|
||||
})
|
||||
|
||||
total_rows = sum(len(df) for df in dfs.values())
|
||||
self.set_output("summary", f"Generated {filename} with {len(dfs)} sheet(s), {total_rows} total rows")
|
||||
self.set_output("data", {k: v.to_dict(orient="records") for k, v in dfs.items()})
|
||||
|
||||
logging.info(f"ExcelProcessor: Generated {filename} as {doc_id}")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"ExcelProcessor output error: {e}")
|
||||
self.set_output("summary", f"Error generating output: {str(e)}")
|
||||
|
||||
def thoughts(self) -> str:
|
||||
"""Return component thoughts for UI display."""
|
||||
op = self._param.operation
|
||||
if op == "read":
|
||||
return "Reading Excel files..."
|
||||
elif op == "merge":
|
||||
return "Merging Excel data..."
|
||||
elif op == "transform":
|
||||
return "Transforming data..."
|
||||
elif op == "output":
|
||||
return "Generating Excel output..."
|
||||
return "Processing Excel..."
|
||||
@ -13,26 +13,20 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from agent.component.base import ComponentParamBase, ComponentBase
|
||||
from abc import ABC
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
|
||||
class WebhookParam(ComponentParamBase):
|
||||
|
||||
"""
|
||||
Define the Begin component parameters.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return getattr(self, "inputs")
|
||||
class ExitLoopParam(ComponentParamBase, ABC):
|
||||
def check(self):
|
||||
return True
|
||||
|
||||
|
||||
class Webhook(ComponentBase):
|
||||
component_name = "Webhook"
|
||||
class ExitLoop(ComponentBase, ABC):
|
||||
component_name = "ExitLoop"
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
pass
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return ""
|
||||
return ""
|
||||
@ -18,6 +18,7 @@ import re
|
||||
from functools import partial
|
||||
|
||||
from agent.component.base import ComponentParamBase, ComponentBase
|
||||
from api.db.services.file_service import FileService
|
||||
|
||||
|
||||
class UserFillUpParam(ComponentParamBase):
|
||||
@ -63,6 +64,13 @@ class UserFillUp(ComponentBase):
|
||||
for k, v in kwargs.get("inputs", {}).items():
|
||||
if self.check_if_canceled("UserFillUp processing"):
|
||||
return
|
||||
if isinstance(v, dict) and v.get("type", "").lower().find("file") >=0:
|
||||
if v.get("optional") and v.get("value", None) is None:
|
||||
v = None
|
||||
else:
|
||||
v = FileService.get_files([v["value"]])
|
||||
else:
|
||||
v = v.get("value")
|
||||
self.set_output(k, v)
|
||||
|
||||
def thoughts(self) -> str:
|
||||
|
||||
@ -32,7 +32,7 @@ class IterationParam(ComponentParamBase):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.items_ref = ""
|
||||
self.veriable={}
|
||||
self.variable={}
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return {
|
||||
|
||||
@ -13,12 +13,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from typing import Any, Generator
|
||||
from typing import Any, AsyncGenerator
|
||||
import json_repair
|
||||
from functools import partial
|
||||
from common.constants import LLMType
|
||||
@ -55,7 +56,6 @@ class LLMParam(ComponentParamBase):
|
||||
self.check_nonnegative_number(int(self.max_tokens), "[Agent] Max tokens")
|
||||
self.check_decimal_float(float(self.top_p), "[Agent] Top P")
|
||||
self.check_empty(self.llm_id, "[Agent] LLM")
|
||||
self.check_empty(self.sys_prompt, "[Agent] System prompt")
|
||||
self.check_empty(self.prompts, "[Agent] User prompt")
|
||||
|
||||
def gen_conf(self):
|
||||
@ -166,25 +166,67 @@ class LLM(ComponentBase):
|
||||
sys_prompt = re.sub(rf"<{tag}>(.*?)</{tag}>", "", sys_prompt, flags=re.DOTALL|re.IGNORECASE)
|
||||
return pts, sys_prompt
|
||||
|
||||
def _generate(self, msg:list[dict], **kwargs) -> str:
|
||||
async def _generate_async(self, msg: list[dict], **kwargs) -> str:
|
||||
if not self.imgs:
|
||||
return self.chat_mdl.chat(msg[0]["content"], msg[1:], self._param.gen_conf(), **kwargs)
|
||||
return self.chat_mdl.chat(msg[0]["content"], msg[1:], self._param.gen_conf(), images=self.imgs, **kwargs)
|
||||
return await self.chat_mdl.async_chat(msg[0]["content"], msg[1:], self._param.gen_conf(), **kwargs)
|
||||
return await self.chat_mdl.async_chat(msg[0]["content"], msg[1:], self._param.gen_conf(), images=self.imgs, **kwargs)
|
||||
|
||||
def _generate_streamly(self, msg:list[dict], **kwargs) -> Generator[str, None, None]:
|
||||
ans = ""
|
||||
async def _generate_streamly(self, msg: list[dict], **kwargs) -> AsyncGenerator[str, None]:
|
||||
async def delta_wrapper(txt_iter):
|
||||
ans = ""
|
||||
last_idx = 0
|
||||
endswith_think = False
|
||||
|
||||
def delta(txt):
|
||||
nonlocal ans, last_idx, endswith_think
|
||||
delta_ans = txt[last_idx:]
|
||||
ans = txt
|
||||
|
||||
if delta_ans.find("<think>") == 0:
|
||||
last_idx += len("<think>")
|
||||
return "<think>"
|
||||
elif delta_ans.find("<think>") > 0:
|
||||
delta_ans = txt[last_idx:last_idx + delta_ans.find("<think>")]
|
||||
last_idx += delta_ans.find("<think>")
|
||||
return delta_ans
|
||||
elif delta_ans.endswith("</think>"):
|
||||
endswith_think = True
|
||||
elif endswith_think:
|
||||
endswith_think = False
|
||||
return "</think>"
|
||||
|
||||
last_idx = len(ans)
|
||||
if ans.endswith("</think>"):
|
||||
last_idx -= len("</think>")
|
||||
return re.sub(r"(<think>|</think>)", "", delta_ans)
|
||||
|
||||
async for t in txt_iter:
|
||||
yield delta(t)
|
||||
|
||||
if not self.imgs:
|
||||
async for t in delta_wrapper(self.chat_mdl.async_chat_streamly(msg[0]["content"], msg[1:], self._param.gen_conf(), **kwargs)):
|
||||
yield t
|
||||
return
|
||||
|
||||
async for t in delta_wrapper(self.chat_mdl.async_chat_streamly(msg[0]["content"], msg[1:], self._param.gen_conf(), images=self.imgs, **kwargs)):
|
||||
yield t
|
||||
|
||||
async def _stream_output_async(self, prompt, msg):
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
answer = ""
|
||||
last_idx = 0
|
||||
endswith_think = False
|
||||
|
||||
def delta(txt):
|
||||
nonlocal ans, last_idx, endswith_think
|
||||
nonlocal answer, last_idx, endswith_think
|
||||
delta_ans = txt[last_idx:]
|
||||
ans = txt
|
||||
answer = txt
|
||||
|
||||
if delta_ans.find("<think>") == 0:
|
||||
last_idx += len("<think>")
|
||||
return "<think>"
|
||||
elif delta_ans.find("<think>") > 0:
|
||||
delta_ans = txt[last_idx:last_idx+delta_ans.find("<think>")]
|
||||
delta_ans = txt[last_idx:last_idx + delta_ans.find("<think>")]
|
||||
last_idx += delta_ans.find("<think>")
|
||||
return delta_ans
|
||||
elif delta_ans.endswith("</think>"):
|
||||
@ -193,20 +235,33 @@ class LLM(ComponentBase):
|
||||
endswith_think = False
|
||||
return "</think>"
|
||||
|
||||
last_idx = len(ans)
|
||||
if ans.endswith("</think>"):
|
||||
last_idx = len(answer)
|
||||
if answer.endswith("</think>"):
|
||||
last_idx -= len("</think>")
|
||||
return re.sub(r"(<think>|</think>)", "", delta_ans)
|
||||
|
||||
if not self.imgs:
|
||||
for txt in self.chat_mdl.chat_streamly(msg[0]["content"], msg[1:], self._param.gen_conf(), **kwargs):
|
||||
yield delta(txt)
|
||||
else:
|
||||
for txt in self.chat_mdl.chat_streamly(msg[0]["content"], msg[1:], self._param.gen_conf(), images=self.imgs, **kwargs):
|
||||
yield delta(txt)
|
||||
stream_kwargs = {"images": self.imgs} if self.imgs else {}
|
||||
async for ans in self.chat_mdl.async_chat_streamly(msg[0]["content"], msg[1:], self._param.gen_conf(), **stream_kwargs):
|
||||
if self.check_if_canceled("LLM streaming"):
|
||||
return
|
||||
|
||||
if isinstance(ans, int):
|
||||
continue
|
||||
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
if self.get_exception_default_value():
|
||||
self.set_output("content", self.get_exception_default_value())
|
||||
yield self.get_exception_default_value()
|
||||
else:
|
||||
self.set_output("_ERROR", ans)
|
||||
return
|
||||
|
||||
yield delta(ans)
|
||||
|
||||
self.set_output("content", answer)
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
async def _invoke_async(self, **kwargs):
|
||||
if self.check_if_canceled("LLM processing"):
|
||||
return
|
||||
|
||||
@ -217,22 +272,25 @@ class LLM(ComponentBase):
|
||||
|
||||
prompt, msg, _ = self._prepare_prompt_variables()
|
||||
error: str = ""
|
||||
output_structure=None
|
||||
output_structure = None
|
||||
try:
|
||||
output_structure = self._param.outputs['structured']
|
||||
output_structure = self._param.outputs["structured"]
|
||||
except Exception:
|
||||
pass
|
||||
if output_structure and isinstance(output_structure, dict) and output_structure.get("properties"):
|
||||
schema=json.dumps(output_structure, ensure_ascii=False, indent=2)
|
||||
prompt += structured_output_prompt(schema)
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if output_structure and isinstance(output_structure, dict) and output_structure.get("properties") and len(output_structure["properties"]) > 0:
|
||||
schema = json.dumps(output_structure, ensure_ascii=False, indent=2)
|
||||
prompt_with_schema = prompt + structured_output_prompt(schema)
|
||||
for _ in range(self._param.max_retries + 1):
|
||||
if self.check_if_canceled("LLM processing"):
|
||||
return
|
||||
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
_, msg_fit = message_fit_in(
|
||||
[{"role": "system", "content": prompt_with_schema}, *deepcopy(msg)],
|
||||
int(self.chat_mdl.max_length * 0.97),
|
||||
)
|
||||
error = ""
|
||||
ans = self._generate(msg)
|
||||
msg.pop(0)
|
||||
ans = await self._generate_async(msg_fit)
|
||||
msg_fit.pop(0)
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
logging.error(f"LLM response error: {ans}")
|
||||
error = ans
|
||||
@ -241,7 +299,7 @@ class LLM(ComponentBase):
|
||||
self.set_output("structured", json_repair.loads(clean_formated_answer(ans)))
|
||||
return
|
||||
except Exception:
|
||||
msg.append({"role": "user", "content": "The answer can't not be parsed as JSON"})
|
||||
msg_fit.append({"role": "user", "content": "The answer can't not be parsed as JSON"})
|
||||
error = "The answer can't not be parsed as JSON"
|
||||
if error:
|
||||
self.set_output("_ERROR", error)
|
||||
@ -249,18 +307,23 @@ class LLM(ComponentBase):
|
||||
|
||||
downstreams = self._canvas.get_component(self._id)["downstream"] if self._canvas.get_component(self._id) else []
|
||||
ex = self.exception_handler()
|
||||
if any([self._canvas.get_component_obj(cid).component_name.lower()=="message" for cid in downstreams]) and not (ex and ex["goto"]):
|
||||
self.set_output("content", partial(self._stream_output, prompt, msg))
|
||||
if any([self._canvas.get_component_obj(cid).component_name.lower() == "message" for cid in downstreams]) and not (
|
||||
ex and ex["goto"]
|
||||
):
|
||||
self.set_output("content", partial(self._stream_output_async, prompt, deepcopy(msg)))
|
||||
return
|
||||
|
||||
for _ in range(self._param.max_retries+1):
|
||||
error = ""
|
||||
for _ in range(self._param.max_retries + 1):
|
||||
if self.check_if_canceled("LLM processing"):
|
||||
return
|
||||
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
_, msg_fit = message_fit_in(
|
||||
[{"role": "system", "content": prompt}, *deepcopy(msg)], int(self.chat_mdl.max_length * 0.97)
|
||||
)
|
||||
error = ""
|
||||
ans = self._generate(msg)
|
||||
msg.pop(0)
|
||||
ans = await self._generate_async(msg_fit)
|
||||
msg_fit.pop(0)
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
logging.error(f"LLM response error: {ans}")
|
||||
error = ans
|
||||
@ -274,26 +337,12 @@ class LLM(ComponentBase):
|
||||
else:
|
||||
self.set_output("_ERROR", error)
|
||||
|
||||
def _stream_output(self, prompt, msg):
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
answer = ""
|
||||
for ans in self._generate_streamly(msg):
|
||||
if self.check_if_canceled("LLM streaming"):
|
||||
return
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
return asyncio.run(self._invoke_async(**kwargs))
|
||||
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
if self.get_exception_default_value():
|
||||
self.set_output("content", self.get_exception_default_value())
|
||||
yield self.get_exception_default_value()
|
||||
else:
|
||||
self.set_output("_ERROR", ans)
|
||||
return
|
||||
yield ans
|
||||
answer += ans
|
||||
self.set_output("content", answer)
|
||||
|
||||
def add_memory(self, user:str, assist:str, func_name: str, params: dict, results: str, user_defined_prompt:dict={}):
|
||||
summ = tool_call_summary(self.chat_mdl, func_name, params, results, user_defined_prompt)
|
||||
async def add_memory(self, user:str, assist:str, func_name: str, params: dict, results: str, user_defined_prompt:dict={}):
|
||||
summ = await tool_call_summary(self.chat_mdl, func_name, params, results, user_defined_prompt)
|
||||
logging.info(f"[MEMORY]: {summ}")
|
||||
self._canvas.add_memory(user, assist, summ)
|
||||
|
||||
|
||||
80
agent/component/loop.py
Normal file
80
agent/component/loop.py
Normal file
@ -0,0 +1,80 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from abc import ABC
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
|
||||
class LoopParam(ComponentParamBase):
|
||||
"""
|
||||
Define the Loop component parameters.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.loop_variables = []
|
||||
self.loop_termination_condition=[]
|
||||
self.maximum_loop_count = 0
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return {
|
||||
"items": {
|
||||
"type": "json",
|
||||
"name": "Items"
|
||||
}
|
||||
}
|
||||
|
||||
def check(self):
|
||||
return True
|
||||
|
||||
|
||||
class Loop(ComponentBase, ABC):
|
||||
component_name = "Loop"
|
||||
|
||||
def get_start(self):
|
||||
for cid in self._canvas.components.keys():
|
||||
if self._canvas.get_component(cid)["obj"].component_name.lower() != "loopitem":
|
||||
continue
|
||||
if self._canvas.get_component(cid)["parent_id"] == self._id:
|
||||
return cid
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Loop processing"):
|
||||
return
|
||||
|
||||
for item in self._param.loop_variables:
|
||||
if any([not item.get("variable"), not item.get("input_mode"), not item.get("value"),not item.get("type")]):
|
||||
assert "Loop Variable is not complete."
|
||||
if item["input_mode"]=="variable":
|
||||
self.set_output(item["variable"],self._canvas.get_variable_value(item["value"]))
|
||||
elif item["input_mode"]=="constant":
|
||||
self.set_output(item["variable"],item["value"])
|
||||
else:
|
||||
if item["type"] == "number":
|
||||
self.set_output(item["variable"], 0)
|
||||
elif item["type"] == "string":
|
||||
self.set_output(item["variable"], "")
|
||||
elif item["type"] == "boolean":
|
||||
self.set_output(item["variable"], False)
|
||||
elif item["type"].startswith("object"):
|
||||
self.set_output(item["variable"], {})
|
||||
elif item["type"].startswith("array"):
|
||||
self.set_output(item["variable"], [])
|
||||
else:
|
||||
self.set_output(item["variable"], "")
|
||||
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Loop from canvas."
|
||||
167
agent/component/loopitem.py
Normal file
167
agent/component/loopitem.py
Normal file
@ -0,0 +1,167 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from abc import ABC
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
|
||||
class LoopItemParam(ComponentParamBase):
|
||||
"""
|
||||
Define the LoopItem component parameters.
|
||||
"""
|
||||
def check(self):
|
||||
return True
|
||||
|
||||
class LoopItem(ComponentBase, ABC):
|
||||
component_name = "LoopItem"
|
||||
|
||||
def __init__(self, canvas, id, param: ComponentParamBase):
|
||||
super().__init__(canvas, id, param)
|
||||
self._idx = 0
|
||||
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("LoopItem processing"):
|
||||
return
|
||||
parent = self.get_parent()
|
||||
maximum_loop_count = parent._param.maximum_loop_count
|
||||
if self._idx >= maximum_loop_count:
|
||||
self._idx = -1
|
||||
return
|
||||
if self._idx > 0:
|
||||
if self.check_if_canceled("LoopItem processing"):
|
||||
return
|
||||
self._idx += 1
|
||||
|
||||
def evaluate_condition(self,var, operator, value):
|
||||
if isinstance(var, str):
|
||||
if operator == "contains":
|
||||
return value in var
|
||||
elif operator == "not contains":
|
||||
return value not in var
|
||||
elif operator == "start with":
|
||||
return var.startswith(value)
|
||||
elif operator == "end with":
|
||||
return var.endswith(value)
|
||||
elif operator == "is":
|
||||
return var == value
|
||||
elif operator == "is not":
|
||||
return var != value
|
||||
elif operator == "empty":
|
||||
return var == ""
|
||||
elif operator == "not empty":
|
||||
return var != ""
|
||||
|
||||
elif isinstance(var, (int, float)):
|
||||
if operator == "=":
|
||||
return var == value
|
||||
elif operator == "≠":
|
||||
return var != value
|
||||
elif operator == ">":
|
||||
return var > value
|
||||
elif operator == "<":
|
||||
return var < value
|
||||
elif operator == "≥":
|
||||
return var >= value
|
||||
elif operator == "≤":
|
||||
return var <= value
|
||||
elif operator == "empty":
|
||||
return var is None
|
||||
elif operator == "not empty":
|
||||
return var is not None
|
||||
|
||||
elif isinstance(var, bool):
|
||||
if operator == "is":
|
||||
return var is value
|
||||
elif operator == "is not":
|
||||
return var is not value
|
||||
elif operator == "empty":
|
||||
return var is None
|
||||
elif operator == "not empty":
|
||||
return var is not None
|
||||
|
||||
elif isinstance(var, dict):
|
||||
if operator == "empty":
|
||||
return len(var) == 0
|
||||
elif operator == "not empty":
|
||||
return len(var) > 0
|
||||
|
||||
elif isinstance(var, list):
|
||||
if operator == "contains":
|
||||
return value in var
|
||||
elif operator == "not contains":
|
||||
return value not in var
|
||||
|
||||
elif operator == "is":
|
||||
return var == value
|
||||
elif operator == "is not":
|
||||
return var != value
|
||||
|
||||
elif operator == "empty":
|
||||
return len(var) == 0
|
||||
elif operator == "not empty":
|
||||
return len(var) > 0
|
||||
elif var is None:
|
||||
if operator == "empty":
|
||||
return True
|
||||
return False
|
||||
|
||||
raise Exception(f"Invalid operator: {operator}")
|
||||
|
||||
def end(self):
|
||||
if self._idx == -1:
|
||||
return True
|
||||
parent = self.get_parent()
|
||||
logical_operator = parent._param.logical_operator if hasattr(parent._param, "logical_operator") else "and"
|
||||
conditions = []
|
||||
for item in parent._param.loop_termination_condition:
|
||||
if not item.get("variable") or not item.get("operator"):
|
||||
raise ValueError("Loop condition is incomplete.")
|
||||
var = self._canvas.get_variable_value(item["variable"])
|
||||
operator = item["operator"]
|
||||
input_mode = item.get("input_mode", "constant")
|
||||
|
||||
if input_mode == "variable":
|
||||
value = self._canvas.get_variable_value(item.get("value", ""))
|
||||
elif input_mode == "constant":
|
||||
value = item.get("value", "")
|
||||
else:
|
||||
raise ValueError("Invalid input mode.")
|
||||
conditions.append(self.evaluate_condition(var, operator, value))
|
||||
should_end = (
|
||||
all(conditions) if logical_operator == "and"
|
||||
else any(conditions) if logical_operator == "or"
|
||||
else None
|
||||
)
|
||||
if should_end is None:
|
||||
raise ValueError("Invalid logical operator,should be 'and' or 'or'.")
|
||||
|
||||
if should_end:
|
||||
self._idx = -1
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def next(self):
|
||||
if self._idx == -1:
|
||||
self._idx = 0
|
||||
else:
|
||||
self._idx += 1
|
||||
if self._idx >= len(self._items):
|
||||
self._idx = -1
|
||||
return False
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Next turn..."
|
||||
@ -13,6 +13,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import nest_asyncio
|
||||
nest_asyncio.apply()
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
@ -29,6 +33,8 @@ from common.connection_utils import timeout
|
||||
from common.misc_utils import get_uuid
|
||||
from common import settings
|
||||
|
||||
from api.db.joint_services.memory_message_service import queue_save_to_memory_task
|
||||
|
||||
|
||||
class MessageParam(ComponentParamBase):
|
||||
"""
|
||||
@ -39,6 +45,7 @@ class MessageParam(ComponentParamBase):
|
||||
self.content = []
|
||||
self.stream = True
|
||||
self.output_format = None # default output format
|
||||
self.auto_play = False
|
||||
self.outputs = {
|
||||
"content": {
|
||||
"type": "str"
|
||||
@ -66,8 +73,12 @@ class Message(ComponentBase):
|
||||
v = ""
|
||||
ans = ""
|
||||
if isinstance(v, partial):
|
||||
for t in v():
|
||||
ans += t
|
||||
iter_obj = v()
|
||||
if inspect.isasyncgen(iter_obj):
|
||||
ans = asyncio.run(self._consume_async_gen(iter_obj))
|
||||
else:
|
||||
for t in iter_obj:
|
||||
ans += t
|
||||
elif isinstance(v, list) and delimiter:
|
||||
ans = delimiter.join([str(vv) for vv in v])
|
||||
elif not isinstance(v, str):
|
||||
@ -89,7 +100,13 @@ class Message(ComponentBase):
|
||||
_kwargs[_n] = v
|
||||
return script, _kwargs
|
||||
|
||||
def _stream(self, rand_cnt:str):
|
||||
async def _consume_async_gen(self, agen):
|
||||
buf = ""
|
||||
async for t in agen:
|
||||
buf += t
|
||||
return buf
|
||||
|
||||
async def _stream(self, rand_cnt:str):
|
||||
s = 0
|
||||
all_content = ""
|
||||
cache = {}
|
||||
@ -111,15 +128,27 @@ class Message(ComponentBase):
|
||||
v = ""
|
||||
if isinstance(v, partial):
|
||||
cnt = ""
|
||||
for t in v():
|
||||
if self.check_if_canceled("Message streaming"):
|
||||
return
|
||||
iter_obj = v()
|
||||
if inspect.isasyncgen(iter_obj):
|
||||
async for t in iter_obj:
|
||||
if self.check_if_canceled("Message streaming"):
|
||||
return
|
||||
|
||||
all_content += t
|
||||
cnt += t
|
||||
yield t
|
||||
all_content += t
|
||||
cnt += t
|
||||
yield t
|
||||
else:
|
||||
for t in iter_obj:
|
||||
if self.check_if_canceled("Message streaming"):
|
||||
return
|
||||
|
||||
all_content += t
|
||||
cnt += t
|
||||
yield t
|
||||
self.set_input_value(exp, cnt)
|
||||
continue
|
||||
elif inspect.isawaitable(v):
|
||||
v = await v
|
||||
elif not isinstance(v, str):
|
||||
try:
|
||||
v = json.dumps(v, ensure_ascii=False)
|
||||
@ -139,6 +168,7 @@ class Message(ComponentBase):
|
||||
|
||||
self.set_output("content", all_content)
|
||||
self._convert_content(all_content)
|
||||
await self._save_to_memory(all_content)
|
||||
|
||||
def _is_jinjia2(self, content:str) -> bool:
|
||||
patt = [
|
||||
@ -171,18 +201,61 @@ class Message(ComponentBase):
|
||||
|
||||
self.set_output("content", content)
|
||||
self._convert_content(content)
|
||||
self._save_to_memory(content)
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return ""
|
||||
|
||||
def _parse_markdown_table_lines(self, table_lines: list):
|
||||
"""
|
||||
Parse a list of Markdown table lines into a pandas DataFrame.
|
||||
|
||||
Args:
|
||||
table_lines: List of strings, each representing a row in the Markdown table
|
||||
(excluding separator lines like |---|---|)
|
||||
|
||||
Returns:
|
||||
pandas DataFrame with the table data, or None if parsing fails
|
||||
"""
|
||||
import pandas as pd
|
||||
|
||||
if not table_lines:
|
||||
return None
|
||||
|
||||
rows = []
|
||||
headers = None
|
||||
|
||||
for line in table_lines:
|
||||
# Split by | and clean up
|
||||
cells = [cell.strip() for cell in line.split('|')]
|
||||
# Remove empty first and last elements from split (caused by leading/trailing |)
|
||||
cells = [c for c in cells if c]
|
||||
|
||||
if headers is None:
|
||||
headers = cells
|
||||
else:
|
||||
rows.append(cells)
|
||||
|
||||
if headers and rows:
|
||||
# Ensure all rows have same number of columns as headers
|
||||
normalized_rows = []
|
||||
for row in rows:
|
||||
while len(row) < len(headers):
|
||||
row.append('')
|
||||
normalized_rows.append(row[:len(headers)])
|
||||
|
||||
return pd.DataFrame(normalized_rows, columns=headers)
|
||||
|
||||
return None
|
||||
|
||||
def _convert_content(self, content):
|
||||
if not self._param.output_format:
|
||||
return
|
||||
|
||||
import pypandoc
|
||||
doc_id = get_uuid()
|
||||
|
||||
if self._param.output_format.lower() not in {"markdown", "html", "pdf", "docx"}:
|
||||
|
||||
if self._param.output_format.lower() not in {"markdown", "html", "pdf", "docx", "xlsx"}:
|
||||
self._param.output_format = "markdown"
|
||||
|
||||
try:
|
||||
@ -202,6 +275,119 @@ class Message(ComponentBase):
|
||||
|
||||
binary_content = converted.encode("utf-8")
|
||||
|
||||
elif self._param.output_format == "xlsx":
|
||||
import pandas as pd
|
||||
from io import BytesIO
|
||||
|
||||
# Debug: log the content being parsed
|
||||
logging.info(f"XLSX Parser: Content length={len(content) if content else 0}, first 500 chars: {content[:500] if content else 'None'}")
|
||||
|
||||
# Try to parse ALL Markdown tables from the content
|
||||
# Each table will be written to a separate sheet
|
||||
tables = [] # List of (sheet_name, dataframe)
|
||||
|
||||
if isinstance(content, str):
|
||||
lines = content.strip().split('\n')
|
||||
logging.info(f"XLSX Parser: Total lines={len(lines)}, lines starting with '|': {sum(1 for line in lines if line.strip().startswith('|'))}")
|
||||
current_table_lines = []
|
||||
current_table_title = None
|
||||
pending_title = None
|
||||
in_table = False
|
||||
table_count = 0
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
stripped = line.strip()
|
||||
|
||||
# Check for potential table title (lines before a table)
|
||||
# Look for patterns like "Table 1:", "## Table", or markdown headers
|
||||
if not in_table and stripped and not stripped.startswith('|'):
|
||||
# Check if this could be a table title
|
||||
lower_stripped = stripped.lower()
|
||||
if (lower_stripped.startswith('table') or
|
||||
stripped.startswith('#') or
|
||||
':' in stripped):
|
||||
pending_title = stripped.lstrip('#').strip()
|
||||
|
||||
if stripped.startswith('|') and '|' in stripped[1:]:
|
||||
# Check if this is a separator line (|---|---|)
|
||||
cleaned = stripped.replace(' ', '').replace('|', '').replace('-', '').replace(':', '')
|
||||
if cleaned == '':
|
||||
continue # Skip separator line
|
||||
|
||||
if not in_table:
|
||||
# Starting a new table
|
||||
in_table = True
|
||||
current_table_lines = []
|
||||
current_table_title = pending_title
|
||||
pending_title = None
|
||||
|
||||
current_table_lines.append(stripped)
|
||||
|
||||
elif in_table and not stripped.startswith('|'):
|
||||
# End of current table - save it
|
||||
if current_table_lines:
|
||||
df = self._parse_markdown_table_lines(current_table_lines)
|
||||
if df is not None and not df.empty:
|
||||
table_count += 1
|
||||
# Generate sheet name
|
||||
if current_table_title:
|
||||
# Clean and truncate title for sheet name
|
||||
sheet_name = current_table_title[:31]
|
||||
sheet_name = sheet_name.replace('/', '_').replace('\\', '_').replace('*', '').replace('?', '').replace('[', '').replace(']', '').replace(':', '')
|
||||
else:
|
||||
sheet_name = f"Table_{table_count}"
|
||||
tables.append((sheet_name, df))
|
||||
|
||||
# Reset for next table
|
||||
in_table = False
|
||||
current_table_lines = []
|
||||
current_table_title = None
|
||||
|
||||
# Check if this line could be a title for the next table
|
||||
if stripped:
|
||||
lower_stripped = stripped.lower()
|
||||
if (lower_stripped.startswith('table') or
|
||||
stripped.startswith('#') or
|
||||
':' in stripped):
|
||||
pending_title = stripped.lstrip('#').strip()
|
||||
|
||||
# Don't forget the last table if content ends with a table
|
||||
if in_table and current_table_lines:
|
||||
df = self._parse_markdown_table_lines(current_table_lines)
|
||||
if df is not None and not df.empty:
|
||||
table_count += 1
|
||||
if current_table_title:
|
||||
sheet_name = current_table_title[:31]
|
||||
sheet_name = sheet_name.replace('/', '_').replace('\\', '_').replace('*', '').replace('?', '').replace('[', '').replace(']', '').replace(':', '')
|
||||
else:
|
||||
sheet_name = f"Table_{table_count}"
|
||||
tables.append((sheet_name, df))
|
||||
|
||||
# Fallback: if no tables found, create single sheet with content
|
||||
if not tables:
|
||||
df = pd.DataFrame({"Content": [content if content else ""]})
|
||||
tables = [("Data", df)]
|
||||
|
||||
# Write all tables to Excel, each in a separate sheet
|
||||
excel_io = BytesIO()
|
||||
with pd.ExcelWriter(excel_io, engine='openpyxl') as writer:
|
||||
used_names = set()
|
||||
for sheet_name, df in tables:
|
||||
# Ensure unique sheet names
|
||||
original_name = sheet_name
|
||||
counter = 1
|
||||
while sheet_name in used_names:
|
||||
suffix = f"_{counter}"
|
||||
sheet_name = original_name[:31-len(suffix)] + suffix
|
||||
counter += 1
|
||||
used_names.add(sheet_name)
|
||||
df.to_excel(writer, sheet_name=sheet_name, index=False)
|
||||
|
||||
excel_io.seek(0)
|
||||
binary_content = excel_io.read()
|
||||
|
||||
logging.info(f"Generated Excel with {len(tables)} sheet(s): {[t[0] for t in tables]}")
|
||||
|
||||
else: # pdf, docx
|
||||
with tempfile.NamedTemporaryFile(suffix=f".{self._param.output_format}", delete=False) as tmp:
|
||||
tmp_name = tmp.name
|
||||
@ -231,11 +417,24 @@ class Message(ComponentBase):
|
||||
|
||||
settings.STORAGE_IMPL.put(self._canvas._tenant_id, doc_id, binary_content)
|
||||
self.set_output("attachment", {
|
||||
"doc_id":doc_id,
|
||||
"format":self._param.output_format,
|
||||
"doc_id":doc_id,
|
||||
"format":self._param.output_format,
|
||||
"file_name":f"{doc_id[:8]}.{self._param.output_format}"})
|
||||
|
||||
logging.info(f"Converted content uploaded as {doc_id} (format={self._param.output_format})")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error converting content to {self._param.output_format}: {e}")
|
||||
logging.error(f"Error converting content to {self._param.output_format}: {e}")
|
||||
|
||||
async def _save_to_memory(self, content):
|
||||
if not hasattr(self._param, "memory_ids") or not self._param.memory_ids:
|
||||
return True, "No memory selected."
|
||||
|
||||
message_dict = {
|
||||
"user_id": self._canvas._tenant_id,
|
||||
"agent_id": self._canvas._id,
|
||||
"session_id": self._canvas.task_id,
|
||||
"user_input": self._canvas.get_sys_query(),
|
||||
"agent_response": content
|
||||
}
|
||||
return await queue_save_to_memory_task(self._param.memory_ids, message_dict)
|
||||
|
||||
@ -32,7 +32,7 @@ all: setup start
|
||||
# 🌱 Initialize environment + install dependencies
|
||||
setup: ensure_env ensure_uv
|
||||
@echo "📦 Installing dependencies with uv..."
|
||||
@$(UV) sync --python 3.11
|
||||
@$(UV) sync --python 3.12
|
||||
source $(ACTIVATE_SCRIPT) && \
|
||||
export PYTHONPATH=$(PYTHONPATH)
|
||||
@$(UV) pip install -r executor_manager/requirements.txt
|
||||
@ -24,7 +24,7 @@ A secure, pluggable code execution backend for RAGFlow and beyond.
|
||||
|
||||
- Linux distro compatible with gVisor
|
||||
- [gVisor](https://gvisor.dev/docs/user_guide/install/)
|
||||
- Docker >= `24.0.0`
|
||||
- Docker >= `25.0` (API 1.44+) — executor manager now bundles Docker CLI `29.1.0` to match newer daemons.
|
||||
- Docker Compose >= `v2.26.1` like [RAGFlow](https://github.com/infiniflow/ragflow)
|
||||
- [uv](https://docs.astral.sh/uv/) as package and project manager
|
||||
|
||||
@ -34,6 +34,10 @@ A secure, pluggable code execution backend for RAGFlow and beyond.
|
||||
|
||||
---
|
||||
|
||||
> ⚠️ **New Docker CLI requirement**
|
||||
>
|
||||
> If you see `client version 1.43 is too old. Minimum supported API version is 1.44`, pull the latest `infiniflow/sandbox-executor-manager:latest` (rebuilt with Docker CLI `29.1.0`) or rebuild it in `./sandbox/executor_manager`. Older images shipped Docker 24.x, which cannot talk to newer Docker daemons.
|
||||
|
||||
### 🐳 Build Docker Base Images
|
||||
|
||||
We use isolated base images for secure containerized execution:
|
||||
@ -117,7 +121,7 @@ make logs # With Make
|
||||
### 🧰 Makefile Toolbox
|
||||
|
||||
| Command | Description |
|
||||
| ----------------- | ------------------------------------------------ |
|
||||
|-------------------|--------------------------------------------------|
|
||||
| `make` | Setup, build, launch and test all at once |
|
||||
| `make setup` | Initialize environment and install uv |
|
||||
| `make ensure_env` | Auto-create `.env` if missing |
|
||||
@ -179,7 +183,7 @@ This security model strikes a balance between **robust isolation** and **develop
|
||||
Currently, the following languages are officially supported:
|
||||
|
||||
| Language | Priority |
|
||||
| -------- | -------- |
|
||||
|----------|----------|
|
||||
| Python | High |
|
||||
| Node.js | Medium |
|
||||
|
||||
@ -292,6 +296,22 @@ Follow this checklist to troubleshoot:
|
||||
127.0.0.1 es01 infinity mysql minio redis sandbox-executor-manager
|
||||
```
|
||||
|
||||
- [ ] **Are you running the latest executor manager image?**
|
||||
|
||||
**Common error:**
|
||||
|
||||
`docker: Error response from daemon: client version 1.43 is too old. Minimum supported API version is 1.44`
|
||||
|
||||
**Fix:**
|
||||
|
||||
Pull the refreshed image that bundles Docker CLI `29.1.0`, or rebuild it in `./sandbox/executor_manager`:
|
||||
|
||||
```bash
|
||||
docker pull infiniflow/sandbox-executor-manager:latest
|
||||
# or
|
||||
docker build -t sandbox-executor-manager:latest ./sandbox/executor_manager
|
||||
```
|
||||
|
||||
- [ ] **Have you enabled sandbox-related configurations in RAGFlow?**
|
||||
|
||||
Double-check that all sandbox settings are correctly enabled in your RAGFlow configuration.
|
||||
|
Before Width: | Height: | Size: 45 KiB After Width: | Height: | Size: 45 KiB |
239
agent/sandbox/client.py
Normal file
239
agent/sandbox/client.py
Normal file
@ -0,0 +1,239 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Sandbox client for agent components.
|
||||
|
||||
This module provides a unified interface for agent components to interact
|
||||
with the configured sandbox provider.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from api.db.services.system_settings_service import SystemSettingsService
|
||||
from agent.sandbox.providers import ProviderManager
|
||||
from agent.sandbox.providers.base import ExecutionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Global provider manager instance
|
||||
_provider_manager: Optional[ProviderManager] = None
|
||||
|
||||
|
||||
def get_provider_manager() -> ProviderManager:
|
||||
"""
|
||||
Get the global provider manager instance.
|
||||
|
||||
Returns:
|
||||
ProviderManager instance with active provider loaded
|
||||
"""
|
||||
global _provider_manager
|
||||
|
||||
if _provider_manager is not None:
|
||||
return _provider_manager
|
||||
|
||||
# Initialize provider manager with system settings
|
||||
_provider_manager = ProviderManager()
|
||||
_load_provider_from_settings()
|
||||
|
||||
return _provider_manager
|
||||
|
||||
|
||||
def _load_provider_from_settings() -> None:
|
||||
"""
|
||||
Load sandbox provider from system settings and configure the provider manager.
|
||||
|
||||
This function reads the system settings to determine which provider is active
|
||||
and initializes it with the appropriate configuration.
|
||||
"""
|
||||
global _provider_manager
|
||||
|
||||
if _provider_manager is None:
|
||||
return
|
||||
|
||||
try:
|
||||
# Get active provider type
|
||||
provider_type_settings = SystemSettingsService.get_by_name("sandbox.provider_type")
|
||||
if not provider_type_settings:
|
||||
raise RuntimeError(
|
||||
"Sandbox provider type not configured. Please set 'sandbox.provider_type' in system settings."
|
||||
)
|
||||
provider_type = provider_type_settings[0].value
|
||||
|
||||
# Get provider configuration
|
||||
provider_config_settings = SystemSettingsService.get_by_name(f"sandbox.{provider_type}")
|
||||
|
||||
if not provider_config_settings:
|
||||
logger.warning(f"No configuration found for provider: {provider_type}")
|
||||
config = {}
|
||||
else:
|
||||
try:
|
||||
config = json.loads(provider_config_settings[0].value)
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"Failed to parse sandbox config for {provider_type}: {e}")
|
||||
config = {}
|
||||
|
||||
# Import and instantiate the provider
|
||||
from agent.sandbox.providers import (
|
||||
SelfManagedProvider,
|
||||
AliyunCodeInterpreterProvider,
|
||||
E2BProvider,
|
||||
)
|
||||
|
||||
provider_classes = {
|
||||
"self_managed": SelfManagedProvider,
|
||||
"aliyun_codeinterpreter": AliyunCodeInterpreterProvider,
|
||||
"e2b": E2BProvider,
|
||||
}
|
||||
|
||||
if provider_type not in provider_classes:
|
||||
logger.error(f"Unknown provider type: {provider_type}")
|
||||
return
|
||||
|
||||
provider_class = provider_classes[provider_type]
|
||||
provider = provider_class()
|
||||
|
||||
# Initialize the provider
|
||||
if not provider.initialize(config):
|
||||
logger.error(f"Failed to initialize sandbox provider: {provider_type}. Config keys: {list(config.keys())}")
|
||||
return
|
||||
|
||||
# Set the active provider
|
||||
_provider_manager.set_provider(provider_type, provider)
|
||||
logger.info(f"Sandbox provider '{provider_type}' initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load sandbox provider from settings: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def reload_provider() -> None:
|
||||
"""
|
||||
Reload the sandbox provider from system settings.
|
||||
|
||||
Use this function when sandbox settings have been updated.
|
||||
"""
|
||||
global _provider_manager
|
||||
_provider_manager = None
|
||||
_load_provider_from_settings()
|
||||
|
||||
|
||||
def execute_code(
|
||||
code: str,
|
||||
language: str = "python",
|
||||
timeout: int = 30,
|
||||
arguments: Optional[Dict[str, Any]] = None
|
||||
) -> ExecutionResult:
|
||||
"""
|
||||
Execute code in the configured sandbox.
|
||||
|
||||
This is the main entry point for agent components to execute code.
|
||||
|
||||
Args:
|
||||
code: Source code to execute
|
||||
language: Programming language (python, nodejs, javascript)
|
||||
timeout: Maximum execution time in seconds
|
||||
arguments: Optional arguments dict to pass to main() function
|
||||
|
||||
Returns:
|
||||
ExecutionResult containing stdout, stderr, exit_code, and metadata
|
||||
|
||||
Raises:
|
||||
RuntimeError: If no provider is configured or execution fails
|
||||
"""
|
||||
provider_manager = get_provider_manager()
|
||||
|
||||
if not provider_manager.is_configured():
|
||||
raise RuntimeError(
|
||||
"No sandbox provider configured. Please configure sandbox settings in the admin panel."
|
||||
)
|
||||
|
||||
provider = provider_manager.get_provider()
|
||||
|
||||
# Create a sandbox instance
|
||||
instance = provider.create_instance(template=language)
|
||||
|
||||
try:
|
||||
# Execute the code
|
||||
result = provider.execute_code(
|
||||
instance_id=instance.instance_id,
|
||||
code=code,
|
||||
language=language,
|
||||
timeout=timeout,
|
||||
arguments=arguments
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
finally:
|
||||
# Clean up the instance
|
||||
try:
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to destroy sandbox instance {instance.instance_id}: {e}")
|
||||
|
||||
|
||||
def health_check() -> bool:
|
||||
"""
|
||||
Check if the sandbox provider is healthy.
|
||||
|
||||
Returns:
|
||||
True if provider is configured and healthy, False otherwise
|
||||
"""
|
||||
try:
|
||||
provider_manager = get_provider_manager()
|
||||
|
||||
if not provider_manager.is_configured():
|
||||
return False
|
||||
|
||||
provider = provider_manager.get_provider()
|
||||
return provider.health_check()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sandbox health check failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_provider_info() -> Dict[str, Any]:
|
||||
"""
|
||||
Get information about the current sandbox provider.
|
||||
|
||||
Returns:
|
||||
Dictionary with provider information:
|
||||
- provider_type: Type of the active provider
|
||||
- configured: Whether provider is configured
|
||||
- healthy: Whether provider is healthy
|
||||
"""
|
||||
try:
|
||||
provider_manager = get_provider_manager()
|
||||
|
||||
return {
|
||||
"provider_type": provider_manager.get_provider_name(),
|
||||
"configured": provider_manager.is_configured(),
|
||||
"healthy": health_check(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get provider info: {e}")
|
||||
return {
|
||||
"provider_type": None,
|
||||
"configured": False,
|
||||
"healthy": False,
|
||||
}
|
||||
37
agent/sandbox/executor_manager/Dockerfile
Normal file
37
agent/sandbox/executor_manager/Dockerfile
Normal file
@ -0,0 +1,37 @@
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
RUN grep -rl 'deb.debian.org' /etc/apt/ | xargs sed -i 's|http[s]*://deb.debian.org|https://mirrors.tuna.tsinghua.edu.cn|g' && \
|
||||
apt-get update && \
|
||||
apt-get install -y curl gcc && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
RUN set -eux; \
|
||||
case "${TARGETARCH}${TARGETVARIANT}" in \
|
||||
amd64) DOCKER_ARCH=x86_64 ;; \
|
||||
arm64) DOCKER_ARCH=aarch64 ;; \
|
||||
armv7) DOCKER_ARCH=armhf ;; \
|
||||
armv6) DOCKER_ARCH=armel ;; \
|
||||
arm64v8) DOCKER_ARCH=aarch64 ;; \
|
||||
arm64v7) DOCKER_ARCH=armhf ;; \
|
||||
arm*) DOCKER_ARCH=armhf ;; \
|
||||
ppc64le) DOCKER_ARCH=ppc64le ;; \
|
||||
s390x) DOCKER_ARCH=s390x ;; \
|
||||
*) echo "Unsupported architecture: ${TARGETARCH}${TARGETVARIANT}" && exit 1 ;; \
|
||||
esac; \
|
||||
echo "Downloading Docker for architecture: ${DOCKER_ARCH}"; \
|
||||
curl -fsSL "https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-29.1.0.tgz" | \
|
||||
tar xz -C /usr/local/bin --strip-components=1 docker/docker; \
|
||||
ln -sf /usr/local/bin/docker /usr/bin/docker
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.7.5 /uv /uvx /bin/
|
||||
ENV UV_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
|
||||
RUN uv pip install --system -r requirements.txt
|
||||
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "9385"]
|
||||
@ -122,15 +122,15 @@ async def create_container(name: str, language: SupportLanguage) -> bool:
|
||||
logger.info(f"Sandbox config:\n\t {create_args}")
|
||||
|
||||
try:
|
||||
returncode, _, stderr = await async_run_command(*create_args, timeout=10)
|
||||
if returncode != 0:
|
||||
return_code, _, stderr = await async_run_command(*create_args, timeout=10)
|
||||
if return_code != 0:
|
||||
logger.error(f"❌ Container creation failed {name}: {stderr}")
|
||||
return False
|
||||
|
||||
if language == SupportLanguage.NODEJS:
|
||||
copy_cmd = ["docker", "exec", name, "bash", "-c", "cp -a /app/node_modules /workspace/"]
|
||||
returncode, _, stderr = await async_run_command(*copy_cmd, timeout=10)
|
||||
if returncode != 0:
|
||||
return_code, _, stderr = await async_run_command(*copy_cmd, timeout=10)
|
||||
if return_code != 0:
|
||||
logger.error(f"❌ Failed to prepare dependencies for {name}: {stderr}")
|
||||
return False
|
||||
|
||||
@ -185,7 +185,7 @@ async def allocate_container_blocking(language: SupportLanguage, timeout=10) ->
|
||||
async def container_is_running(name: str) -> bool:
|
||||
"""Asynchronously check the container status"""
|
||||
try:
|
||||
returncode, stdout, _ = await async_run_command("docker", "inspect", "-f", "{{.State.Running}}", name, timeout=2)
|
||||
return returncode == 0 and stdout.strip() == "true"
|
||||
return_code, stdout, _ = await async_run_command("docker", "inspect", "-f", "{{.State.Running}}", name, timeout=2)
|
||||
return return_code == 0 and stdout.strip() == "true"
|
||||
except Exception:
|
||||
return False
|
||||
43
agent/sandbox/providers/__init__.py
Normal file
43
agent/sandbox/providers/__init__.py
Normal file
@ -0,0 +1,43 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Sandbox providers package.
|
||||
|
||||
This package contains:
|
||||
- base.py: Base interface for all sandbox providers
|
||||
- manager.py: Provider manager for managing active provider
|
||||
- self_managed.py: Self-managed provider implementation (wraps existing executor_manager)
|
||||
- aliyun_codeinterpreter.py: Aliyun Code Interpreter provider implementation
|
||||
Official Documentation: https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter
|
||||
- e2b.py: E2B provider implementation
|
||||
"""
|
||||
|
||||
from .base import SandboxProvider, SandboxInstance, ExecutionResult
|
||||
from .manager import ProviderManager
|
||||
from .self_managed import SelfManagedProvider
|
||||
from .aliyun_codeinterpreter import AliyunCodeInterpreterProvider
|
||||
from .e2b import E2BProvider
|
||||
|
||||
__all__ = [
|
||||
"SandboxProvider",
|
||||
"SandboxInstance",
|
||||
"ExecutionResult",
|
||||
"ProviderManager",
|
||||
"SelfManagedProvider",
|
||||
"AliyunCodeInterpreterProvider",
|
||||
"E2BProvider",
|
||||
]
|
||||
512
agent/sandbox/providers/aliyun_codeinterpreter.py
Normal file
512
agent/sandbox/providers/aliyun_codeinterpreter.py
Normal file
@ -0,0 +1,512 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Aliyun Code Interpreter provider implementation.
|
||||
|
||||
This provider integrates with Aliyun Function Compute Code Interpreter service
|
||||
for secure code execution in serverless microVMs using the official agentrun-sdk.
|
||||
|
||||
Official Documentation: https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter
|
||||
Official SDK: https://github.com/Serverless-Devs/agentrun-sdk-python
|
||||
|
||||
https://api.aliyun.com/api/AgentRun/2025-09-10/CreateTemplate?lang=PYTHON
|
||||
https://api.aliyun.com/api/AgentRun/2025-09-10/CreateSandbox?lang=PYTHON
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from agentrun.sandbox import TemplateType, CodeLanguage, Template, TemplateInput, Sandbox
|
||||
from agentrun.utils.config import Config
|
||||
from agentrun.utils.exception import ServerError
|
||||
|
||||
from .base import SandboxProvider, SandboxInstance, ExecutionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AliyunCodeInterpreterProvider(SandboxProvider):
|
||||
"""
|
||||
Aliyun Code Interpreter provider implementation.
|
||||
|
||||
This provider uses the official agentrun-sdk to interact with
|
||||
Aliyun Function Compute's Code Interpreter service.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.access_key_id: Optional[str] = None
|
||||
self.access_key_secret: Optional[str] = None
|
||||
self.account_id: Optional[str] = None
|
||||
self.region: str = "cn-hangzhou"
|
||||
self.template_name: str = ""
|
||||
self.timeout: int = 30
|
||||
self._initialized: bool = False
|
||||
self._config: Optional[Config] = None
|
||||
|
||||
def initialize(self, config: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Initialize the provider with Aliyun credentials.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary with keys:
|
||||
- access_key_id: Aliyun AccessKey ID
|
||||
- access_key_secret: Aliyun AccessKey Secret
|
||||
- account_id: Aliyun primary account ID (主账号ID)
|
||||
- region: Region (default: "cn-hangzhou")
|
||||
- template_name: Optional sandbox template name
|
||||
- timeout: Request timeout in seconds (default: 30, max 30)
|
||||
|
||||
Returns:
|
||||
True if initialization successful, False otherwise
|
||||
"""
|
||||
# Get values from config or environment variables
|
||||
access_key_id = config.get("access_key_id") or os.getenv("AGENTRUN_ACCESS_KEY_ID")
|
||||
access_key_secret = config.get("access_key_secret") or os.getenv("AGENTRUN_ACCESS_KEY_SECRET")
|
||||
account_id = config.get("account_id") or os.getenv("AGENTRUN_ACCOUNT_ID")
|
||||
region = config.get("region") or os.getenv("AGENTRUN_REGION", "cn-hangzhou")
|
||||
|
||||
self.access_key_id = access_key_id
|
||||
self.access_key_secret = access_key_secret
|
||||
self.account_id = account_id
|
||||
self.region = region
|
||||
self.template_name = config.get("template_name", "")
|
||||
self.timeout = min(config.get("timeout", 30), 30) # Max 30 seconds
|
||||
|
||||
logger.info(f"Aliyun Code Interpreter: Initializing with account_id={self.account_id}, region={self.region}")
|
||||
|
||||
# Validate required fields
|
||||
if not self.access_key_id or not self.access_key_secret:
|
||||
logger.error("Aliyun Code Interpreter: Missing access_key_id or access_key_secret")
|
||||
return False
|
||||
|
||||
if not self.account_id:
|
||||
logger.error("Aliyun Code Interpreter: Missing account_id (主账号ID)")
|
||||
return False
|
||||
|
||||
# Create SDK configuration
|
||||
try:
|
||||
logger.info(f"Aliyun Code Interpreter: Creating Config object with account_id={self.account_id}")
|
||||
self._config = Config(
|
||||
access_key_id=self.access_key_id,
|
||||
access_key_secret=self.access_key_secret,
|
||||
account_id=self.account_id,
|
||||
region_id=self.region,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
logger.info("Aliyun Code Interpreter: Config object created successfully")
|
||||
|
||||
# Verify connection with health check
|
||||
if not self.health_check():
|
||||
logger.error(f"Aliyun Code Interpreter: Health check failed for region {self.region}")
|
||||
return False
|
||||
|
||||
self._initialized = True
|
||||
logger.info(f"Aliyun Code Interpreter: Initialized successfully for region {self.region}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Aliyun Code Interpreter: Initialization failed - {str(e)}")
|
||||
return False
|
||||
|
||||
def create_instance(self, template: str = "python") -> SandboxInstance:
|
||||
"""
|
||||
Create a new sandbox instance in Aliyun Code Interpreter.
|
||||
|
||||
Args:
|
||||
template: Programming language (python, javascript)
|
||||
|
||||
Returns:
|
||||
SandboxInstance object
|
||||
|
||||
Raises:
|
||||
RuntimeError: If instance creation fails
|
||||
"""
|
||||
if not self._initialized or not self._config:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# Normalize language
|
||||
language = self._normalize_language(template)
|
||||
|
||||
try:
|
||||
# Get or create template
|
||||
from agentrun.sandbox import Sandbox
|
||||
|
||||
if self.template_name:
|
||||
# Use existing template
|
||||
template_name = self.template_name
|
||||
else:
|
||||
# Try to get default template, or create one if it doesn't exist
|
||||
default_template_name = f"ragflow-{language}-default"
|
||||
try:
|
||||
# Check if template exists
|
||||
Template.get_by_name(default_template_name, config=self._config)
|
||||
template_name = default_template_name
|
||||
except Exception:
|
||||
# Create default template if it doesn't exist
|
||||
template_input = TemplateInput(
|
||||
template_name=default_template_name,
|
||||
template_type=TemplateType.CODE_INTERPRETER,
|
||||
)
|
||||
Template.create(template_input, config=self._config)
|
||||
template_name = default_template_name
|
||||
|
||||
# Create sandbox directly
|
||||
sandbox = Sandbox.create(
|
||||
template_type=TemplateType.CODE_INTERPRETER,
|
||||
template_name=template_name,
|
||||
sandbox_idle_timeout_seconds=self.timeout,
|
||||
config=self._config,
|
||||
)
|
||||
|
||||
instance_id = sandbox.sandbox_id
|
||||
|
||||
return SandboxInstance(
|
||||
instance_id=instance_id,
|
||||
provider="aliyun_codeinterpreter",
|
||||
status="READY",
|
||||
metadata={
|
||||
"language": language,
|
||||
"region": self.region,
|
||||
"account_id": self.account_id,
|
||||
"template_name": template_name,
|
||||
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
)
|
||||
|
||||
except ServerError as e:
|
||||
raise RuntimeError(f"Failed to create sandbox instance: {str(e)}")
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Unexpected error creating instance: {str(e)}")
|
||||
|
||||
def execute_code(self, instance_id: str, code: str, language: str, timeout: int = 10, arguments: Optional[Dict[str, Any]] = None) -> ExecutionResult:
|
||||
"""
|
||||
Execute code in the Aliyun Code Interpreter instance.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the sandbox instance
|
||||
code: Source code to execute
|
||||
language: Programming language (python, javascript)
|
||||
timeout: Maximum execution time in seconds (max 30)
|
||||
arguments: Optional arguments dict to pass to main() function
|
||||
|
||||
Returns:
|
||||
ExecutionResult containing stdout, stderr, exit_code, and metadata
|
||||
|
||||
Raises:
|
||||
RuntimeError: If execution fails
|
||||
TimeoutError: If execution exceeds timeout
|
||||
"""
|
||||
if not self._initialized or not self._config:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# Normalize language
|
||||
normalized_lang = self._normalize_language(language)
|
||||
|
||||
# Enforce 30-second hard limit
|
||||
timeout = min(timeout or self.timeout, 30)
|
||||
|
||||
try:
|
||||
# Connect to existing sandbox instance
|
||||
sandbox = Sandbox.connect(sandbox_id=instance_id, config=self._config)
|
||||
|
||||
# Convert language string to CodeLanguage enum
|
||||
code_language = CodeLanguage.PYTHON if normalized_lang == "python" else CodeLanguage.JAVASCRIPT
|
||||
|
||||
# Wrap code to call main() function
|
||||
# Matches self_managed provider behavior: call main(**arguments)
|
||||
if normalized_lang == "python":
|
||||
# Build arguments string for main() call
|
||||
if arguments:
|
||||
import json as json_module
|
||||
args_json = json_module.dumps(arguments)
|
||||
wrapped_code = f'''{code}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import json
|
||||
result = main(**{args_json})
|
||||
print(json.dumps(result) if isinstance(result, dict) else result)
|
||||
'''
|
||||
else:
|
||||
wrapped_code = f'''{code}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import json
|
||||
result = main()
|
||||
print(json.dumps(result) if isinstance(result, dict) else result)
|
||||
'''
|
||||
else: # javascript
|
||||
if arguments:
|
||||
import json as json_module
|
||||
args_json = json_module.dumps(arguments)
|
||||
wrapped_code = f'''{code}
|
||||
|
||||
// Call main and output result
|
||||
const result = main({args_json});
|
||||
console.log(typeof result === 'object' ? JSON.stringify(result) : String(result));
|
||||
'''
|
||||
else:
|
||||
wrapped_code = f'''{code}
|
||||
|
||||
// Call main and output result
|
||||
const result = main();
|
||||
console.log(typeof result === 'object' ? JSON.stringify(result) : String(result));
|
||||
'''
|
||||
logger.debug(f"Aliyun Code Interpreter: Wrapped code (first 200 chars): {wrapped_code[:200]}")
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Execute code using SDK's simplified execute endpoint
|
||||
logger.info(f"Aliyun Code Interpreter: Executing code (language={normalized_lang}, timeout={timeout})")
|
||||
logger.debug(f"Aliyun Code Interpreter: Original code (first 200 chars): {code[:200]}")
|
||||
result = sandbox.context.execute(
|
||||
code=wrapped_code,
|
||||
language=code_language,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
logger.info(f"Aliyun Code Interpreter: Execution completed in {execution_time:.2f}s")
|
||||
logger.debug(f"Aliyun Code Interpreter: Raw SDK result: {result}")
|
||||
|
||||
# Parse execution result
|
||||
results = result.get("results", []) if isinstance(result, dict) else []
|
||||
logger.info(f"Aliyun Code Interpreter: Parsed {len(results)} result items")
|
||||
|
||||
# Extract stdout and stderr from results
|
||||
stdout_parts = []
|
||||
stderr_parts = []
|
||||
exit_code = 0
|
||||
execution_status = "ok"
|
||||
|
||||
for item in results:
|
||||
result_type = item.get("type", "")
|
||||
text = item.get("text", "")
|
||||
|
||||
if result_type == "stdout":
|
||||
stdout_parts.append(text)
|
||||
elif result_type == "stderr":
|
||||
stderr_parts.append(text)
|
||||
exit_code = 1 # Error occurred
|
||||
elif result_type == "endOfExecution":
|
||||
execution_status = item.get("status", "ok")
|
||||
if execution_status != "ok":
|
||||
exit_code = 1
|
||||
elif result_type == "error":
|
||||
stderr_parts.append(text)
|
||||
exit_code = 1
|
||||
|
||||
stdout = "\n".join(stdout_parts)
|
||||
stderr = "\n".join(stderr_parts)
|
||||
|
||||
logger.info(f"Aliyun Code Interpreter: stdout length={len(stdout)}, stderr length={len(stderr)}, exit_code={exit_code}")
|
||||
if stdout:
|
||||
logger.debug(f"Aliyun Code Interpreter: stdout (first 200 chars): {stdout[:200]}")
|
||||
if stderr:
|
||||
logger.debug(f"Aliyun Code Interpreter: stderr (first 200 chars): {stderr[:200]}")
|
||||
|
||||
return ExecutionResult(
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
exit_code=exit_code,
|
||||
execution_time=execution_time,
|
||||
metadata={
|
||||
"instance_id": instance_id,
|
||||
"language": normalized_lang,
|
||||
"context_id": result.get("contextId") if isinstance(result, dict) else None,
|
||||
"timeout": timeout,
|
||||
},
|
||||
)
|
||||
|
||||
except ServerError as e:
|
||||
if "timeout" in str(e).lower():
|
||||
raise TimeoutError(f"Execution timed out after {timeout} seconds")
|
||||
raise RuntimeError(f"Failed to execute code: {str(e)}")
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Unexpected error during execution: {str(e)}")
|
||||
|
||||
def destroy_instance(self, instance_id: str) -> bool:
|
||||
"""
|
||||
Destroy an Aliyun Code Interpreter instance.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the instance to destroy
|
||||
|
||||
Returns:
|
||||
True if destruction successful, False otherwise
|
||||
"""
|
||||
if not self._initialized or not self._config:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
try:
|
||||
# Delete sandbox by ID directly
|
||||
Sandbox.delete_by_id(sandbox_id=instance_id)
|
||||
|
||||
logger.info(f"Successfully destroyed sandbox instance {instance_id}")
|
||||
return True
|
||||
|
||||
except ServerError as e:
|
||||
logger.error(f"Failed to destroy instance {instance_id}: {str(e)}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error destroying instance {instance_id}: {str(e)}")
|
||||
return False
|
||||
|
||||
def health_check(self) -> bool:
|
||||
"""
|
||||
Check if the Aliyun Code Interpreter service is accessible.
|
||||
|
||||
Returns:
|
||||
True if provider is healthy, False otherwise
|
||||
"""
|
||||
if not self._initialized and not (self.access_key_id and self.account_id):
|
||||
return False
|
||||
|
||||
try:
|
||||
# Try to list templates to verify connection
|
||||
from agentrun.sandbox import Template
|
||||
|
||||
templates = Template.list(config=self._config)
|
||||
return templates is not None
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Aliyun Code Interpreter health check failed: {str(e)}")
|
||||
# If we get any response (even an error), the service is reachable
|
||||
return "connection" not in str(e).lower()
|
||||
|
||||
def get_supported_languages(self) -> List[str]:
|
||||
"""
|
||||
Get list of supported programming languages.
|
||||
|
||||
Returns:
|
||||
List of language identifiers
|
||||
"""
|
||||
return ["python", "javascript"]
|
||||
|
||||
@staticmethod
|
||||
def get_config_schema() -> Dict[str, Dict]:
|
||||
"""
|
||||
Return configuration schema for Aliyun Code Interpreter provider.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping field names to their schema definitions
|
||||
"""
|
||||
return {
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"label": "Access Key ID",
|
||||
"placeholder": "LTAI5t...",
|
||||
"description": "Aliyun AccessKey ID for authentication",
|
||||
"secret": False,
|
||||
},
|
||||
"access_key_secret": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"label": "Access Key Secret",
|
||||
"placeholder": "••••••••••••••••",
|
||||
"description": "Aliyun AccessKey Secret for authentication",
|
||||
"secret": True,
|
||||
},
|
||||
"account_id": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"label": "Account ID",
|
||||
"placeholder": "1234567890...",
|
||||
"description": "Aliyun primary account ID (主账号ID), required for API calls",
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"label": "Region",
|
||||
"default": "cn-hangzhou",
|
||||
"description": "Aliyun region for Code Interpreter service",
|
||||
"options": ["cn-hangzhou", "cn-beijing", "cn-shanghai", "cn-shenzhen", "cn-guangzhou"],
|
||||
},
|
||||
"template_name": {
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"label": "Template Name",
|
||||
"placeholder": "my-interpreter",
|
||||
"description": "Optional sandbox template name for pre-configured environments",
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"required": False,
|
||||
"label": "Execution Timeout (seconds)",
|
||||
"default": 30,
|
||||
"min": 1,
|
||||
"max": 30,
|
||||
"description": "Code execution timeout (max 30 seconds - hard limit)",
|
||||
},
|
||||
}
|
||||
|
||||
def validate_config(self, config: Dict[str, Any]) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate Aliyun-specific configuration.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary to validate
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
"""
|
||||
# Validate access key format
|
||||
access_key_id = config.get("access_key_id", "")
|
||||
if access_key_id and not access_key_id.startswith("LTAI"):
|
||||
return False, "Invalid AccessKey ID format (should start with 'LTAI')"
|
||||
|
||||
# Validate account ID
|
||||
account_id = config.get("account_id", "")
|
||||
if not account_id:
|
||||
return False, "Account ID is required"
|
||||
|
||||
# Validate region
|
||||
valid_regions = ["cn-hangzhou", "cn-beijing", "cn-shanghai", "cn-shenzhen", "cn-guangzhou"]
|
||||
region = config.get("region", "cn-hangzhou")
|
||||
if region and region not in valid_regions:
|
||||
return False, f"Invalid region. Must be one of: {', '.join(valid_regions)}"
|
||||
|
||||
# Validate timeout range (max 30 seconds)
|
||||
timeout = config.get("timeout", 30)
|
||||
if isinstance(timeout, int) and (timeout < 1 or timeout > 30):
|
||||
return False, "Timeout must be between 1 and 30 seconds"
|
||||
|
||||
return True, None
|
||||
|
||||
def _normalize_language(self, language: str) -> str:
|
||||
"""
|
||||
Normalize language identifier to Aliyun format.
|
||||
|
||||
Args:
|
||||
language: Language identifier (python, python3, javascript, nodejs)
|
||||
|
||||
Returns:
|
||||
Normalized language identifier
|
||||
"""
|
||||
if not language:
|
||||
return "python"
|
||||
|
||||
lang_lower = language.lower()
|
||||
if lang_lower in ("python", "python3"):
|
||||
return "python"
|
||||
elif lang_lower in ("javascript", "nodejs"):
|
||||
return "javascript"
|
||||
else:
|
||||
return language
|
||||
212
agent/sandbox/providers/base.py
Normal file
212
agent/sandbox/providers/base.py
Normal file
@ -0,0 +1,212 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Base interface for sandbox providers.
|
||||
|
||||
Each sandbox provider (self-managed, SaaS) implements this interface
|
||||
to provide code execution capabilities.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
|
||||
@dataclass
|
||||
class SandboxInstance:
|
||||
"""Represents a sandbox execution instance"""
|
||||
instance_id: str
|
||||
provider: str
|
||||
status: str # running, stopped, error
|
||||
metadata: Dict[str, Any]
|
||||
|
||||
def __post_init__(self):
|
||||
if self.metadata is None:
|
||||
self.metadata = {}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExecutionResult:
|
||||
"""Result of code execution in a sandbox"""
|
||||
stdout: str
|
||||
stderr: str
|
||||
exit_code: int
|
||||
execution_time: float # in seconds
|
||||
metadata: Dict[str, Any]
|
||||
|
||||
def __post_init__(self):
|
||||
if self.metadata is None:
|
||||
self.metadata = {}
|
||||
|
||||
|
||||
class SandboxProvider(ABC):
|
||||
"""
|
||||
Base interface for all sandbox providers.
|
||||
|
||||
Each provider implementation (self-managed, Aliyun OpenSandbox, E2B, etc.)
|
||||
must implement these methods to provide code execution capabilities.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def initialize(self, config: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Initialize the provider with configuration.
|
||||
|
||||
Args:
|
||||
config: Provider-specific configuration dictionary
|
||||
|
||||
Returns:
|
||||
True if initialization successful, False otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_instance(self, template: str = "python") -> SandboxInstance:
|
||||
"""
|
||||
Create a new sandbox instance.
|
||||
|
||||
Args:
|
||||
template: Programming language/template for the instance
|
||||
(e.g., "python", "nodejs", "bash")
|
||||
|
||||
Returns:
|
||||
SandboxInstance object representing the created instance
|
||||
|
||||
Raises:
|
||||
RuntimeError: If instance creation fails
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def execute_code(
|
||||
self,
|
||||
instance_id: str,
|
||||
code: str,
|
||||
language: str,
|
||||
timeout: int = 10,
|
||||
arguments: Optional[Dict[str, Any]] = None
|
||||
) -> ExecutionResult:
|
||||
"""
|
||||
Execute code in a sandbox instance.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the sandbox instance
|
||||
code: Source code to execute
|
||||
language: Programming language (python, javascript, etc.)
|
||||
timeout: Maximum execution time in seconds
|
||||
arguments: Optional arguments dict to pass to main() function
|
||||
|
||||
Returns:
|
||||
ExecutionResult containing stdout, stderr, exit_code, and metadata
|
||||
|
||||
Raises:
|
||||
RuntimeError: If execution fails
|
||||
TimeoutError: If execution exceeds timeout
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def destroy_instance(self, instance_id: str) -> bool:
|
||||
"""
|
||||
Destroy a sandbox instance.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the instance to destroy
|
||||
|
||||
Returns:
|
||||
True if destruction successful, False otherwise
|
||||
|
||||
Raises:
|
||||
RuntimeError: If destruction fails
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def health_check(self) -> bool:
|
||||
"""
|
||||
Check if the provider is healthy and accessible.
|
||||
|
||||
Returns:
|
||||
True if provider is healthy, False otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_supported_languages(self) -> List[str]:
|
||||
"""
|
||||
Get list of supported programming languages.
|
||||
|
||||
Returns:
|
||||
List of language identifiers (e.g., ["python", "javascript", "go"])
|
||||
"""
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_config_schema() -> Dict[str, Dict]:
|
||||
"""
|
||||
Return configuration schema for this provider.
|
||||
|
||||
The schema defines what configuration fields are required/optional,
|
||||
their types, validation rules, and UI labels.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping field names to their schema definitions.
|
||||
|
||||
Example:
|
||||
{
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"label": "API Endpoint",
|
||||
"placeholder": "http://localhost:9385"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"default": 30,
|
||||
"label": "Timeout (seconds)",
|
||||
"min": 5,
|
||||
"max": 300
|
||||
}
|
||||
}
|
||||
"""
|
||||
return {}
|
||||
|
||||
def validate_config(self, config: Dict[str, Any]) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate provider-specific configuration.
|
||||
|
||||
This method allows providers to implement custom validation logic beyond
|
||||
the basic schema validation. Override this method to add provider-specific
|
||||
checks like URL format validation, API key format validation, etc.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary to validate
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message):
|
||||
- is_valid: True if configuration is valid, False otherwise
|
||||
- error_message: Error message if invalid, None if valid
|
||||
|
||||
Example:
|
||||
>>> def validate_config(self, config):
|
||||
>>> endpoint = config.get("endpoint", "")
|
||||
>>> if not endpoint.startswith(("http://", "https://")):
|
||||
>>> return False, "Endpoint must start with http:// or https://"
|
||||
>>> return True, None
|
||||
"""
|
||||
# Default implementation: no custom validation
|
||||
return True, None
|
||||
233
agent/sandbox/providers/e2b.py
Normal file
233
agent/sandbox/providers/e2b.py
Normal file
@ -0,0 +1,233 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
E2B provider implementation.
|
||||
|
||||
This provider integrates with E2B Cloud for cloud-based code execution
|
||||
using Firecracker microVMs.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import Dict, Any, List
|
||||
|
||||
from .base import SandboxProvider, SandboxInstance, ExecutionResult
|
||||
|
||||
|
||||
class E2BProvider(SandboxProvider):
|
||||
"""
|
||||
E2B provider implementation.
|
||||
|
||||
This provider uses E2B Cloud service for secure code execution
|
||||
in Firecracker microVMs.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.api_key: str = ""
|
||||
self.region: str = "us"
|
||||
self.timeout: int = 30
|
||||
self._initialized: bool = False
|
||||
|
||||
def initialize(self, config: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Initialize the provider with E2B credentials.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary with keys:
|
||||
- api_key: E2B API key
|
||||
- region: Region (us, eu) (default: "us")
|
||||
- timeout: Request timeout in seconds (default: 30)
|
||||
|
||||
Returns:
|
||||
True if initialization successful, False otherwise
|
||||
"""
|
||||
self.api_key = config.get("api_key", "")
|
||||
self.region = config.get("region", "us")
|
||||
self.timeout = config.get("timeout", 30)
|
||||
|
||||
# Validate required fields
|
||||
if not self.api_key:
|
||||
return False
|
||||
|
||||
# TODO: Implement actual E2B API client initialization
|
||||
# For now, we'll mark as initialized but actual API calls will fail
|
||||
self._initialized = True
|
||||
return True
|
||||
|
||||
def create_instance(self, template: str = "python") -> SandboxInstance:
|
||||
"""
|
||||
Create a new sandbox instance in E2B.
|
||||
|
||||
Args:
|
||||
template: Programming language template (python, nodejs, go, bash)
|
||||
|
||||
Returns:
|
||||
SandboxInstance object
|
||||
|
||||
Raises:
|
||||
RuntimeError: If instance creation fails
|
||||
"""
|
||||
if not self._initialized:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# Normalize language
|
||||
language = self._normalize_language(template)
|
||||
|
||||
# TODO: Implement actual E2B API call
|
||||
# POST /sandbox with template
|
||||
instance_id = str(uuid.uuid4())
|
||||
|
||||
return SandboxInstance(
|
||||
instance_id=instance_id,
|
||||
provider="e2b",
|
||||
status="running",
|
||||
metadata={
|
||||
"language": language,
|
||||
"region": self.region,
|
||||
}
|
||||
)
|
||||
|
||||
def execute_code(
|
||||
self,
|
||||
instance_id: str,
|
||||
code: str,
|
||||
language: str,
|
||||
timeout: int = 10
|
||||
) -> ExecutionResult:
|
||||
"""
|
||||
Execute code in the E2B instance.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the sandbox instance
|
||||
code: Source code to execute
|
||||
language: Programming language (python, nodejs, go, bash)
|
||||
timeout: Maximum execution time in seconds
|
||||
|
||||
Returns:
|
||||
ExecutionResult containing stdout, stderr, exit_code, and metadata
|
||||
|
||||
Raises:
|
||||
RuntimeError: If execution fails
|
||||
TimeoutError: If execution exceeds timeout
|
||||
"""
|
||||
if not self._initialized:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# TODO: Implement actual E2B API call
|
||||
# POST /sandbox/{sandboxID}/execute
|
||||
|
||||
raise RuntimeError(
|
||||
"E2B provider is not yet fully implemented. "
|
||||
"Please use the self-managed provider or implement the E2B API integration. "
|
||||
"See https://github.com/e2b-dev/e2b for API documentation."
|
||||
)
|
||||
|
||||
def destroy_instance(self, instance_id: str) -> bool:
|
||||
"""
|
||||
Destroy an E2B instance.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the instance to destroy
|
||||
|
||||
Returns:
|
||||
True if destruction successful, False otherwise
|
||||
"""
|
||||
if not self._initialized:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# TODO: Implement actual E2B API call
|
||||
# DELETE /sandbox/{sandboxID}
|
||||
return True
|
||||
|
||||
def health_check(self) -> bool:
|
||||
"""
|
||||
Check if the E2B service is accessible.
|
||||
|
||||
Returns:
|
||||
True if provider is healthy, False otherwise
|
||||
"""
|
||||
if not self._initialized:
|
||||
return False
|
||||
|
||||
# TODO: Implement actual E2B health check API call
|
||||
# GET /healthz or similar
|
||||
# For now, return True if initialized with API key
|
||||
return bool(self.api_key)
|
||||
|
||||
def get_supported_languages(self) -> List[str]:
|
||||
"""
|
||||
Get list of supported programming languages.
|
||||
|
||||
Returns:
|
||||
List of language identifiers
|
||||
"""
|
||||
return ["python", "nodejs", "javascript", "go", "bash"]
|
||||
|
||||
@staticmethod
|
||||
def get_config_schema() -> Dict[str, Dict]:
|
||||
"""
|
||||
Return configuration schema for E2B provider.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping field names to their schema definitions
|
||||
"""
|
||||
return {
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"label": "API Key",
|
||||
"placeholder": "e2b_sk_...",
|
||||
"description": "E2B API key for authentication",
|
||||
"secret": True,
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"label": "Region",
|
||||
"default": "us",
|
||||
"description": "E2B service region (us or eu)",
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"required": False,
|
||||
"label": "Request Timeout (seconds)",
|
||||
"default": 30,
|
||||
"min": 5,
|
||||
"max": 300,
|
||||
"description": "API request timeout for code execution",
|
||||
}
|
||||
}
|
||||
|
||||
def _normalize_language(self, language: str) -> str:
|
||||
"""
|
||||
Normalize language identifier to E2B template format.
|
||||
|
||||
Args:
|
||||
language: Language identifier
|
||||
|
||||
Returns:
|
||||
Normalized language identifier
|
||||
"""
|
||||
if not language:
|
||||
return "python"
|
||||
|
||||
lang_lower = language.lower()
|
||||
if lang_lower in ("python", "python3"):
|
||||
return "python"
|
||||
elif lang_lower in ("javascript", "nodejs"):
|
||||
return "nodejs"
|
||||
else:
|
||||
return language
|
||||
78
agent/sandbox/providers/manager.py
Normal file
78
agent/sandbox/providers/manager.py
Normal file
@ -0,0 +1,78 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Provider manager for sandbox providers.
|
||||
|
||||
Since sandbox configuration is global (system-level), we only use one
|
||||
active provider at a time. This manager is a thin wrapper that holds a reference
|
||||
to the currently active provider.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from .base import SandboxProvider
|
||||
|
||||
|
||||
class ProviderManager:
|
||||
"""
|
||||
Manages the currently active sandbox provider.
|
||||
|
||||
With global configuration, there's only one active provider at a time.
|
||||
This manager simply holds a reference to that provider.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize an empty provider manager."""
|
||||
self.current_provider: Optional[SandboxProvider] = None
|
||||
self.current_provider_name: Optional[str] = None
|
||||
|
||||
def set_provider(self, name: str, provider: SandboxProvider):
|
||||
"""
|
||||
Set the active provider.
|
||||
|
||||
Args:
|
||||
name: Provider identifier (e.g., "self_managed", "e2b")
|
||||
provider: Provider instance
|
||||
"""
|
||||
self.current_provider = provider
|
||||
self.current_provider_name = name
|
||||
|
||||
def get_provider(self) -> Optional[SandboxProvider]:
|
||||
"""
|
||||
Get the active provider.
|
||||
|
||||
Returns:
|
||||
Currently active SandboxProvider instance, or None if not set
|
||||
"""
|
||||
return self.current_provider
|
||||
|
||||
def get_provider_name(self) -> Optional[str]:
|
||||
"""
|
||||
Get the active provider name.
|
||||
|
||||
Returns:
|
||||
Provider name (e.g., "self_managed"), or None if not set
|
||||
"""
|
||||
return self.current_provider_name
|
||||
|
||||
def is_configured(self) -> bool:
|
||||
"""
|
||||
Check if a provider is configured.
|
||||
|
||||
Returns:
|
||||
True if a provider is set, False otherwise
|
||||
"""
|
||||
return self.current_provider is not None
|
||||
359
agent/sandbox/providers/self_managed.py
Normal file
359
agent/sandbox/providers/self_managed.py
Normal file
@ -0,0 +1,359 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Self-managed sandbox provider implementation.
|
||||
|
||||
This provider wraps the existing executor_manager HTTP API which manages
|
||||
a pool of Docker containers with gVisor for secure code execution.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import time
|
||||
import uuid
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from .base import SandboxProvider, SandboxInstance, ExecutionResult
|
||||
|
||||
|
||||
class SelfManagedProvider(SandboxProvider):
|
||||
"""
|
||||
Self-managed sandbox provider using Daytona/Docker.
|
||||
|
||||
This provider communicates with the executor_manager HTTP API
|
||||
which manages a pool of containers for code execution.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.endpoint: str = "http://localhost:9385"
|
||||
self.timeout: int = 30
|
||||
self.max_retries: int = 3
|
||||
self.pool_size: int = 10
|
||||
self._initialized: bool = False
|
||||
|
||||
def initialize(self, config: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Initialize the provider with configuration.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary with keys:
|
||||
- endpoint: HTTP endpoint (default: "http://localhost:9385")
|
||||
- timeout: Request timeout in seconds (default: 30)
|
||||
- max_retries: Maximum retry attempts (default: 3)
|
||||
- pool_size: Container pool size for info (default: 10)
|
||||
|
||||
Returns:
|
||||
True if initialization successful, False otherwise
|
||||
"""
|
||||
self.endpoint = config.get("endpoint", "http://localhost:9385")
|
||||
self.timeout = config.get("timeout", 30)
|
||||
self.max_retries = config.get("max_retries", 3)
|
||||
self.pool_size = config.get("pool_size", 10)
|
||||
|
||||
# Validate endpoint is accessible
|
||||
if not self.health_check():
|
||||
# Try to fall back to SANDBOX_HOST from settings if we are using localhost
|
||||
if "localhost" in self.endpoint or "127.0.0.1" in self.endpoint:
|
||||
try:
|
||||
from api import settings
|
||||
if settings.SANDBOX_HOST and settings.SANDBOX_HOST not in self.endpoint:
|
||||
original_endpoint = self.endpoint
|
||||
self.endpoint = f"http://{settings.SANDBOX_HOST}:9385"
|
||||
if self.health_check():
|
||||
import logging
|
||||
logging.warning(f"Sandbox self_managed: Connected using settings.SANDBOX_HOST fallback: {self.endpoint} (original: {original_endpoint})")
|
||||
self._initialized = True
|
||||
return True
|
||||
else:
|
||||
self.endpoint = original_endpoint # Restore if fallback also fails
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
self._initialized = True
|
||||
return True
|
||||
|
||||
def create_instance(self, template: str = "python") -> SandboxInstance:
|
||||
"""
|
||||
Create a new sandbox instance.
|
||||
|
||||
Note: For self-managed provider, instances are managed internally
|
||||
by the executor_manager's container pool. This method returns
|
||||
a logical instance handle.
|
||||
|
||||
Args:
|
||||
template: Programming language (python, nodejs)
|
||||
|
||||
Returns:
|
||||
SandboxInstance object
|
||||
|
||||
Raises:
|
||||
RuntimeError: If instance creation fails
|
||||
"""
|
||||
if not self._initialized:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# Normalize language
|
||||
language = self._normalize_language(template)
|
||||
|
||||
# The executor_manager manages instances internally via container pool
|
||||
# We create a logical instance ID for tracking
|
||||
instance_id = str(uuid.uuid4())
|
||||
|
||||
return SandboxInstance(
|
||||
instance_id=instance_id,
|
||||
provider="self_managed",
|
||||
status="running",
|
||||
metadata={
|
||||
"language": language,
|
||||
"endpoint": self.endpoint,
|
||||
"pool_size": self.pool_size,
|
||||
}
|
||||
)
|
||||
|
||||
def execute_code(
|
||||
self,
|
||||
instance_id: str,
|
||||
code: str,
|
||||
language: str,
|
||||
timeout: int = 10,
|
||||
arguments: Optional[Dict[str, Any]] = None
|
||||
) -> ExecutionResult:
|
||||
"""
|
||||
Execute code in the sandbox.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the sandbox instance (not used for self-managed)
|
||||
code: Source code to execute
|
||||
language: Programming language (python, nodejs, javascript)
|
||||
timeout: Maximum execution time in seconds
|
||||
arguments: Optional arguments dict to pass to main() function
|
||||
|
||||
Returns:
|
||||
ExecutionResult containing stdout, stderr, exit_code, and metadata
|
||||
|
||||
Raises:
|
||||
RuntimeError: If execution fails
|
||||
TimeoutError: If execution exceeds timeout
|
||||
"""
|
||||
if not self._initialized:
|
||||
raise RuntimeError("Provider not initialized. Call initialize() first.")
|
||||
|
||||
# Normalize language
|
||||
normalized_lang = self._normalize_language(language)
|
||||
|
||||
# Prepare request
|
||||
code_b64 = base64.b64encode(code.encode("utf-8")).decode("utf-8")
|
||||
payload = {
|
||||
"code_b64": code_b64,
|
||||
"language": normalized_lang,
|
||||
"arguments": arguments or {}
|
||||
}
|
||||
|
||||
url = f"{self.endpoint}/run"
|
||||
exec_timeout = timeout or self.timeout
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
url,
|
||||
json=payload,
|
||||
timeout=exec_timeout,
|
||||
headers={"Content-Type": "application/json"}
|
||||
)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
if response.status_code != 200:
|
||||
raise RuntimeError(
|
||||
f"HTTP {response.status_code}: {response.text}"
|
||||
)
|
||||
|
||||
result = response.json()
|
||||
|
||||
return ExecutionResult(
|
||||
stdout=result.get("stdout", ""),
|
||||
stderr=result.get("stderr", ""),
|
||||
exit_code=result.get("exit_code", 0),
|
||||
execution_time=execution_time,
|
||||
metadata={
|
||||
"status": result.get("status"),
|
||||
"time_used_ms": result.get("time_used_ms"),
|
||||
"memory_used_kb": result.get("memory_used_kb"),
|
||||
"detail": result.get("detail"),
|
||||
"instance_id": instance_id,
|
||||
}
|
||||
)
|
||||
|
||||
except requests.Timeout:
|
||||
execution_time = time.time() - start_time
|
||||
raise TimeoutError(
|
||||
f"Execution timed out after {exec_timeout} seconds"
|
||||
)
|
||||
|
||||
except requests.RequestException as e:
|
||||
raise RuntimeError(f"HTTP request failed: {str(e)}")
|
||||
|
||||
def destroy_instance(self, instance_id: str) -> bool:
|
||||
"""
|
||||
Destroy a sandbox instance.
|
||||
|
||||
Note: For self-managed provider, instances are returned to the
|
||||
internal pool automatically by executor_manager after execution.
|
||||
This is a no-op for tracking purposes.
|
||||
|
||||
Args:
|
||||
instance_id: ID of the instance to destroy
|
||||
|
||||
Returns:
|
||||
True (always succeeds for self-managed)
|
||||
"""
|
||||
# The executor_manager manages container lifecycle internally
|
||||
# Container is returned to pool after execution
|
||||
return True
|
||||
|
||||
def health_check(self) -> bool:
|
||||
"""
|
||||
Check if the provider is healthy and accessible.
|
||||
|
||||
Returns:
|
||||
True if provider is healthy, False otherwise
|
||||
"""
|
||||
try:
|
||||
url = f"{self.endpoint}/healthz"
|
||||
response = requests.get(url, timeout=5)
|
||||
return response.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_supported_languages(self) -> List[str]:
|
||||
"""
|
||||
Get list of supported programming languages.
|
||||
|
||||
Returns:
|
||||
List of language identifiers
|
||||
"""
|
||||
return ["python", "nodejs", "javascript"]
|
||||
|
||||
@staticmethod
|
||||
def get_config_schema() -> Dict[str, Dict]:
|
||||
"""
|
||||
Return configuration schema for self-managed provider.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping field names to their schema definitions
|
||||
"""
|
||||
return {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"label": "Executor Manager Endpoint",
|
||||
"placeholder": "http://localhost:9385",
|
||||
"default": "http://localhost:9385",
|
||||
"description": "HTTP endpoint of the executor_manager service"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"required": False,
|
||||
"label": "Request Timeout (seconds)",
|
||||
"default": 30,
|
||||
"min": 5,
|
||||
"max": 300,
|
||||
"description": "HTTP request timeout for code execution"
|
||||
},
|
||||
"max_retries": {
|
||||
"type": "integer",
|
||||
"required": False,
|
||||
"label": "Max Retries",
|
||||
"default": 3,
|
||||
"min": 0,
|
||||
"max": 10,
|
||||
"description": "Maximum number of retry attempts for failed requests"
|
||||
},
|
||||
"pool_size": {
|
||||
"type": "integer",
|
||||
"required": False,
|
||||
"label": "Container Pool Size",
|
||||
"default": 10,
|
||||
"min": 1,
|
||||
"max": 100,
|
||||
"description": "Size of the container pool (configured in executor_manager)"
|
||||
}
|
||||
}
|
||||
|
||||
def _normalize_language(self, language: str) -> str:
|
||||
"""
|
||||
Normalize language identifier to executor_manager format.
|
||||
|
||||
Args:
|
||||
language: Language identifier (python, python3, nodejs, javascript)
|
||||
|
||||
Returns:
|
||||
Normalized language identifier
|
||||
"""
|
||||
if not language:
|
||||
return "python"
|
||||
|
||||
lang_lower = language.lower()
|
||||
if lang_lower in ("python", "python3"):
|
||||
return "python"
|
||||
elif lang_lower in ("javascript", "nodejs"):
|
||||
return "nodejs"
|
||||
else:
|
||||
return language
|
||||
|
||||
def validate_config(self, config: dict) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate self-managed provider configuration.
|
||||
|
||||
Performs custom validation beyond the basic schema validation,
|
||||
such as checking URL format.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary to validate
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
"""
|
||||
# Validate endpoint URL format
|
||||
endpoint = config.get("endpoint", "")
|
||||
if endpoint:
|
||||
# Check if it's a valid HTTP/HTTPS URL or localhost
|
||||
import re
|
||||
url_pattern = r'^(https?://|http://localhost|http://[\d\.]+:[a-z]+:[/]|http://[\w\.]+:)'
|
||||
if not re.match(url_pattern, endpoint):
|
||||
return False, f"Invalid endpoint format: {endpoint}. Must start with http:// or https://"
|
||||
|
||||
# Validate pool_size is positive
|
||||
pool_size = config.get("pool_size", 10)
|
||||
if isinstance(pool_size, int) and pool_size <= 0:
|
||||
return False, "Pool size must be greater than 0"
|
||||
|
||||
# Validate timeout is reasonable
|
||||
timeout = config.get("timeout", 30)
|
||||
if isinstance(timeout, int) and (timeout < 1 or timeout > 600):
|
||||
return False, "Timeout must be between 1 and 600 seconds"
|
||||
|
||||
# Validate max_retries
|
||||
max_retries = config.get("max_retries", 3)
|
||||
if isinstance(max_retries, int) and (max_retries < 0 or max_retries > 10):
|
||||
return False, "Max retries must be between 0 and 10"
|
||||
|
||||
return True, None
|
||||
@ -3,7 +3,7 @@ name = "gvisor-sandbox"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.12,<3.15"
|
||||
dependencies = [
|
||||
"fastapi>=0.115.12",
|
||||
"httpx>=0.28.1",
|
||||
@ -1,4 +1,4 @@
|
||||
FROM node:24-bookworm-slim
|
||||
FROM node:24.13-bookworm-slim
|
||||
|
||||
RUN npm config set registry https://registry.npmmirror.com
|
||||
|
||||
261
agent/sandbox/tests/MIGRATION_GUIDE.md
Normal file
261
agent/sandbox/tests/MIGRATION_GUIDE.md
Normal file
@ -0,0 +1,261 @@
|
||||
# Aliyun Code Interpreter Provider - 使用官方 SDK
|
||||
|
||||
## 重要变更
|
||||
|
||||
### 官方资源
|
||||
- **Code Interpreter API**: https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter
|
||||
- **官方 SDK**: https://github.com/Serverless-Devs/agentrun-sdk-python
|
||||
- **SDK 文档**: https://docs.agent.run
|
||||
|
||||
## 使用官方 SDK 的优势
|
||||
|
||||
从手动 HTTP 请求迁移到官方 SDK (`agentrun-sdk`) 有以下优势:
|
||||
|
||||
### 1. **自动签名认证**
|
||||
- SDK 自动处理 Aliyun API 签名(无需手动实现 `Authorization` 头)
|
||||
- 支持多种认证方式:AccessKey、STS Token
|
||||
- 自动读取环境变量
|
||||
|
||||
### 2. **简化的 API**
|
||||
```python
|
||||
# 旧实现(手动 HTTP 请求)
|
||||
response = requests.post(
|
||||
f"{DATA_ENDPOINT}/sandboxes/{sandbox_id}/execute",
|
||||
headers={"X-Acs-Parent-Id": account_id},
|
||||
json={"code": code, "language": "python"}
|
||||
)
|
||||
|
||||
# 新实现(使用 SDK)
|
||||
sandbox = CodeInterpreterSandbox(template_name="python-sandbox", config=config)
|
||||
result = sandbox.context.execute(code="print('hello')")
|
||||
```
|
||||
|
||||
### 3. **更好的错误处理**
|
||||
- 结构化的异常类型 (`ServerError`)
|
||||
- 自动重试机制
|
||||
- 详细的错误信息
|
||||
|
||||
## 主要变更
|
||||
|
||||
### 1. 文件重命名
|
||||
|
||||
| 旧文件名 | 新文件名 | 说明 |
|
||||
|---------|---------|------|
|
||||
| `aliyun_opensandbox.py` | `aliyun_codeinterpreter.py` | 提供商实现 |
|
||||
| `test_aliyun_provider.py` | `test_aliyun_codeinterpreter.py` | 单元测试 |
|
||||
| `test_aliyun_integration.py` | `test_aliyun_codeinterpreter_integration.py` | 集成测试 |
|
||||
|
||||
### 2. 配置字段变更
|
||||
|
||||
#### 旧配置(OpenSandbox)
|
||||
```json
|
||||
{
|
||||
"access_key_id": "LTAI5t...",
|
||||
"access_key_secret": "...",
|
||||
"region": "cn-hangzhou",
|
||||
"workspace_id": "ws-xxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
#### 新配置(Code Interpreter)
|
||||
```json
|
||||
{
|
||||
"access_key_id": "LTAI5t...",
|
||||
"access_key_secret": "...",
|
||||
"account_id": "1234567890...", // 新增:阿里云主账号ID(必需)
|
||||
"region": "cn-hangzhou",
|
||||
"template_name": "python-sandbox", // 新增:沙箱模板名称
|
||||
"timeout": 30 // 最大 30 秒(硬限制)
|
||||
}
|
||||
```
|
||||
|
||||
### 3. 关键差异
|
||||
|
||||
| 特性 | OpenSandbox | Code Interpreter |
|
||||
|------|-------------|-----------------|
|
||||
| **API 端点** | `opensandbox.{region}.aliyuncs.com` | `agentrun.{region}.aliyuncs.com` (控制面) |
|
||||
| **API 版本** | `2024-01-01` | `2025-09-10` |
|
||||
| **认证** | 需要 AccessKey | 需要 AccessKey + 主账号ID |
|
||||
| **请求头** | 标准签名 | 需要 `X-Acs-Parent-Id` 头 |
|
||||
| **超时限制** | 可配置 | **最大 30 秒**(硬限制) |
|
||||
| **上下文** | 不支持 | 支持上下文(Jupyter kernel) |
|
||||
|
||||
### 4. API 调用方式变更
|
||||
|
||||
#### 旧实现(假设的 OpenSandbox)
|
||||
```python
|
||||
# 单一端点
|
||||
API_ENDPOINT = "https://opensandbox.cn-hangzhou.aliyuncs.com"
|
||||
|
||||
# 简单的请求/响应
|
||||
response = requests.post(
|
||||
f"{API_ENDPOINT}/execute",
|
||||
json={"code": "print('hello')", "language": "python"}
|
||||
)
|
||||
```
|
||||
|
||||
#### 新实现(Code Interpreter)
|
||||
```python
|
||||
# 控制面 API - 管理沙箱生命周期
|
||||
CONTROL_ENDPOINT = "https://agentrun.cn-hangzhou.aliyuncs.com/2025-09-10"
|
||||
|
||||
# 数据面 API - 执行代码
|
||||
DATA_ENDPOINT = "https://{account_id}.agentrun-data.cn-hangzhou.aliyuncs.com"
|
||||
|
||||
# 创建沙箱(控制面)
|
||||
response = requests.post(
|
||||
f"{CONTROL_ENDPOINT}/sandboxes",
|
||||
headers={"X-Acs-Parent-Id": account_id},
|
||||
json={"templateName": "python-sandbox"}
|
||||
)
|
||||
|
||||
# 执行代码(数据面)
|
||||
response = requests.post(
|
||||
f"{DATA_ENDPOINT}/sandboxes/{sandbox_id}/execute",
|
||||
headers={"X-Acs-Parent-Id": account_id},
|
||||
json={"code": "print('hello')", "language": "python", "timeout": 30}
|
||||
)
|
||||
```
|
||||
|
||||
### 5. 迁移步骤
|
||||
|
||||
#### 步骤 1: 更新配置
|
||||
|
||||
如果您之前使用的是 `aliyun_opensandbox`:
|
||||
|
||||
**旧配置**:
|
||||
```json
|
||||
{
|
||||
"name": "sandbox.provider_type",
|
||||
"value": "aliyun_opensandbox"
|
||||
}
|
||||
```
|
||||
|
||||
**新配置**:
|
||||
```json
|
||||
{
|
||||
"name": "sandbox.provider_type",
|
||||
"value": "aliyun_codeinterpreter"
|
||||
}
|
||||
```
|
||||
|
||||
#### 步骤 2: 添加必需的 account_id
|
||||
|
||||
在 Aliyun 控制台右上角点击头像,获取主账号 ID:
|
||||
1. 登录 [阿里云控制台](https://ram.console.aliyun.com/manage/ak)
|
||||
2. 点击右上角头像
|
||||
3. 复制主账号 ID(16 位数字)
|
||||
|
||||
#### 步骤 3: 更新环境变量
|
||||
|
||||
```bash
|
||||
# 新增必需的环境变量
|
||||
export ALIYUN_ACCOUNT_ID="1234567890123456"
|
||||
|
||||
# 其他环境变量保持不变
|
||||
export ALIYUN_ACCESS_KEY_ID="LTAI5t..."
|
||||
export ALIYUN_ACCESS_KEY_SECRET="..."
|
||||
export ALIYUN_REGION="cn-hangzhou"
|
||||
```
|
||||
|
||||
#### 步骤 4: 运行测试
|
||||
|
||||
```bash
|
||||
# 单元测试(不需要真实凭据)
|
||||
pytest agent/sandbox/tests/test_aliyun_codeinterpreter.py -v
|
||||
|
||||
# 集成测试(需要真实凭据)
|
||||
pytest agent/sandbox/tests/test_aliyun_codeinterpreter_integration.py -v -m integration
|
||||
```
|
||||
|
||||
## 文件变更清单
|
||||
|
||||
### ✅ 已完成
|
||||
|
||||
- [x] 创建 `aliyun_codeinterpreter.py` - 新的提供商实现
|
||||
- [x] 更新 `sandbox_spec.md` - 规范文档
|
||||
- [x] 更新 `admin/services.py` - 服务管理器
|
||||
- [x] 更新 `providers/__init__.py` - 包导出
|
||||
- [x] 创建 `test_aliyun_codeinterpreter.py` - 单元测试
|
||||
- [x] 创建 `test_aliyun_codeinterpreter_integration.py` - 集成测试
|
||||
|
||||
### 📝 可选清理
|
||||
|
||||
如果您想删除旧的 OpenSandbox 实现:
|
||||
|
||||
```bash
|
||||
# 删除旧文件(可选)
|
||||
rm agent/sandbox/providers/aliyun_opensandbox.py
|
||||
rm agent/sandbox/tests/test_aliyun_provider.py
|
||||
rm agent/sandbox/tests/test_aliyun_integration.py
|
||||
```
|
||||
|
||||
**注意**: 保留旧文件不会影响新功能,只是代码冗余。
|
||||
|
||||
## API 参考
|
||||
|
||||
### 控制面 API(沙箱管理)
|
||||
|
||||
| 端点 | 方法 | 说明 |
|
||||
|------|------|------|
|
||||
| `/sandboxes` | POST | 创建沙箱实例 |
|
||||
| `/sandboxes/{id}/stop` | POST | 停止实例 |
|
||||
| `/sandboxes/{id}` | DELETE | 删除实例 |
|
||||
| `/templates` | GET | 列出模板 |
|
||||
|
||||
### 数据面 API(代码执行)
|
||||
|
||||
| 端点 | 方法 | 说明 |
|
||||
|------|------|------|
|
||||
| `/sandboxes/{id}/execute` | POST | 执行代码(简化版) |
|
||||
| `/sandboxes/{id}/contexts` | POST | 创建上下文 |
|
||||
| `/sandboxes/{id}/contexts/{ctx_id}/execute` | POST | 在上下文中执行 |
|
||||
| `/sandboxes/{id}/health` | GET | 健康检查 |
|
||||
| `/sandboxes/{id}/files` | GET/POST | 文件读写 |
|
||||
| `/sandboxes/{id}/processes/cmd` | POST | 执行 Shell 命令 |
|
||||
|
||||
## 常见问题
|
||||
|
||||
### Q: 为什么要添加 account_id?
|
||||
|
||||
**A**: Code Interpreter API 需要在请求头中提供 `X-Acs-Parent-Id`(阿里云主账号ID)进行身份验证。这是 Aliyun Code Interpreter API 的必需参数。
|
||||
|
||||
### Q: 30 秒超时限制可以绕过吗?
|
||||
|
||||
**A**: 不可以。这是 Aliyun Code Interpreter 的**硬限制**,无法通过配置或请求参数绕过。如果代码执行时间超过 30 秒,请考虑:
|
||||
1. 优化代码逻辑
|
||||
2. 分批处理数据
|
||||
3. 使用上下文保持状态
|
||||
|
||||
### Q: 旧的 OpenSandbox 配置还能用吗?
|
||||
|
||||
**A**: 不能。OpenSandbox 和 Code Interpreter 是两个不同的服务,API 不兼容。必须迁移到新的配置格式。
|
||||
|
||||
### Q: 如何获取阿里云主账号 ID?
|
||||
|
||||
**A**:
|
||||
1. 登录阿里云控制台
|
||||
2. 点击右上角的头像
|
||||
3. 在弹出的信息中可以看到"主账号ID"
|
||||
|
||||
### Q: 迁移后会影响现有功能吗?
|
||||
|
||||
**A**:
|
||||
- **自我管理提供商(self_managed)**: 不受影响
|
||||
- **E2B 提供商**: 不受影响
|
||||
- **Aliyun 提供商**: 需要更新配置并重新测试
|
||||
|
||||
## 相关文档
|
||||
|
||||
- [官方文档](https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter)
|
||||
- [sandbox 规范](../docs/develop/sandbox_spec.md)
|
||||
- [测试指南](./README.md)
|
||||
- [快速开始](./QUICKSTART.md)
|
||||
|
||||
## 技术支持
|
||||
|
||||
如有问题,请:
|
||||
1. 查看官方文档
|
||||
2. 检查配置是否正确
|
||||
3. 查看测试输出中的错误信息
|
||||
4. 联系 RAGFlow 团队
|
||||
178
agent/sandbox/tests/QUICKSTART.md
Normal file
178
agent/sandbox/tests/QUICKSTART.md
Normal file
@ -0,0 +1,178 @@
|
||||
# Aliyun OpenSandbox Provider - 快速测试指南
|
||||
|
||||
## 测试说明
|
||||
|
||||
### 1. 单元测试(不需要真实凭据)
|
||||
|
||||
单元测试使用 mock,**不需要**真实的 Aliyun 凭据,可以随时运行。
|
||||
|
||||
```bash
|
||||
# 运行 Aliyun 提供商的单元测试
|
||||
pytest agent/sandbox/tests/test_aliyun_provider.py -v
|
||||
|
||||
# 预期输出:
|
||||
# test_aliyun_provider.py::TestAliyunOpenSandboxProvider::test_provider_initialization PASSED
|
||||
# test_aliyun_provider.py::TestAliyunOpenSandboxProvider::test_initialize_success PASSED
|
||||
# ...
|
||||
# ========================= 48 passed in 2.34s ==========================
|
||||
```
|
||||
|
||||
### 2. 集成测试(需要真实凭据)
|
||||
|
||||
集成测试会调用真实的 Aliyun API,需要配置凭据。
|
||||
|
||||
#### 步骤 1: 配置环境变量
|
||||
|
||||
```bash
|
||||
export ALIYUN_ACCESS_KEY_ID="LTAI5t..." # 替换为真实的 Access Key ID
|
||||
export ALIYUN_ACCESS_KEY_SECRET="..." # 替换为真实的 Access Key Secret
|
||||
export ALIYUN_REGION="cn-hangzhou" # 可选,默认为 cn-hangzhou
|
||||
```
|
||||
|
||||
#### 步骤 2: 运行集成测试
|
||||
|
||||
```bash
|
||||
# 运行所有集成测试
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v -m integration
|
||||
|
||||
# 运行特定测试
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_health_check -v
|
||||
```
|
||||
|
||||
#### 步骤 3: 预期输出
|
||||
|
||||
```
|
||||
test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_initialize_provider PASSED
|
||||
test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_health_check PASSED
|
||||
test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_execute_python_code PASSED
|
||||
...
|
||||
========================== 10 passed in 15.67s ==========================
|
||||
```
|
||||
|
||||
### 3. 测试场景
|
||||
|
||||
#### 基础功能测试
|
||||
|
||||
```bash
|
||||
# 健康检查
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_health_check -v
|
||||
|
||||
# 创建实例
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_create_python_instance -v
|
||||
|
||||
# 执行代码
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_execute_python_code -v
|
||||
|
||||
# 销毁实例
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_destroy_instance -v
|
||||
```
|
||||
|
||||
#### 错误处理测试
|
||||
|
||||
```bash
|
||||
# 代码执行错误
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_execute_python_code_with_error -v
|
||||
|
||||
# 超时处理
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_execute_python_code_timeout -v
|
||||
```
|
||||
|
||||
#### 真实场景测试
|
||||
|
||||
```bash
|
||||
# 数据处理工作流
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunRealWorldScenarios::test_data_processing_workflow -v
|
||||
|
||||
# 字符串操作
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunRealWorldScenarios::test_string_manipulation -v
|
||||
|
||||
# 多次执行
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunRealWorldScenarios::test_multiple_executions_same_instance -v
|
||||
```
|
||||
|
||||
## 常见问题
|
||||
|
||||
### Q: 没有凭据怎么办?
|
||||
|
||||
**A:** 运行单元测试即可,不需要真实凭据:
|
||||
```bash
|
||||
pytest agent/sandbox/tests/test_aliyun_provider.py -v
|
||||
```
|
||||
|
||||
### Q: 如何跳过集成测试?
|
||||
|
||||
**A:** 使用 pytest 标记跳过:
|
||||
```bash
|
||||
# 只运行单元测试,跳过集成测试
|
||||
pytest agent/sandbox/tests/ -v -m "not integration"
|
||||
```
|
||||
|
||||
### Q: 集成测试失败怎么办?
|
||||
|
||||
**A:** 检查以下几点:
|
||||
|
||||
1. **凭据是否正确**
|
||||
```bash
|
||||
echo $ALIYUN_ACCESS_KEY_ID
|
||||
echo $ALIYUN_ACCESS_KEY_SECRET
|
||||
```
|
||||
|
||||
2. **网络连接是否正常**
|
||||
```bash
|
||||
curl -I https://opensandbox.cn-hangzhou.aliyuncs.com
|
||||
```
|
||||
|
||||
3. **是否有 OpenSandbox 服务权限**
|
||||
- 登录阿里云控制台
|
||||
- 检查是否已开通 OpenSandbox 服务
|
||||
- 检查 AccessKey 权限
|
||||
|
||||
4. **查看详细错误信息**
|
||||
```bash
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v -s
|
||||
```
|
||||
|
||||
### Q: 测试超时怎么办?
|
||||
|
||||
**A:** 增加超时时间或检查网络:
|
||||
```bash
|
||||
# 使用更长的超时
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v --timeout=60
|
||||
```
|
||||
|
||||
## 测试命令速查表
|
||||
|
||||
| 命令 | 说明 | 需要凭据 |
|
||||
|------|------|---------|
|
||||
| `pytest agent/sandbox/tests/test_aliyun_provider.py -v` | 单元测试 | ❌ |
|
||||
| `pytest agent/sandbox/tests/test_aliyun_integration.py -v` | 集成测试 | ✅ |
|
||||
| `pytest agent/sandbox/tests/ -v -m "not integration"` | 仅单元测试 | ❌ |
|
||||
| `pytest agent/sandbox/tests/ -v -m integration` | 仅集成测试 | ✅ |
|
||||
| `pytest agent/sandbox/tests/ -v` | 所有测试 | 部分需要 |
|
||||
|
||||
## 获取 Aliyun 凭据
|
||||
|
||||
1. 访问 [阿里云控制台](https://ram.console.aliyun.com/manage/ak)
|
||||
2. 创建 AccessKey
|
||||
3. 保存 AccessKey ID 和 AccessKey Secret
|
||||
4. 设置环境变量
|
||||
|
||||
⚠️ **安全提示:**
|
||||
- 不要在代码中硬编码凭据
|
||||
- 使用环境变量或配置文件
|
||||
- 定期轮换 AccessKey
|
||||
- 限制 AccessKey 权限
|
||||
|
||||
## 下一步
|
||||
|
||||
1. ✅ **运行单元测试** - 验证代码逻辑
|
||||
2. 🔧 **配置凭据** - 设置环境变量
|
||||
3. 🚀 **运行集成测试** - 测试真实 API
|
||||
4. 📊 **查看结果** - 确保所有测试通过
|
||||
5. 🎯 **集成到系统** - 使用 admin API 配置提供商
|
||||
|
||||
## 需要帮助?
|
||||
|
||||
- 查看 [完整文档](README.md)
|
||||
- 检查 [sandbox 规范](../../../../../docs/develop/sandbox_spec.md)
|
||||
- 联系 RAGFlow 团队
|
||||
213
agent/sandbox/tests/README.md
Normal file
213
agent/sandbox/tests/README.md
Normal file
@ -0,0 +1,213 @@
|
||||
# Sandbox Provider Tests
|
||||
|
||||
This directory contains tests for the RAGFlow sandbox provider system.
|
||||
|
||||
## Test Structure
|
||||
|
||||
```
|
||||
tests/
|
||||
├── pytest.ini # Pytest configuration
|
||||
├── test_providers.py # Unit tests for all providers (mocked)
|
||||
├── test_aliyun_provider.py # Unit tests for Aliyun provider (mocked)
|
||||
├── test_aliyun_integration.py # Integration tests for Aliyun (real API)
|
||||
└── sandbox_security_tests_full.py # Security tests for self-managed provider
|
||||
```
|
||||
|
||||
## Test Types
|
||||
|
||||
### 1. Unit Tests (No Credentials Required)
|
||||
|
||||
Unit tests use mocks and don't require any external services or credentials.
|
||||
|
||||
**Files:**
|
||||
- `test_providers.py` - Tests for base provider interface and manager
|
||||
- `test_aliyun_provider.py` - Tests for Aliyun provider with mocked API calls
|
||||
|
||||
**Run unit tests:**
|
||||
```bash
|
||||
# Run all unit tests
|
||||
pytest agent/sandbox/tests/test_providers.py -v
|
||||
pytest agent/sandbox/tests/test_aliyun_provider.py -v
|
||||
|
||||
# Run specific test
|
||||
pytest agent/sandbox/tests/test_aliyun_provider.py::TestAliyunOpenSandboxProvider::test_initialize_success -v
|
||||
|
||||
# Run all unit tests (skip integration)
|
||||
pytest agent/sandbox/tests/ -v -m "not integration"
|
||||
```
|
||||
|
||||
### 2. Integration Tests (Real Credentials Required)
|
||||
|
||||
Integration tests make real API calls to Aliyun OpenSandbox service.
|
||||
|
||||
**Files:**
|
||||
- `test_aliyun_integration.py` - Tests with real Aliyun API calls
|
||||
|
||||
**Setup environment variables:**
|
||||
```bash
|
||||
export ALIYUN_ACCESS_KEY_ID="LTAI5t..."
|
||||
export ALIYUN_ACCESS_KEY_SECRET="..."
|
||||
export ALIYUN_REGION="cn-hangzhou" # Optional, defaults to cn-hangzhou
|
||||
export ALIYUN_WORKSPACE_ID="ws-..." # Optional
|
||||
```
|
||||
|
||||
**Run integration tests:**
|
||||
```bash
|
||||
# Run only integration tests
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v -m integration
|
||||
|
||||
# Run all tests including integration
|
||||
pytest agent/sandbox/tests/ -v
|
||||
|
||||
# Run specific integration test
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_health_check -v
|
||||
```
|
||||
|
||||
### 3. Security Tests
|
||||
|
||||
Security tests validate the security features of the self-managed sandbox provider.
|
||||
|
||||
**Files:**
|
||||
- `sandbox_security_tests_full.py` - Comprehensive security tests
|
||||
|
||||
**Run security tests:**
|
||||
```bash
|
||||
# Run all security tests
|
||||
pytest agent/sandbox/tests/sandbox_security_tests_full.py -v
|
||||
|
||||
# Run specific security test
|
||||
pytest agent/sandbox/tests/sandbox_security_tests_full.py -k "test_dangerous_imports" -v
|
||||
```
|
||||
|
||||
## Test Commands
|
||||
|
||||
### Quick Test Commands
|
||||
|
||||
```bash
|
||||
# Run all sandbox tests (unit only, fast)
|
||||
pytest agent/sandbox/tests/ -v -m "not integration" --tb=short
|
||||
|
||||
# Run tests with coverage
|
||||
pytest agent/sandbox/tests/ -v --cov=agent.sandbox --cov-report=term-missing -m "not integration"
|
||||
|
||||
# Run tests and stop on first failure
|
||||
pytest agent/sandbox/tests/ -v -x -m "not integration"
|
||||
|
||||
# Run tests in parallel (requires pytest-xdist)
|
||||
pytest agent/sandbox/tests/ -v -n auto -m "not integration"
|
||||
```
|
||||
|
||||
### Aliyun Provider Testing
|
||||
|
||||
```bash
|
||||
# 1. Run unit tests (no credentials needed)
|
||||
pytest agent/sandbox/tests/test_aliyun_provider.py -v
|
||||
|
||||
# 2. Set up credentials for integration tests
|
||||
export ALIYUN_ACCESS_KEY_ID="your-key-id"
|
||||
export ALIYUN_ACCESS_KEY_SECRET="your-secret"
|
||||
export ALIYUN_REGION="cn-hangzhou"
|
||||
|
||||
# 3. Run integration tests (makes real API calls)
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v
|
||||
|
||||
# 4. Test specific scenarios
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_execute_python_code -v
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py::TestAliyunRealWorldScenarios -v
|
||||
```
|
||||
|
||||
## Understanding Test Results
|
||||
|
||||
### Unit Test Output
|
||||
|
||||
```
|
||||
agent/sandbox/tests/test_aliyun_provider.py::TestAliyunOpenSandboxProvider::test_initialize_success PASSED
|
||||
agent/sandbox/tests/test_aliyun_provider.py::TestAliyunOpenSandboxProvider::test_create_instance_python PASSED
|
||||
...
|
||||
========================== 48 passed in 2.34s ===========================
|
||||
```
|
||||
|
||||
### Integration Test Output
|
||||
|
||||
```
|
||||
agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_health_check PASSED
|
||||
agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_create_python_instance PASSED
|
||||
agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_execute_python_code PASSED
|
||||
...
|
||||
========================== 10 passed in 15.67s ===========================
|
||||
```
|
||||
|
||||
**Note:** Integration tests will be skipped if credentials are not set:
|
||||
```
|
||||
agent/sandbox/tests/test_aliyun_integration.py::TestAliyunOpenSandboxIntegration::test_health_check SKIPPED
|
||||
...
|
||||
========================== 48 skipped, 10 passed in 0.12s ===========================
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Integration Tests Fail
|
||||
|
||||
1. **Check credentials:**
|
||||
```bash
|
||||
echo $ALIYUN_ACCESS_KEY_ID
|
||||
echo $ALIYUN_ACCESS_KEY_SECRET
|
||||
```
|
||||
|
||||
2. **Check network connectivity:**
|
||||
```bash
|
||||
curl -I https://opensandbox.cn-hangzhou.aliyuncs.com
|
||||
```
|
||||
|
||||
3. **Verify permissions:**
|
||||
- Make sure your Aliyun account has OpenSandbox service enabled
|
||||
- Check that your AccessKey has the required permissions
|
||||
|
||||
4. **Check region:**
|
||||
- Verify the region is correct for your account
|
||||
- Try different regions: cn-hangzhou, cn-beijing, cn-shanghai, etc.
|
||||
|
||||
### Tests Timeout
|
||||
|
||||
If tests timeout, increase the timeout in the test configuration or run with a longer timeout:
|
||||
```bash
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v --timeout=60
|
||||
```
|
||||
|
||||
### Mock Tests Fail
|
||||
|
||||
If unit tests fail, it's likely a code issue, not a credentials issue:
|
||||
1. Check the test error message
|
||||
2. Review the code changes
|
||||
3. Run with verbose output: `pytest -vv`
|
||||
|
||||
## Contributing
|
||||
|
||||
When adding new providers:
|
||||
|
||||
1. **Create unit tests** in `test_{provider}_provider.py` with mocks
|
||||
2. **Create integration tests** in `test_{provider}_integration.py` with real API calls
|
||||
3. **Add markers** to distinguish test types
|
||||
4. **Update this README** with provider-specific testing instructions
|
||||
|
||||
Example:
|
||||
```python
|
||||
@pytest.mark.integration
|
||||
def test_new_provider_real_api():
|
||||
"""Test with real API calls."""
|
||||
# Your test here
|
||||
```
|
||||
|
||||
## Continuous Integration
|
||||
|
||||
In CI/CD pipelines:
|
||||
|
||||
```yaml
|
||||
# Run unit tests only (fast, no credentials)
|
||||
pytest agent/sandbox/tests/ -v -m "not integration"
|
||||
|
||||
# Run integration tests if credentials available
|
||||
if [ -n "$ALIYUN_ACCESS_KEY_ID" ]; then
|
||||
pytest agent/sandbox/tests/test_aliyun_integration.py -v -m integration
|
||||
fi
|
||||
```
|
||||
@ -13,3 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Sandbox provider tests package.
|
||||
"""
|
||||
33
agent/sandbox/tests/pytest.ini
Normal file
33
agent/sandbox/tests/pytest.ini
Normal file
@ -0,0 +1,33 @@
|
||||
[pytest]
|
||||
# Pytest configuration for sandbox tests
|
||||
|
||||
# Test discovery patterns
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
|
||||
# Markers for different test types
|
||||
markers =
|
||||
integration: Tests that require external services (Aliyun API, etc.)
|
||||
unit: Fast tests that don't require external services
|
||||
slow: Tests that take a long time to run
|
||||
|
||||
# Test paths
|
||||
testpaths = .
|
||||
|
||||
# Minimum version
|
||||
minversion = 7.0
|
||||
|
||||
# Output options
|
||||
addopts =
|
||||
-v
|
||||
--strict-markers
|
||||
--tb=short
|
||||
--disable-warnings
|
||||
|
||||
# Log options
|
||||
log_cli = false
|
||||
log_cli_level = INFO
|
||||
|
||||
# Coverage options (if using pytest-cov)
|
||||
# addopts = --cov=agent.sandbox --cov-report=html --cov-report=term
|
||||
329
agent/sandbox/tests/test_aliyun_codeinterpreter.py
Normal file
329
agent/sandbox/tests/test_aliyun_codeinterpreter.py
Normal file
@ -0,0 +1,329 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Unit tests for Aliyun Code Interpreter provider.
|
||||
|
||||
These tests use mocks and don't require real Aliyun credentials.
|
||||
|
||||
Official Documentation: https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter
|
||||
Official SDK: https://github.com/Serverless-Devs/agentrun-sdk-python
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from agent.sandbox.providers.base import SandboxProvider
|
||||
from agent.sandbox.providers.aliyun_codeinterpreter import AliyunCodeInterpreterProvider
|
||||
|
||||
|
||||
class TestAliyunCodeInterpreterProvider:
|
||||
"""Test AliyunCodeInterpreterProvider implementation."""
|
||||
|
||||
def test_provider_initialization(self):
|
||||
"""Test provider initialization."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
assert provider.access_key_id == ""
|
||||
assert provider.access_key_secret == ""
|
||||
assert provider.account_id == ""
|
||||
assert provider.region == "cn-hangzhou"
|
||||
assert provider.template_name == ""
|
||||
assert provider.timeout == 30
|
||||
assert not provider._initialized
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.Template")
|
||||
def test_initialize_success(self, mock_template):
|
||||
"""Test successful initialization."""
|
||||
# Mock health check response
|
||||
mock_template.list.return_value = []
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
result = provider.initialize(
|
||||
{
|
||||
"access_key_id": "LTAI5tXXXXXXXXXX",
|
||||
"access_key_secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
|
||||
"account_id": "1234567890123456",
|
||||
"region": "cn-hangzhou",
|
||||
"template_name": "python-sandbox",
|
||||
"timeout": 20,
|
||||
}
|
||||
)
|
||||
|
||||
assert result is True
|
||||
assert provider.access_key_id == "LTAI5tXXXXXXXXXX"
|
||||
assert provider.access_key_secret == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||
assert provider.account_id == "1234567890123456"
|
||||
assert provider.region == "cn-hangzhou"
|
||||
assert provider.template_name == "python-sandbox"
|
||||
assert provider.timeout == 20
|
||||
assert provider._initialized
|
||||
|
||||
def test_initialize_missing_credentials(self):
|
||||
"""Test initialization with missing credentials."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
# Missing access_key_id
|
||||
result = provider.initialize({"access_key_secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"})
|
||||
assert result is False
|
||||
|
||||
# Missing access_key_secret
|
||||
result = provider.initialize({"access_key_id": "LTAI5tXXXXXXXXXX"})
|
||||
assert result is False
|
||||
|
||||
# Missing account_id
|
||||
provider2 = AliyunCodeInterpreterProvider()
|
||||
result = provider2.initialize({"access_key_id": "LTAI5tXXXXXXXXXX", "access_key_secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"})
|
||||
assert result is False
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.Template")
|
||||
def test_initialize_default_config(self, mock_template):
|
||||
"""Test initialization with default config."""
|
||||
mock_template.list.return_value = []
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
result = provider.initialize({"access_key_id": "LTAI5tXXXXXXXXXX", "access_key_secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", "account_id": "1234567890123456"})
|
||||
|
||||
assert result is True
|
||||
assert provider.region == "cn-hangzhou"
|
||||
assert provider.template_name == ""
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.CodeInterpreterSandbox")
|
||||
def test_create_instance_python(self, mock_sandbox_class):
|
||||
"""Test creating a Python instance."""
|
||||
# Mock successful instance creation
|
||||
mock_sandbox = MagicMock()
|
||||
mock_sandbox.sandbox_id = "01JCED8Z9Y6XQVK8M2NRST5WXY"
|
||||
mock_sandbox_class.return_value = mock_sandbox
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
provider._initialized = True
|
||||
provider._config = MagicMock()
|
||||
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
assert instance.provider == "aliyun_codeinterpreter"
|
||||
assert instance.status == "READY"
|
||||
assert instance.metadata["language"] == "python"
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.CodeInterpreterSandbox")
|
||||
def test_create_instance_javascript(self, mock_sandbox_class):
|
||||
"""Test creating a JavaScript instance."""
|
||||
mock_sandbox = MagicMock()
|
||||
mock_sandbox.sandbox_id = "01JCED8Z9Y6XQVK8M2NRST5WXY"
|
||||
mock_sandbox_class.return_value = mock_sandbox
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
provider._initialized = True
|
||||
provider._config = MagicMock()
|
||||
|
||||
instance = provider.create_instance("javascript")
|
||||
|
||||
assert instance.metadata["language"] == "javascript"
|
||||
|
||||
def test_create_instance_not_initialized(self):
|
||||
"""Test creating instance when provider not initialized."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
with pytest.raises(RuntimeError, match="Provider not initialized"):
|
||||
provider.create_instance("python")
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.CodeInterpreterSandbox")
|
||||
def test_execute_code_success(self, mock_sandbox_class):
|
||||
"""Test successful code execution."""
|
||||
# Mock sandbox instance
|
||||
mock_sandbox = MagicMock()
|
||||
mock_sandbox.context.execute.return_value = {
|
||||
"results": [{"type": "stdout", "text": "Hello, World!"}, {"type": "result", "text": "None"}, {"type": "endOfExecution", "status": "ok"}],
|
||||
"contextId": "kernel-12345-67890",
|
||||
}
|
||||
mock_sandbox_class.return_value = mock_sandbox
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
provider._initialized = True
|
||||
provider._config = MagicMock()
|
||||
|
||||
result = provider.execute_code(instance_id="01JCED8Z9Y6XQVK8M2NRST5WXY", code="print('Hello, World!')", language="python", timeout=10)
|
||||
|
||||
assert result.stdout == "Hello, World!"
|
||||
assert result.stderr == ""
|
||||
assert result.exit_code == 0
|
||||
assert result.execution_time > 0
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.CodeInterpreterSandbox")
|
||||
def test_execute_code_timeout(self, mock_sandbox_class):
|
||||
"""Test code execution timeout."""
|
||||
from agentrun.utils.exception import ServerError
|
||||
|
||||
mock_sandbox = MagicMock()
|
||||
mock_sandbox.context.execute.side_effect = ServerError(408, "Request timeout")
|
||||
mock_sandbox_class.return_value = mock_sandbox
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
provider._initialized = True
|
||||
provider._config = MagicMock()
|
||||
|
||||
with pytest.raises(TimeoutError, match="Execution timed out"):
|
||||
provider.execute_code(instance_id="01JCED8Z9Y6XQVK8M2NRST5WXY", code="while True: pass", language="python", timeout=5)
|
||||
|
||||
@patch("agent.sandbox.providers.aliyun_codeinterpreter.CodeInterpreterSandbox")
|
||||
def test_execute_code_with_error(self, mock_sandbox_class):
|
||||
"""Test code execution with error."""
|
||||
mock_sandbox = MagicMock()
|
||||
mock_sandbox.context.execute.return_value = {
|
||||
"results": [{"type": "stderr", "text": "Traceback..."}, {"type": "error", "text": "NameError: name 'x' is not defined"}, {"type": "endOfExecution", "status": "error"}]
|
||||
}
|
||||
mock_sandbox_class.return_value = mock_sandbox
|
||||
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
provider._initialized = True
|
||||
provider._config = MagicMock()
|
||||
|
||||
result = provider.execute_code(instance_id="01JCED8Z9Y6XQVK8M2NRST5WXY", code="print(x)", language="python")
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert len(result.stderr) > 0
|
||||
|
||||
def test_get_supported_languages(self):
|
||||
"""Test getting supported languages."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
languages = provider.get_supported_languages()
|
||||
|
||||
assert "python" in languages
|
||||
assert "javascript" in languages
|
||||
|
||||
def test_get_config_schema(self):
|
||||
"""Test getting configuration schema."""
|
||||
schema = AliyunCodeInterpreterProvider.get_config_schema()
|
||||
|
||||
assert "access_key_id" in schema
|
||||
assert schema["access_key_id"]["required"] is True
|
||||
|
||||
assert "access_key_secret" in schema
|
||||
assert schema["access_key_secret"]["required"] is True
|
||||
|
||||
assert "account_id" in schema
|
||||
assert schema["account_id"]["required"] is True
|
||||
|
||||
assert "region" in schema
|
||||
assert "template_name" in schema
|
||||
assert "timeout" in schema
|
||||
|
||||
def test_validate_config_success(self):
|
||||
"""Test successful configuration validation."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
is_valid, error_msg = provider.validate_config({"access_key_id": "LTAI5tXXXXXXXXXX", "account_id": "1234567890123456", "region": "cn-hangzhou"})
|
||||
|
||||
assert is_valid is True
|
||||
assert error_msg is None
|
||||
|
||||
def test_validate_config_invalid_access_key(self):
|
||||
"""Test validation with invalid access key format."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
is_valid, error_msg = provider.validate_config({"access_key_id": "INVALID_KEY"})
|
||||
|
||||
assert is_valid is False
|
||||
assert "AccessKey ID format" in error_msg
|
||||
|
||||
def test_validate_config_missing_account_id(self):
|
||||
"""Test validation with missing account ID."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
is_valid, error_msg = provider.validate_config({})
|
||||
|
||||
assert is_valid is False
|
||||
assert "Account ID" in error_msg
|
||||
|
||||
def test_validate_config_invalid_region(self):
|
||||
"""Test validation with invalid region."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
is_valid, error_msg = provider.validate_config(
|
||||
{
|
||||
"access_key_id": "LTAI5tXXXXXXXXXX",
|
||||
"account_id": "1234567890123456", # Provide required field
|
||||
"region": "us-west-1",
|
||||
}
|
||||
)
|
||||
|
||||
assert is_valid is False
|
||||
assert "Invalid region" in error_msg
|
||||
|
||||
def test_validate_config_invalid_timeout(self):
|
||||
"""Test validation with invalid timeout (> 30 seconds)."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
is_valid, error_msg = provider.validate_config(
|
||||
{
|
||||
"access_key_id": "LTAI5tXXXXXXXXXX",
|
||||
"account_id": "1234567890123456", # Provide required field
|
||||
"timeout": 60,
|
||||
}
|
||||
)
|
||||
|
||||
assert is_valid is False
|
||||
assert "Timeout must be between 1 and 30 seconds" in error_msg
|
||||
|
||||
def test_normalize_language_python(self):
|
||||
"""Test normalizing Python language identifier."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
assert provider._normalize_language("python") == "python"
|
||||
assert provider._normalize_language("python3") == "python"
|
||||
assert provider._normalize_language("PYTHON") == "python"
|
||||
|
||||
def test_normalize_language_javascript(self):
|
||||
"""Test normalizing JavaScript language identifier."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
assert provider._normalize_language("javascript") == "javascript"
|
||||
assert provider._normalize_language("nodejs") == "javascript"
|
||||
assert provider._normalize_language("JavaScript") == "javascript"
|
||||
|
||||
|
||||
class TestAliyunCodeInterpreterInterface:
|
||||
"""Test that Aliyun provider correctly implements the interface."""
|
||||
|
||||
def test_aliyun_provider_is_abstract(self):
|
||||
"""Test that AliyunCodeInterpreterProvider is a SandboxProvider."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
assert isinstance(provider, SandboxProvider)
|
||||
|
||||
def test_aliyun_provider_has_abstract_methods(self):
|
||||
"""Test that AliyunCodeInterpreterProvider implements all abstract methods."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
|
||||
assert hasattr(provider, "initialize")
|
||||
assert callable(provider.initialize)
|
||||
|
||||
assert hasattr(provider, "create_instance")
|
||||
assert callable(provider.create_instance)
|
||||
|
||||
assert hasattr(provider, "execute_code")
|
||||
assert callable(provider.execute_code)
|
||||
|
||||
assert hasattr(provider, "destroy_instance")
|
||||
assert callable(provider.destroy_instance)
|
||||
|
||||
assert hasattr(provider, "health_check")
|
||||
assert callable(provider.health_check)
|
||||
|
||||
assert hasattr(provider, "get_supported_languages")
|
||||
assert callable(provider.get_supported_languages)
|
||||
353
agent/sandbox/tests/test_aliyun_codeinterpreter_integration.py
Normal file
353
agent/sandbox/tests/test_aliyun_codeinterpreter_integration.py
Normal file
@ -0,0 +1,353 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Integration tests for Aliyun Code Interpreter provider.
|
||||
|
||||
These tests require real Aliyun credentials and will make actual API calls.
|
||||
To run these tests, set the following environment variables:
|
||||
|
||||
export AGENTRUN_ACCESS_KEY_ID="LTAI5t..."
|
||||
export AGENTRUN_ACCESS_KEY_SECRET="..."
|
||||
export AGENTRUN_ACCOUNT_ID="1234567890..." # Aliyun primary account ID (主账号ID)
|
||||
export AGENTRUN_REGION="cn-hangzhou" # Note: AGENTRUN_REGION (SDK will read this)
|
||||
|
||||
Then run:
|
||||
pytest agent/sandbox/tests/test_aliyun_codeinterpreter_integration.py -v
|
||||
|
||||
Official Documentation: https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter
|
||||
"""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
from agent.sandbox.providers.aliyun_codeinterpreter import AliyunCodeInterpreterProvider
|
||||
|
||||
|
||||
# Skip all tests if credentials are not provided
|
||||
pytestmark = pytest.mark.skipif(
|
||||
not all(
|
||||
[
|
||||
os.getenv("AGENTRUN_ACCESS_KEY_ID"),
|
||||
os.getenv("AGENTRUN_ACCESS_KEY_SECRET"),
|
||||
os.getenv("AGENTRUN_ACCOUNT_ID"),
|
||||
]
|
||||
),
|
||||
reason="Aliyun credentials not set. Set AGENTRUN_ACCESS_KEY_ID, AGENTRUN_ACCESS_KEY_SECRET, and AGENTRUN_ACCOUNT_ID.",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aliyun_config():
|
||||
"""Get Aliyun configuration from environment variables."""
|
||||
return {
|
||||
"access_key_id": os.getenv("AGENTRUN_ACCESS_KEY_ID"),
|
||||
"access_key_secret": os.getenv("AGENTRUN_ACCESS_KEY_SECRET"),
|
||||
"account_id": os.getenv("AGENTRUN_ACCOUNT_ID"),
|
||||
"region": os.getenv("AGENTRUN_REGION", "cn-hangzhou"),
|
||||
"template_name": os.getenv("AGENTRUN_TEMPLATE_NAME", ""),
|
||||
"timeout": 30,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider(aliyun_config):
|
||||
"""Create an initialized Aliyun provider."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
initialized = provider.initialize(aliyun_config)
|
||||
if not initialized:
|
||||
pytest.skip("Failed to initialize Aliyun provider. Check credentials, account ID, and network.")
|
||||
return provider
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestAliyunCodeInterpreterIntegration:
|
||||
"""Integration tests for Aliyun Code Interpreter provider."""
|
||||
|
||||
def test_initialize_provider(self, aliyun_config):
|
||||
"""Test provider initialization with real credentials."""
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
result = provider.initialize(aliyun_config)
|
||||
|
||||
assert result is True
|
||||
assert provider._initialized is True
|
||||
|
||||
def test_health_check(self, provider):
|
||||
"""Test health check with real API."""
|
||||
result = provider.health_check()
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_get_supported_languages(self, provider):
|
||||
"""Test getting supported languages."""
|
||||
languages = provider.get_supported_languages()
|
||||
|
||||
assert "python" in languages
|
||||
assert "javascript" in languages
|
||||
assert isinstance(languages, list)
|
||||
|
||||
def test_create_python_instance(self, provider):
|
||||
"""Test creating a Python sandbox instance."""
|
||||
try:
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
assert instance.provider == "aliyun_codeinterpreter"
|
||||
assert instance.status in ["READY", "CREATING"]
|
||||
assert instance.metadata["language"] == "python"
|
||||
assert len(instance.instance_id) > 0
|
||||
|
||||
# Clean up
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"Instance creation failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_execute_python_code(self, provider):
|
||||
"""Test executing Python code in the sandbox."""
|
||||
try:
|
||||
# Create instance
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
# Execute simple code
|
||||
result = provider.execute_code(
|
||||
instance_id=instance.instance_id,
|
||||
code="print('Hello from Aliyun Code Interpreter!')\nprint(42)",
|
||||
language="python",
|
||||
timeout=30, # Max 30 seconds
|
||||
)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Hello from Aliyun Code Interpreter!" in result.stdout
|
||||
assert "42" in result.stdout
|
||||
assert result.execution_time > 0
|
||||
|
||||
# Clean up
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"Code execution test failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_execute_python_code_with_arguments(self, provider):
|
||||
"""Test executing Python code with arguments parameter."""
|
||||
try:
|
||||
# Create instance
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
# Execute code with arguments
|
||||
result = provider.execute_code(
|
||||
instance_id=instance.instance_id,
|
||||
code="""def main(name: str, count: int) -> dict:
|
||||
return {"message": f"Hello {name}!" * count}
|
||||
""",
|
||||
language="python",
|
||||
timeout=30,
|
||||
arguments={"name": "World", "count": 2}
|
||||
)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Hello World!Hello World!" in result.stdout
|
||||
|
||||
# Clean up
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"Arguments test failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_execute_python_code_with_error(self, provider):
|
||||
"""Test executing Python code that produces an error."""
|
||||
try:
|
||||
# Create instance
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
# Execute code with error
|
||||
result = provider.execute_code(instance_id=instance.instance_id, code="raise ValueError('Test error')", language="python", timeout=30)
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert len(result.stderr) > 0 or "ValueError" in result.stdout
|
||||
|
||||
# Clean up
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"Error handling test failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_execute_javascript_code(self, provider):
|
||||
"""Test executing JavaScript code in the sandbox."""
|
||||
try:
|
||||
# Create instance
|
||||
instance = provider.create_instance("javascript")
|
||||
|
||||
# Execute simple code
|
||||
result = provider.execute_code(instance_id=instance.instance_id, code="console.log('Hello from JavaScript!');", language="javascript", timeout=30)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Hello from JavaScript!" in result.stdout
|
||||
|
||||
# Clean up
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"JavaScript execution test failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_execute_javascript_code_with_arguments(self, provider):
|
||||
"""Test executing JavaScript code with arguments parameter."""
|
||||
try:
|
||||
# Create instance
|
||||
instance = provider.create_instance("javascript")
|
||||
|
||||
# Execute code with arguments
|
||||
result = provider.execute_code(
|
||||
instance_id=instance.instance_id,
|
||||
code="""function main(args) {
|
||||
const { name, count } = args;
|
||||
return `Hello ${name}!`.repeat(count);
|
||||
}""",
|
||||
language="javascript",
|
||||
timeout=30,
|
||||
arguments={"name": "World", "count": 2}
|
||||
)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Hello World!Hello World!" in result.stdout
|
||||
|
||||
# Clean up
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"JavaScript arguments test failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_destroy_instance(self, provider):
|
||||
"""Test destroying a sandbox instance."""
|
||||
try:
|
||||
# Create instance
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
# Destroy instance
|
||||
result = provider.destroy_instance(instance.instance_id)
|
||||
|
||||
# Note: The API might return True immediately or async
|
||||
assert result is True or result is False
|
||||
except Exception as e:
|
||||
pytest.skip(f"Destroy instance test failed: {str(e)}. API might not be available yet.")
|
||||
|
||||
def test_config_validation(self, provider):
|
||||
"""Test configuration validation."""
|
||||
# Valid config
|
||||
is_valid, error = provider.validate_config({"access_key_id": "LTAI5tXXXXXXXXXX", "account_id": "1234567890123456", "region": "cn-hangzhou", "timeout": 30})
|
||||
assert is_valid is True
|
||||
assert error is None
|
||||
|
||||
# Invalid access key
|
||||
is_valid, error = provider.validate_config({"access_key_id": "INVALID_KEY"})
|
||||
assert is_valid is False
|
||||
|
||||
# Missing account ID
|
||||
is_valid, error = provider.validate_config({})
|
||||
assert is_valid is False
|
||||
assert "Account ID" in error
|
||||
|
||||
def test_timeout_limit(self, provider):
|
||||
"""Test that timeout is limited to 30 seconds."""
|
||||
# Timeout > 30 should be clamped to 30
|
||||
provider2 = AliyunCodeInterpreterProvider()
|
||||
provider2.initialize(
|
||||
{
|
||||
"access_key_id": os.getenv("AGENTRUN_ACCESS_KEY_ID"),
|
||||
"access_key_secret": os.getenv("AGENTRUN_ACCESS_KEY_SECRET"),
|
||||
"account_id": os.getenv("AGENTRUN_ACCOUNT_ID"),
|
||||
"timeout": 60, # Request 60 seconds
|
||||
}
|
||||
)
|
||||
|
||||
# Should be clamped to 30
|
||||
assert provider2.timeout == 30
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestAliyunCodeInterpreterScenarios:
|
||||
"""Test real-world usage scenarios."""
|
||||
|
||||
def test_data_processing_workflow(self, provider):
|
||||
"""Test a simple data processing workflow."""
|
||||
try:
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
# Execute data processing code
|
||||
code = """
|
||||
import json
|
||||
data = [{"name": "Alice", "age": 30}, {"name": "Bob", "age": 25}]
|
||||
result = json.dumps(data, indent=2)
|
||||
print(result)
|
||||
"""
|
||||
result = provider.execute_code(instance_id=instance.instance_id, code=code, language="python", timeout=30)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Alice" in result.stdout
|
||||
assert "Bob" in result.stdout
|
||||
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"Data processing test failed: {str(e)}")
|
||||
|
||||
def test_string_manipulation(self, provider):
|
||||
"""Test string manipulation operations."""
|
||||
try:
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
code = """
|
||||
text = "Hello, World!"
|
||||
print(text.upper())
|
||||
print(text.lower())
|
||||
print(text.replace("World", "Aliyun"))
|
||||
"""
|
||||
result = provider.execute_code(instance_id=instance.instance_id, code=code, language="python", timeout=30)
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "HELLO, WORLD!" in result.stdout
|
||||
assert "hello, world!" in result.stdout
|
||||
assert "Hello, Aliyun!" in result.stdout
|
||||
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"String manipulation test failed: {str(e)}")
|
||||
|
||||
def test_context_persistence(self, provider):
|
||||
"""Test code execution with context persistence."""
|
||||
try:
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
# First execution - define variable
|
||||
result1 = provider.execute_code(instance_id=instance.instance_id, code="x = 42\nprint(x)", language="python", timeout=30)
|
||||
assert result1.exit_code == 0
|
||||
|
||||
# Second execution - use variable
|
||||
# Note: Context persistence depends on whether the contextId is reused
|
||||
result2 = provider.execute_code(instance_id=instance.instance_id, code="print(f'x is {x}')", language="python", timeout=30)
|
||||
|
||||
# Context might or might not persist depending on API implementation
|
||||
assert result2.exit_code == 0
|
||||
|
||||
provider.destroy_instance(instance.instance_id)
|
||||
except Exception as e:
|
||||
pytest.skip(f"Context persistence test failed: {str(e)}")
|
||||
|
||||
|
||||
def test_without_credentials():
|
||||
"""Test that tests are skipped without credentials."""
|
||||
# This test should always run (not skipped)
|
||||
if all(
|
||||
[
|
||||
os.getenv("AGENTRUN_ACCESS_KEY_ID"),
|
||||
os.getenv("AGENTRUN_ACCESS_KEY_SECRET"),
|
||||
os.getenv("AGENTRUN_ACCOUNT_ID"),
|
||||
]
|
||||
):
|
||||
assert True # Credentials are set
|
||||
else:
|
||||
assert True # Credentials not set, test still passes
|
||||
423
agent/sandbox/tests/test_providers.py
Normal file
423
agent/sandbox/tests/test_providers.py
Normal file
@ -0,0 +1,423 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
"""
|
||||
Unit tests for sandbox provider abstraction layer.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
import requests
|
||||
|
||||
from agent.sandbox.providers.base import SandboxProvider, SandboxInstance, ExecutionResult
|
||||
from agent.sandbox.providers.manager import ProviderManager
|
||||
from agent.sandbox.providers.self_managed import SelfManagedProvider
|
||||
|
||||
|
||||
class TestSandboxDataclasses:
|
||||
"""Test sandbox dataclasses."""
|
||||
|
||||
def test_sandbox_instance_creation(self):
|
||||
"""Test SandboxInstance dataclass creation."""
|
||||
instance = SandboxInstance(
|
||||
instance_id="test-123",
|
||||
provider="self_managed",
|
||||
status="running",
|
||||
metadata={"language": "python"}
|
||||
)
|
||||
|
||||
assert instance.instance_id == "test-123"
|
||||
assert instance.provider == "self_managed"
|
||||
assert instance.status == "running"
|
||||
assert instance.metadata == {"language": "python"}
|
||||
|
||||
def test_sandbox_instance_default_metadata(self):
|
||||
"""Test SandboxInstance with None metadata."""
|
||||
instance = SandboxInstance(
|
||||
instance_id="test-123",
|
||||
provider="self_managed",
|
||||
status="running",
|
||||
metadata=None
|
||||
)
|
||||
|
||||
assert instance.metadata == {}
|
||||
|
||||
def test_execution_result_creation(self):
|
||||
"""Test ExecutionResult dataclass creation."""
|
||||
result = ExecutionResult(
|
||||
stdout="Hello, World!",
|
||||
stderr="",
|
||||
exit_code=0,
|
||||
execution_time=1.5,
|
||||
metadata={"status": "success"}
|
||||
)
|
||||
|
||||
assert result.stdout == "Hello, World!"
|
||||
assert result.stderr == ""
|
||||
assert result.exit_code == 0
|
||||
assert result.execution_time == 1.5
|
||||
assert result.metadata == {"status": "success"}
|
||||
|
||||
def test_execution_result_default_metadata(self):
|
||||
"""Test ExecutionResult with None metadata."""
|
||||
result = ExecutionResult(
|
||||
stdout="output",
|
||||
stderr="error",
|
||||
exit_code=1,
|
||||
execution_time=0.5,
|
||||
metadata=None
|
||||
)
|
||||
|
||||
assert result.metadata == {}
|
||||
|
||||
|
||||
class TestProviderManager:
|
||||
"""Test ProviderManager functionality."""
|
||||
|
||||
def test_manager_initialization(self):
|
||||
"""Test ProviderManager initialization."""
|
||||
manager = ProviderManager()
|
||||
|
||||
assert manager.current_provider is None
|
||||
assert manager.current_provider_name is None
|
||||
assert not manager.is_configured()
|
||||
|
||||
def test_set_provider(self):
|
||||
"""Test setting a provider."""
|
||||
manager = ProviderManager()
|
||||
mock_provider = Mock(spec=SandboxProvider)
|
||||
|
||||
manager.set_provider("self_managed", mock_provider)
|
||||
|
||||
assert manager.current_provider == mock_provider
|
||||
assert manager.current_provider_name == "self_managed"
|
||||
assert manager.is_configured()
|
||||
|
||||
def test_get_provider(self):
|
||||
"""Test getting the current provider."""
|
||||
manager = ProviderManager()
|
||||
mock_provider = Mock(spec=SandboxProvider)
|
||||
|
||||
manager.set_provider("self_managed", mock_provider)
|
||||
|
||||
assert manager.get_provider() == mock_provider
|
||||
|
||||
def test_get_provider_name(self):
|
||||
"""Test getting the current provider name."""
|
||||
manager = ProviderManager()
|
||||
mock_provider = Mock(spec=SandboxProvider)
|
||||
|
||||
manager.set_provider("self_managed", mock_provider)
|
||||
|
||||
assert manager.get_provider_name() == "self_managed"
|
||||
|
||||
def test_get_provider_when_not_set(self):
|
||||
"""Test getting provider when none is set."""
|
||||
manager = ProviderManager()
|
||||
|
||||
assert manager.get_provider() is None
|
||||
assert manager.get_provider_name() is None
|
||||
|
||||
|
||||
class TestSelfManagedProvider:
|
||||
"""Test SelfManagedProvider implementation."""
|
||||
|
||||
def test_provider_initialization(self):
|
||||
"""Test provider initialization."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
assert provider.endpoint == "http://localhost:9385"
|
||||
assert provider.timeout == 30
|
||||
assert provider.max_retries == 3
|
||||
assert provider.pool_size == 10
|
||||
assert not provider._initialized
|
||||
|
||||
@patch('requests.get')
|
||||
def test_initialize_success(self, mock_get):
|
||||
"""Test successful initialization."""
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
result = provider.initialize({
|
||||
"endpoint": "http://test-endpoint:9385",
|
||||
"timeout": 60,
|
||||
"max_retries": 5,
|
||||
"pool_size": 20
|
||||
})
|
||||
|
||||
assert result is True
|
||||
assert provider.endpoint == "http://test-endpoint:9385"
|
||||
assert provider.timeout == 60
|
||||
assert provider.max_retries == 5
|
||||
assert provider.pool_size == 20
|
||||
assert provider._initialized
|
||||
mock_get.assert_called_once_with("http://test-endpoint:9385/healthz", timeout=5)
|
||||
|
||||
@patch('requests.get')
|
||||
def test_initialize_failure(self, mock_get):
|
||||
"""Test initialization failure."""
|
||||
mock_get.side_effect = Exception("Connection error")
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
result = provider.initialize({"endpoint": "http://invalid:9385"})
|
||||
|
||||
assert result is False
|
||||
assert not provider._initialized
|
||||
|
||||
def test_initialize_default_config(self):
|
||||
"""Test initialization with default config."""
|
||||
with patch('requests.get') as mock_get:
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
result = provider.initialize({})
|
||||
|
||||
assert result is True
|
||||
assert provider.endpoint == "http://localhost:9385"
|
||||
assert provider.timeout == 30
|
||||
|
||||
def test_create_instance_python(self):
|
||||
"""Test creating a Python instance."""
|
||||
provider = SelfManagedProvider()
|
||||
provider._initialized = True
|
||||
|
||||
instance = provider.create_instance("python")
|
||||
|
||||
assert instance.provider == "self_managed"
|
||||
assert instance.status == "running"
|
||||
assert instance.metadata["language"] == "python"
|
||||
assert instance.metadata["endpoint"] == "http://localhost:9385"
|
||||
assert len(instance.instance_id) > 0 # Verify instance_id exists
|
||||
|
||||
def test_create_instance_nodejs(self):
|
||||
"""Test creating a Node.js instance."""
|
||||
provider = SelfManagedProvider()
|
||||
provider._initialized = True
|
||||
|
||||
instance = provider.create_instance("nodejs")
|
||||
|
||||
assert instance.metadata["language"] == "nodejs"
|
||||
|
||||
def test_create_instance_not_initialized(self):
|
||||
"""Test creating instance when provider not initialized."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
with pytest.raises(RuntimeError, match="Provider not initialized"):
|
||||
provider.create_instance("python")
|
||||
|
||||
@patch('requests.post')
|
||||
def test_execute_code_success(self, mock_post):
|
||||
"""Test successful code execution."""
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"status": "success",
|
||||
"stdout": '{"result": 42}',
|
||||
"stderr": "",
|
||||
"exit_code": 0,
|
||||
"time_used_ms": 100.0,
|
||||
"memory_used_kb": 1024.0
|
||||
}
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
provider._initialized = True
|
||||
|
||||
result = provider.execute_code(
|
||||
instance_id="test-123",
|
||||
code="def main(): return {'result': 42}",
|
||||
language="python",
|
||||
timeout=10
|
||||
)
|
||||
|
||||
assert result.stdout == '{"result": 42}'
|
||||
assert result.stderr == ""
|
||||
assert result.exit_code == 0
|
||||
assert result.execution_time > 0
|
||||
assert result.metadata["status"] == "success"
|
||||
assert result.metadata["instance_id"] == "test-123"
|
||||
|
||||
@patch('requests.post')
|
||||
def test_execute_code_timeout(self, mock_post):
|
||||
"""Test code execution timeout."""
|
||||
mock_post.side_effect = requests.Timeout()
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
provider._initialized = True
|
||||
|
||||
with pytest.raises(TimeoutError, match="Execution timed out"):
|
||||
provider.execute_code(
|
||||
instance_id="test-123",
|
||||
code="while True: pass",
|
||||
language="python",
|
||||
timeout=5
|
||||
)
|
||||
|
||||
@patch('requests.post')
|
||||
def test_execute_code_http_error(self, mock_post):
|
||||
"""Test code execution with HTTP error."""
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.text = "Internal Server Error"
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
provider._initialized = True
|
||||
|
||||
with pytest.raises(RuntimeError, match="HTTP 500"):
|
||||
provider.execute_code(
|
||||
instance_id="test-123",
|
||||
code="invalid code",
|
||||
language="python"
|
||||
)
|
||||
|
||||
def test_execute_code_not_initialized(self):
|
||||
"""Test executing code when provider not initialized."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
with pytest.raises(RuntimeError, match="Provider not initialized"):
|
||||
provider.execute_code(
|
||||
instance_id="test-123",
|
||||
code="print('hello')",
|
||||
language="python"
|
||||
)
|
||||
|
||||
def test_destroy_instance(self):
|
||||
"""Test destroying an instance (no-op for self-managed)."""
|
||||
provider = SelfManagedProvider()
|
||||
provider._initialized = True
|
||||
|
||||
# For self-managed, destroy_instance is a no-op
|
||||
result = provider.destroy_instance("test-123")
|
||||
|
||||
assert result is True
|
||||
|
||||
@patch('requests.get')
|
||||
def test_health_check_success(self, mock_get):
|
||||
"""Test successful health check."""
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
result = provider.health_check()
|
||||
|
||||
assert result is True
|
||||
mock_get.assert_called_once_with("http://localhost:9385/healthz", timeout=5)
|
||||
|
||||
@patch('requests.get')
|
||||
def test_health_check_failure(self, mock_get):
|
||||
"""Test health check failure."""
|
||||
mock_get.side_effect = Exception("Connection error")
|
||||
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
result = provider.health_check()
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_get_supported_languages(self):
|
||||
"""Test getting supported languages."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
languages = provider.get_supported_languages()
|
||||
|
||||
assert "python" in languages
|
||||
assert "nodejs" in languages
|
||||
assert "javascript" in languages
|
||||
|
||||
def test_get_config_schema(self):
|
||||
"""Test getting configuration schema."""
|
||||
schema = SelfManagedProvider.get_config_schema()
|
||||
|
||||
assert "endpoint" in schema
|
||||
assert schema["endpoint"]["type"] == "string"
|
||||
assert schema["endpoint"]["required"] is True
|
||||
assert schema["endpoint"]["default"] == "http://localhost:9385"
|
||||
|
||||
assert "timeout" in schema
|
||||
assert schema["timeout"]["type"] == "integer"
|
||||
assert schema["timeout"]["default"] == 30
|
||||
|
||||
assert "max_retries" in schema
|
||||
assert schema["max_retries"]["type"] == "integer"
|
||||
|
||||
assert "pool_size" in schema
|
||||
assert schema["pool_size"]["type"] == "integer"
|
||||
|
||||
def test_normalize_language_python(self):
|
||||
"""Test normalizing Python language identifier."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
assert provider._normalize_language("python") == "python"
|
||||
assert provider._normalize_language("python3") == "python"
|
||||
assert provider._normalize_language("PYTHON") == "python"
|
||||
assert provider._normalize_language("Python3") == "python"
|
||||
|
||||
def test_normalize_language_javascript(self):
|
||||
"""Test normalizing JavaScript language identifier."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
assert provider._normalize_language("javascript") == "nodejs"
|
||||
assert provider._normalize_language("nodejs") == "nodejs"
|
||||
assert provider._normalize_language("JavaScript") == "nodejs"
|
||||
assert provider._normalize_language("NodeJS") == "nodejs"
|
||||
|
||||
def test_normalize_language_default(self):
|
||||
"""Test language normalization with empty/unknown input."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
assert provider._normalize_language("") == "python"
|
||||
assert provider._normalize_language(None) == "python"
|
||||
assert provider._normalize_language("unknown") == "unknown"
|
||||
|
||||
|
||||
class TestProviderInterface:
|
||||
"""Test that providers correctly implement the interface."""
|
||||
|
||||
def test_self_managed_provider_is_abstract(self):
|
||||
"""Test that SelfManagedProvider is a SandboxProvider."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
assert isinstance(provider, SandboxProvider)
|
||||
|
||||
def test_self_managed_provider_has_abstract_methods(self):
|
||||
"""Test that SelfManagedProvider implements all abstract methods."""
|
||||
provider = SelfManagedProvider()
|
||||
|
||||
# Check all abstract methods are implemented
|
||||
assert hasattr(provider, 'initialize')
|
||||
assert callable(provider.initialize)
|
||||
|
||||
assert hasattr(provider, 'create_instance')
|
||||
assert callable(provider.create_instance)
|
||||
|
||||
assert hasattr(provider, 'execute_code')
|
||||
assert callable(provider.execute_code)
|
||||
|
||||
assert hasattr(provider, 'destroy_instance')
|
||||
assert callable(provider.destroy_instance)
|
||||
|
||||
assert hasattr(provider, 'health_check')
|
||||
assert callable(provider.health_check)
|
||||
|
||||
assert hasattr(provider, 'get_supported_languages')
|
||||
assert callable(provider.get_supported_languages)
|
||||
78
agent/sandbox/tests/verify_sdk.py
Normal file
78
agent/sandbox/tests/verify_sdk.py
Normal file
@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Quick verification script for Aliyun Code Interpreter provider using official SDK.
|
||||
"""
|
||||
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
|
||||
print("=" * 60)
|
||||
print("Aliyun Code Interpreter Provider - SDK Verification")
|
||||
print("=" * 60)
|
||||
|
||||
# Test 1: Import provider
|
||||
print("\n[1/5] Testing provider import...")
|
||||
try:
|
||||
from agent.sandbox.providers.aliyun_codeinterpreter import AliyunCodeInterpreterProvider
|
||||
|
||||
print("✓ Provider imported successfully")
|
||||
except ImportError as e:
|
||||
print(f"✗ Import failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
# Test 2: Check provider class
|
||||
print("\n[2/5] Testing provider class...")
|
||||
provider = AliyunCodeInterpreterProvider()
|
||||
assert hasattr(provider, "initialize")
|
||||
assert hasattr(provider, "create_instance")
|
||||
assert hasattr(provider, "execute_code")
|
||||
assert hasattr(provider, "destroy_instance")
|
||||
assert hasattr(provider, "health_check")
|
||||
print("✓ Provider has all required methods")
|
||||
|
||||
# Test 3: Check SDK imports
|
||||
print("\n[3/5] Testing SDK imports...")
|
||||
try:
|
||||
# Check if agentrun SDK is available using importlib
|
||||
if (
|
||||
importlib.util.find_spec("agentrun.sandbox") is None
|
||||
or importlib.util.find_spec("agentrun.utils.config") is None
|
||||
or importlib.util.find_spec("agentrun.utils.exception") is None
|
||||
):
|
||||
raise ImportError("agentrun SDK not found")
|
||||
|
||||
# Verify imports work (assign to _ to indicate they're intentionally unused)
|
||||
from agentrun.sandbox import CodeInterpreterSandbox, TemplateType, CodeLanguage
|
||||
from agentrun.utils.config import Config
|
||||
from agentrun.utils.exception import ServerError
|
||||
_ = (CodeInterpreterSandbox, TemplateType, CodeLanguage, Config, ServerError)
|
||||
|
||||
print("✓ SDK modules imported successfully")
|
||||
except ImportError as e:
|
||||
print(f"✗ SDK import failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
# Test 4: Check config schema
|
||||
print("\n[4/5] Testing configuration schema...")
|
||||
schema = AliyunCodeInterpreterProvider.get_config_schema()
|
||||
required_fields = ["access_key_id", "access_key_secret", "account_id"]
|
||||
for field in required_fields:
|
||||
assert field in schema
|
||||
assert schema[field]["required"] is True
|
||||
print(f"✓ All required fields present: {', '.join(required_fields)}")
|
||||
|
||||
# Test 5: Check supported languages
|
||||
print("\n[5/5] Testing supported languages...")
|
||||
languages = provider.get_supported_languages()
|
||||
assert "python" in languages
|
||||
assert "javascript" in languages
|
||||
print(f"✓ Supported languages: {', '.join(languages)}")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("All verification tests passed! ✓")
|
||||
print("=" * 60)
|
||||
print("\nNote: This provider now uses the official agentrun-sdk.")
|
||||
print("SDK Documentation: https://github.com/Serverless-Devs/agentrun-sdk-python")
|
||||
print("API Documentation: https://help.aliyun.com/zh/functioncompute/fc/sandbox-sandbox-code-interepreter")
|
||||
135
sandbox/uv.lock → agent/sandbox/uv.lock
generated
135
sandbox/uv.lock → agent/sandbox/uv.lock
generated
@ -1,7 +1,16 @@
|
||||
version = 1
|
||||
revision = 2
|
||||
revision = 3
|
||||
requires-python = ">=3.10"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-doc"
|
||||
version = "0.0.4"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
@ -16,7 +25,6 @@ name = "anyio"
|
||||
version = "4.9.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
@ -53,32 +61,6 @@ version = "3.4.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" },
|
||||
@ -141,27 +123,19 @@ wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998, upload-time = "2025-01-27T10:46:09.186Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883, upload-time = "2024-07-12T22:26:00.161Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453, upload-time = "2024-07-12T22:25:58.476Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.12"
|
||||
version = "0.128.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload-time = "2025-03-23T22:55:43.822Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload-time = "2025-03-23T22:55:42.101Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -304,33 +278,6 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
|
||||
@ -362,24 +309,6 @@ wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -420,14 +349,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.46.2"
|
||||
version = "0.49.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846, upload-time = "2025-04-13T13:56:17.942Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/1b/3f/507c21db33b66fb027a332f2cb3abbbe924cc3a79ced12f01ed8645955c9/starlette-0.49.1.tar.gz", hash = "sha256:481a43b71e24ed8c43b11ea02f5353d77840e01480881b8cb5a26b8cae64a8cb", size = 2654703, upload-time = "2025-10-28T17:34:10.928Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175, upload-time = "2025-10-28T17:34:09.13Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -453,11 +383,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.4.0"
|
||||
version = "2.6.3"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -467,7 +397,6 @@ source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815, upload-time = "2025-04-19T06:02:50.101Z" }
|
||||
wheels = [
|
||||
@ -480,28 +409,6 @@ version = "1.17.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531, upload-time = "2025-01-14T10:35:45.465Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/5a/d1/1daec934997e8b160040c78d7b31789f19b122110a75eca3d4e8da0049e1/wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984", size = 53307, upload-time = "2025-01-14T10:33:13.616Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1b/7b/13369d42651b809389c1a7153baa01d9700430576c81a2f5c5e460df0ed9/wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22", size = 38486, upload-time = "2025-01-14T10:33:15.947Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/62/bf/e0105016f907c30b4bd9e377867c48c34dc9c6c0c104556c9c9126bd89ed/wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7", size = 38777, upload-time = "2025-01-14T10:33:17.462Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/27/70/0f6e0679845cbf8b165e027d43402a55494779295c4b08414097b258ac87/wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c", size = 83314, upload-time = "2025-01-14T10:33:21.282Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0f/77/0576d841bf84af8579124a93d216f55d6f74374e4445264cb378a6ed33eb/wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72", size = 74947, upload-time = "2025-01-14T10:33:24.414Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/90/ec/00759565518f268ed707dcc40f7eeec38637d46b098a1f5143bff488fe97/wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061", size = 82778, upload-time = "2025-01-14T10:33:26.152Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f8/5a/7cffd26b1c607b0b0c8a9ca9d75757ad7620c9c0a9b4a25d3f8a1480fafc/wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2", size = 81716, upload-time = "2025-01-14T10:33:27.372Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7e/09/dccf68fa98e862df7e6a60a61d43d644b7d095a5fc36dbb591bbd4a1c7b2/wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c", size = 74548, upload-time = "2025-01-14T10:33:28.52Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b7/8e/067021fa3c8814952c5e228d916963c1115b983e21393289de15128e867e/wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62", size = 81334, upload-time = "2025-01-14T10:33:29.643Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4b/0d/9d4b5219ae4393f718699ca1c05f5ebc0c40d076f7e65fd48f5f693294fb/wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563", size = 36427, upload-time = "2025-01-14T10:33:30.832Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/72/6a/c5a83e8f61aec1e1aeef939807602fb880e5872371e95df2137142f5c58e/wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f", size = 38774, upload-time = "2025-01-14T10:33:32.897Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308, upload-time = "2025-01-14T10:33:33.992Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488, upload-time = "2025-01-14T10:33:35.264Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776, upload-time = "2025-01-14T10:33:38.28Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776, upload-time = "2025-01-14T10:33:40.678Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420, upload-time = "2025-01-14T10:33:41.868Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199, upload-time = "2025-01-14T10:33:43.598Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307, upload-time = "2025-01-14T10:33:48.499Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025, upload-time = "2025-01-14T10:33:51.191Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879, upload-time = "2025-01-14T10:33:52.328Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419, upload-time = "2025-01-14T10:33:53.551Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773, upload-time = "2025-01-14T10:33:56.323Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799, upload-time = "2025-01-14T10:33:57.4Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821, upload-time = "2025-01-14T10:33:59.334Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919, upload-time = "2025-01-14T10:34:04.093Z" },
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user