mirror of
https://github.com/enricoros/big-AGI.git
synced 2026-05-10 21:50:14 -07:00
Compare commits
727 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d936629ead | |||
| 9bd1a66208 | |||
| 1a0c029ee8 | |||
| e7be228703 | |||
| 0ab4dc972f | |||
| 5f1ca8954f | |||
| 3ec1b033ce | |||
| 0caf27af9b | |||
| bd67e14fa4 | |||
| 494c3b542c | |||
| 8e0884eb64 | |||
| 73c4dc4ac8 | |||
| d77274058d | |||
| 0c8460419b | |||
| eabb589390 | |||
| 62f860ae93 | |||
| 605aae873c | |||
| 62e9ee5b05 | |||
| d686f5d143 | |||
| 3922f232ae | |||
| 6735b438d3 | |||
| fb1e30ab32 | |||
| 0ec06edb57 | |||
| 2a52673c56 | |||
| cc20d00d8a | |||
| 3d9201f7dc | |||
| 176732a6c0 | |||
| 39815b3af3 | |||
| bcce517089 | |||
| a4b50d0d97 | |||
| 2a124e7588 | |||
| a85556ab5b | |||
| cef93d6084 | |||
| 207e257778 | |||
| 12203daa22 | |||
| 27f8e9248d | |||
| 51384dc984 | |||
| bc76cbb5ad | |||
| 5a1ca83f6d | |||
| c9f585f808 | |||
| 9f559e1dbf | |||
| e458bca1a7 | |||
| 43d2226019 | |||
| 122bc34701 | |||
| e01358e268 | |||
| 847c84c3e6 | |||
| b11cac4328 | |||
| f617b06109 | |||
| 345ccf3369 | |||
| d111b8af62 | |||
| 8f964c5c49 | |||
| b6f3f4538f | |||
| f6dd30d5d8 | |||
| af8b79f849 | |||
| 0cfccc423b | |||
| f9a5d582d4 | |||
| 684e00d594 | |||
| 3cd2df0b50 | |||
| 02197f4ee6 | |||
| f9049a3fea | |||
| 462bddc271 | |||
| f79000cf39 | |||
| 1d95273f4d | |||
| 6c4579f434 | |||
| 4ef56ade21 | |||
| 7c1369d6e9 | |||
| 533d54b106 | |||
| cce0ca6560 | |||
| e87ce2593c | |||
| 431dc8b667 | |||
| 5caf614bf7 | |||
| ecf9703570 | |||
| e7641393a0 | |||
| 2201f6ff5a | |||
| 557e1ce293 | |||
| cbe9a6b9a5 | |||
| 9bbcb038d4 | |||
| 3602204420 | |||
| 6f485e5589 | |||
| 2f46a3dfaf | |||
| 267845bba3 | |||
| 6f33a8eebf | |||
| b0d2b09a2e | |||
| c699b6b16b | |||
| 1789bac28d | |||
| 60c05f615f | |||
| bd84523671 | |||
| eb21b9c770 | |||
| ff3ac11afb | |||
| 1ef8c3d02b | |||
| 2ebaf6279b | |||
| a5ee40e184 | |||
| b17a97eac7 | |||
| 63908bfaf6 | |||
| 3f9a419a19 | |||
| bae691e33e | |||
| 91539346ee | |||
| 4842ca81b3 | |||
| 9c77a1a4ab | |||
| 4af284be42 | |||
| 6aec68bb3c | |||
| d4e2b0834f | |||
| 24c2702f96 | |||
| 4691fc9bad | |||
| 8c6c60b6f1 | |||
| bc482407fe | |||
| ff05593db8 | |||
| 3d304d9374 | |||
| 1734f0c2f1 | |||
| 1b25e5df85 | |||
| ea8eb32b0b | |||
| 614a1f95de | |||
| d36bc28914 | |||
| deec48d7c1 | |||
| b318ec8d39 | |||
| b4b0e2befc | |||
| 51d3fe13da | |||
| 58220216d3 | |||
| cac75cca42 | |||
| 47f247907f | |||
| 81e04b7322 | |||
| 56a964b700 | |||
| 458341d79f | |||
| d1d212b075 | |||
| 59c9996489 | |||
| bf8221a2f1 | |||
| 787a11a040 | |||
| 05d114be2f | |||
| 3c04a7dbac | |||
| 1673e1148d | |||
| de416b035d | |||
| 08aaf2989d | |||
| a50964060c | |||
| 54b6108719 | |||
| 585e5c254a | |||
| 477808c9bb | |||
| 6c58a2b688 | |||
| c9854bf30f | |||
| cfed4bbd41 | |||
| 2dd6485b0e | |||
| bf1dd5b860 | |||
| 765c373f7d | |||
| 32d752e82b | |||
| 4623e438fa | |||
| 8a44ff396f | |||
| 086d7ecae4 | |||
| d6adebb711 | |||
| 8325fe7b3c | |||
| 7cf83f878b | |||
| 597ba26424 | |||
| 7bccea47f5 | |||
| 5770116779 | |||
| 0679144f69 | |||
| c9fd288b52 | |||
| 9ae449fcfd | |||
| 249f67f796 | |||
| e91c0bb554 | |||
| 5e306d9598 | |||
| 42ebc81cbb | |||
| f624c37db5 | |||
| 22b6f42936 | |||
| 760c66cac8 | |||
| 1d91e9da03 | |||
| 7eac409ec6 | |||
| 128558420c | |||
| ca3e664690 | |||
| 7eb37462d7 | |||
| 31e02c2d39 | |||
| 003a68b9b8 | |||
| f418708389 | |||
| d23a564035 | |||
| 7fe586244c | |||
| f1a597cdc6 | |||
| 9b68c8f58c | |||
| be5b57ea71 | |||
| 425c82f26d | |||
| 942421c1fb | |||
| b1184f6928 | |||
| ffeb6d1b98 | |||
| b2718b56b7 | |||
| 455f834957 | |||
| 8a14c80ff8 | |||
| e268e733c7 | |||
| 8933a8dfb3 | |||
| 9796cc525c | |||
| cdbf9a9190 | |||
| c26792292d | |||
| 4698e0ee03 | |||
| 68afcb2f4b | |||
| e8f61e46e3 | |||
| 317bb2b7c8 | |||
| d1b3c6b468 | |||
| b35eccc984 | |||
| a780c92047 | |||
| 5fc65698ba | |||
| c923b5ec4c | |||
| 609b2b9a7b | |||
| a257278004 | |||
| 273daed634 | |||
| a6862d8c58 | |||
| 323e5b4ea7 | |||
| 89217a5308 | |||
| a45e995d2f | |||
| 8700b4c8ca | |||
| 1f7f5fb488 | |||
| afde8ee864 | |||
| 3884c26b15 | |||
| 24dce7eae9 | |||
| 1db4e9b771 | |||
| b2ed7eae00 | |||
| 3169fd67e8 | |||
| 773ceb1396 | |||
| 8c62ee1720 | |||
| 5fa1f52922 | |||
| d2180c010c | |||
| b73df7b2ce | |||
| 971f737846 | |||
| a393353907 | |||
| 751f609554 | |||
| e8cd5c6552 | |||
| 86e387b270 | |||
| 32f15aa621 | |||
| bfc889a9e5 | |||
| bd907625a8 | |||
| 60004926d7 | |||
| ac751dfd1a | |||
| 6828eee17f | |||
| 19c97f397b | |||
| 0167a8bdd8 | |||
| 93e5044603 | |||
| 024d930677 | |||
| 98873446a8 | |||
| 5318b7a406 | |||
| 4a6c3cbcd2 | |||
| ac0a39c202 | |||
| 88d39345a5 | |||
| 7aa9cb07b2 | |||
| ef30c8d28d | |||
| 2727f690b4 | |||
| 5945c24301 | |||
| 7b6aff1f95 | |||
| cb0fe3aadd | |||
| 4f9d69f9c2 | |||
| c18aeabe06 | |||
| 550742323a | |||
| c71f789a08 | |||
| a9b4b195bf | |||
| 52e8177f42 | |||
| b0743efc48 | |||
| 6dfd652dac | |||
| 3f93cb2e6d | |||
| 8f7b9b7f19 | |||
| abff89ab6b | |||
| d4f03f743a | |||
| c3714f6651 | |||
| 9b4d0ddf2f | |||
| 2c9ac2f549 | |||
| c1292de2a0 | |||
| 21d5e4cd29 | |||
| a9495a3e15 | |||
| bff5b3d765 | |||
| a4ff37eecc | |||
| 460209f486 | |||
| 96c68c86a4 | |||
| 8b152fdff8 | |||
| 25c9a52873 | |||
| 44302d903c | |||
| c7b8668609 | |||
| 7d60df6266 | |||
| b7f898a5e5 | |||
| 04c4dbe4b8 | |||
| 8d04c494df | |||
| a6aadf76f3 | |||
| a685ef97bf | |||
| d46c29689f | |||
| 65ce07395b | |||
| cc1542fe95 | |||
| b70d57d878 | |||
| 5aa857362b | |||
| c92fc34051 | |||
| b01e66f12a | |||
| a88d20784a | |||
| 63486ed6cf | |||
| 3ceec773f2 | |||
| 817fa56ec4 | |||
| 088fb21a90 | |||
| 79c755a469 | |||
| a091d3f011 | |||
| c7c01a5d7c | |||
| cdc0f48973 | |||
| e884f6b962 | |||
| 485a9bea71 | |||
| f3c3b667ca | |||
| 3b0c4f31b6 | |||
| 5e54600766 | |||
| c3e54f69b7 | |||
| c4022d1c9b | |||
| 6e13a78a24 | |||
| c7cacd9727 | |||
| a77110f704 | |||
| 83a6069de5 | |||
| e9a1890e54 | |||
| bf928aa06e | |||
| b2dc50590c | |||
| 229e53ac32 | |||
| 51e8a47615 | |||
| e80b58a412 | |||
| 48ced8b079 | |||
| c07e2aea1e | |||
| f3194aa30e | |||
| cb3e4cd951 | |||
| f5d8d029ea | |||
| 7c946c4126 | |||
| ded4ea0d69 | |||
| c180c549fe | |||
| 1f30f1168f | |||
| 9446f15922 | |||
| e13b2c9cd9 | |||
| e9e14e0292 | |||
| added19656 | |||
| 4fa3c4d479 | |||
| 690738de9a | |||
| cb31d27e68 | |||
| e6658df123 | |||
| 0b7154a14c | |||
| 02c1838de5 | |||
| fc455fceb8 | |||
| 8d40cdd234 | |||
| 40145c669a | |||
| 34d2fc233f | |||
| 670ec0381a | |||
| 2128f255fe | |||
| b717bd9a9a | |||
| 8aab9311f5 | |||
| ff3e16ea67 | |||
| 1de039c315 | |||
| d05e1786d7 | |||
| e34b5a7372 | |||
| a1b3d1b508 | |||
| 1ebccdf420 | |||
| e5f674509c | |||
| 197a4ae5c0 | |||
| 64d2dcf39c | |||
| caf54c736b | |||
| 423c2cce28 | |||
| a1af51efcb | |||
| ffc1bf9c58 | |||
| a54bfdb342 | |||
| 03861d2dbd | |||
| 8c080da6bf | |||
| a8c98056b6 | |||
| 78e663f955 | |||
| 70546a5039 | |||
| 30f78b33cb | |||
| 712e8c1f16 | |||
| 933dfdfb53 | |||
| 9ce86b029f | |||
| 13580cc69d | |||
| a7dee0002d | |||
| c84b2df3fa | |||
| d9471a8684 | |||
| ef630c2272 | |||
| e188c71652 | |||
| 910260c2c8 | |||
| 22752abc38 | |||
| 92bc3a5d64 | |||
| 1383752cc1 | |||
| 66af16fb81 | |||
| fc019d7b46 | |||
| ac4f0fcb12 | |||
| a6c2bc663d | |||
| e62ffa02e9 | |||
| a003600839 | |||
| ea73feb06d | |||
| 3bdf69e1b7 | |||
| 590fe78bd1 | |||
| 76187ba0e7 | |||
| 5eba375f4d | |||
| 8fa6a8251f | |||
| 75fa046f30 | |||
| 08a8cd1430 | |||
| 3afbb78a39 | |||
| fca6ccd816 | |||
| 8d351822c1 | |||
| 7d274a31fe | |||
| e36dde0d25 | |||
| 51cc6e5ae5 | |||
| 28d911c617 | |||
| b1e9fe58fb | |||
| 16ba014ade | |||
| e9d5a20c1a | |||
| 6e0036f9c4 | |||
| d7e189aa1c | |||
| ea2b444fb2 | |||
| cd1efaf26e | |||
| e47f0e5d43 | |||
| 5284d37984 | |||
| 1bf6fa0e4d | |||
| fc294c82f1 | |||
| 7b1dc49dda | |||
| d15ddeea24 | |||
| eaac213859 | |||
| 02c1460351 | |||
| 2fff35b7d9 | |||
| c5b9072bde | |||
| 8a570e912a | |||
| 1dcc40afb8 | |||
| c2092f8035 | |||
| 886c4b411e | |||
| 8888fd40cd | |||
| 31cd01bccf | |||
| c59b221004 | |||
| cb3cc3e74c | |||
| 9e90015fcc | |||
| 95e0517056 | |||
| 2b2f47915f | |||
| 9acd178ce1 | |||
| f381f80184 | |||
| c83be61343 | |||
| f6e49d31ec | |||
| cc0429a362 | |||
| b35901d94c | |||
| c0df1a23f4 | |||
| 495619af2c | |||
| 72dfadf106 | |||
| 5825909e45 | |||
| d3f6d87ee0 | |||
| c4f4c5ddad | |||
| 2921d7ca27 | |||
| 2021cbc988 | |||
| e9e29861b2 | |||
| 8e6da36059 | |||
| 5e1469e12e | |||
| bd7465f8b1 | |||
| 570397a616 | |||
| b3b5f1daef | |||
| 25ec3ae47c | |||
| 5ba5e3da58 | |||
| 9296c14ca0 | |||
| 310b5d3422 | |||
| 1c5967112e | |||
| 49a3d8ee71 | |||
| cf8b61e8d9 | |||
| 967ae5723e | |||
| 03421acf2f | |||
| d43896cc5a | |||
| b283124a2f | |||
| 8c39be01f8 | |||
| fb2bd4ccd8 | |||
| 5b826ffc45 | |||
| 0b2ab365d3 | |||
| 93fc54992c | |||
| 60b7326deb | |||
| d6e6139244 | |||
| 0892911ddc | |||
| 30267ac50c | |||
| ffef0ef31d | |||
| fc047087ce | |||
| 81d4966535 | |||
| 004d63fda1 | |||
| 23e2dbb354 | |||
| 28e9899b97 | |||
| 7441d41550 | |||
| 99e2d5597a | |||
| 74321a44ca | |||
| 7b664affb7 | |||
| c411835f3b | |||
| 7b62c946a5 | |||
| 252e2fcd29 | |||
| aa2731bccc | |||
| 282c439963 | |||
| e99459aba0 | |||
| 4c35cbbe34 | |||
| cab3537ae2 | |||
| c3f211389b | |||
| a4de84a842 | |||
| 2bf1eaaa0f | |||
| 7f5ddd1629 | |||
| ed798fec65 | |||
| 90386f5794 | |||
| 8ada8811bf | |||
| b24badabef | |||
| 4e20cb12cd | |||
| 245da9e6cc | |||
| a800b34aa7 | |||
| 50c3941f42 | |||
| 6e5d5ee36c | |||
| 2c8b713ff3 | |||
| 8162a6706d | |||
| 952f6883fa | |||
| 373f3e3698 | |||
| 17791f631f | |||
| 6987c67cc7 | |||
| 65a59e5d2d | |||
| 05b9a6d412 | |||
| 6608f4f164 | |||
| 93378ad6b0 | |||
| bd4a60203e | |||
| c9e6a62641 | |||
| 68d797fa99 | |||
| 08011d8cf2 | |||
| 2f91bf7f52 | |||
| d5182c05c1 | |||
| 8e0947a833 | |||
| 1d88fc37b0 | |||
| 46bd8e6f4d | |||
| b95b427331 | |||
| 9b574c60eb | |||
| a8b39cc0a4 | |||
| cdbc7dd9b8 | |||
| 08dfec4fcf | |||
| 7f4553225b | |||
| f37e65a91e | |||
| c022f8a68c | |||
| daa7a506a5 | |||
| f3dcf39c15 | |||
| 06cbef16d4 | |||
| ab31bcd3e3 | |||
| 563a99864f | |||
| 39b8abc2c6 | |||
| f3dd837076 | |||
| d6b3a5259d | |||
| 9fea1d5c64 | |||
| 0adb5355c7 | |||
| 01d807b61e | |||
| 285bb812d0 | |||
| d897155d6e | |||
| 7154426279 | |||
| 4526084e4d | |||
| 0c5c786ae3 | |||
| 8a2c4aa356 | |||
| 4cba819edd | |||
| 4db42a2b29 | |||
| fc0ee5b698 | |||
| 2c0c3f1c70 | |||
| 3f3976b73c | |||
| 82d5dcced5 | |||
| f4eaed694a | |||
| 05d9869326 | |||
| 2675934ff8 | |||
| fb6e19d3ea | |||
| f1151d54e1 | |||
| 6a0fa4f9fa | |||
| 20d96fffc8 | |||
| ad6c06308a | |||
| 84ee4171a4 | |||
| 6bc4f8a1e4 | |||
| 8876aa0866 | |||
| 691d2e7228 | |||
| 7a12755de9 | |||
| 8573f56d03 | |||
| 8f3e683321 | |||
| 64867b0b67 | |||
| e42d060e57 | |||
| 2ca9ab8a0c | |||
| fdc0c6b371 | |||
| 8f8779c3cd | |||
| 851877ad8b | |||
| 8df74529ad | |||
| 353f51ebf0 | |||
| 6c5cb08118 | |||
| 54fee92b15 | |||
| 776431c801 | |||
| 9f893ce999 | |||
| 820447670c | |||
| b43c49cd64 | |||
| f9c3558975 | |||
| 1b75250824 | |||
| 3fa3bb5d03 | |||
| ef0ff55f1f | |||
| 66aa8ed177 | |||
| 519286bc69 | |||
| 9882f45fd2 | |||
| 634f6216a0 | |||
| 69574a7d1c | |||
| eddd4b9be8 | |||
| 9a9c31ff53 | |||
| 41ee7a1c85 | |||
| 2f9bbf373c | |||
| d662e10ebb | |||
| cd31092333 | |||
| 1eae7ab6f3 | |||
| ba378f852f | |||
| 5cfd1e557d | |||
| df31d79eaf | |||
| 12d7304325 | |||
| 41424cbdfd | |||
| 05dda519a2 | |||
| 120d39282e | |||
| 8e7d0fd13b | |||
| 3d979fdfbb | |||
| 6ab47ae3cb | |||
| a4977b4924 | |||
| bac9c692b8 | |||
| 6ab15356e1 | |||
| 73cc7121c3 | |||
| 1aeef06f49 | |||
| 3b16bcf01d | |||
| f6351fda41 | |||
| 007e91480d | |||
| 163ef9296e | |||
| fa042f7d68 | |||
| 8a11040dde | |||
| a88971d557 | |||
| 5867e5fcc5 | |||
| 20e587d6d3 | |||
| 6bfa8471cd | |||
| 5c10bce2f4 | |||
| f1663f6668 | |||
| 90c27e0e74 | |||
| b5eac0d907 | |||
| 4eabe2cb3a | |||
| a1c0d30a06 | |||
| 63c9f65040 | |||
| f58a066bff | |||
| 952ea6357a | |||
| 6695973035 | |||
| 3dc28635f4 | |||
| 0bde01a85f | |||
| b9840c2074 | |||
| 8228a76875 | |||
| 46b370a2e3 | |||
| 820e9513ba | |||
| bd71d64db3 | |||
| 9d4baf827c | |||
| d6843d7fcf | |||
| babb1dd962 | |||
| aa32e396a7 | |||
| 1068efcb49 | |||
| 576c7f1458 | |||
| 37c857b055 | |||
| 794dfb44d1 | |||
| 929bb6dc66 | |||
| 28337e31eb | |||
| 09a38c0e4b | |||
| 645b8fb9cd | |||
| 541588948c | |||
| bdd6fcfbbc | |||
| 9e50286c66 | |||
| 418e4649dc | |||
| 4a70f20f4a | |||
| d6eabfcb6d | |||
| d88889d760 | |||
| 85146d8af0 | |||
| 9612572f07 | |||
| 4bb1dddf4d | |||
| b066a86962 | |||
| 6086455782 | |||
| 9020b3cbad | |||
| 5822dea270 | |||
| c445f59664 | |||
| 737e4cb4f9 | |||
| dba7368d01 | |||
| 314c4cd8cc | |||
| 3e46f99e14 | |||
| e0cc552b8d | |||
| 6b5be403af | |||
| 269d5989bc | |||
| edfe3d9b65 | |||
| ffb2c42a26 | |||
| b7de19b020 | |||
| 77cd659b39 | |||
| fbba9d8357 | |||
| f464a9efdf | |||
| 7ec4290582 | |||
| 3f887a1d3a | |||
| ffd76dc587 | |||
| d7f3594a73 | |||
| 32fa5f206b | |||
| 70d2c09e81 | |||
| 17f03806d0 | |||
| b6aba0efa4 | |||
| 65a5e06935 | |||
| f459cb9805 | |||
| f5470aca5d | |||
| c26af97fe7 | |||
| 766ec458a2 | |||
| 48ff78580c | |||
| 396f7524d7 | |||
| da19ef42f5 | |||
| 91abe5aa43 | |||
| 682435321b | |||
| 76f0d60224 | |||
| 628b88ef9f | |||
| 6a792814ce | |||
| 05ce15d677 | |||
| 4a9d0d4f8e | |||
| 16f0552682 | |||
| 9e3819b9c7 | |||
| 233a0d4b35 | |||
| bd95b808ae | |||
| 96132c4585 | |||
| 3edacef572 | |||
| 36889c1695 | |||
| cd2c6c1d8f | |||
| d8c78b1a00 | |||
| 74a22c26cf | |||
| f742eba4c1 | |||
| 36c2812157 | |||
| d353fc4c63 | |||
| 98bd3d6da0 | |||
| cd5ec8d295 | |||
| f91c6456bd | |||
| 67af87968e | |||
| 58ea3e1b35 | |||
| a9435c10e8 | |||
| a86860fe76 | |||
| a3d707f78a | |||
| c502426249 | |||
| 2fb5ffcecf | |||
| 6d995c1253 | |||
| a860c1c490 | |||
| 481d9cc745 | |||
| 7e53a7bc2b | |||
| 4df10e3782 | |||
| 396da65178 | |||
| 87e8faf383 | |||
| 9eb3e6d398 | |||
| 332c4fdf82 | |||
| 4d247344d5 | |||
| 4e4738d4f6 | |||
| dbfa7b0932 | |||
| e90231d58d | |||
| 9bc7d40425 | |||
| d2d5c0621b | |||
| e41d57c914 | |||
| 7c5336cba3 |
@@ -0,0 +1,49 @@
|
||||
---
|
||||
description: Sync OpenRouter API implementation with latest upstream documentation
|
||||
argument-hint: specific feature to check
|
||||
---
|
||||
|
||||
Review the OpenRouter implementation:
|
||||
- Models list: `src/modules/llms/server/openai/openrouter.wiretypes.ts` (list API response schema)
|
||||
- Chat wire types: `src/modules/aix/server/dispatch/wiretypes/openai.wiretypes.ts` (OpenAI-compatible)
|
||||
- Request adapter: `src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts` ('openrouter' dialect)
|
||||
- Response parser: `src/modules/aix/server/dispatch/chatGenerate/parsers/openai.parser.ts` (shared OpenAI parser)
|
||||
- Vendor config: `src/modules/llms/vendors/openrouter/openrouter.vendor.ts`
|
||||
|
||||
GOAL: Ensure complete support for OpenRouter's API including advanced features like reasoning/thinking tokens, tool use, search integration, and multi-modal capabilities. OpenRouter is OpenAI-compatible but has important extensions and differences.
|
||||
|
||||
Use Task tool with subagent_type=Explore and thoroughness="very thorough" to discover:
|
||||
1. Map API structure - all endpoints, parameters, capabilities from https://openrouter.ai/docs
|
||||
2. **Advanced features** - How to use: reasoning/thinking tokens (o1, DeepSeek R1), tool use/function calling, search integration, multi-modal (vision/audio)
|
||||
3. Changelog location - How does OpenRouter communicate API updates and breaking changes?
|
||||
4. Model metadata - What capabilities are exposed in the models list API? How to detect feature support?
|
||||
5. OpenAI deviations - Extensions, special headers (HTTP-Referer, X-Title), response fields, streaming differences
|
||||
|
||||
Then check the latest API information. Try these sources (be creative if blocked):
|
||||
|
||||
**Primary Sources:**
|
||||
- API Reference: https://openrouter.ai/docs/api-reference
|
||||
- Chat Completions: https://openrouter.ai/docs/api-reference#chat-completions
|
||||
- Models List: https://openrouter.ai/docs/api-reference#models-list
|
||||
- Parameters Guide: https://openrouter.ai/docs/parameters
|
||||
- Announcements: https://openrouter.ai/announcements (feature launches, API updates, new models)
|
||||
- Models Directory: https://openrouter.ai/models (check metadata for capabilities)
|
||||
|
||||
**Alternative Sources:**
|
||||
- GitHub: https://github.com/OpenRouterTeam (SDKs, examples, issues for recent changes)
|
||||
- Web Search: "openrouter api changelog" or "openrouter reasoning tokens" or "openrouter tool use"
|
||||
|
||||
**If blocked:** Ask user to provide documentation.
|
||||
|
||||
$ARGUMENTS
|
||||
Focus on discrepancies and gaps:
|
||||
- **Request/Response structure**: New fields, changed requirements, streaming event types
|
||||
- **Feature support**: Thinking tokens format, tool calling protocol, search parameters
|
||||
- **Model capabilities**: How to detect and enable advanced features per model
|
||||
- **OpenRouter extensions**: Headers, routing, fallbacks, rate limiting (free vs paid)
|
||||
- **Breaking changes**: Protocol updates, deprecated fields, new required parameters
|
||||
|
||||
Report differences in wire types, adapter logic, parser handling, or dialect-specific quirks.
|
||||
Prioritize new capabilities that improve user experience (reasoning visibility, better tool use, etc.).
|
||||
|
||||
When making changes, add comments with date: `// [OpenRouter, 2026-MM-DD]: explanation`
|
||||
@@ -0,0 +1,63 @@
|
||||
---
|
||||
description: Sync LLM parameter options between full model dialog and chat side panel
|
||||
---
|
||||
|
||||
Audit and sync LLM parameter configurations between the two UI editors. Goal: identical `value` fields in option arrays + equivalent onChange logic. Labels/descriptions can differ for UI space.
|
||||
|
||||
**Files to Compare:**
|
||||
1. **Full Model Dialog**: `src/modules/llms/models-modal/LLMParametersEditor.tsx` (main branch)
|
||||
2. **Chat Side Panel**: `src/apps/chat/components/layout-panel/ChatPanelModelParameters.tsx` (main derived branches only)
|
||||
|
||||
**Reference Documentation:**
|
||||
- Parameter system: `kb/systems/LLM-parameters-system.md`
|
||||
- Parameter registry: `src/common/stores/llms/llms.parameters.ts`
|
||||
|
||||
**Task: Perform a comprehensive audit**
|
||||
|
||||
1. **Read both files** and extract all option arrays (e.g., `_reasoningEffortOptions`, `_antEffortOptions`, `_geminiThinkingLevelOptions`, etc.)
|
||||
|
||||
2. **Check for missing parameters:**
|
||||
- Parameters handled in `LLMParametersEditor.tsx` but NOT in `ChatPanelModelParameters.tsx`
|
||||
- Parameters in `ChatPanelModelParameters.tsx`'s `_interestingParameters` array but missing UI controls
|
||||
- Note: The side panel intentionally shows only "interesting" parameters - focus on those listed in `_interestingParameters`
|
||||
|
||||
3. **Check for value mismatches** between corresponding option arrays:
|
||||
- Different number of options (e.g., 3 vs 4 options)
|
||||
- Same label but different `value` (this causes the bug in issue #926)
|
||||
- Different labels for the same `value`
|
||||
- Missing `_UNSPECIFIED`/Default option in one but not the other
|
||||
|
||||
4. **Check onChange handler consistency:**
|
||||
- Both should remove parameter on `_UNSPECIFIED` selection
|
||||
- Both should set explicit values the same way
|
||||
- Watch for conditions like `value === 'high'` that may differ
|
||||
|
||||
**Output Format:**
|
||||
|
||||
```
|
||||
## Parameter Sync Audit Report
|
||||
|
||||
### Missing Parameters
|
||||
- [ ] `llmVndXyz` - In full dialog, missing from side panel
|
||||
|
||||
### Value Mismatches
|
||||
- [ ] `_xyzOptions`:
|
||||
- Full dialog: [values...]
|
||||
- Side panel: [values...]
|
||||
- Issue: [description]
|
||||
|
||||
### Handler Inconsistencies
|
||||
- [ ] `llmVndXyz` onChange differs: [explanation]
|
||||
|
||||
### Recommended Fixes
|
||||
1. [Specific fix with code snippet if needed]
|
||||
```
|
||||
|
||||
**Fix Direction:** Full dialog is source of truth. Update side panel to match its values when mismatched.
|
||||
|
||||
**Notes:**
|
||||
- Side panel uses shorter descriptions (space-constrained) - that's fine
|
||||
- Variable names may differ (e.g., `_anthropicEffortOptions` vs `_antEffortOptions`) - that's fine, but same is better
|
||||
- `value` fields must be identical sets
|
||||
- `_UNSPECIFIED` must mean the same thing in both
|
||||
- onChange: remove on `_UNSPECIFIED`, set explicit value otherwise
|
||||
@@ -4,7 +4,7 @@ description: Update Alibaba model definitions with latest pricing and capabiliti
|
||||
|
||||
Update `src/modules/llms/server/openai/models/alibaba.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models & Pricing: https://www.alibabacloud.com/help/en/model-studio/models
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update Anthropic model definitions with latest pricing and capabili
|
||||
|
||||
Update `src/modules/llms/server/anthropic/anthropic.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://docs.claude.com/en/docs/about-claude/models/overview
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update DeepSeek model definitions with latest pricing and capabilit
|
||||
|
||||
Update `src/modules/llms/server/openai/models/deepseek.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Pricing: https://api-docs.deepseek.com/quick_start/pricing
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update Gemini model definitions with latest pricing and capabilitie
|
||||
|
||||
Update `src/modules/llms/server/gemini/gemini.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.types.ts`, `src/modules/llms/server/llm.server.types.ts`, and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.types.ts`, `src/modules/llms/server/llm.server.types.ts`, and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://ai.google.dev/gemini-api/docs/models
|
||||
|
||||
@@ -4,13 +4,13 @@ description: Update Groq model definitions with latest pricing and capabilities
|
||||
|
||||
Update `src/modules/llms/server/openai/models/groq.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://console.groq.com/docs/models
|
||||
**Primary Source:**
|
||||
- Fetch https://console.groq.com/docs/models.md directly (markdown format, no search needed)
|
||||
- Pricing: https://groq.com/pricing/
|
||||
|
||||
**Fallbacks if blocked:** Search "groq models latest pricing", "groq latest models", "groq api models", or search GitHub for latest model prices and context windows
|
||||
**Do NOT use web search.** The `.md` endpoint provides structured markdown content directly.
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
---
|
||||
description: Update Kimi model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/moonshot.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources (fetch directly, no search needed):**
|
||||
- Pricing: https://platform.moonshot.ai/docs/pricing/chat
|
||||
- API Reference: https://platform.moonshot.ai/docs/api/chat
|
||||
|
||||
**Do NOT use web search.** Fetch the URLs directly, or ask the user to provide data, if unaccessible.
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -4,7 +4,7 @@ description: Update Mistral model definitions with latest pricing and capabiliti
|
||||
|
||||
Update `src/modules/llms/server/openai/models/mistral.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://docs.mistral.ai/getting-started/models/models_overview/
|
||||
|
||||
@@ -4,12 +4,12 @@ description: Update Ollama model definitions with latest featured models
|
||||
|
||||
Update `src/modules/llms/server/ollama/ollama.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Automated Workflow:**
|
||||
```bash
|
||||
# 1. Fetch the HTML
|
||||
curl -s "https://ollama.com/library?sort=featured" -o /tmp/ollama-featured.html
|
||||
# 1. Fetch the HTML (sorted by newest for stable ordering)
|
||||
curl -s "https://ollama.com/library?sort=newest" -o /tmp/ollama-newest.html
|
||||
|
||||
# 2. Parse it with the script
|
||||
node .claude/scripts/parse-ollama-models.js > /tmp/ollama-parsed.txt 2>&1
|
||||
@@ -22,14 +22,18 @@ The parser outputs: `modelName|pulls|capabilities|sizes`
|
||||
- Example: `deepseek-r1|66200000|tools,thinking|1.5b,7b,8b,14b,32b,70b,671b`
|
||||
|
||||
**Primary Sources:**
|
||||
- Model Library: https://ollama.com/library?sort=featured
|
||||
- Model Library: https://ollama.com/library?sort=newest
|
||||
- Parser script: `.claude/scripts/parse-ollama-models.js`
|
||||
|
||||
**Fallbacks if blocked:** Check https://github.com/ollama/ollama, search "ollama featured models", "ollama latest models", or search GitHub for latest model info
|
||||
|
||||
**Important:**
|
||||
- Skip models below 50,000 pulls (parser does this automatically)
|
||||
- Sort them in the EXACT same order as the source (featured models)
|
||||
- Parser filtering rules:
|
||||
- Top 30 newest models are always included (regardless of pull count)
|
||||
- After top 30, only models with 50K+ pulls are included
|
||||
- Models with 'cloud' capability are automatically excluded
|
||||
- Models with 'embedding' capability are automatically excluded
|
||||
- Sort them in the EXACT same order as the source (newest first, for stable ordering)
|
||||
- Extract tags: 'tools' → hasTools, 'vision' → hasVision, 'embedding' → isEmbeddings (note the 's'), 'thinking' → tags only
|
||||
- Extract 'b' tags (1.5b, 7b, 32b) to tags field
|
||||
- Set today's date (YYYYMMDD format) for newly added models only
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update OpenAI model definitions with latest pricing and capabilitie
|
||||
|
||||
Update `src/modules/llms/server/openai/models/openai.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Manual hint:** For pricing page, expand all tables before copying content.
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update OpenPipe model definitions with latest pricing and capabilit
|
||||
|
||||
Update `src/modules/llms/server/openai/models/openpipe.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Base Models: https://docs.openpipe.ai/base-models
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update Perplexity model definitions with latest pricing and capabil
|
||||
|
||||
Update `src/modules/llms/server/openai/models/perplexity.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://docs.perplexity.ai/getting-started/models
|
||||
|
||||
@@ -4,7 +4,7 @@ description: Update xAI model definitions with latest pricing and capabilities
|
||||
|
||||
Update `src/modules/llms/server/openai/models/xai.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models & Pricing: https://docs.x.ai/docs/models?cluster=us-east-1#detailed-pricing-for-all-grok-models
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
description: Generate changelog bullets for big-agi.com/changes
|
||||
argument-hint: date like "2026-01-10" or empty for auto-detect
|
||||
---
|
||||
|
||||
Generate changelog bullets for a single entry in https://big-agi.com/changes
|
||||
|
||||
**Step 1: Find the starting date**
|
||||
|
||||
IMPORTANT: This repo rebases frequently, so commits are INTERLEAVED throughout history.
|
||||
New commits can appear at line 10, 500, or 1800. Use AUTHOR DATE (`%ad`) to filter - it's preserved during rebases.
|
||||
|
||||
If `$ARGUMENTS` provided, use it as the cutoff date.
|
||||
|
||||
If NO argument:
|
||||
1. Fetch https://big-agi.com/changes to get the most recent changelog date
|
||||
2. Use that date as the cutoff
|
||||
|
||||
**Step 2: Get commits by author date**
|
||||
|
||||
Filter commits by author date to catch ALL new commits regardless of position in history:
|
||||
|
||||
```bash
|
||||
# For commits after Jan 10, 2026 (adjust date pattern as needed)
|
||||
git log --oneline --no-merges --format="%h %ad %s" --date=short | grep "2026-01-1[1-9]\|2026-01-2\|2026-02"
|
||||
|
||||
# Verify interleaving by checking line numbers
|
||||
git log --oneline --no-merges --format="%h %ad %s" --date=short | grep -n "2026-01-1[1-9]"
|
||||
```
|
||||
|
||||
The line numbers prove commits are scattered (e.g., lines 14, 638, 1156, 1803 = interleaved).
|
||||
|
||||
**Step 3: Write bullets**
|
||||
|
||||
Real examples from big-agi.com/changes:
|
||||
- "Gemini 3 Flash support with 4-level thinking: high, medium, low, minimal"
|
||||
- "Cloud Sync launched! - long awaited and top requested"
|
||||
- "Deepseek V3.2 Speciale comes with almost Gemini 3 Pro performance but 20 times cheaper"
|
||||
- "Anthropic Opus 4.5 with controls for effort (speed tradeoff), thinking budget, search"
|
||||
- "Login with email, via magic link"
|
||||
- "Mobile UX fixes for popups drag/interaction"
|
||||
|
||||
**Rules:**
|
||||
|
||||
1. **Order by importance** - most significant changes first, minor fixes last
|
||||
2. **Feature-first, no verb prefixes** - "Gemini 3 support" not "Add Gemini 3 support"
|
||||
3. **Model names lead** when it's about LLMs
|
||||
4. **Specific details** - "4-level thinking: high, medium, low, minimal" not "multiple thinking levels"
|
||||
5. **One-liners** - short, no fluff
|
||||
6. **Consolidate commits** - 10 persona editor commits = 1 bullet
|
||||
7. **No corporate speak** - no "enhanced", "streamlined", "robust", "leverage"
|
||||
|
||||
**Skip:** WIP, internal refactors, KB docs, automation, review cleanups, trivial fixes, deps bumps, CI changes.
|
||||
|
||||
**Output:** Just bullets, ready to paste. 2-5 bullets but adapt depending on scope, especially
|
||||
in relation to the usual https://big-agi.com/changes entries.
|
||||
@@ -1,23 +1,36 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Parse Ollama featured models from HTML
|
||||
* Parse Ollama models from HTML (sorted by newest for stable ordering)
|
||||
*
|
||||
* Usage:
|
||||
* 1. Fetch HTML: curl -s "https://ollama.com/library?sort=featured" -o /tmp/ollama-featured.html
|
||||
* 1. Fetch HTML: curl -s "https://ollama.com/library?sort=newest" -o /tmp/ollama-newest.html
|
||||
* 2. Parse: node .claude/scripts/parse-ollama-models.js
|
||||
*
|
||||
* Outputs: pipe-delimited format: modelName|pulls|capabilities|sizes
|
||||
* Example: deepseek-r1|66200000|tools,thinking|1.5b,7b,8b,14b,32b,70b,671b
|
||||
*
|
||||
* Filtering rules:
|
||||
* - Top 30 newest models are always included (regardless of pull count)
|
||||
* - After top 30, only models with 50K+ pulls are included
|
||||
* - Models with 'cloud' capability are always excluded
|
||||
* - Models with 'embedding' capability are always excluded
|
||||
*
|
||||
* Pull counts are rounded to significant figures for stable diffs:
|
||||
* - >=10M: round to 100K (e.g., 109,123,456 -> 109,100,000)
|
||||
* - >=1M: round to 10K (e.g., 5,432,100 -> 5,430,000)
|
||||
* - <1M: round to 1K (e.g., 88,700 -> 89,000)
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
const htmlPath = process.argv[2] || '/tmp/ollama-featured.html';
|
||||
const htmlPath = process.argv[2] || '/tmp/ollama-newest.html';
|
||||
const TOP_N_ALWAYS_INCLUDE = 30;
|
||||
const MIN_PULLS_THRESHOLD = 50000;
|
||||
|
||||
if (!fs.existsSync(htmlPath)) {
|
||||
console.error(`Error: HTML file not found at ${htmlPath}`);
|
||||
console.error('Please fetch it first with:');
|
||||
console.error(' curl -s "https://ollama.com/library?sort=featured" -o /tmp/ollama-featured.html');
|
||||
console.error(' curl -s "https://ollama.com/library?sort=newest" -o /tmp/ollama-newest.html');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -25,7 +38,7 @@ const html = fs.readFileSync(htmlPath, 'utf8');
|
||||
|
||||
// Split into model sections - each starts with <a href="/library/
|
||||
const modelSections = html.split(/<a href="\/library\//);
|
||||
const models = [];
|
||||
const allParsedModels = [];
|
||||
|
||||
for (let i = 1; i < modelSections.length; i++) {
|
||||
const section = modelSections[i].substring(0, 5000); // Large enough window to capture all data
|
||||
@@ -65,10 +78,27 @@ for (let i = 1; i < modelSections.length; i++) {
|
||||
sizes.push(sizeMatch[1].trim());
|
||||
}
|
||||
|
||||
// Only include models with 50K+ pulls
|
||||
if (pulls >= 50000) {
|
||||
models.push({ name, pulls, capabilities, sizes });
|
||||
// Skip models with 'cloud' or 'embedding' capability
|
||||
if (capabilities.includes('cloud') || capabilities.includes('embedding')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
allParsedModels.push({ name, pulls: roundPulls(pulls), capabilities, sizes });
|
||||
}
|
||||
|
||||
// Apply filtering: top 30 always included, rest need 50K+ pulls
|
||||
const models = allParsedModels.filter((model, index) => {
|
||||
return index < TOP_N_ALWAYS_INCLUDE || model.pulls >= MIN_PULLS_THRESHOLD;
|
||||
});
|
||||
|
||||
/**
|
||||
* Round pulls to significant figures for stable output.
|
||||
* This reduces churn from daily fluctuations while preserving magnitude.
|
||||
*/
|
||||
function roundPulls(pulls) {
|
||||
if (pulls >= 10000000) return Math.round(pulls / 100000) * 100000; // >=10M: round to 100K
|
||||
if (pulls >= 1000000) return Math.round(pulls / 10000) * 10000; // >=1M: round to 10K
|
||||
return Math.round(pulls / 1000) * 1000; // <1M: round to 1K
|
||||
}
|
||||
|
||||
// Output in pipe-delimited format (in the order they appear on the page)
|
||||
@@ -78,4 +108,6 @@ models.forEach(m => {
|
||||
console.log(`${m.name}|${m.pulls}|${caps}|${tags}`);
|
||||
});
|
||||
|
||||
console.error(`\nTotal models with 50K+ pulls: ${models.length}`);
|
||||
const topNCount = Math.min(TOP_N_ALWAYS_INCLUDE, allParsedModels.length);
|
||||
const thresholdCount = models.length - topNCount;
|
||||
console.error(`\nTotal models: ${models.length} (top ${topNCount} newest + ${thresholdCount} with ${MIN_PULLS_THRESHOLD / 1000}K+ pulls)`);
|
||||
|
||||
@@ -3,8 +3,17 @@
|
||||
"allow": [
|
||||
"Bash(cat:*)",
|
||||
"Bash(cp:*)",
|
||||
"Bash(curl:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(gh issue list:*)",
|
||||
"Bash(gh issue view:*)",
|
||||
"Bash(git branch:*)",
|
||||
"Bash(git cherry-pick:*)",
|
||||
"Bash(git describe:*)",
|
||||
"Bash(git grep:*)",
|
||||
"Bash(git log:*)",
|
||||
"Bash(git ls-tree:*)",
|
||||
"Bash(git show:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(ls:*)",
|
||||
"Bash(mkdir:*)",
|
||||
@@ -12,10 +21,14 @@
|
||||
"Bash(npm install)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(npm run:*)",
|
||||
"Bash(npx eslint:*)",
|
||||
"Bash(npx tsc:*)",
|
||||
"Bash(rg:*)",
|
||||
"Bash(rm:*)",
|
||||
"Bash(sed:*)",
|
||||
"Bash(tree:*)",
|
||||
"Read(//tmp/**)",
|
||||
"Skill(llms:update-models*)",
|
||||
"WebFetch",
|
||||
"WebFetch(domain:big-agi.com)",
|
||||
"WebSearch",
|
||||
|
||||
+15
-40
@@ -1,43 +1,18 @@
|
||||
# big-AGI non-code files
|
||||
/docs/
|
||||
/dist/
|
||||
README.md
|
||||
*
|
||||
|
||||
# Ignore build and log files
|
||||
Dockerfile
|
||||
/.dockerignore
|
||||
!app/
|
||||
!kb/
|
||||
!pages/
|
||||
!public/
|
||||
!src/
|
||||
!tools/
|
||||
|
||||
# Node build artifacts
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
!*.mjs
|
||||
!middleware_BASIC_AUTH.ts
|
||||
!middleware.ts
|
||||
!next.config.ts
|
||||
!package*.json
|
||||
!tsconfig.json
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# versioning
|
||||
.git/
|
||||
.github/
|
||||
|
||||
# IDEs
|
||||
.idea/
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
!LICENSE
|
||||
!README.md
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"extends": "next/core-web-vitals"
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
name: 🔥 Make AI Fix This
|
||||
description: Bug, question, or feedback - AI analyzes and changes Big-AGI appropriately
|
||||
labels: [ 'claude-triage' ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for opening an issue! Our AI will analyze it and change Big-AGI appropriately.
|
||||
|
||||
**What happens next:**
|
||||
- AI searches the codebase and documentation
|
||||
- You get a response, typically within 30 minutes
|
||||
- Ticket gets follow-up and community votes
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What's happening?
|
||||
description: Describe the bug, feature request, or question. Be as detailed as you can.
|
||||
placeholder: |
|
||||
Bug example: "In Beam, Anthropic models seem to have search off..."
|
||||
Model request: "Add Claude Opus 4.5 out today, see https://..."
|
||||
Feature example: "Add the option to to save frequent prompt templates for reuse..."
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Where does this happen?
|
||||
description: If this is a bug or issue, where are you experiencing it?
|
||||
options:
|
||||
- Big-AGI Pro (big-agi.com)
|
||||
- Self-deployed from GitHub
|
||||
- Docker deployment
|
||||
- Local development
|
||||
- Not applicable (question/feedback)
|
||||
- Other
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Impact on your workflow
|
||||
description: How does this affect your use of Big-AGI?
|
||||
options:
|
||||
- Blocking - Can't use Big-AGI
|
||||
- High - Major feature broken
|
||||
- Medium - Workaround exists
|
||||
- Low - Minor inconvenience
|
||||
- None - Just a question/suggestion
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment (if applicable)
|
||||
description: Device, OS, browser - only if reporting a bug
|
||||
placeholder: |
|
||||
Device: Macbook Pro M3
|
||||
OS: macOS 15.2
|
||||
Browser: Chrome 131
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Screenshots, error messages, or anything else that helps
|
||||
placeholder: Paste screenshots or error messages here
|
||||
validations:
|
||||
required: false
|
||||
@@ -0,0 +1,69 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: docker
|
||||
directory: /
|
||||
schedule:
|
||||
interval: weekly
|
||||
commit-message:
|
||||
prefix: "chore(deps)"
|
||||
ignore:
|
||||
- dependency-name: "node"
|
||||
versions: [">=25", "<26"] # Node 25 breaks the build because of a dummy localStorage object
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: weekly
|
||||
commit-message:
|
||||
prefix: "chore(deps)"
|
||||
|
||||
# Disabled npm updates for now - will need precise package pinning, as some packages changed behavior upstream
|
||||
# - package-ecosystem: npm
|
||||
# directory: /
|
||||
# schedule:
|
||||
# interval: weekly
|
||||
# commit-message:
|
||||
# prefix: "chore(deps)"
|
||||
# cooldown:
|
||||
# semver-patch: 3
|
||||
# semver-minor: 7
|
||||
# semver-major: 14
|
||||
# # Ignore packages intentionally pinned due to upstream issues
|
||||
# ignore:
|
||||
# # Issue #857: v11.6+ breaks streaming; tried 11.4.4/11.6/11.7, only 11.5.1 works
|
||||
# - dependency-name: "@trpc/*"
|
||||
# versions: [">=11.5.1", "<12"]
|
||||
# # Pinned during tRPC #857 debugging - may be safe to unpin, test first
|
||||
# - dependency-name: "@tanstack/react-query"
|
||||
# versions: [">=5.90.10", "<6"]
|
||||
# # Pinned because 5.0.8 changes signatures so return set({ .. }) != void;
|
||||
# - dependency-name: "zustand"
|
||||
# versions: [">=5.0.7", "<6"]
|
||||
# groups:
|
||||
# next:
|
||||
# patterns:
|
||||
# - "@next/*"
|
||||
# - "eslint-config-next"
|
||||
# - "next"
|
||||
# react:
|
||||
# patterns:
|
||||
# - "react"
|
||||
# - "react-dom"
|
||||
# - "@types/react"
|
||||
# - "@types/react-dom"
|
||||
# emotion:
|
||||
# patterns:
|
||||
# - "@emotion/*"
|
||||
# mui:
|
||||
# patterns:
|
||||
# - "@mui/*"
|
||||
# dnd-kit:
|
||||
# patterns:
|
||||
# - "@dnd-kit/*"
|
||||
# prisma:
|
||||
# patterns:
|
||||
# - "@prisma/*"
|
||||
# - "prisma"
|
||||
# vercel:
|
||||
# patterns:
|
||||
# - "@vercel/*"
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude'))
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -51,7 +51,8 @@ jobs:
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
# claude_args: '--allowed-tools Bash(gh pr:*)'
|
||||
# disabling opus for now claude-opus-4-1-20250805
|
||||
# former: claude-sonnet-4-5-20250929
|
||||
claude_args: |
|
||||
--model claude-sonnet-4-5-20250929
|
||||
--model claude-opus-4-5-20251101
|
||||
--max-turns 100
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
|
||||
|
||||
@@ -12,17 +12,18 @@ jobs:
|
||||
!contains(github.event.issue.body, '@claude')
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: read
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -35,6 +36,10 @@ jobs:
|
||||
allowed_non_write_users: '*'
|
||||
# track_progress: true # Enables tracking comments
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
prompt: |
|
||||
REPO: ${{ github.repository }}
|
||||
ISSUE NUMBER: #${{ github.event.issue.number }}
|
||||
@@ -61,11 +66,12 @@ jobs:
|
||||
- Link duplicates if found
|
||||
|
||||
If you're uncertain, say so and suggest next steps.
|
||||
If you write any code make sure that it compiles and that you push it.
|
||||
Be welcoming, helpful, professional, solution-focused and no-BS.
|
||||
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
claude_args: |
|
||||
--model claude-sonnet-4-5-20250929
|
||||
--max-turns 60
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
|
||||
--model claude-opus-4-5-20251101
|
||||
--max-turns 75
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
|
||||
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -72,6 +72,6 @@ jobs:
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
claude_args: |
|
||||
--model claude-sonnet-4-5-20250929
|
||||
--model claude-opus-4-5-20251101
|
||||
--max-turns 100
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools"
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools"
|
||||
|
||||
@@ -20,28 +20,122 @@ env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
# Build job: runs on native runners for each platform (no QEMU emulation)
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
runs-on: ${{ matrix.runner }}
|
||||
name: Build ${{ matrix.platform }}
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Big-AGI Open
|
||||
org.opencontainers.image.description=Big-AGI Open - Multi-model AI workspace for experts who need to think broader, decide smarter, and build with confidence.
|
||||
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
|
||||
org.opencontainers.image.documentation=https://big-agi.com
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LC }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
|
||||
NEXT_PUBLIC_BUILD_HASH=${{ github.sha }}
|
||||
NEXT_PUBLIC_BUILD_REF_NAME=${{ github.ref_name }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true,oci-mediatypes=true
|
||||
provenance: false
|
||||
cache-from: type=gha,scope=${{ github.repository }}-${{ matrix.platform }}
|
||||
cache-to: type=gha,scope=${{ github.repository }}-${{ matrix.platform }},mode=max
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# Merge job: combines platform-specific images into a unified multi-arch manifest
|
||||
merge:
|
||||
name: Merge manifests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
needs: build
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -49,7 +143,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
@@ -65,28 +159,18 @@ jobs:
|
||||
# Version tags (v2.0.0, 2.0.0)
|
||||
type=ref,event=tag
|
||||
type=semver,pattern={{version}}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Big-AGI Open
|
||||
org.opencontainers.image.description=Big-AGI Open - Multi-model AI workspace for experts who need to think broader, decide smarter, and build with confidence.
|
||||
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
|
||||
org.opencontainers.image.documentation=https://big-agi.com
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
|
||||
NEXT_PUBLIC_BUILD_HASH=${{ github.sha }}
|
||||
NEXT_PUBLIC_BUILD_REF_NAME=${{ github.ref_name }}
|
||||
# Enable build cache (future)
|
||||
#cache-from: type=gha
|
||||
#cache-to: type=gha,mode=max
|
||||
# Enable provenance and SBOM (future)
|
||||
#provenance: true
|
||||
#sbom: true
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
--annotation='index:org.opencontainers.image.title=Big-AGI Open' \
|
||||
--annotation='index:org.opencontainers.image.description=Big-AGI Open - Multi-model AI workspace for experts who need to think broader, decide smarter, and build with confidence.' \
|
||||
--annotation='index:org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}' \
|
||||
--annotation='index:org.opencontainers.image.documentation=https://big-agi.com' \
|
||||
$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LC }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LC }}:${{ steps.meta.outputs.version }}
|
||||
@@ -53,3 +53,6 @@ next-env.d.ts
|
||||
.env*.local
|
||||
/.run/dev (ENV).run.xml
|
||||
/src/modules/3rdparty/aider/scratch*
|
||||
|
||||
# Ignore temporary CC files
|
||||
/tmpclaude*
|
||||
@@ -1,3 +0,0 @@
|
||||
overrides=@mui/material@^5.0.0:
|
||||
dependencies:
|
||||
@mui/material: replaced-by=@mui/joy
|
||||
@@ -117,6 +117,7 @@ Located in `/src/common/layout/optima/`
|
||||
- `store-chats`: Conversations and messages
|
||||
- `store-llms`: Model configurations
|
||||
- `store-ux-labs`: UI preferences and labs features
|
||||
- **Zustand pattern**: Always wrap multi-property selectors with `useShallow` from `zustand/react/shallow` to prevent re-renders on reference changes
|
||||
|
||||
2. **Per-Instance Stores** (Vanilla Zustand)
|
||||
- `store-beam_vanilla`: Beam scatter/gather state
|
||||
@@ -227,7 +228,7 @@ The server uses a split architecture with two tRPC routers:
|
||||
Distributed edge runtime for low-latency AI operations:
|
||||
- **AIX** - AI streaming and communication
|
||||
- **LLM Routers** - Direct vendor integrations (OpenAI, Anthropic, Gemini, Ollama)
|
||||
- **External Services** - ElevenLabs (TTS), Google Search, YouTube transcripts
|
||||
- **External Services** - ElevenLabs (TTS), Inworld (TTS), Google Search, YouTube transcripts
|
||||
|
||||
Located at `/src/server/trpc/trpc.router-edge.ts`
|
||||
|
||||
|
||||
+19
-10
@@ -1,5 +1,8 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# check=skip=CopyIgnoredFile
|
||||
|
||||
# Base
|
||||
FROM node:22-alpine AS base
|
||||
FROM node:24-alpine AS base
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
|
||||
# Dependencies
|
||||
@@ -39,19 +42,20 @@ ENV NEXT_PUBLIC_GA4_MEASUREMENT_ID=${NEXT_PUBLIC_GA4_MEASUREMENT_ID}
|
||||
ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_KEY=${NEXT_PUBLIC_POSTHOG_KEY}
|
||||
|
||||
# Optional argument to configure Google Drive Picker at build time (can reuse AUTH_GOOGLE_ID value)
|
||||
ARG NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID
|
||||
ENV NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID=${NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID}
|
||||
|
||||
# Copy development deps and source
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
# link ssl3 for latest Alpine
|
||||
RUN sh -c '[ ! -e /lib/libssl.so.3 ] && ln -s /usr/lib/libssl.so.3 /lib/libssl.so.3 || echo "Link already exists"'
|
||||
|
||||
# Build the application
|
||||
ENV NODE_ENV=production
|
||||
RUN npm run build
|
||||
|
||||
# Reduce installed packages to production-only
|
||||
RUN npm prune --production
|
||||
RUN npm prune --omit=dev
|
||||
|
||||
|
||||
# Runner
|
||||
@@ -59,18 +63,23 @@ FROM base AS runner
|
||||
WORKDIR /app
|
||||
|
||||
# As user
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 nextjs \
|
||||
&& apk add --no-cache openssl
|
||||
|
||||
# Copy Built app
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next ./.next
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/src/server/prisma ./src/server/prisma
|
||||
# Instead of `COPY --from=builder --chown=nextjs:nodejs /app/.next ./.next`, we only extract some parts, excluding .next/cache which is build time only:
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/BUILD_ID ./.next/
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/server ./.next/server
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/types ./.next/types
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/*.json ./.next/
|
||||
|
||||
# Minimal ENV for production
|
||||
ENV NODE_ENV=production
|
||||
ENV PATH=$PATH:/app/node_modules/.bin
|
||||
|
||||
# Run as non-root user
|
||||
USER nextjs
|
||||
@@ -79,4 +88,4 @@ USER nextjs
|
||||
EXPOSE 3000
|
||||
|
||||
# Start the application
|
||||
CMD ["next", "start"]
|
||||
CMD ["/app/node_modules/.bin/next", "start"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023-2025 Enrico Ros
|
||||
Copyright (c) 2023-2026 Enrico Ros
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -1,37 +1,124 @@
|
||||
<div align="center">
|
||||
|
||||
<img width="256" height="256" alt="Big-AGI Logo" src="https://big-agi.com/assets/logo-bright-github.svg" />
|
||||
|
||||
<h1><a href="https://big-agi.com">Big-AGI</a></h1>
|
||||
|
||||
[](https://big-agi.com)
|
||||
[](https://github.com/enricoros/big-AGI/pkgs/container/big-agi)
|
||||
[](https://vercel.com/new/clone?repository-url=https://github.com/enricoros/big-agi)
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
<br/>
|
||||
[](https://github.com/enricoros/big-agi/commits)
|
||||
[](https://github.com/enricoros/big-AGI/pkgs/container/big-agi)
|
||||
[](https://github.com/enricoros/big-AGI/graphs/contributors)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
<br/>
|
||||
|
||||
[](https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml)
|
||||
|
||||
[//]: # ([](https://stats.uptimerobot.com/59MXcnmjrM))
|
||||
[//]: # ([](https://github.com/enricoros/big-AGI/releases/latest))
|
||||
[//]: # ()
|
||||
[//]: # ([](#))
|
||||
[//]: # ([](https://x.com/enricoros))
|
||||
|
||||
</div>
|
||||
|
||||
<br/>
|
||||
|
||||
# Big-AGI Open 🧠
|
||||
|
||||
This is the open-source foundation for **Big-AGI**.
|
||||
This is the open-source foundation of **Big-AGI**, ___the multi-model AI workspace for experts___.
|
||||
|
||||
Big-AGI is the multi-model AI workspace for experts: Engineers architecting systems. Founders making decisions. Researchers validating hypotheses.
|
||||
You need to think broader, decide faster, and build with confidence, then you need Big-AGI.
|
||||
|
||||
Big-AGI is the multi-model AI workspace for experts who need to think broader, decide smarter, and build with confidence.
|
||||
It comes packed with **world-class features** like Beam, and is praised for its **best-in-class AI chat UX**.
|
||||
**As an independent, non-VC-funded project, Pro subscriptions at $10.99/mo fund development for everyone, including the free and open-source tiers.**
|
||||
|
||||
**What makes Big-AGI different:**
|
||||
**Intelligence**, with [Beam](https://big-agi.com/beam) & Merge for multi-model reasoning and bleeding-edge AI models like Nano Banana, GPT-5 Pro, Sonnet 4.5 -
|
||||
**Control** with personas, data ownership, requests inspection, unlimited usage with API keys, and *no vendor lock-in* -
|
||||
and **Speed** with a local-first, over-powered, zero-latency, madly optimized web app.
|
||||

|
||||
[](https://big-agi.com/beam)
|
||||
[](https://big-agi.com/inspector)
|
||||
|
||||
**Who uses Big-AGI:**
|
||||
### What makes Big-AGI different:
|
||||
|
||||
**Intelligence**: with [Beam & Merge](https://big-agi.com/beam) for multi-model de-hallucination, native search, and bleeding-edge AI models like Opus 4.5, Nano Banana Pro, Kimi K2.5 or GPT 5.2 -
|
||||
**Control**: with personas, data ownership, requests inspection, unlimited usage with API keys, and *no vendor lock-in* -
|
||||
and **Speed**: with a local-first, over-powered, zero-latency, madly optimized web app.
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center" width="25%">
|
||||
<b>🧠 Intelligence</b><br/>
|
||||
<img src="https://img.shields.io/badge/Multi--Model-Trust-4285F4?style=for-the-badge" alt="Multi-Model"/>
|
||||
</td>
|
||||
<td align="center" width="25%">
|
||||
<b>✨ Experience</b><br/>
|
||||
<img src="https://img.shields.io/badge/Clean-UX-34A853?style=for-the-badge" alt="Clean UX"/>
|
||||
</td>
|
||||
<td align="center" width="25%">
|
||||
<b>⚡ Performance</b><br/>
|
||||
<img src="https://img.shields.io/badge/Zero-Latency-EA4335?style=for-the-badge" alt="Zero Latency"/>
|
||||
</td>
|
||||
<td align="center" width="25%">
|
||||
<b>🔒 Control</b><br/>
|
||||
<img src="https://img.shields.io/badge/No-Lock--in-FBBC04?style=for-the-badge" alt="No Lock-in"/>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" valign="top">
|
||||
Beam & Merge<br/>
|
||||
No context junk<br/>
|
||||
Purest AI outputs
|
||||
</td>
|
||||
<td align="center" valign="top">
|
||||
Flow-state interface<br/>
|
||||
Higly customizable<br/>
|
||||
Best-in-class UX
|
||||
</td>
|
||||
<td align="center" valign="top">
|
||||
Local-first<br/>
|
||||
Highly parallel<br/>
|
||||
Madly optimized
|
||||
</td>
|
||||
<td align="center" valign="top">
|
||||
No vendor lock-in<br/>
|
||||
Your API keys<br/>
|
||||
AI Inspector
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
### Who uses Big-AGI:
|
||||
Loved by engineers, founders, researchers, self-hosters, and IT departments for its power, reliability, and transparency.
|
||||
|
||||
<img width="830" height="370" alt="image" src="https://github.com/user-attachments/assets/513c4f77-0970-4a56-b23b-1416c8246174" />
|
||||
|
||||
Choose Big-AGI because you don't need another clone or slop - you need an AI tool that scales with you.
|
||||
|
||||
### Show me a screenshot:
|
||||
Sure - here is real-world screeengrab as I'm writing this, while running a Beam to extract SVG from an image with Sonnet 4.5, Opus 4.1, GPT 5.1, Gemini 2.5 Pro, Nano Banana, etc.
|
||||
<img alt="Real-world screen capture as of Nov 15 2025, 2am" src="https://github.com/user-attachments/assets/853f4160-27cb-4ac9-826b-402f1e63d4af" />
|
||||
|
||||
|
||||
## Get Started
|
||||
|
||||
**Most users: [big-agi.com](https://big-agi.com)** (fastest, zero setup, support the project)
|
||||
Free tier with all core features and more, Pro tier with Cloud Sync.
|
||||
| Tier | Best For | What You Get | Setup |
|
||||
|------------------------------------------------------|-------------------|---------------------------------------------------------------|-------------|
|
||||
| Big-AGI Open (self-host) | **IT** | First to get new models support. Maximum control and privacy. | 5-30 min |
|
||||
| [big-agi.com](https://big-agi.com) Free | **Everyone** | Full core experience, improved Beam, new Personas, best UX. | **2 min**\* |
|
||||
| **[big-agi.com](https://big-agi.com) Pro** $10.99/mo | **Professionals** | Everything + **Sync** across unlimited devices + 1GB storage | **2 min**\* |
|
||||
|
||||
<a href="https://big-agi.com">
|
||||
<img width="210" height="68" alt="image" src="https://github.com/user-attachments/assets/b2f8a7b8-415f-4c92-b228-4f5a54fe2bdd" />
|
||||
</a>
|
||||
\*: **Configuration requires your API keys**. *Big-AGI does not charge for model usage or limit your access*.
|
||||
**Why Pro?** As an independent project, Pro subscriptions fund all development. Early subscribers shape the roadmap directly.
|
||||
|
||||
[](https://big-agi.com)
|
||||
|
||||
**Self-host and developers** (full control)
|
||||
Develop locally or self-host with Docker on your own infrastructure – [guide](docs/installation.md)
|
||||
|
||||
Or fork & run on Vercel:
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
- Develop locally or self-host with Docker on your own infrastructure – [guide](docs/installation.md)
|
||||
- Or fork & run on Vercel:
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
[//]: # (**For the latest Big-AGI:**)
|
||||
|
||||
@@ -41,7 +128,26 @@ Or fork & run on Vercel:
|
||||
|
||||
---
|
||||
|
||||
## What's New in 2.0 · Oct 6, 2025 · Open
|
||||
## Our Philosophy
|
||||
|
||||
We're an independent, non-VC-funded project with a simple belief: **AI should elevate you, not replace you**.
|
||||
|
||||
This is why we built Big-AGI to be **local-first**, madly optimized to 0-latency, launched multi-model first to
|
||||
defeat hallucinations, designed Beam around the **humans in the loop**, re-wrote frameworks and abstractions
|
||||
so you **are not vendor locked-in**, and obsessed over a powerful UI that works, just works.
|
||||
|
||||
NOTE: this is a powerful tool - if you need a toy UI or clone, this ain't it.
|
||||
|
||||
|
||||
---
|
||||
|
||||
## Release Notes
|
||||
|
||||
👉 **[See the Live Release Notes](https://big-agi.com/changes)**
|
||||
- Open 2.0.3: **Red Carpet** **Kimi K2.5**, **Gemini 3 Flash**, **GPT 5.2**, Google Drive, Inworld, Novita.ai, Speech/UX improvements
|
||||
- Open 2.0.2: **Speex** multi-vendor speech synthesis, **Opus 4.5**, **Gemini 3 Pro**, **Nano Banana Pro**, **Grok 4.1**, **GPT-5.1**, **Kimi K2** + 280 fixes
|
||||
|
||||
### What's New in 2.0 · Oct 31, 2025 · Open
|
||||
|
||||
- **Big-AGI Open** is ready and more productive and faster than ever, with:
|
||||
- **Beam 2**: multi-modal, program-based, follow-ups, save presets
|
||||
@@ -54,7 +160,9 @@ Or fork & run on Vercel:
|
||||
|
||||
<img width="830" height="385" alt="image" src="https://github.com/user-attachments/assets/ad52761d-7e3f-44d8-b41e-947ce8b4faa1" />
|
||||
|
||||
### Open links: 👉 [installation](docs/installation.md) 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [documentation](docs/README.md)
|
||||
#### **Open** links: 👉 [changelog](https://big-agi.com/changes) 👉 [installation](docs/installation.md) 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [documentation](docs/README.md)
|
||||
|
||||
**For teams and institutions:** Need shared prompts, SSO, or managed deployments? Reach out at enrico@big-agi.com. We're actively collecting requirements from research groups and IT departments.
|
||||
|
||||
<details>
|
||||
<summary>5,000 Commits Milestone</summary>
|
||||
@@ -200,98 +308,85 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
|
||||
</details>
|
||||
|
||||
For full details and former releases, check out the [changelog](docs/changelog.md).
|
||||
For full details and former releases, check out the [archived versions changelog](docs/changelog.md).
|
||||
|
||||
## 👉 Key Features
|
||||
## 👉 Supported Models & Integrations
|
||||
|
||||
|  |  |  |  |  |
|
||||
Delightful UX with latest models exclusive features like Beam for **multi-model AI validation**.
|
||||
> 
|
||||
> [](https://big-agi.com/beam)
|
||||
|
||||
|  |  |  |  |  |
|
||||
|---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|
|
||||
| **Chat**<br/>**Call**<br/>**Beam**<br/>**Draw**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
|
||||
|
||||

|
||||
|
||||
You can easily configure 100s of AI models in big-AGI:
|
||||
### AI Models & Vendors
|
||||
|
||||
| **AI models** | _supported vendors_ |
|
||||
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Opensource Servers | [LocalAI](https://localai.io/) (multimodal) · [Ollama](https://ollama.com/) |
|
||||
| Local Servers | [LM Studio](https://lmstudio.ai/) |
|
||||
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Anthropic](https://anthropic.com) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
|
||||
| Language services | [Alibaba](https://www.alibabacloud.com/en/product/modelstudio) · [DeepSeek](https://deepseek.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) · [xAI](https://x.ai/) |
|
||||
| Image services | OpenAI · Google Gemini |
|
||||
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
|
||||
Configure 100s of AI models from 18+ providers:
|
||||
|
||||
Add extra functionality with these integrations:
|
||||
| **AI models** | _supported vendors_ |
|
||||
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Opensource Servers | [LocalAI](https://localai.io/) · [Ollama](https://ollama.com/) |
|
||||
| Local Servers | [LM Studio](https://lmstudio.ai/) (non-open) |
|
||||
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Anthropic](https://anthropic.com) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
|
||||
| LLM services | [Alibaba](https://www.alibabacloud.com/en/product/modelstudio) · [DeepSeek](https://deepseek.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [Moonshot](https://www.moonshot.cn/) · [OpenPipe](https://openpipe.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) · [xAI](https://x.ai/) |
|
||||
| Image services | OpenAI · Google Gemini |
|
||||
| Speech services | [ElevenLabs](https://elevenlabs.io) · [Inworld](https://inworld.ai) · [OpenAI TTS](https://platform.openai.com/docs/guides/text-to-speech) · LocalAI · Browser (Web Speech API) |
|
||||
|
||||
| **More** | _integrations_ |
|
||||
|:-------------|:---------------------------------------------------------------------------------------------------------------|
|
||||
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
|
||||
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
|
||||
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
|
||||
| Tracking | [Helicone](https://www.helicone.ai) (LLM Observability) |
|
||||
### Additional Integrations
|
||||
|
||||
[//]: # (- [x] **Flow-state UX** for uncompromised productivity)
|
||||
|
||||
[//]: # (- [x] **AI Personas**: Tailor your AI interactions with customizable personas)
|
||||
|
||||
[//]: # (- [x] **Sleek UI/UX**: A smooth, intuitive, and mobile-responsive interface)
|
||||
|
||||
[//]: # (- [x] **Efficient Interaction**: Voice commands, OCR, and drag-and-drop file uploads)
|
||||
|
||||
[//]: # (- [x] **Privacy First**: Self-host and use your own API keys for full control)
|
||||
|
||||
[//]: # (- [x] **Advanced Tools**: Execute code, import PDFs, and summarize documents)
|
||||
|
||||
[//]: # (- [x] **Seamless Integrations**: Enhance functionality with various third-party services)
|
||||
|
||||
[//]: # (- [x] **Open Roadmap**: Contribute to the progress of big-AGI)
|
||||
|
||||
<br/>
|
||||
|
||||
## 🚀 Installation
|
||||
|
||||
To get started with big-AGI, follow our comprehensive [Installation Guide](docs/installation.md).
|
||||
The guide covers various installation options, whether you're spinning it up on
|
||||
your local computer, deploying on Vercel, on Cloudflare, or rolling it out
|
||||
through Docker.
|
||||
|
||||
Whether you're a developer, system integrator, or enterprise user, you'll find step-by-step instructions
|
||||
to set up big-AGI quickly and easily.
|
||||
|
||||
[](docs/installation.md)
|
||||
|
||||
Or bring your API keys and jump straight into our free instance on [big-AGI.com](https://big-agi.com).
|
||||
|
||||
<br/>
|
||||
|
||||
# 🌟 Get Involved!
|
||||
|
||||
[//]: # ([](https://discord.gg/MkH4qj2Jp9))
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
|
||||
- [ ] 📢️ [**Chat with us** on Discord](https://discord.gg/MkH4qj2Jp9)
|
||||
- [ ] ⭐ **Give us a star** on GitHub 👆
|
||||
- [ ] 🚀 **Do you like code**? You'll love this gem of a project! [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
|
||||
- [ ] 💡 Got a feature suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
- [ ] ✨ [Deploy](docs/installation.md) your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
|
||||
|
||||
<br/>
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/stargazers))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/network))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/pulls))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/LICENSE))
|
||||
|
||||
## 📜 Licensing
|
||||
|
||||
Big-AGI incorporates third-party software components that are subject
|
||||
to separate license terms. For detailed information about these
|
||||
components and their respective licenses, please refer to
|
||||
the [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md).
|
||||
| **More** | _integrations_ |
|
||||
|:--------------|:---------------------------------------------------------------------------------------------------------------|
|
||||
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
|
||||
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
|
||||
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
|
||||
| Observability | [Helicone](https://www.helicone.ai) |
|
||||
|
||||
---
|
||||
|
||||
2023-2025 · Enrico Ros x [Big-AGI](https://big-agi.com) · Like this project? Leave a star! 💫⭐
|
||||
## 🚀 Installation
|
||||
|
||||
Self-host with Docker, deploy on Vercel, or develop locally. Full setup guide:
|
||||
|
||||
[](docs/installation.md)
|
||||
|
||||
Or use the hosted version at [big-agi.com](https://big-agi.com) with your API keys.
|
||||
|
||||
---
|
||||
|
||||
## 👋 Community & Contributing
|
||||
|
||||
### Connect
|
||||
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
|
||||
⭐ [Star the repo](https://github.com/enricoros/big-agi) if Big-AGI is useful to you
|
||||
|
||||
### Contribute
|
||||
|
||||
**🤖 AI-Powered Issue Assistance**
|
||||
|
||||
When you open an issue, our custom AI triage system (powered by [Claude Code](https://github.com/anthropics/claude-code-action) with Big-AGI architecture documentation) analyzes it, searches the codebase, and provides solutions - typically within 30 minutes. We've trained the system on our modules and subsystems so it handles most issues effectively. Your feedback drives development!
|
||||
|
||||
[](https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml)
|
||||
[](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
|
||||
[](https://github.com/users/enricoros/projects/4/views/4)
|
||||
[](docs/customizations.md)
|
||||
[](https://github.com/users/enricoros/projects/4/views/2)
|
||||
|
||||
#### Contributors
|
||||
|
||||
<a href="https://github.com/enricoros/big-agi/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=enricoros/big-agi&max=48&columns=12" />
|
||||
</a>
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
MIT License · [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md)
|
||||
|
||||
**2023-2026** · Enrico Ros × [Big-AGI](https://big-agi.com)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
||||
|
||||
import { appRouterCloud } from '~/server/trpc/trpc.router-cloud';
|
||||
import { createTRPCFetchContext } from '~/server/trpc/trpc.server';
|
||||
import { posthogCaptureServerException } from '~/server/posthog/posthog.server';
|
||||
import { posthogServerSendException } from '~/server/posthog/posthog.server';
|
||||
|
||||
const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
|
||||
endpoint: '/api/cloud',
|
||||
@@ -16,15 +16,15 @@ const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
|
||||
console.error(`❌ tRPC-cloud failed on ${path ?? 'unk-path'}: ${error.message}`);
|
||||
|
||||
// -> Capture node errors
|
||||
await posthogCaptureServerException(error, {
|
||||
await posthogServerSendException(error, undefined, {
|
||||
domain: 'trpc-onerror',
|
||||
runtime: 'nodejs',
|
||||
endpoint: path ?? 'unknown',
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
additionalProperties: {
|
||||
errorCode: error.code,
|
||||
errorType: type,
|
||||
error_code: error.code,
|
||||
error_type: type,
|
||||
},
|
||||
});
|
||||
},
|
||||
@@ -33,7 +33,7 @@ const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
|
||||
|
||||
// NOTE: the following statement breaks the build on non-pro deployments, and conditionals don't work either
|
||||
// so we resorted to raising the timeout from 10s to 60s in the vercel.json file instead
|
||||
export const maxDuration = 60;
|
||||
// export const maxDuration = 60;
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
export { handlerNodeRoutes as GET, handlerNodeRoutes as POST };
|
||||
@@ -10,9 +10,11 @@ const handlerEdgeRoutes = (req: Request) => fetchRequestHandler({
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
? ({ path, error }) => console.error(`\n❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
// NOTE: we don't set maxDuration explicitly here - however we set it in the Vercel project settings, raising to the limit of 300s
|
||||
// export const maxDuration = 60;
|
||||
export const runtime = 'edge';
|
||||
export { handlerEdgeRoutes as GET, handlerEdgeRoutes as POST };
|
||||
+1
-3
@@ -2,8 +2,6 @@
|
||||
#
|
||||
# For more examples, such running big-AGI alongside a web browsing service, see the `docs/docker` folder.
|
||||
|
||||
version: '3.9'
|
||||
|
||||
services:
|
||||
big-agi:
|
||||
image: ghcr.io/enricoros/big-agi:latest
|
||||
@@ -11,4 +9,4 @@ services:
|
||||
- "3000:3000"
|
||||
env_file:
|
||||
- .env
|
||||
command: [ "next", "start", "-p", "3000" ]
|
||||
command: [ "next", "start", "-p", "3000" ]
|
||||
|
||||
+4
-1
@@ -2,6 +2,8 @@
|
||||
|
||||
Information you need to get started, configure, and use big-AGI productively.
|
||||
|
||||
👉 **[Changelog](https://big-agi.com/changes)** - See what's new
|
||||
|
||||
## Getting Started
|
||||
|
||||
Essential guides:
|
||||
@@ -41,7 +43,7 @@ How to set up AI models and features in big-AGI.
|
||||
- **[Web Browsing](config-feature-browse.md)**: Enable web page download through third-party services or your own cloud
|
||||
- **Web Search**: Google Search API (see '[Environment Variables](environment-variables.md)')
|
||||
- **Image Generation**: GPT Image (gpt-image-1), DALL·E 3 and 2
|
||||
- **Voice Synthesis**: ElevenLabs API for voice generation
|
||||
- **Voice Synthesis**: ElevenLabs, Inworld, OpenAI TTS, LocalAI, or browser Web Speech API
|
||||
|
||||
## Deployment & Customization
|
||||
|
||||
@@ -64,6 +66,7 @@ For deploying a custom big-AGI instance:
|
||||
|
||||
## Community & Support
|
||||
|
||||
- Check the [changelog](https://big-agi.com/changes) for the latest updates
|
||||
- Visit our [GitHub repository](https://github.com/enricoros/big-AGI) for source code and issue tracking
|
||||
- Join our [Discord](https://discord.gg/MkH4qj2Jp9) for discussions and help
|
||||
|
||||
|
||||
+14
-4
@@ -1,14 +1,24 @@
|
||||
## Changelog
|
||||
## Archived Versions - Changelog
|
||||
|
||||
This is a high-level changelog. Calls out some of the high level features batched
|
||||
by release.
|
||||
|
||||
- For the live changelog, see [big-agi.com/changes](https://big-agi.com/changes)
|
||||
- For the live roadmap, please see [the GitHub project](https://github.com/users/enricoros/projects/4/views/2)
|
||||
|
||||
### 1.17.0 - Jun 2024
|
||||
> NOTE: with the release of 2.0.0 we switching to [big-agi.com/changes](https://big-agi.com/changes) for the
|
||||
> continuously updated changelog.
|
||||
|
||||
- milestone: [1.17.0](https://github.com/enricoros/big-agi/milestone/17)
|
||||
- work in progress: [big-AGI open roadmap](https://github.com/users/enricoros/projects/4/views/2), [help here](https://github.com/users/enricoros/projects/4/views/4)
|
||||
### What's New in 2 · Oct 31, 2025 · Open
|
||||
|
||||
- **Big-AGI Open** is ready and more productive and faster than ever, with:
|
||||
- **Beam 2**: multi-modal, program-based, follow-ups, save presets
|
||||
- Top-notch AI models support including **agentic models** and **reasoning models**
|
||||
- **Image Generation** and editing with Nano Banana and gpt-image-1
|
||||
- **Web Search** with citations for supported models
|
||||
- **UI** & Mobile UI overhaul with peeking and side panels
|
||||
- And all of the [Big-AGI 2 changes](https://github.com/enricoros/big-AGI/issues/567#issuecomment-2262187617) and more
|
||||
- Built for the future, madly optimized
|
||||
|
||||
### What's New in 1.16.1...1.16.9 · Jan 21, 2025 (patch releases)
|
||||
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
# Google Drive Integration
|
||||
|
||||
Attach files from Google Drive directly in the chat composer.
|
||||
|
||||
## Setup
|
||||
|
||||
### 1. Enable APIs
|
||||
|
||||
In [Google Cloud Console](https://console.cloud.google.com/):
|
||||
|
||||
1. Go to **APIs & Services > Library**
|
||||
2. Enable **Google Drive API** and **Google Picker API**
|
||||
|
||||
### 2. Configure OAuth
|
||||
|
||||
1. Go to **APIs & Services > OAuth consent screen**
|
||||
2. Create consent screen (External or Internal)
|
||||
3. Add scope: `https://www.googleapis.com/auth/drive.file`
|
||||
4. Add test users if in testing mode
|
||||
|
||||
### 3. Create Credentials
|
||||
|
||||
1. Go to **APIs & Services > Credentials**
|
||||
2. Create **OAuth client ID** (Web application)
|
||||
3. Add JavaScript origins:
|
||||
- `http://localhost:3000` (dev)
|
||||
- `https://your-domain.com` (prod)
|
||||
|
||||
### 4. Set Environment Variable
|
||||
|
||||
```bash
|
||||
NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID=your-client-id.apps.googleusercontent.com
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- Click **Drive** button in attachment menu
|
||||
|
||||
## Supported Files
|
||||
|
||||
| Type | Export Format |
|
||||
|-----------------|---------------------|
|
||||
| Regular files | Downloaded directly |
|
||||
| Google Docs | Markdown (.md) |
|
||||
| Google Sheets | CSV (.csv) |
|
||||
| Google Slides | PDF (.pdf) |
|
||||
| Google Drawings | SVG (.svg) |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Picker won't open**: Check `NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID` is set and APIs are enabled.
|
||||
|
||||
**OAuth errors**: Verify your domain is in authorized JavaScript origins. Add yourself as test user if app is in testing mode.
|
||||
|
||||
**Download fails**: Check file permissions and that Drive API is enabled.
|
||||
@@ -54,7 +54,7 @@ If the running LocalAI instance is configured with a [Model Gallery](https://loc
|
||||
|
||||
At the time of writing, LocalAI does not publish the model `context window size`.
|
||||
Every model is assumed to be capable of chatting, and with a context window of 4096 tokens.
|
||||
Please update the [src/modules/llms/transports/server/openai/models/models.data.ts](../src/modules/llms/server/openai/models/models.data.ts)
|
||||
Please update the [src/modules/llms/server/models.mappings.ts](../src/modules/llms/server/models.mappings.ts)
|
||||
file with the mapping information between LocalAI model IDs and names/descriptions/tokens, etc.
|
||||
|
||||
# 🤝 Support
|
||||
|
||||
@@ -35,6 +35,7 @@ GROQ_API_KEY=
|
||||
LOCALAI_API_HOST=
|
||||
LOCALAI_API_KEY=
|
||||
MISTRAL_API_KEY=
|
||||
MOONSHOT_API_KEY=
|
||||
OLLAMA_API_HOST=
|
||||
OPENPIPE_API_KEY=
|
||||
OPENROUTER_API_KEY=
|
||||
@@ -65,8 +66,9 @@ HTTP_BASIC_AUTH_PASSWORD=
|
||||
# Frontend variables
|
||||
NEXT_PUBLIC_MOTD=
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID=
|
||||
NEXT_PUBLIC_PLANTUML_SERVER_URL=
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
```
|
||||
|
||||
## Backend Variables
|
||||
@@ -105,6 +107,7 @@ requiring the user to enter an API key
|
||||
| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional |
|
||||
| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional |
|
||||
| `MISTRAL_API_KEY` | The API key for Mistral | Optional |
|
||||
| `MOONSHOT_API_KEY` | The API key for Moonshot AI | Optional |
|
||||
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama.md) | |
|
||||
| `OPENPIPE_API_KEY` | The API key for OpenPipe | Optional |
|
||||
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
|
||||
@@ -130,10 +133,11 @@ Enable the app to Talk, Draw, and Google things up.
|
||||
|
||||
| Variable | Description |
|
||||
|:---------------------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| **Text-To-Speech** | [ElevenLabs](https://elevenlabs.io/) is a high quality speech synthesis service |
|
||||
| **Text-To-Speech** | ElevenLabs, Inworld, OpenAI TTS, LocalAI, and browser Web Speech API are supported |
|
||||
| `ELEVENLABS_API_KEY` | ElevenLabs API Key - used for calls, etc. |
|
||||
| `ELEVENLABS_API_HOST` | Custom host for ElevenLabs |
|
||||
| `ELEVENLABS_VOICE_ID` | Default voice ID for ElevenLabs |
|
||||
| | *Note: OpenAI TTS and LocalAI TTS reuse credentials from your configured LLM services (no separate env vars needed)* |
|
||||
| **Google Custom Search** | [Google Programmable Search Engine](https://programmablesearchengine.google.com/about/) produces links to pages |
|
||||
| `GOOGLE_CLOUD_API_KEY` | Google Cloud API Key, used with the '/react' command - [Link to GCP](https://console.cloud.google.com/apis/credentials) |
|
||||
| `GOOGLE_CSE_ID` | Google Custom/Programmable Search Engine ID - [Link to PSE](https://programmablesearchengine.google.com/) |
|
||||
@@ -152,8 +156,9 @@ The value of these variables are passed to the frontend (Web UI) - make sure the
|
||||
| `NEXT_PUBLIC_DEBUG_BREAKS` | (optional, development) When set to 'true', enables automatic debugger breaks on DEV/error/critical logs in development builds |
|
||||
| `NEXT_PUBLIC_MOTD` | Message of the Day - displays a dismissible banner at the top of the app (see [customizations](customizations.md) for the template variables). Example: 🔔 Welcome to our deployment! Version {{app_build_pkgver}} built on {{app_build_time}}. |
|
||||
| `NEXT_PUBLIC_GA4_MEASUREMENT_ID` | (optional) The measurement ID for Google Analytics 4. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
| `NEXT_PUBLIC_POSTHOG_KEY` | (optional) Key for PostHog analytics. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
| `NEXT_PUBLIC_GOOGLE_DRIVE_CLIENT_ID` | (optional) Google OAuth Client ID for Drive Picker. Can reuse `AUTH_GOOGLE_ID`. See [Google Drive](config-feature-google-drive.md) |
|
||||
| `NEXT_PUBLIC_PLANTUML_SERVER_URL` | The URL of the PlantUML server, used for rendering UML diagrams. Allows using custom local servers. |
|
||||
| `NEXT_PUBLIC_POSTHOG_KEY` | (optional) Key for PostHog analytics. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
|
||||
> Important: these variables must be set at build time, which is required by Next.js to pass them to the frontend.
|
||||
> This is in contrast to the backend variables, which can be set when starting the local server/container.
|
||||
|
||||
@@ -136,11 +136,6 @@ Deploy big-AGI on a Kubernetes cluster for enhanced scalability and management.
|
||||
|
||||
For more detailed instructions on Kubernetes deployment, including updating and troubleshooting, refer to our [Kubernetes Deployment Guide](deploy-k8s.md).
|
||||
|
||||
### Midori AI Subsystem for Docker Deployment
|
||||
|
||||
Follow the instructions found on [Midori AI Subsystem Site](https://io.midori-ai.xyz/subsystem/manager/)
|
||||
for your host OS. After completing the setup process, install the Big-AGI docker backend to the Midori AI Subsystem.
|
||||
|
||||
## Enterprise-Grade Installation
|
||||
|
||||
For businesses seeking a fully-managed, scalable solution, consider our managed installations.
|
||||
|
||||
@@ -28,6 +28,7 @@ stringData:
|
||||
LOCALAI_API_HOST: ""
|
||||
LOCALAI_API_KEY: ""
|
||||
MISTRAL_API_KEY: ""
|
||||
MOONSHOT_API_KEY: ""
|
||||
OLLAMA_API_HOST: ""
|
||||
OPENPIPE_API_KEY: ""
|
||||
OPENROUTER_API_KEY: ""
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
import { defineConfig } from "eslint/config";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import js from "@eslint/js";
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all
|
||||
});
|
||||
|
||||
export default defineConfig([{
|
||||
extends: compat.extends("next/core-web-vitals"),
|
||||
}]);
|
||||
@@ -14,6 +14,9 @@ Internal documentation for Big-AGI architecture and systems, for use by AI agent
|
||||
- **[AIX.md](modules/AIX.md)** - AIX streaming architecture documentation
|
||||
- **[AIX-callers-analysis.md](modules/AIX-callers-analysis.md)** - Analysis of AIX entry points, call chains, common and different rendering, error handling, etc.
|
||||
|
||||
#### CSF - Client-Side Fetch
|
||||
- **[CSF.md](systems/client-side-fetch.md)** - Direct browser-to-API communication for LLM requests
|
||||
|
||||
### Systems Documentation
|
||||
|
||||
#### Core Platform Systems
|
||||
|
||||
@@ -60,7 +60,7 @@ Shows only parameters that are:
|
||||
|
||||
The AIX client transforms DLLM parameters to wire protocol format. This layer handles parameter precedence rules and name transformations:
|
||||
|
||||
```typescript
|
||||
```
|
||||
// Parameter precedence: newer 4-value version takes priority over 3-value
|
||||
...((llmVndOaiReasoningEffort4 || llmVndOaiReasoningEffort) ?
|
||||
{ vndOaiReasoningEffort: llmVndOaiReasoningEffort4 || llmVndOaiReasoningEffort } : {})
|
||||
@@ -105,7 +105,7 @@ When a model is loaded:
|
||||
The system maintains type safety through:
|
||||
- `DModelParameterId` union from registry keys
|
||||
- `DModelParameterValue<T>` conditional types for values
|
||||
- `DModelParameterSpec<T>` interfaces for specifications
|
||||
- `DModelParameterSpecAny` interfaces for specifications
|
||||
- Runtime validation via Zod schemas at API boundaries
|
||||
|
||||
## Model Variant Pattern
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
# CSF - Client-Side Fetch
|
||||
|
||||
Client-Side Fetch (CSF) enables direct browser-to-API communication, bypassing the server for LLM requests. When enabled, the browser makes requests directly to vendor APIs (e.g., `api.openai.com`, `api.groq.com`) instead of routing through the Next.js server. This reduces latency, decreases server load, and is particularly useful for local models where the browser can communicate directly with Ollama or LM Studio.
|
||||
|
||||
## Implementation
|
||||
|
||||
CSF is implemented as an opt-in setting stored as `csf: boolean` in each vendor's service settings. The vendor interface exposes `csfAvailable?: (setup) => boolean` to determine if CSF can be enabled (typically checking if an API key or host is configured). The actual execution happens in `aix.client.direct-chatGenerate.ts` which dynamically imports when CSF is active, making direct fetch calls using the same wire protocols as the server.
|
||||
|
||||
All 16 supported vendors (OpenAI, Anthropic, Gemini, Ollama, LocalAI, Deepseek, Groq, Mistral, xAI, OpenRouter, Perplexity, Together AI, Alibaba, Moonshot, OpenPipe, LM Studio) support CSF. Cloud vendors require CORS support from the API provider (all tested vendors return `access-control-allow-origin: *`). Local vendors (Ollama, LocalAI, LM Studio) require CORS to be enabled on the local server.
|
||||
|
||||
## UI
|
||||
|
||||
The CSF toggle appears in each vendor's setup panel under "Advanced" settings, labeled "Direct Connection". It becomes visible when the prerequisites are met (API key present for cloud vendors, host configured for local vendors). The setting is managed through `useModelServiceClientSideFetch` hook which provides `csfAvailable`, `csfActive`, `csfToggle`, and `csfReset` for UI consumption.
|
||||
+28
-7
@@ -1,4 +1,5 @@
|
||||
import type { NextConfig } from 'next';
|
||||
import type { WebpackConfigContext } from 'next/dist/server/config-shared';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
|
||||
@@ -29,7 +30,7 @@ buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
let nextConfig: NextConfig = {
|
||||
reactStrictMode: true,
|
||||
reactStrictMode: !process.env.NO_STRICT_MODE, // default: enabled
|
||||
|
||||
// [exports] https://nextjs.org/docs/advanced-features/static-html-export
|
||||
...(buildType && {
|
||||
@@ -47,7 +48,7 @@ let nextConfig: NextConfig = {
|
||||
// NOTE: we may not be needing this anymore, as we use '@cloudflare/puppeteer'
|
||||
serverExternalPackages: ['puppeteer-core'],
|
||||
|
||||
webpack: (config: any, { isServer }: { isServer: boolean }) => {
|
||||
webpack: (config: any, { isServer, webpack /*, dev, nextRuntime*/ }: WebpackConfigContext) => {
|
||||
// @mui/joy: anything material gets redirected to Joy
|
||||
config.resolve.alias['@mui/material'] = '@mui/joy';
|
||||
|
||||
@@ -57,8 +58,28 @@ let nextConfig: NextConfig = {
|
||||
layers: true,
|
||||
};
|
||||
|
||||
// fix warnings for async functions in the browser (https://github.com/vercel/next.js/issues/64792)
|
||||
// client-side bundling
|
||||
if (!isServer) {
|
||||
/**
|
||||
* AIX client-side
|
||||
* We replace certain server-only modules with client-side mocks, to reuse the exact same imports
|
||||
* while avoiding importing server-only code which would break the build or break at runtime.
|
||||
*/
|
||||
const serverToClientMocks: ReadonlyArray<[RegExp, string]> = [
|
||||
[/\/posthog\.server/, '/posthog.client-mock'],
|
||||
[/\/env\.server/, '/env.client-mock'],
|
||||
];
|
||||
config.plugins = [
|
||||
...config.plugins,
|
||||
...serverToClientMocks.map(([pattern, replacement]) =>
|
||||
new webpack.NormalModuleReplacementPlugin(pattern, (resource: any) => {
|
||||
// console.log(' 🧠 [WEBPACK REPLACEMENT]:', resource.request, '->', resource.request.replace(pattern, replacement));
|
||||
resource.request = resource.request.replace(pattern, replacement);
|
||||
}),
|
||||
),
|
||||
];
|
||||
|
||||
// cosmetic: fix warnings for (absent!) top-level awaits in the browser (https://github.com/vercel/next.js/issues/64792)
|
||||
config.output.environment = { ...config.output.environment, asyncFunction: true };
|
||||
}
|
||||
|
||||
@@ -108,9 +129,9 @@ let nextConfig: NextConfig = {
|
||||
// },
|
||||
};
|
||||
|
||||
// Validate environment variables, if set at build time. Will be actually read and used at runtime.
|
||||
import { verifyBuildTimeVars } from '~/server/env';
|
||||
verifyBuildTimeVars();
|
||||
// Validate environment variables at build time, if required. Server env vars will be actually read and used at runtime (cloud/edge).
|
||||
import { env as validateEnv } from '~/server/env.server';
|
||||
void validateEnv; // Triggers env validation - throws if required vars are missing
|
||||
|
||||
// PostHog error reporting with source maps for production builds
|
||||
import { withPostHogConfig } from '@posthog/nextjs-config';
|
||||
@@ -120,7 +141,7 @@ if (process.env.POSTHOG_API_KEY && process.env.POSTHOG_ENV_ID) {
|
||||
personalApiKey: process.env.POSTHOG_API_KEY,
|
||||
envId: process.env.POSTHOG_ENV_ID,
|
||||
host: 'https://us.i.posthog.com', // backtrace upload host
|
||||
verbose: false,
|
||||
logLevel: 'error', // lowered, too noisy
|
||||
sourcemaps: {
|
||||
enabled: process.env.NODE_ENV === 'production',
|
||||
project: 'big-agi',
|
||||
|
||||
Generated
+2188
-1302
File diff suppressed because it is too large
Load Diff
+32
-29
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "big-agi",
|
||||
"version": "2.0.0",
|
||||
"version": "2.0.3",
|
||||
"private": true,
|
||||
"author": "Enrico Ros <enrico.ros@gmail.com>",
|
||||
"repository": "https://github.com/enricoros/big-agi",
|
||||
@@ -14,7 +14,8 @@
|
||||
"postinstall": "prisma generate --no-hints",
|
||||
"db:push": "prisma db push",
|
||||
"db:studio": "prisma studio",
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local"
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local",
|
||||
"sharp:win32_x64": "npm install --os=win32 --cpu=x64 sharp"
|
||||
},
|
||||
"prisma": {
|
||||
"schema": "src/server/prisma/schema.prisma"
|
||||
@@ -28,66 +29,68 @@
|
||||
"@emotion/react": "^11.14.0",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
"@googleworkspace/drive-picker-react": "^0.2.0",
|
||||
"@mui/icons-material": "^5.18.0",
|
||||
"@mui/joy": "^5.0.0-beta.52",
|
||||
"@next/bundle-analyzer": "~15.1.8",
|
||||
"@next/bundle-analyzer": "~15.1.12",
|
||||
"@prisma/client": "~5.22.0",
|
||||
"@tanstack/react-query": "5.90.3",
|
||||
"@tanstack/react-query": "5.90.10",
|
||||
"@tanstack/react-virtual": "^3.13.18",
|
||||
"@trpc/client": "11.5.1",
|
||||
"@trpc/next": "11.5.1",
|
||||
"@trpc/react-query": "11.5.1",
|
||||
"@trpc/server": "11.5.1",
|
||||
"@vercel/analytics": "^1.5.0",
|
||||
"@vercel/speed-insights": "^1.2.0",
|
||||
"@vercel/analytics": "^1.6.1",
|
||||
"@vercel/speed-insights": "^1.3.1",
|
||||
"browser-fs-access": "^0.38.0",
|
||||
"cheerio": "^1.1.2",
|
||||
"csv-stringify": "^6.6.0",
|
||||
"dexie": "^4.0.11",
|
||||
"dexie-react-hooks": "^1.1.7",
|
||||
"diff": "^8.0.2",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"dexie": "~4.0.11",
|
||||
"dexie-react-hooks": "~1.1.7",
|
||||
"diff": "^8.0.3",
|
||||
"eventemitter3": "^5.0.4",
|
||||
"idb-keyval": "^6.2.2",
|
||||
"mammoth": "^1.11.0",
|
||||
"nanoid": "^5.1.6",
|
||||
"next": "~15.1.8",
|
||||
"next": "~15.1.12",
|
||||
"nprogress": "^0.2.0",
|
||||
"pdfjs-dist": "5.4.54",
|
||||
"posthog-js": "^1.275.3",
|
||||
"posthog-node": "^5.10.0",
|
||||
"posthog-js": "^1.336.4",
|
||||
"posthog-node": "^5.24.7",
|
||||
"prismjs": "^1.30.0",
|
||||
"puppeteer-core": "^24.25.0",
|
||||
"puppeteer-core": "^24.36.1",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.65.0",
|
||||
"react-hook-form": "^7.71.1",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-player": "^3.3.3",
|
||||
"react-player": "^3.4.0",
|
||||
"react-resizable-panels": "^3.0.6",
|
||||
"react-timeago": "^8.3.0",
|
||||
"rehype-katex": "^7.0.1",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"remark-mark-highlight": "^0.1.1",
|
||||
"remark-math": "^6.0.0",
|
||||
"sharp": "^0.33.5",
|
||||
"superjson": "^2.2.2",
|
||||
"tesseract.js": "^6.0.1",
|
||||
"sharp": "^0.34.5",
|
||||
"superjson": "^2.2.6",
|
||||
"tesseract.js": "^7.0.0",
|
||||
"tiktoken": "^1.0.22",
|
||||
"turndown": "^7.2.1",
|
||||
"zod": "^4.1.12",
|
||||
"turndown": "^7.2.2",
|
||||
"zod": "^4.3.6",
|
||||
"zustand": "5.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@posthog/nextjs-config": "^1.3.2",
|
||||
"@types/node": "^24.7.2",
|
||||
"@posthog/nextjs-config": "~1.6.4",
|
||||
"@types/node": "^25.1.0",
|
||||
"@types/nprogress": "^0.2.3",
|
||||
"@types/prismjs": "^1.26.5",
|
||||
"@types/react": "^19.2.2",
|
||||
"@types/react": "^19.2.10",
|
||||
"@types/react-csv": "^1.1.10",
|
||||
"@types/react-dom": "^19.2.2",
|
||||
"@types/turndown": "^5.0.5",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@types/turndown": "^5.0.6",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "^9.37.0",
|
||||
"eslint-config-next": "~15.1.8",
|
||||
"prettier": "^3.6.2",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-next": "~15.1.12",
|
||||
"prettier": "^3.8.1",
|
||||
"prisma": "~5.22.0",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
|
||||
+9
-4
@@ -1,12 +1,17 @@
|
||||
import * as React from 'react';
|
||||
import Head from 'next/head';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { MyAppProps } from 'next/app';
|
||||
import { Analytics as VercelAnalytics } from '@vercel/analytics/next';
|
||||
import { SpeedInsights as VercelSpeedInsights } from '@vercel/speed-insights/next';
|
||||
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { apiQuery } from '~/common/util/trpc.client';
|
||||
|
||||
|
||||
// [server-client-safe] dynamic imports to avoid webpack bundling issues with next/navigation
|
||||
const VercelAnalytics = dynamic(() => import('@vercel/analytics/next').then(mod => mod.Analytics), { ssr: false });
|
||||
const VercelSpeedInsights = dynamic(() => import('@vercel/speed-insights/next').then(mod => mod.SpeedInsights), { ssr: false });
|
||||
|
||||
|
||||
import 'katex/dist/katex.min.css';
|
||||
import '~/common/styles/CodePrism.css';
|
||||
import '~/common/styles/GithubMarkdown.css';
|
||||
@@ -55,10 +60,10 @@ const Big_AGI_App = ({ Component, emotionCache, pageProps }: MyAppProps) => {
|
||||
</ProviderSingleTab>
|
||||
</ProviderTheming>
|
||||
|
||||
{Is.Deployment.VercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
{hasGoogleAnalytics && <OptionalGoogleAnalytics />}
|
||||
{hasPostHogAnalytics && <OptionalPostHogAnalytics />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
|
||||
</>;
|
||||
};
|
||||
|
||||
@@ -111,7 +111,6 @@ MyDocument.getInitialProps = async (ctx: DocumentContext) => {
|
||||
<style
|
||||
data-emotion={`${style.key} ${style.ids.join(' ')}`}
|
||||
key={style.key}
|
||||
// eslint-disable-next-line react/no-danger
|
||||
dangerouslySetInnerHTML={{ __html: style.css }}
|
||||
/>
|
||||
));
|
||||
|
||||
@@ -18,7 +18,7 @@ import { ROUTE_APP_CHAT, ROUTE_INDEX } from '~/common/app.routes';
|
||||
import { Release } from '~/common/app.release';
|
||||
|
||||
// capabilities access
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs, useCapabilityTextToImage } from '~/common/components/useCapabilities';
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityTextToImage } from '~/common/components/useCapabilities';
|
||||
|
||||
// stores access
|
||||
import { getLLMsDebugInfo } from '~/common/stores/llms/store-llms';
|
||||
@@ -95,7 +95,6 @@ function AppDebug() {
|
||||
const cProduct = {
|
||||
capabilities: {
|
||||
mic: useCapabilityBrowserSpeechRecognition(),
|
||||
elevenLabs: useCapabilityElevenLabs(),
|
||||
textToImage: useCapabilityTextToImage(),
|
||||
},
|
||||
models: getLLMsDebugInfo(),
|
||||
|
||||
@@ -20,7 +20,7 @@ function initTestConversation(): DConversation {
|
||||
return conversation;
|
||||
}
|
||||
|
||||
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi = createBeamVanillaStore()): BeamStoreApi {
|
||||
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi): BeamStoreApi {
|
||||
beamStore.getState().open(messages, null, false, (content) => alert(content));
|
||||
return beamStore;
|
||||
}
|
||||
|
||||
@@ -6,13 +6,15 @@ import ChatIcon from '@mui/icons-material/Chat';
|
||||
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
import WarningRoundedIcon from '@mui/icons-material/WarningRounded';
|
||||
|
||||
import { useSpeexGlobalEngine } from '~/modules/speex/store-module-speex';
|
||||
|
||||
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
|
||||
import { animationColorRainbow } from '~/common/util/animUtils';
|
||||
import { navigateBack } from '~/common/app.routes';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useCapabilityBrowserSpeechRecognition } from '~/common/components/useCapabilities';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useUICounter } from '~/common/stores/store-ui';
|
||||
|
||||
@@ -45,7 +47,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
// external state
|
||||
const recognition = useCapabilityBrowserSpeechRecognition();
|
||||
const synthesis = useCapabilityElevenLabs();
|
||||
const speexGlobalEngine = useSpeexGlobalEngine();
|
||||
const chatIsEmpty = useChatStore(state => {
|
||||
if (!props.conversationId)
|
||||
return false;
|
||||
@@ -56,17 +58,18 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
// derived state
|
||||
const outOfTheBlue = !props.conversationId;
|
||||
const overriddenEmptyChat = chatEmptyOverride || !chatIsEmpty;
|
||||
const overriddenEmptyChat = outOfTheBlue || chatEmptyOverride || !chatIsEmpty;
|
||||
const overriddenRecognition = recognitionOverride || recognition.mayWork;
|
||||
const allGood = overriddenEmptyChat && overriddenRecognition && synthesis.mayWork;
|
||||
const fatalGood = overriddenRecognition && synthesis.mayWork;
|
||||
const synthesisShallWork = !!speexGlobalEngine;
|
||||
const allGood = overriddenEmptyChat && overriddenRecognition && synthesisShallWork;
|
||||
const fatalGood = overriddenRecognition && synthesisShallWork;
|
||||
|
||||
|
||||
const handleOverrideChatEmpty = React.useCallback(() => setChatEmptyOverride(true), []);
|
||||
|
||||
const handleOverrideRecognition = React.useCallback(() => setRecognitionOverride(true), []);
|
||||
|
||||
const handleConfigureElevenLabs = React.useCallback(() => optimaOpenPreferences('voice'), []);
|
||||
const handleConfigureVoice = React.useCallback(() => optimaOpenPreferences('voice'), []);
|
||||
|
||||
const handleFinishButton = React.useCallback(() => {
|
||||
if (!allGood)
|
||||
@@ -128,17 +131,17 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
{/* Text to Speech status */}
|
||||
<StatusCard
|
||||
icon={<RecordVoiceOverTwoToneIcon />}
|
||||
icon={<PhVoice />}
|
||||
text={
|
||||
(synthesis.mayWork ? 'Voice synthesis should be ready.' : 'There might be an issue with ElevenLabs voice synthesis.')
|
||||
+ (synthesis.isConfiguredServerSide ? '' : (synthesis.isConfiguredClientSide ? '' : ' Please add your API key in the settings.'))
|
||||
(synthesisShallWork ? 'Voice synthesis should be ready.' : 'There might be an issue with voice synthesis.')
|
||||
// + (synthesis.isConfiguredServerSide ? '' : (synthesis.isConfiguredClientSide ? '' : ' Please add your API key in the settings.'))
|
||||
}
|
||||
button={synthesis.mayWork ? undefined : (
|
||||
<Button variant='outlined' onClick={handleConfigureElevenLabs} sx={{ mx: 1 }}>
|
||||
button={synthesisShallWork ? undefined : (
|
||||
<Button variant='outlined' onClick={handleConfigureVoice} sx={{ mx: 1 }}>
|
||||
Configure
|
||||
</Button>
|
||||
)}
|
||||
hasIssue={!synthesis.mayWork}
|
||||
hasIssue={!synthesisShallWork}
|
||||
/>
|
||||
|
||||
{/*<Typography>*/}
|
||||
|
||||
@@ -317,7 +317,7 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
|
||||
issue={354}
|
||||
text='Call App: Support thread and compatibility matrix'
|
||||
note={<>
|
||||
Voice input uses the HTML Web Speech API, and speech output requires an ElevenLabs API Key.
|
||||
Voice input uses the HTML Web Speech API.
|
||||
</>}
|
||||
// note2='Please report any issues you encounter'
|
||||
sx={{
|
||||
|
||||
+20
-32
@@ -7,23 +7,24 @@ import CallEndIcon from '@mui/icons-material/CallEnd';
|
||||
import CallIcon from '@mui/icons-material/Call';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import MicNoneIcon from '@mui/icons-material/MicNone';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
|
||||
import { useChatLLMDropdown } from '../chat/components/layout-bar/useLLMDropdown';
|
||||
|
||||
import { SystemPurposeId, SystemPurposes } from '../../data';
|
||||
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { AixChatGenerateContent_DMessage, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
|
||||
import { useElevenLabsVoiceDropdown } from '~/modules/elevenlabs/useElevenLabsVoiceDropdown';
|
||||
|
||||
import { aixChatGenerateContent_DMessage_FromConversation, AixChatGenerateContent_DMessageGuts } from '~/modules/aix/client/aix.client';
|
||||
import { speakText } from '~/modules/speex/speex.client';
|
||||
|
||||
import type { OptimaBarControlMethods } from '~/common/layout/optima/bar/OptimaBarDropdown';
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { Link } from '~/common/components/Link';
|
||||
import { OptimaPanelGroupedList } from '~/common/layout/optima/panel/OptimaPanelGroupedList';
|
||||
import { OptimaPanelIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
|
||||
import { SpeechResult, useSpeechRecognition } from '~/common/components/speechrecognition/useSpeechRecognition';
|
||||
import { clipboardInterceptCtrlCForCleanup } from '~/common/util/clipboardUtils';
|
||||
import { conversationTitle, remapMessagesSysToUsr } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, messageFragmentsReduceText, messageWasInterruptedAtStart } from '~/common/stores/chat/chat.message';
|
||||
import { createErrorContentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
@@ -43,18 +44,13 @@ import { useAppCallStore } from './state/store-app-call';
|
||||
function CallMenu(props: {
|
||||
pushToTalk: boolean,
|
||||
setPushToTalk: (pushToTalk: boolean) => void,
|
||||
override: boolean,
|
||||
setOverride: (overridePersonaVoice: boolean) => void,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const { grayUI, toggleGrayUI } = useAppCallStore();
|
||||
const { voicesDropdown } = useElevenLabsVoiceDropdown(false, !props.override);
|
||||
|
||||
const handlePushToTalkToggle = () => props.setPushToTalk(!props.pushToTalk);
|
||||
|
||||
const handleChangeVoiceToggle = () => props.setOverride(!props.override);
|
||||
|
||||
return <OptimaPanelGroupedList title='Call'>
|
||||
|
||||
<MenuItem onClick={handlePushToTalkToggle}>
|
||||
@@ -63,17 +59,6 @@ function CallMenu(props: {
|
||||
<Switch checked={props.pushToTalk} onChange={handlePushToTalkToggle} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={handleChangeVoiceToggle}>
|
||||
<ListItemDecorator><RecordVoiceOverTwoToneIcon /></ListItemDecorator>
|
||||
Change Voice
|
||||
<Switch checked={props.override} onChange={handleChangeVoiceToggle} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem>
|
||||
<ListItemDecorator>{' '}</ListItemDecorator>
|
||||
{voicesDropdown}
|
||||
</MenuItem>
|
||||
|
||||
<ListDivider />
|
||||
|
||||
<MenuItem onClick={toggleGrayUI}>
|
||||
@@ -98,7 +83,6 @@ export function Telephone(props: {
|
||||
const [avatarClickCount, setAvatarClickCount] = React.useState<number>(0);// const [micMuted, setMicMuted] = React.useState(false);
|
||||
const [callElapsedTime, setCallElapsedTime] = React.useState<string>('00:00');
|
||||
const [callMessages, setCallMessages] = React.useState<DMessage[]>([]);
|
||||
const [overridePersonaVoice, setOverridePersonaVoice] = React.useState<boolean>(false);
|
||||
const [personaTextInterim, setPersonaTextInterim] = React.useState<string | null>(null);
|
||||
const [pushToTalk, setPushToTalk] = React.useState(true);
|
||||
const [stage, setStage] = React.useState<'ring' | 'declined' | 'connected' | 'ended'>('ring');
|
||||
@@ -118,7 +102,7 @@ export function Telephone(props: {
|
||||
}));
|
||||
const persona = SystemPurposes[props.callIntent.personaId as SystemPurposeId] ?? undefined;
|
||||
const personaCallStarters = persona?.call?.starters ?? undefined;
|
||||
const personaVoiceId = overridePersonaVoice ? undefined : (persona?.voices?.elevenLabs?.voiceId ?? undefined);
|
||||
// const personaVoiceSelector = React.useMemo(() => personaGetVoiceSelector(persona), [persona]);
|
||||
const personaSystemMessage = persona?.systemMessage ?? undefined;
|
||||
|
||||
// hooks and speech
|
||||
@@ -165,7 +149,6 @@ export function Telephone(props: {
|
||||
};
|
||||
|
||||
// [E] pickup -> seed message and call timer
|
||||
// FIXME: Overriding the voice will reset the call - not a desired behavior
|
||||
React.useEffect(() => {
|
||||
if (!isConnected) return;
|
||||
|
||||
@@ -185,11 +168,14 @@ export function Telephone(props: {
|
||||
|
||||
setCallMessages([createDMessageTextContent('assistant', firstMessage)]); // [state] set assistant:hello message
|
||||
|
||||
// fire/forget
|
||||
void elevenLabsSpeakText(firstMessage, personaVoiceId, true, true);
|
||||
// fire/forget - use 'fast' priority for real-time conversation
|
||||
void speakText(firstMessage,
|
||||
undefined,
|
||||
{ label: 'Call', priority: 'fast' },
|
||||
);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [isConnected, personaCallStarters, personaVoiceId]);
|
||||
}, [isConnected, personaCallStarters]);
|
||||
|
||||
// [E] persona streaming response - upon new user message
|
||||
React.useEffect(() => {
|
||||
@@ -254,7 +240,7 @@ export function Telephone(props: {
|
||||
'call',
|
||||
callMessages[0].id,
|
||||
{ abortSignal: responseAbortController.current.signal },
|
||||
(update: AixChatGenerateContent_DMessage, _isDone: boolean) => {
|
||||
(update: AixChatGenerateContent_DMessageGuts, _isDone: boolean) => {
|
||||
const updatedText = messageFragmentsReduceText(update.fragments).trim();
|
||||
if (updatedText)
|
||||
setPersonaTextInterim(finalText = updatedText);
|
||||
@@ -270,9 +256,12 @@ export function Telephone(props: {
|
||||
fullMessage.generator = status.lastDMessage.generator;
|
||||
setCallMessages(messages => [...messages, fullMessage]); // [state] append assistant:call_response
|
||||
|
||||
// fire/forget
|
||||
// fire/forget - use 'fast' priority for real-time conversation
|
||||
if (status.outcome === 'success' && finalText?.length >= 1)
|
||||
void elevenLabsSpeakText(finalText, personaVoiceId, true, true);
|
||||
void speakText(finalText,
|
||||
undefined,
|
||||
{ label: 'Call', priority: 'fast' },
|
||||
);
|
||||
|
||||
}).catch((err: DOMException) => {
|
||||
if (err?.name !== 'AbortError') {
|
||||
@@ -288,7 +277,7 @@ export function Telephone(props: {
|
||||
responseAbortController.current?.abort();
|
||||
responseAbortController.current = null;
|
||||
};
|
||||
}, [isConnected, callMessages, modelId, personaVoiceId, personaSystemMessage, reMessages]);
|
||||
}, [callMessages, isConnected, modelId, personaSystemMessage, reMessages]);
|
||||
|
||||
// [E] Message interrupter
|
||||
const abortTrigger = isConnected && recognitionState.hasSpeech;
|
||||
@@ -325,7 +314,6 @@ export function Telephone(props: {
|
||||
<OptimaPanelIn>
|
||||
<CallMenu
|
||||
pushToTalk={pushToTalk} setPushToTalk={setPushToTalk}
|
||||
override={overridePersonaVoice} setOverride={setOverridePersonaVoice}
|
||||
/>
|
||||
</OptimaPanelIn>
|
||||
|
||||
@@ -373,7 +361,7 @@ export function Telephone(props: {
|
||||
|
||||
<ScrollToBottom stickToBottomInitial>
|
||||
|
||||
<Box sx={{ minHeight: '100%', p: 1, display: 'flex', flexDirection: 'column', gap: 1 }}>
|
||||
<Box onCopy={clipboardInterceptCtrlCForCleanup} sx={{ minHeight: '100%', p: 1, display: 'flex', flexDirection: 'column', gap: 1 }}>
|
||||
|
||||
{/* Call Messages [] */}
|
||||
{callMessages.map((message) =>
|
||||
|
||||
@@ -10,7 +10,6 @@ import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
import type { TradeConfig } from '~/modules/trade/TradeModal';
|
||||
import { downloadSingleChat, importConversationsFromFilesAtRest, openConversationsAtRestPicker } from '~/modules/trade/trade.client';
|
||||
import { imaginePromptFromTextOrThrow } from '~/modules/aifn/imagine/imaginePromptFromText';
|
||||
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { useAreBeamsOpen } from '~/modules/beam/store-beam.hooks';
|
||||
import { useCapabilityTextToImage } from '~/modules/t2i/t2i.client';
|
||||
|
||||
@@ -21,7 +20,7 @@ import { ConversationsManager } from '~/common/chat-overlay/ConversationsManager
|
||||
import { ErrorBoundary } from '~/common/components/ErrorBoundary';
|
||||
import { getLLMContextTokens, LLM_IF_ANT_PromptCaching, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { OptimaDrawerIn, OptimaPanelIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { PanelResizeInset } from '~/common/components/panes/GoodPanelResizeHandler';
|
||||
import { PanelResizeInset } from '~/common/components/PanelResizeInset';
|
||||
import { Release } from '~/common/app.release';
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
|
||||
@@ -186,6 +185,7 @@ export function AppChat() {
|
||||
const beamOpenStoreInFocusedPane = focusedPaneIndex === null ? null
|
||||
: !beamsOpens?.[focusedPaneIndex] ? null
|
||||
: paneBeamStores?.[focusedPaneIndex] ?? null;
|
||||
const focusedChatBeamOpen = focusedPaneIndex !== null && !!beamsOpens?.[focusedPaneIndex];
|
||||
|
||||
const {
|
||||
// focused
|
||||
@@ -345,11 +345,6 @@ export function AppChat() {
|
||||
});
|
||||
}, [handleExecuteAndOutcome]);
|
||||
|
||||
const handleTextSpeak = React.useCallback(async (text: string): Promise<void> => {
|
||||
await elevenLabsSpeakText(text, undefined, true, true);
|
||||
}, []);
|
||||
|
||||
|
||||
// Chat actions
|
||||
|
||||
const handleConversationNewInFocusedPane = React.useCallback((forceNoRecycle: boolean, isIncognito: boolean) => {
|
||||
@@ -479,7 +474,7 @@ export function AppChat() {
|
||||
);
|
||||
|
||||
|
||||
// Disabled by default, as it lags the opening of the drawer and immediatly vanishes during the closing animation
|
||||
// Disabled by default, as it lags the opening of the drawer and immediately vanishes during the closing animation
|
||||
const isDrawerOpen = true; // useOptimaDrawerOpen();
|
||||
|
||||
const drawerContent = React.useMemo(() => !isDrawerOpen ? null :
|
||||
@@ -489,6 +484,7 @@ export function AppChat() {
|
||||
activeFolderId={activeFolderId}
|
||||
chatPanesConversationIds={paneUniqueConversationIds}
|
||||
disableNewButton={disableNewButton}
|
||||
focusedChatBeamOpen={focusedChatBeamOpen}
|
||||
onConversationActivate={handleOpenConversationInFocusedPane}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationNew={handleConversationNewInFocusedPane}
|
||||
@@ -497,7 +493,7 @@ export function AppChat() {
|
||||
onConversationsImportDialog={handleConversationImportDialog}
|
||||
setActiveFolderId={setActiveFolderId}
|
||||
/>,
|
||||
[activeFolderId, disableNewButton, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isDrawerOpen, paneUniqueConversationIds],
|
||||
[activeFolderId, disableNewButton, focusedChatBeamOpen, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isDrawerOpen, paneUniqueConversationIds],
|
||||
);
|
||||
|
||||
const focusedChatPanelContent = React.useMemo(() => !focusedPaneConversationId ? null :
|
||||
@@ -523,7 +519,7 @@ export function AppChat() {
|
||||
React.useEffect(() => {
|
||||
// Debug: open a null chat
|
||||
if (Release.IsNodeDevBuild && intent.initialConversationId === 'null')
|
||||
openConversationInFocusedPane(null! /* for debugging purporse */);
|
||||
openConversationInFocusedPane(null! /* for debugging purpose */);
|
||||
// Open the initial conversation if set
|
||||
else if (intent.initialConversationId)
|
||||
openConversationInFocusedPane(intent.initialConversationId);
|
||||
@@ -651,7 +647,7 @@ export function AppChat() {
|
||||
setFocusedPaneIndex(idx);
|
||||
}}
|
||||
onCollapse={() => {
|
||||
// NOTE: despite the delay to try to let the draggin settle, there seems to be an issue with the Pane locking the screen
|
||||
// NOTE: despite the delay to try to let the dragging settle, there seems to be an issue with the Pane locking the screen
|
||||
// setTimeout(() => removePane(idx), 50);
|
||||
// more than 2 will result in an assertion from the framework
|
||||
if (chatPanes.length === 2) removePane(idx);
|
||||
@@ -678,7 +674,7 @@ export function AppChat() {
|
||||
// NOTE: this is a workaround for the 'stuck-after-collapse-close' issue. We will collapse the 'other' pane, which
|
||||
// will get it removed (onCollapse), and somehow this pane will be stuck with a pointerEvents: 'none' style, which de-facto
|
||||
// disables further interaction with the chat. This is a workaround to re-enable the pointer events.
|
||||
// The root cause seems to be a Dragstate not being reset properly, however the pointerEvents has been set since 0.0.56 while
|
||||
// The root cause seems to be a Drag state not being reset properly, however the pointerEvents has been set since 0.0.56 while
|
||||
// it was optional before: https://github.com/bvaughn/react-resizable-panels/issues/241
|
||||
pointerEvents: 'auto',
|
||||
}),
|
||||
@@ -723,7 +719,6 @@ export function AppChat() {
|
||||
onConversationNew={handleConversationNewInFocusedPane}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={handleImagineFromText}
|
||||
onTextSpeak={handleTextSpeak}
|
||||
sx={chatMessageListSx}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -7,18 +7,18 @@ import { Box, List } from '@mui/joy';
|
||||
import type { SystemPurposeExample } from '../../../data';
|
||||
|
||||
import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
import { speakText } from '~/modules/speex/speex.client';
|
||||
|
||||
import type { ConversationHandler } from '~/common/chat-overlay/ConversationHandler';
|
||||
import type { DLLMContextTokens } from '~/common/stores/llms/llms.types';
|
||||
import { DConversationId, excludeSystemMessages } from '~/common/stores/chat/chat.conversation';
|
||||
import { ShortcutKey, useGlobalShortcuts } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { clipboardInterceptCtrlCForCleanup } from '~/common/util/clipboardUtils';
|
||||
import { convertFilesToDAttachmentFragments } from '~/common/attachment-drafts/attachment.pipeline';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, DMessageId, DMessageUserFlag, DMetaReferenceItem, MESSAGE_FLAG_AIX_SKIP, messageHasUserFlag } from '~/common/stores/chat/chat.message';
|
||||
import { createTextContentFragment, DMessageFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
|
||||
import { openFileForAttaching } from '~/common/components/ButtonAttachFiles';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating';
|
||||
import { useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useChatOverlayStore } from '~/common/chat-overlay/store-perchat_vanilla';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useScrollToBottom } from '~/common/scroll-to-bottom/useScrollToBottom';
|
||||
@@ -51,7 +51,6 @@ export function ChatMessageList(props: {
|
||||
onConversationNew: (forceNoRecycle: boolean, isIncognito: boolean) => void,
|
||||
onTextDiagram: (diagramConfig: DiagramConfig | null) => void,
|
||||
onTextImagine: (conversationId: DConversationId, selectedText: string) => Promise<void>,
|
||||
onTextSpeak: (selectedText: string) => Promise<void>,
|
||||
setIsMessageSelectionMode: (isMessageSelectionMode: boolean) => void,
|
||||
sx?: SxProps,
|
||||
}) {
|
||||
@@ -65,7 +64,6 @@ export function ChatMessageList(props: {
|
||||
const { notifyBooting } = useScrollToBottom();
|
||||
const danger_experimentalHtmlWebUi = useChatAutoSuggestHTMLUI();
|
||||
const [showSystemMessages] = useChatShowSystemMessages();
|
||||
const optionalTranslationWarning = useBrowserTranslationWarning();
|
||||
const { conversationMessages, historyTokenCount } = useChatStore(useShallow(({ conversations }) => {
|
||||
const conversation = conversations.find(conversation => conversation.id === props.conversationId);
|
||||
return {
|
||||
@@ -77,10 +75,9 @@ export function ChatMessageList(props: {
|
||||
_composerInReferenceToCount: state.inReferenceTo?.length ?? 0,
|
||||
ephemerals: state.ephemerals?.length ? state.ephemerals : null,
|
||||
})));
|
||||
const { mayWork: isSpeakable } = useCapabilityElevenLabs();
|
||||
|
||||
// derived state
|
||||
const { conversationHandler, conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine, onTextSpeak } = props;
|
||||
const { conversationHandler, conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine } = props;
|
||||
const composerCanAddInReferenceTo = _composerInReferenceToCount < 5;
|
||||
const composerHasInReferenceto = _composerInReferenceToCount > 0;
|
||||
|
||||
@@ -214,12 +211,15 @@ export function ChatMessageList(props: {
|
||||
}, [capabilityHasT2I, conversationId, onTextImagine]);
|
||||
|
||||
const handleTextSpeak = React.useCallback(async (text: string) => {
|
||||
if (!isSpeakable)
|
||||
return optimaOpenPreferences('voice');
|
||||
// sandwich the speaking with the indicator
|
||||
setIsSpeaking(true);
|
||||
await onTextSpeak(text);
|
||||
const result = await speakText(text, undefined, { label: 'Chat speak' });
|
||||
setIsSpeaking(false);
|
||||
}, [isSpeakable, onTextSpeak]);
|
||||
|
||||
// open voice preferences
|
||||
if (!result.success && (result.errorType === 'tts-no-engine' || result.errorType === 'tts-unconfigured'))
|
||||
optimaOpenPreferences('voice');
|
||||
}, []);
|
||||
|
||||
|
||||
// operate on the local selection set
|
||||
@@ -324,9 +324,7 @@ export function ChatMessageList(props: {
|
||||
);
|
||||
|
||||
return (
|
||||
<List role='chat-messages-list' sx={listSx}>
|
||||
|
||||
{optionalTranslationWarning}
|
||||
<List role='chat-messages-list' sx={listSx} onCopy={clipboardInterceptCtrlCForCleanup}>
|
||||
|
||||
{props.isMessageSelectionMode && (
|
||||
<MessagesSelectionHeader
|
||||
@@ -381,7 +379,7 @@ export function ChatMessageList(props: {
|
||||
onMessageTruncate={handleMessageTruncate}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={capabilityHasT2I ? handleTextImagine : undefined}
|
||||
onTextSpeak={isSpeakable ? handleTextSpeak : undefined}
|
||||
onTextSpeak={handleTextSpeak}
|
||||
/>
|
||||
|
||||
);
|
||||
|
||||
@@ -220,7 +220,7 @@ export function CameraCaptureModal(props: {
|
||||
backdropFilter: 'none', // using none because this is heavy
|
||||
// backdropFilter: 'blur(4px)',
|
||||
// backgroundColor: 'rgba(11 13 14 / 0.75)',
|
||||
backgroundColor: 'rgba(var(--joy-palette-neutral-darkChannel) / 0.5)',
|
||||
backgroundColor: 'rgba(var(--joy-palette-neutral-darkChannel) / 0.67)',
|
||||
},
|
||||
},
|
||||
}}
|
||||
|
||||
@@ -17,7 +17,7 @@ import { useChatAutoSuggestAttachmentPrompts, useChatMicTimeoutMsValue } from '.
|
||||
import { useAgiAttachmentPrompts } from '~/modules/aifn/agiattachmentprompts/useAgiAttachmentPrompts';
|
||||
import { useBrowseCapability } from '~/modules/browse/store-module-browsing';
|
||||
|
||||
import { DLLM, getLLMContextTokens, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { DLLM, getLLMContextTokens, getLLMPricing, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { AudioGenerator } from '~/common/util/audio/AudioGenerator';
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { ButtonAttachFilesMemo, openFileForAttaching } from '~/common/components/ButtonAttachFiles';
|
||||
@@ -63,8 +63,10 @@ import { chatExecuteModeCanAttach, useChatExecuteMode } from '../../execute-mode
|
||||
|
||||
import { ButtonAttachCameraMemo, useCameraCaptureModalDialog } from './buttons/ButtonAttachCamera';
|
||||
import { ButtonAttachClipboardMemo } from './buttons/ButtonAttachClipboard';
|
||||
import { ButtonAttachGoogleDriveMemo } from './buttons/ButtonAttachGoogleDrive';
|
||||
import { ButtonAttachScreenCaptureMemo } from './buttons/ButtonAttachScreenCapture';
|
||||
import { ButtonAttachWebMemo } from './buttons/ButtonAttachWeb';
|
||||
import { hasGoogleDriveCapability, useGoogleDrivePicker } from '~/common/attachment-drafts/useGoogleDrivePicker';
|
||||
import { ButtonBeamMemo } from './buttons/ButtonBeam';
|
||||
import { ButtonCallMemo } from './buttons/ButtonCall';
|
||||
import { ButtonGroupDrawRepeat } from './buttons/ButtonGroupDrawRepeat';
|
||||
@@ -197,7 +199,7 @@ export function Composer(props: {
|
||||
const showChatAttachments = chatExecuteModeCanAttach(chatExecuteMode, props.capabilityHasT2IEdit);
|
||||
const {
|
||||
/* items */ attachmentDrafts,
|
||||
/* append */ attachAppendClipboardItems, attachAppendDataTransfer, attachAppendEgoFragments, attachAppendFile, attachAppendUrl,
|
||||
/* append */ attachAppendClipboardItems, attachAppendCloudFile, attachAppendDataTransfer, attachAppendEgoFragments, attachAppendFile, attachAppendUrl,
|
||||
/* take */ attachmentsRemoveAll, attachmentsTakeAllFragments, attachmentsTakeFragmentsByType,
|
||||
} = useAttachmentDrafts(conversationOverlayStore, enableLoadURLsInComposer, chatLLMSupportsImages, handleFilterAGIFile, showChatAttachments === 'only-images');
|
||||
|
||||
@@ -233,7 +235,7 @@ export function Composer(props: {
|
||||
const tokensHistory = _historyTokenCount;
|
||||
const tokensResponseMax = getModelParameterValueOrThrow('llmResponseTokens', props.chatLLM?.initialParameters, props.chatLLM?.userParameters, 0) ?? 0;
|
||||
const tokenLimit = getLLMContextTokens(props.chatLLM) ?? 0;
|
||||
const tokenChatPricing = props.chatLLM?.pricing?.chat;
|
||||
const tokenChatPricing = getLLMPricing(props.chatLLM)?.chat;
|
||||
|
||||
|
||||
// Effect: load initial text if queued up (e.g. by /link/share_targetF)
|
||||
@@ -545,6 +547,9 @@ export function Composer(props: {
|
||||
|
||||
// Enter: primary action
|
||||
if (e.key === 'Enter') {
|
||||
// Skip if composing (e.g., CJK input methods) - issue #784
|
||||
if (e.nativeEvent.isComposing)
|
||||
return;
|
||||
|
||||
// Alt (Windows) or Option (Mac) + Enter: append the message instead of sending it
|
||||
if (e.altKey && !e.metaKey && !e.ctrlKey) {
|
||||
@@ -620,6 +625,8 @@ export function Composer(props: {
|
||||
|
||||
const { openWebInputDialog, webInputDialogComponent } = useWebInputModal(handleAttachWebLinks, composeText);
|
||||
|
||||
const { openGoogleDrivePicker, googleDrivePickerComponent } = useGoogleDrivePicker(attachAppendCloudFile, isMobile);
|
||||
|
||||
|
||||
// Attachments Down
|
||||
|
||||
@@ -799,6 +806,11 @@ export function Composer(props: {
|
||||
<ButtonAttachWebMemo disabled={!hasComposerBrowseCapability} onOpenWebInput={openWebInputDialog} />
|
||||
</MenuItem>
|
||||
|
||||
{/* Responsive Google Drive button */}
|
||||
{hasGoogleDriveCapability && <MenuItem>
|
||||
<ButtonAttachGoogleDriveMemo onOpenGoogleDrivePicker={openGoogleDrivePicker} fullWidth />
|
||||
</MenuItem>}
|
||||
|
||||
{/* Responsive Paste button */}
|
||||
{supportsClipboardRead() && <MenuItem>
|
||||
<ButtonAttachClipboardMemo onAttachClipboard={attachAppendClipboardItems} />
|
||||
@@ -828,6 +840,9 @@ export function Composer(props: {
|
||||
{/* Responsive Web button */}
|
||||
{showChatAttachments !== 'only-images' && <ButtonAttachWebMemo color={showTint} disabled={!hasComposerBrowseCapability} onOpenWebInput={openWebInputDialog} />}
|
||||
|
||||
{/* Responsive Google Drive button */}
|
||||
{hasGoogleDriveCapability && showChatAttachments !== 'only-images' && <ButtonAttachGoogleDriveMemo color={showTint} onOpenGoogleDrivePicker={openGoogleDrivePicker} />}
|
||||
|
||||
{/* Responsive Paste button */}
|
||||
{supportsClipboardRead() && showChatAttachments !== 'only-images' && <ButtonAttachClipboardMemo color={showTint} onAttachClipboard={attachAppendClipboardItems} />}
|
||||
|
||||
@@ -859,7 +874,7 @@ export function Composer(props: {
|
||||
<Textarea
|
||||
variant='outlined'
|
||||
color={isDraw ? 'warning' : isReAct ? 'success' : undefined}
|
||||
autoFocus
|
||||
autoFocus={isDesktop}
|
||||
minRows={isMobile ? 3.5 : isDraw ? 4 : agiAttachmentPrompts.hasData ? 3 : showChatInReferenceTo ? 4 : 5}
|
||||
maxRows={isMobile ? 8 : 10}
|
||||
placeholder={textPlaceholder}
|
||||
@@ -905,7 +920,7 @@ export function Composer(props: {
|
||||
)}
|
||||
|
||||
{!showChatInReferenceTo && !isDraw && tokenLimit > 0 && (
|
||||
<TokenBadgeMemo hideBelowDollars={0.0001} chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} showCost={labsShowCost} enableHover={!isMobile} showExcess absoluteBottomRight />
|
||||
<TokenBadgeMemo hideBelowDollars={0.01} chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} showCost={labsShowCost} enableHover={!isMobile} showExcess absoluteBottomRight />
|
||||
)}
|
||||
|
||||
</Box>
|
||||
@@ -1123,6 +1138,9 @@ export function Composer(props: {
|
||||
{/* Camera (when open) */}
|
||||
{cameraCaptureComponent}
|
||||
|
||||
{/* Google Drive Picker (when open) */}
|
||||
{googleDrivePickerComponent}
|
||||
|
||||
{/* Web Input Dialog (when open) */}
|
||||
{webInputDialogComponent}
|
||||
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, ColorPaletteProp, IconButton, Tooltip } from '@mui/joy';
|
||||
import AddToDriveRoundedIcon from '@mui/icons-material/AddToDriveRounded';
|
||||
|
||||
import { buttonAttachSx } from '~/common/components/ButtonAttachFiles';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
|
||||
|
||||
export const ButtonAttachGoogleDriveMemo = React.memo(ButtonAttachGoogleDrive);
|
||||
|
||||
function ButtonAttachGoogleDrive(props: {
|
||||
color?: ColorPaletteProp,
|
||||
isMobile?: boolean,
|
||||
disabled?: boolean,
|
||||
fullWidth?: boolean,
|
||||
noToolTip?: boolean,
|
||||
onOpenGoogleDrivePicker: () => void,
|
||||
}) {
|
||||
|
||||
const button = props.isMobile ? (
|
||||
<IconButton color={props.color} disabled={props.disabled} onClick={props.onOpenGoogleDrivePicker}>
|
||||
<AddToDriveRoundedIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
<Button
|
||||
variant={props.color ? 'soft' : 'plain'}
|
||||
color={props.color || 'neutral'}
|
||||
disabled={props.disabled}
|
||||
fullWidth={props.fullWidth}
|
||||
startDecorator={<AddToDriveRoundedIcon />}
|
||||
onClick={props.onOpenGoogleDrivePicker}
|
||||
sx={buttonAttachSx.desktop}
|
||||
>
|
||||
Drive
|
||||
</Button>
|
||||
);
|
||||
|
||||
return (props.noToolTip || props.isMobile) ? button : (
|
||||
<Tooltip arrow disableInteractive placement='top-start' title={
|
||||
<Box sx={buttonAttachSx.tooltip}>
|
||||
<b>Add from Google Drive</b><br />
|
||||
Attach files from your Drive
|
||||
</Box>
|
||||
}>
|
||||
{button}
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
@@ -91,16 +91,22 @@ function InputErrorIndicator() {
|
||||
|
||||
const converterTypeToIconMap: { [key in AttachmentDraftConverterType]: React.ComponentType<any> | null } = {
|
||||
'text': TextFieldsIcon,
|
||||
'text-cleaner': CodeIcon,
|
||||
'text-markdown': TextFieldsIcon,
|
||||
'rich-text': CodeIcon,
|
||||
'rich-text-cleaner': CodeIcon,
|
||||
'rich-text-markdown': TextFieldsIcon,
|
||||
'rich-text-table': PivotTableChartIcon,
|
||||
'image-original': ImageOutlinedIcon,
|
||||
'image-resized-high': PhotoSizeSelectLargeOutlinedIcon,
|
||||
'image-resized-low': PhotoSizeSelectSmallOutlinedIcon,
|
||||
'image-to-default': ImageOutlinedIcon,
|
||||
'image-caption': AbcIcon,
|
||||
'image-ocr': AbcIcon,
|
||||
'pdf-auto': PictureAsPdfIcon,
|
||||
'pdf-text': PictureAsPdfIcon,
|
||||
'pdf-images': PermMediaOutlinedIcon,
|
||||
'pdf-images-ocr': AbcIcon,
|
||||
'pdf-text-and-images': PermMediaOutlinedIcon,
|
||||
'docx-to-html': DescriptionOutlinedIcon,
|
||||
'url-page-text': TextFieldsIcon, // was LanguageIcon
|
||||
@@ -198,13 +204,21 @@ function attachmentIcons(attachmentDraft: AttachmentDraft, noTooltips: boolean,
|
||||
|
||||
function attachmentLabelText(attachmentDraft: AttachmentDraft): string {
|
||||
const converter = attachmentDraft.converters.find(c => c.isActive) ?? null;
|
||||
if (converter && attachmentDraft.label === 'Rich Text') {
|
||||
if (converter.id === 'rich-text-table')
|
||||
return 'Rich Table';
|
||||
if (converter.id === 'rich-text-cleaner')
|
||||
if (converter && attachmentDraft.label === 'Text') {
|
||||
if (converter.id === 'text-markdown')
|
||||
return 'Markdown';
|
||||
if (converter.id === 'text-cleaner')
|
||||
return 'Clean HTML';
|
||||
}
|
||||
if (converter && attachmentDraft.label === 'Rich Text') {
|
||||
if (converter.id === 'rich-text')
|
||||
return 'Rich HTML';
|
||||
if (converter.id === 'rich-text-markdown')
|
||||
return 'Markdown';
|
||||
if (converter.id === 'rich-text-cleaner')
|
||||
return 'Clean HTML';
|
||||
if (converter.id === 'rich-text-table')
|
||||
return 'Rich Table';
|
||||
}
|
||||
return ellipsizeFront(attachmentDraft.label, 22);
|
||||
}
|
||||
@@ -227,9 +241,10 @@ function LLMAttachmentButton(props: {
|
||||
const isUnconvertible = !draft.converters.length;
|
||||
const isOutputLoading = draft.outputsConverting;
|
||||
const isOutputMissing = !draft.outputFragments.length;
|
||||
const isOutputWarned = !!draft.outputWarnings?.length;
|
||||
const hasLiveFiles = draft.outputFragments.some(_f => _f.liveFileId);
|
||||
|
||||
const showWarning = isUnconvertible || (isOutputMissing || !llmSupportsAllFragments);
|
||||
const showWarning = isUnconvertible || (isOutputMissing || !llmSupportsAllFragments) || isOutputWarned;
|
||||
|
||||
|
||||
// handlers
|
||||
@@ -256,6 +271,17 @@ function LLMAttachmentButton(props: {
|
||||
if (isInputLoading)
|
||||
return <InputLoadingPlaceholder label={draft.label} />;
|
||||
|
||||
// tooltip for truncated filenames (only show when menu is closed)
|
||||
const displayedLabel = attachmentLabelText(draft);
|
||||
const showFilenameTooltip = !props.menuShown && !isOutputLoading && displayedLabel !== draft.label;
|
||||
|
||||
// label element (reused with/without tooltip)
|
||||
const labelElement = (
|
||||
<Typography level='title-sm' sx={{ whiteSpace: 'nowrap' }}>
|
||||
{isOutputLoading ? 'Converting... ' : displayedLabel}
|
||||
</Typography>
|
||||
);
|
||||
|
||||
return (
|
||||
<Button
|
||||
size='sm'
|
||||
@@ -279,10 +305,11 @@ function LLMAttachmentButton(props: {
|
||||
{/* Icons: Web Page Screenshot, Converter[s] */}
|
||||
{attachmentIcons(draft, props.menuShown, props.onViewImageRefPart)}
|
||||
|
||||
{/* Label */}
|
||||
<Typography level='title-sm' sx={{ whiteSpace: 'nowrap' }}>
|
||||
{isOutputLoading ? 'Converting... ' : attachmentLabelText(draft)}
|
||||
</Typography>
|
||||
{/* Label (with tooltip for truncated filenames) */}
|
||||
{showFilenameTooltip
|
||||
? <TooltipOutlined title={<span style={{ wordBreak: 'break-all' }}>{draft.label}</span>}>{labelElement}</TooltipOutlined>
|
||||
: labelElement
|
||||
}
|
||||
|
||||
{/* Is Converting icon */}
|
||||
{isOutputLoading && <CircularProgress color='success' size='sm' />}
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Checkbox, Chip, CircularProgress, LinearProgress, ListDivider, ListItem, ListItemDecorator, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
import AttachmentIcon from '@mui/icons-material/Attachment';
|
||||
import { Box, Button, ButtonGroup, Checkbox, Chip, CircularProgress, Divider, LinearProgress, ListDivider, ListItem, ListItemDecorator, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
|
||||
import DeleteForeverIcon from '@mui/icons-material/DeleteForever';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import ExpandLessIcon from '@mui/icons-material/ExpandLess';
|
||||
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
import KeyboardArrowLeftIcon from '@mui/icons-material/KeyboardArrowLeft';
|
||||
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
|
||||
import ReadMoreIcon from '@mui/icons-material/ReadMore';
|
||||
import VerticalAlignBottomIcon from '@mui/icons-material/VerticalAlignBottom';
|
||||
import VisibilityIcon from '@mui/icons-material/Visibility';
|
||||
|
||||
@@ -18,6 +17,7 @@ import { CloseablePopup } from '~/common/components/CloseablePopup';
|
||||
import { DMessageAttachmentFragment, DMessageDocPart, DMessageImageRefPart, isDocPart, isImageRefPart, isZyncAssetImageReferencePartWithLegacyDBlob } from '~/common/stores/chat/chat.fragments';
|
||||
import { LiveFileIcon } from '~/common/livefile/liveFile.icons';
|
||||
import { copyToClipboard } from '~/common/util/clipboardUtils';
|
||||
import { humanReadableBytes } from '~/common/util/textUtils';
|
||||
import { themeZIndexOverMobileDrawer } from '~/common/app.theme';
|
||||
import { useUIPreferencesStore } from '~/common/stores/store-ui';
|
||||
|
||||
@@ -32,12 +32,20 @@ const DEFAULT_DETAILS_OPEN = true;
|
||||
const SHOW_INLINING_OPERATIONS = false;
|
||||
|
||||
|
||||
const indicatorSx = {
|
||||
fontSize: '1rem',
|
||||
} as const;
|
||||
// const indicatorSx = {
|
||||
// fontSize: '1rem',
|
||||
// } as const;
|
||||
//
|
||||
// const indicatorGapSx: SxProps = {
|
||||
// paddingLeft: '1.375rem',
|
||||
// };
|
||||
|
||||
const indicatorGapSx: SxProps = {
|
||||
paddingLeft: '1.375rem',
|
||||
const actionButtonsSx: SxProps = {
|
||||
ml: 'auto',
|
||||
minHeight: 0,
|
||||
borderRadius: '1rem',
|
||||
backgroundColor: 'background.surface',
|
||||
'& button': { fontSize: 'xs', fontWeight: 'md', py: 0, minWidth: 0, minHeight: 0 },
|
||||
};
|
||||
|
||||
|
||||
@@ -82,9 +90,10 @@ export function LLMAttachmentMenu(props: {
|
||||
const isUnconvertible = !draft.converters.length;
|
||||
const isOutputMissing = !draft.outputFragments.length;
|
||||
const isOutputMultiple = draft.outputFragments.length > 1;
|
||||
const isOutputWarned = !!draft.outputWarnings?.length;
|
||||
const hasLiveFiles = draft.outputFragments.some(_f => _f.liveFileId);
|
||||
|
||||
const showWarning = isUnconvertible || isOutputMissing || !llmSupportsAllFragments;
|
||||
const showWarning = isUnconvertible || isOutputMissing || !llmSupportsAllFragments || isOutputWarned;
|
||||
|
||||
|
||||
// hooks
|
||||
@@ -157,6 +166,8 @@ export function LLMAttachmentMenu(props: {
|
||||
minWidth={260}
|
||||
noTopPadding
|
||||
placement='top'
|
||||
placementOffset={[0, 15]}
|
||||
boxShadow='lg'
|
||||
zIndex={themeZIndexOverMobileDrawer /* was not set, but the Attachment Menu can be used from the Personas Modal */}
|
||||
>
|
||||
|
||||
@@ -187,9 +198,10 @@ export function LLMAttachmentMenu(props: {
|
||||
<ListItem sx={{ fontSize: 'sm', my: 0.75 }}>
|
||||
Attach {draftSource.media === 'url' ? 'web page'
|
||||
: draftSource.media === 'file' ? 'file'
|
||||
: draftSource.media === 'text'
|
||||
? (draftSource.method === 'drop' ? 'drop' : draftSource.method === 'clipboard-read' ? 'clipboard' : draftSource.method === 'paste' ? 'paste' : '')
|
||||
: ''} as:
|
||||
: draftSource.media === 'cloud' ? 'cloud file'
|
||||
: draftSource.media === 'text'
|
||||
? (draftSource.method === 'drop' ? 'drop' : draftSource.method === 'clipboard-read' ? 'clipboard' : draftSource.method === 'paste' ? 'paste' : '')
|
||||
: ''} as:
|
||||
{uiComplexityMode === 'extra' && (
|
||||
<Chip component='span' size='sm' color='neutral' variant='outlined' startDecorator={<ContentCopyIcon />} onClick={(event) => handleCopyLabelToClipboard(event, draft.label)} sx={{ ml: 'auto' }}>
|
||||
copy name
|
||||
@@ -197,6 +209,17 @@ export function LLMAttachmentMenu(props: {
|
||||
)}
|
||||
</ListItem>
|
||||
)}
|
||||
{/* Auto-heuristics message, with explanation */}
|
||||
{!!draft.outputsHeuristic?.isAuto && (
|
||||
<ListItem color={draft.outputsHeuristic.isAuto ? 'primary' : undefined} sx={{ fontSize: 'sm', fontWeight: 'lg', mb: 0.5 }}>
|
||||
{draft.outputsHeuristic.isAuto ? 'Auto: ' : ''}
|
||||
{draft.outputsHeuristic.actualConverterId === 'pdf-text' && 'Text'}
|
||||
{draft.outputsHeuristic.actualConverterId === 'pdf-images-ocr' && 'OCR'}
|
||||
{draft.outputsHeuristic.actualConverterId === 'pdf-images' && 'Images'}
|
||||
{draft.outputsHeuristic.actualConverterId === 'pdf-text-and-images' && 'Text + Images'}
|
||||
{draft.outputsHeuristic.explain && ` (${draft.outputsHeuristic.explain})`}
|
||||
</ListItem>
|
||||
)}
|
||||
{!isUnconvertible && draft.converters.map((c, idx) =>
|
||||
<MenuItem
|
||||
disabled={c.disabled || isConverting}
|
||||
@@ -213,7 +236,9 @@ export function LLMAttachmentMenu(props: {
|
||||
</ListItemDecorator>
|
||||
{c.unsupported
|
||||
? <Box>Unsupported 🤔 <Typography level='body-xs'>{c.name}</Typography></Box>
|
||||
: c.name}
|
||||
: (/* auto-converted */ draft.outputsHeuristic?.isAuto && c.id === draft.outputsHeuristic.actualConverterId)
|
||||
? <Box component='span' sx={{ fontWeight: 'lg', color: 'primary.softColor' }}>{c.name}</Box>
|
||||
: c.name}
|
||||
</MenuItem>,
|
||||
)}
|
||||
{/*{!isUnconvertible && <ListDivider sx={{ mb: 0 }} />}*/}
|
||||
@@ -261,11 +286,19 @@ export function LLMAttachmentMenu(props: {
|
||||
<Typography color={isInputError ? 'danger' : 'warning'} level='title-sm'>
|
||||
{isInputError ? 'Loading Issue' : 'Warning'}
|
||||
</Typography>
|
||||
|
||||
{/* Only show 1 warning, excluding lower priorities */}
|
||||
{isInputError ? <div>{draft.inputError}</div>
|
||||
: isUnconvertible ? <div>Attachments of type {draft.input?.mimeType} are not supported yet. You can request this on GitHub.</div>
|
||||
: isOutputMissing ? <div>File not supported. Please try another format.</div>
|
||||
: !llmSupportsAllFragments ? <div>May not be compatible with the current model. Please try another format.</div>
|
||||
: <>Unknown warning</>}
|
||||
: draft.outputWarnings?.length ? '' /* printed below */
|
||||
: <>Unknown warning</>}
|
||||
|
||||
{/* Explicit output warnings */}
|
||||
{!!draft.outputWarnings?.length && draft.outputWarnings.map((w, widx) =>
|
||||
<Box key={'ow-' + widx} sx={{ fontSize: 'sm', color: 'warning.softColor', py: 1 }}>⚠️ {w}</Box>)
|
||||
}
|
||||
</Box>
|
||||
</MenuItem>
|
||||
</Box>
|
||||
@@ -294,24 +327,24 @@ export function LLMAttachmentMenu(props: {
|
||||
Details
|
||||
</Typography>
|
||||
) : (
|
||||
<Box sx={{ my: 0.5 }}>
|
||||
<Box sx={{ my: 1 }}>
|
||||
|
||||
{/* <- inputs */}
|
||||
{showInputs && !!draftInput && (
|
||||
<Typography level='body-sm' textColor='text.primary' startDecorator={<AttachmentIcon sx={indicatorSx} />}>
|
||||
{draftInput.mimeType}{typeof draftInput.dataSize === 'number' ? ` · ${draftInput.dataSize.toLocaleString()} bytes` : ''}
|
||||
<Typography level='body-sm' textColor='success.softColor'>
|
||||
Input: {draftInput.mimeType}{typeof draftInput.dataSize === 'number' ? ` · ${humanReadableBytes(draftInput.dataSize)}` : ''}
|
||||
</Typography>
|
||||
)}
|
||||
{showInputs && !!draftInput?.altMimeType && (
|
||||
<Typography level='body-sm' sx={indicatorGapSx}>
|
||||
{draftInput.altMimeType} · {draftInput.altData?.length.toLocaleString()}
|
||||
<Typography level='body-sm' textColor='success.softColor'>
|
||||
Input: {draftInput.altMimeType}{!draftInput.altData?.length ? '' : ` · ${humanReadableBytes(draftInput.altData.length)}`}
|
||||
</Typography>
|
||||
)}
|
||||
{showInputs && !!draftInput?.urlImage && (
|
||||
<Typography level='body-sm' sx={indicatorGapSx}>
|
||||
{draftInput.urlImage.mimeType} · {draftInput.urlImage.width} x {draftInput.urlImage.height} · {draftInput.urlImage.imgDataUrl?.length.toLocaleString()}
|
||||
{' · '}
|
||||
<Chip component='span' size='sm' color='primary' variant='outlined' startDecorator={<VisibilityIcon />} onClick={(event) => {
|
||||
<Typography level='body-sm' textColor='success.softColor' sx={{ display: 'flex', alignItems: 'center' }}>
|
||||
Input: {draftInput.urlImage.mimeType} · {draftInput.urlImage.width}x{draftInput.urlImage.height}{!draftInput.urlImage.imgDataUrl?.length ? '' : ` · ${humanReadableBytes(draftInput.urlImage.imgDataUrl.length)}`}
|
||||
|
||||
<Chip component='span' size='sm' color='success' variant='soft' startDecorator={<VisibilityIcon />} onClick={(event) => {
|
||||
if (draftInput?.urlImage?.imgDataUrl) {
|
||||
// Invoke the viewer but with a virtual 'temp' part description to see this preview image
|
||||
handleViewImageRefPart(event, {
|
||||
@@ -325,8 +358,8 @@ export function LLMAttachmentMenu(props: {
|
||||
height: draftInput.urlImage.height || undefined,
|
||||
});
|
||||
}
|
||||
}}>
|
||||
view
|
||||
}} sx={{ ml: 'auto' }}>
|
||||
view input
|
||||
</Chip>
|
||||
</Typography>
|
||||
)}
|
||||
@@ -335,45 +368,79 @@ export function LLMAttachmentMenu(props: {
|
||||
{/* Converters: {draft.converters.map(((converter, idx) => ` ${converter.id}${converter.isActive ? '*' : ''}`)).join(', ')}*/}
|
||||
{/*</Typography>*/}
|
||||
|
||||
{/* Downward arrow */}
|
||||
<Divider color='success'>
|
||||
<KeyboardArrowDownIcon color='success' />
|
||||
</Divider>
|
||||
|
||||
{/* -> Outputs */}
|
||||
<Box sx={{ mt: 1 }}>
|
||||
<Box>
|
||||
{isOutputMissing ? (
|
||||
<Typography level='body-sm' startDecorator={<ReadMoreIcon sx={indicatorSx} />}>...</Typography>
|
||||
<Typography level='body-sm' color={isConverting ? 'primary' : 'danger'}>{isConverting ? '...' : '... nothing ...'}</Typography>
|
||||
) : (
|
||||
draft.outputFragments.map(({ part }, index) => {
|
||||
if (isDocPart(part)) {
|
||||
return (
|
||||
<Typography key={index} level='body-sm' sx={{ color: 'text.primary' }} startDecorator={<ReadMoreIcon sx={indicatorSx} />}>
|
||||
<span>{part.data.mimeType /* part.type: big-agi type, not source mime */} · {part.data.text.length.toLocaleString()} bytes · </span>
|
||||
<Chip component='span' size='sm' color='primary' variant='outlined' startDecorator={<VisibilityIcon />} onClick={(event) => handleViewDocPart(event, part)}>
|
||||
view
|
||||
</Chip>
|
||||
<Chip component='span' size='sm' color='success' variant='outlined' startDecorator={<ContentCopyIcon />} onClick={(event) => handleCopyToClipboard(event, part.data.text)}>
|
||||
copy
|
||||
</Chip>
|
||||
<Typography key={index} component='div' level='body-sm' textColor='primary.softColor' sx={{ display: 'flex', alignItems: 'center' }}>
|
||||
<span>{part.data.mimeType /* part.type: big-agi type, not source mime */} · {humanReadableBytes(part.data.text.length)} </span>
|
||||
{/*<Chip component='span' size='sm' color='primary' variant='outlined' startDecorator={<VisibilityIcon />} onClick={(event) => handleViewDocPart(event, part)} sx={{ ml: 'auto' }}>*/}
|
||||
{/* view*/}
|
||||
{/*</Chip>*/}
|
||||
{/*<Chip component='span' size='sm' color='primary' variant='outlined' startDecorator={<ContentCopyIcon />} onClick={(event) => handleCopyToClipboard(event, part.data.text)}>*/}
|
||||
{/* copy*/}
|
||||
{/*</Chip>*/}
|
||||
<ButtonGroup size='sm' color='primary' variant='outlined' sx={actionButtonsSx}>
|
||||
<Button startDecorator={<VisibilityIcon sx={{ fontSize: 'md' }} />} onClick={(event) => handleViewDocPart(event, part)}>
|
||||
view
|
||||
</Button>
|
||||
<Button onClick={(event) => handleCopyToClipboard(event, part.data.text)}/* endDecorator={<ContentCopyIcon />} */>
|
||||
copy
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</Typography>
|
||||
);
|
||||
} else if (isZyncAssetImageReferencePartWithLegacyDBlob(part) || isImageRefPart(part)) {
|
||||
// Unified Image Reference handling (both Zync Asset References with legacy fallback and legacy image_ref)
|
||||
const legacyImageRefPart = isZyncAssetImageReferencePartWithLegacyDBlob(part) ? part._legacyImageRefPart! : part;
|
||||
const { dataRef, width, height } = legacyImageRefPart;
|
||||
const resolution = width && height ? `${width} x ${height}` : 'no resolution';
|
||||
const resolution = width && height ? `${width}x${height}` : 'no resolution';
|
||||
const mime = dataRef.reftype === 'dblob' ? dataRef.mimeType : 'unknown image';
|
||||
return (
|
||||
<Typography key={index} level='body-sm' sx={{ color: 'text.primary' }} startDecorator={<ReadMoreIcon sx={indicatorSx} />}>
|
||||
<span>{mime /*.replace('image/', 'img: ')*/} · {resolution} · {dataRef.reftype === 'dblob' ? (dataRef.bytesSize?.toLocaleString() || 'no size') : '(remote)'} · </span>
|
||||
<Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='primary' variant='outlined' startDecorator={<VisibilityIcon />}
|
||||
onClick={(event) => handleViewImageRefPart(event, legacyImageRefPart)}>
|
||||
view
|
||||
</Chip>
|
||||
{isOutputMultiple && <Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='danger' variant='outlined' startDecorator={<DeleteForeverIcon />} onClick={(event) => handleDeleteOutputFragment(event, index)}>
|
||||
del
|
||||
</Chip>}
|
||||
<Typography key={index} component='div' level='body-sm' textColor='primary.softColor' sx={{ display: 'flex', alignItems: 'center' }}>
|
||||
<span>{mime /*.replace('image/', 'img: ')*/} · {resolution} · {
|
||||
dataRef.reftype !== 'dblob' ? '(remote)'
|
||||
: !dataRef.bytesSize ? 'no size'
|
||||
: humanReadableBytes(dataRef.bytesSize)} </span>
|
||||
{/*<Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='primary' variant='outlined' startDecorator={<VisibilityIcon />}*/}
|
||||
{/* onClick={(event) => handleViewImageRefPart(event, legacyImageRefPart)}>*/}
|
||||
{/* view*/}
|
||||
{/*</Chip>*/}
|
||||
{/*{isOutputMultiple && <Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='danger' variant='outlined' startDecorator={<DeleteForeverIcon />} onClick={(event) => handleDeleteOutputFragment(event, index)}>*/}
|
||||
{/* del*/}
|
||||
{/*</Chip>}*/}
|
||||
<ButtonGroup size='sm' color='primary' variant='outlined' sx={actionButtonsSx}>
|
||||
<Button
|
||||
startDecorator={<VisibilityIcon sx={{ fontSize: 'md' }} />}
|
||||
onClick={(event) => handleViewImageRefPart(event, legacyImageRefPart)}
|
||||
>
|
||||
view
|
||||
</Button>
|
||||
{isOutputMultiple && (
|
||||
<Button
|
||||
color='warning'
|
||||
endDecorator={<DeleteOutlineIcon sx={{ fontSize: 'md' }} />}
|
||||
onClick={(event) => handleDeleteOutputFragment(event, index)}
|
||||
// sx={{ width: 48 }}
|
||||
>
|
||||
del
|
||||
</Button>
|
||||
)}
|
||||
</ButtonGroup>
|
||||
</Typography>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<Typography key={index} level='body-sm' sx={{ color: 'text.primary' }} startDecorator={<ReadMoreIcon sx={indicatorSx} />}>
|
||||
<Typography key={index} level='body-sm' textColor='primary.softColor'>
|
||||
{(part as DMessageAttachmentFragment['part']).pt}: (other)
|
||||
</Typography>
|
||||
);
|
||||
@@ -381,8 +448,8 @@ export function LLMAttachmentMenu(props: {
|
||||
})
|
||||
)}
|
||||
{!!llmTokenCountApprox && (
|
||||
<Typography level='body-xs' mt={0.5} sx={indicatorGapSx}>
|
||||
~{llmTokenCountApprox.toLocaleString()} tokens
|
||||
<Typography level='body-xs' mt={0.5} textColor='primary.softColor'>
|
||||
~ {llmTokenCountApprox.toLocaleString()} tokens
|
||||
</Typography>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
@@ -47,9 +47,9 @@ function TokenBadge(props: {
|
||||
const showAltCosts = !!props.showCost && !!costMax && costMin !== undefined;
|
||||
if (showAltCosts) {
|
||||
// Note: switched to 'min cost (>= ...)' on mobile as well, to restore the former behavior, just uncomment the !props.enableHover (a proxy for isMobile)
|
||||
badgeValue = (/*!props.enableHover ||*/ isHovering)
|
||||
? '< ' + formatModelsCost(costMax)
|
||||
: '> ' + formatModelsCost(costMin);
|
||||
badgeValue =
|
||||
// (/*!props.enableHover ||*/ isHovering) ? '< ' + formatModelsCost(costMax) :
|
||||
'> ' + formatModelsCost(costMin);
|
||||
} else {
|
||||
|
||||
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
|
||||
@@ -77,7 +77,7 @@ function TokenBadge(props: {
|
||||
slotProps={{
|
||||
root: {
|
||||
sx: {
|
||||
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
|
||||
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: '1rem' }),
|
||||
cursor: 'help',
|
||||
...(shallInvisible && {
|
||||
opacity: 0,
|
||||
@@ -92,6 +92,13 @@ function TokenBadge(props: {
|
||||
fontFamily: 'code',
|
||||
fontSize: 'xs',
|
||||
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
|
||||
// make it transparent over text
|
||||
// backgroundColor: `rgb(var(--joy-palette-${color}-lightChannel) / 15%)`, // similar to success.50
|
||||
background: 'transparent',
|
||||
boxShadow: 'none', // outline
|
||||
'&:hover': {
|
||||
backgroundColor: `${color}.softHoverBg`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}}
|
||||
|
||||
@@ -66,6 +66,7 @@ function ChatDrawer(props: {
|
||||
activeFolderId: string | null,
|
||||
chatPanesConversationIds: DConversationId[],
|
||||
disableNewButton: boolean,
|
||||
focusedChatBeamOpen: boolean,
|
||||
onConversationActivate: (conversationId: DConversationId) => void,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string | null, addSplitPane: boolean) => void,
|
||||
onConversationNew: (forceNoRecycle: boolean, isIncognito: boolean) => void,
|
||||
@@ -456,7 +457,7 @@ function ChatDrawer(props: {
|
||||
{/*<OpenAIIcon sx={{ ml: 'auto' }} />*/}
|
||||
</ListItemButton>
|
||||
|
||||
<ListItemButton disabled={filteredChatsAreEmpty} onClick={handleConversationsExport} sx={{ flex: 1 }}>
|
||||
<ListItemButton disabled={filteredChatsAreEmpty || props.focusedChatBeamOpen} onClick={handleConversationsExport} sx={{ flex: 1 }}>
|
||||
<ListItemDecorator>
|
||||
<FileUploadOutlinedIcon />
|
||||
</ListItemDecorator>
|
||||
|
||||
@@ -5,7 +5,7 @@ import { useModuleBeamStore } from '~/modules/beam/store-module-beam';
|
||||
import type { DFolder } from '~/common/stores/folders/store-chat-folders';
|
||||
import { DMessage, DMessageUserFlag, MESSAGE_FLAG_STARRED, messageFragmentsReduceText, messageHasUserFlag, messageUserFlagToEmoji } from '~/common/stores/chat/chat.message';
|
||||
import { conversationTitle, DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { getLocalMidnightInUTCTimestamp, getTimeBucketEn } from '~/common/util/timeUtils';
|
||||
import { createTimeBucketClassifierEn } from '~/common/util/timeUtils';
|
||||
import { isAttachmentFragment, isContentOrAttachmentFragment, isDocPart, isImageRefPart, isZyncAssetImageReferencePart } from '~/common/stores/chat/chat.fragments';
|
||||
import { shallowEquals } from '~/common/util/hooks/useShallowObject';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
@@ -235,14 +235,14 @@ export function useChatDrawerRenderItems(
|
||||
break;
|
||||
}
|
||||
|
||||
const midnightTime = getLocalMidnightInUTCTimestamp();
|
||||
const getTimeBucket = createTimeBucketClassifierEn();
|
||||
const grouped = chatNavItems.reduce((acc, item) => {
|
||||
|
||||
// derive the bucket name
|
||||
let bucket: string;
|
||||
switch (grouping) {
|
||||
case 'date':
|
||||
bucket = getTimeBucketEn(item.updatedAt || midnightTime, midnightTime);
|
||||
bucket = getTimeBucket(item.updatedAt || Date.now());
|
||||
break;
|
||||
case 'persona':
|
||||
bucket = item.systemPurposeId;
|
||||
|
||||
@@ -4,7 +4,8 @@ import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Button, ColorPaletteProp } from '@mui/joy';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import { DMessageContentFragment, DMessageTextPart, isTextContentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
import type { InterleavedFragment } from '~/common/stores/chat/hooks/useFragmentBuckets';
|
||||
import { DMessageTextPart, isTextContentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
|
||||
|
||||
// configuration
|
||||
@@ -54,7 +55,7 @@ const optionSx: SxProps = {
|
||||
};
|
||||
|
||||
|
||||
export function optionsExtractFromFragments_dangerModifyFragment(enabled: boolean, fragments: DMessageContentFragment[]): { fragments: DMessageContentFragment[], options: string[], } {
|
||||
export function optionsExtractFromFragments_dangerModifyFragment(enabled: boolean, fragments: InterleavedFragment[]): { fragments: InterleavedFragment[], options: string[] } {
|
||||
if (enabled && fragments.length) {
|
||||
const fragment = fragments[fragments.length - 1];
|
||||
if (isTextContentFragment(fragment)) {
|
||||
|
||||
@@ -21,7 +21,6 @@ import InsertLinkIcon from '@mui/icons-material/InsertLink';
|
||||
import MoreVertIcon from '@mui/icons-material/MoreVert';
|
||||
import NotificationsActiveIcon from '@mui/icons-material/NotificationsActive';
|
||||
import NotificationsOutlinedIcon from '@mui/icons-material/NotificationsOutlined';
|
||||
import RecordVoiceOverOutlinedIcon from '@mui/icons-material/RecordVoiceOverOutlined';
|
||||
import ReplayIcon from '@mui/icons-material/Replay';
|
||||
import ReplyAllRoundedIcon from '@mui/icons-material/ReplyAllRounded';
|
||||
import ReplyRoundedIcon from '@mui/icons-material/ReplyRounded';
|
||||
@@ -40,11 +39,12 @@ import { CloseablePopup } from '~/common/components/CloseablePopup';
|
||||
import { DMessage, DMessageId, DMessageUserFlag, DMetaReferenceItem, MESSAGE_FLAG_AIX_SKIP, MESSAGE_FLAG_NOTIFY_COMPLETE, MESSAGE_FLAG_STARRED, MESSAGE_FLAG_VND_ANT_CACHE_AUTO, MESSAGE_FLAG_VND_ANT_CACHE_USER, messageFragmentsReduceText, messageHasUserFlag } from '~/common/stores/chat/chat.message';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { MarkHighlightIcon } from '~/common/components/icons/MarkHighlightIcon';
|
||||
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
|
||||
import { Release } from '~/common/app.release';
|
||||
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
|
||||
import { adjustContentScaling, themeScalingMap, themeZIndexChatBubble } from '~/common/app.theme';
|
||||
import { avatarIconSx, makeMessageAvatarIcon, messageBackground, useMessageAvatarLabel } from '~/common/util/dMessageUtils';
|
||||
import { copyToClipboard } from '~/common/util/clipboardUtils';
|
||||
import { clipboardCopyDOMSelectionOrFallback } from '~/common/util/clipboardUtils';
|
||||
import { createTextContentFragment, DMessageFragment, DMessageFragmentId, updateFragmentWithEditedText } from '~/common/stores/chat/chat.fragments';
|
||||
import { useFragmentBuckets } from '~/common/stores/chat/hooks/useFragmentBuckets';
|
||||
import { useUIPreferencesStore } from '~/common/stores/store-ui';
|
||||
@@ -69,7 +69,7 @@ const ENABLE_BUBBLE = true;
|
||||
export const BUBBLE_MIN_TEXT_LENGTH = 3;
|
||||
|
||||
// Enable the hover button to copy the whole message. The Copy button is also available in Blocks, or in the Avatar Menu.
|
||||
const ENABLE_COPY_MESSAGE_OVERLAY: boolean = false;
|
||||
// const ENABLE_COPY_MESSAGE_OVERLAY: boolean = false;
|
||||
|
||||
|
||||
const messageBodySx: SxProps = {
|
||||
@@ -217,15 +217,15 @@ export function ChatMessage(props: {
|
||||
const isVndAndCacheUser = !!props.showAntPromptCaching && messageHasUserFlag(props.message, MESSAGE_FLAG_VND_ANT_CACHE_USER);
|
||||
|
||||
const {
|
||||
annotationFragments, // Web Citations, References (rendered at top)
|
||||
interleavedFragments, // Reasoning, Placeholders, Text, Code, Tools (interleaved in temporal order)
|
||||
imageAttachments, // Stamp-sized Images
|
||||
voidFragments, // Model-Aux, Placeholders
|
||||
contentFragments, // Text (Markdown + Code + ... blocks), Errors, (large) Images
|
||||
nonImageAttachments, // Document Attachments, likely the User dropped them in
|
||||
lastFragmentIsError,
|
||||
} = useFragmentBuckets(messageFragments);
|
||||
|
||||
const fragmentFlattenedText = React.useMemo(() => messageFragmentsReduceText(messageFragments), [messageFragments]);
|
||||
const handleHighlightSelText = useSelHighlighterMemo(messageId, selText, contentFragments, fromAssistant, props.onMessageFragmentReplace);
|
||||
const handleHighlightSelText = useSelHighlighterMemo(messageId, selText, interleavedFragments.filter(f => f.ft === 'content'), fromAssistant, props.onMessageFragmentReplace);
|
||||
|
||||
const textSubject = selText ? selText : fragmentFlattenedText;
|
||||
const isSpecialT2I = textSubject.startsWith('/draw ') || textSubject.startsWith('/imagine ') || textSubject.startsWith('/img ');
|
||||
@@ -315,8 +315,8 @@ export function ChatMessage(props: {
|
||||
const handleCloseOpsMenu = React.useCallback(() => setOpsMenuAnchor(null), []);
|
||||
|
||||
const handleOpsCopy = (e: React.MouseEvent) => {
|
||||
copyToClipboard(textSubject, 'Text');
|
||||
e.preventDefault();
|
||||
clipboardCopyDOMSelectionOrFallback(blocksRendererRef.current, textSubject, 'Message');
|
||||
handleCloseOpsMenu();
|
||||
closeContextMenu();
|
||||
closeBubble();
|
||||
@@ -579,9 +579,9 @@ export function ChatMessage(props: {
|
||||
|
||||
const lookForOptions = props.onMessageContinue !== undefined && props.isBottom === true && messageGenerator?.tokenStopReason !== 'out-of-tokens' && fromAssistant && !messagePendingIncomplete && !isEditingText && uiComplexityMode !== 'minimal' && false;
|
||||
|
||||
const { fragments: renderContentFragments, options: continuationOptions } = React.useMemo(() => {
|
||||
return optionsExtractFromFragments_dangerModifyFragment(lookForOptions, contentFragments);
|
||||
}, [contentFragments, lookForOptions]);
|
||||
const { fragments: renderInterleavedFragments, options: continuationOptions } = React.useMemo(() => {
|
||||
return optionsExtractFromFragments_dangerModifyFragment(lookForOptions, interleavedFragments);
|
||||
}, [interleavedFragments, lookForOptions]);
|
||||
|
||||
|
||||
// style
|
||||
@@ -589,7 +589,7 @@ export function ChatMessage(props: {
|
||||
|
||||
const listItemSx: SxProps = React.useMemo(() => ({
|
||||
// vars
|
||||
'--AGI-overlay-start-opacity': uiComplexityMode === 'extra' ? 0.1 : 0,
|
||||
// '--AGI-overlay-start-opacity': uiComplexityMode === 'extra' ? 0.1 : 0, // disabled - looks worse
|
||||
|
||||
// style
|
||||
backgroundColor: backgroundColor,
|
||||
@@ -773,20 +773,23 @@ export function ChatMessage(props: {
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Void Fragments */}
|
||||
{voidFragments.length >= 1 && (
|
||||
{/* Annotation Fragments (absolute top: citations, references) */}
|
||||
{annotationFragments.length >= 1 && (
|
||||
<VoidFragments
|
||||
voidFragments={voidFragments}
|
||||
nonVoidFragmentsCount={renderContentFragments.length}
|
||||
voidFragments={annotationFragments}
|
||||
nonVoidFragmentsCount={interleavedFragments.filter(f => f.ft === 'content').length}
|
||||
contentScaling={adjContentScaling}
|
||||
uiComplexityMode={uiComplexityMode}
|
||||
messageRole={messageRole}
|
||||
messagePendingIncomplete={messagePendingIncomplete}
|
||||
onFragmentDelete={!props.onMessageFragmentDelete ? undefined : handleFragmentDelete}
|
||||
onFragmentReplace={!props.onMessageFragmentReplace ? undefined : handleFragmentReplace}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Content Fragments */}
|
||||
{/* Interleaved Fragments (reasoning + content in temporal order) */}
|
||||
<ContentFragments
|
||||
contentFragments={renderContentFragments}
|
||||
contentFragments={renderInterleavedFragments}
|
||||
showEmptyNotice={!messageFragments.length && !messagePendingIncomplete}
|
||||
|
||||
contentScaling={adjContentScaling}
|
||||
@@ -794,6 +797,8 @@ export function ChatMessage(props: {
|
||||
fitScreen={props.fitScreen}
|
||||
isMobile={props.isMobile}
|
||||
messageRole={messageRole}
|
||||
messageGeneratorLlmId={messageGenerator?.mgt === 'aix' ? messageGenerator.aix?.mId : undefined}
|
||||
messagePendingIncomplete={messagePendingIncomplete}
|
||||
optiAllowSubBlocksMemo={!!messagePendingIncomplete}
|
||||
disableMarkdownText={disableMarkdown || fromUser /* User messages are edited as text. Try to have them in plain text. NOTE: This may bite. */}
|
||||
showUnsafeHtmlCode={props.showUnsafeHtmlCode}
|
||||
@@ -888,18 +893,18 @@ export function ChatMessage(props: {
|
||||
|
||||
|
||||
{/* Overlay copy icon */}
|
||||
{ENABLE_COPY_MESSAGE_OVERLAY && !fromSystem && !isEditingText && (
|
||||
<Tooltip title={messagePendingIncomplete ? null : (fromAssistant ? 'Copy message' : 'Copy input')} variant='solid'>
|
||||
<IconButton
|
||||
variant='outlined' onClick={handleOpsCopy}
|
||||
sx={{
|
||||
position: 'absolute', ...(fromAssistant ? { right: { xs: 12, md: 28 } } : { left: { xs: 12, md: 28 } }), zIndex: 10,
|
||||
opacity: 0, transition: 'opacity 0.16s cubic-bezier(.17,.84,.44,1)',
|
||||
}}>
|
||||
<ContentCopyIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
{/*{ENABLE_COPY_MESSAGE_OVERLAY && !fromSystem && !isEditingText && (*/}
|
||||
{/* <Tooltip title={messagePendingIncomplete ? null : (fromAssistant ? 'Copy message' : 'Copy input')} variant='solid'>*/}
|
||||
{/* <IconButton*/}
|
||||
{/* variant='outlined' onClick={handleOpsCopy}*/}
|
||||
{/* sx={{*/}
|
||||
{/* position: 'absolute', ...(fromAssistant ? { right: { xs: 12, md: 28 } } : { left: { xs: 12, md: 28 } }), zIndex: 10,*/}
|
||||
{/* opacity: 0, transition: 'opacity 0.16s cubic-bezier(.17,.84,.44,1)',*/}
|
||||
{/* }}>*/}
|
||||
{/* <ContentCopyIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* </Tooltip>*/}
|
||||
{/*)}*/}
|
||||
|
||||
|
||||
{/* Message Operations Menu (3 dots) */}
|
||||
@@ -1022,7 +1027,7 @@ export function ChatMessage(props: {
|
||||
)}
|
||||
{!!props.onTextSpeak && (
|
||||
<MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
|
||||
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverOutlinedIcon />}</ListItemDecorator>
|
||||
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <PhVoice />}</ListItemDecorator>
|
||||
Speak
|
||||
</MenuItem>
|
||||
)}
|
||||
@@ -1150,7 +1155,7 @@ export function ChatMessage(props: {
|
||||
</Tooltip>}
|
||||
{!!props.onTextSpeak && <Tooltip disableInteractive arrow placement='top' title='Speak'>
|
||||
<IconButton color='success' onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
|
||||
{!props.isSpeaking ? <RecordVoiceOverOutlinedIcon /> : <CircularProgress sx={{ '--CircularProgress-size': '16px' }} />}
|
||||
{!props.isSpeaking ? <PhVoice /> : <CircularProgress sx={{ '--CircularProgress-size': '16px' }} />}
|
||||
</IconButton>
|
||||
</Tooltip>}
|
||||
{(!!props.onTextDiagram || !!props.onTextImagine || !!props.onTextSpeak) && <Divider />}
|
||||
@@ -1190,7 +1195,7 @@ export function ChatMessage(props: {
|
||||
Auto-Draw
|
||||
</MenuItem>}
|
||||
{!!props.onTextSpeak && <MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
|
||||
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverOutlinedIcon />}</ListItemDecorator>
|
||||
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <PhVoice />}</ListItemDecorator>
|
||||
Speak
|
||||
</MenuItem>}
|
||||
</CloseablePopup>
|
||||
|
||||
+21
-7
@@ -7,13 +7,13 @@ import CodeIcon from '@mui/icons-material/Code';
|
||||
import EditRoundedIcon from '@mui/icons-material/EditRounded';
|
||||
import ImageOutlinedIcon from '@mui/icons-material/ImageOutlined';
|
||||
import PictureAsPdfIcon from '@mui/icons-material/PictureAsPdf';
|
||||
import RecordVoiceOverOutlinedIcon from '@mui/icons-material/RecordVoiceOverOutlined';
|
||||
import TextFieldsIcon from '@mui/icons-material/TextFields';
|
||||
import TextureIcon from '@mui/icons-material/Texture';
|
||||
|
||||
import { ContentScaling, themeScalingMap } from '~/common/app.theme';
|
||||
import { DMessageAttachmentFragment, DMessageFragmentId, DVMimeType, isDocPart } from '~/common/stores/chat/chat.fragments';
|
||||
import { LiveFileIcon } from '~/common/livefile/liveFile.icons';
|
||||
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
|
||||
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
|
||||
import { ellipsizeMiddle } from '~/common/util/textUtils';
|
||||
import { useLiveFileMetadata } from '~/common/livefile/useLiveFileMetadata';
|
||||
@@ -24,6 +24,15 @@ export const DocSelColor: ColorPaletteProp = 'primary';
|
||||
const DocUnselColor: ColorPaletteProp = 'primary';
|
||||
|
||||
|
||||
const _styles = {
|
||||
label: {
|
||||
whiteSpace: 'nowrap',
|
||||
fontWeight: 'md',
|
||||
minWidth: 48,
|
||||
},
|
||||
} as const;
|
||||
|
||||
|
||||
export function buttonIconForFragment(part: DMessageAttachmentFragment['part']): React.ComponentType<any> {
|
||||
const pt = part.pt;
|
||||
switch (pt) {
|
||||
@@ -41,7 +50,7 @@ export function buttonIconForFragment(part: DMessageAttachmentFragment['part']):
|
||||
case 'image':
|
||||
return ImageOutlinedIcon;
|
||||
case 'audio':
|
||||
return RecordVoiceOverOutlinedIcon;
|
||||
return PhVoice;
|
||||
default:
|
||||
const _exhaustiveCheck: never = assetType;
|
||||
return TextureIcon; // missing zync asset type
|
||||
@@ -146,10 +155,14 @@ export function DocAttachmentFragmentButton(props: {
|
||||
if (!isDocPart(fragment.part))
|
||||
return 'Unexpected: ' + fragment.part.pt;
|
||||
|
||||
const buttonText = ellipsizeMiddle(fragment.part.l1Title || fragment.title || 'Document', 28 /* totally arbitrary length */);
|
||||
|
||||
const Icon = isSelected ? EditRoundedIcon : buttonIconForFragment(fragment.part);
|
||||
|
||||
const fullTitle = fragment.part.l1Title || fragment.title || 'Document';
|
||||
const buttonText = ellipsizeMiddle(fullTitle, 28 /* totally arbitrary length */);
|
||||
const showFilenameTooltip = fullTitle !== buttonText;
|
||||
|
||||
const labelContent = <Box sx={_styles.label}>{buttonText}</Box>;
|
||||
|
||||
return (
|
||||
<Button
|
||||
size={props.contentScaling === 'md' ? 'md' : 'sm'}
|
||||
@@ -171,9 +184,10 @@ export function DocAttachmentFragmentButton(props: {
|
||||
</Box>
|
||||
)}
|
||||
<Box sx={{ display: 'flex', flexDirection: 'column', alignItems: 'flex-start', paddingX: '0.5rem' }}>
|
||||
<Box sx={{ whiteSpace: 'nowrap', fontWeight: 'md', minWidth: 48 }}>
|
||||
{buttonText}
|
||||
</Box>
|
||||
{showFilenameTooltip
|
||||
? <TooltipOutlined title={<span style={{ wordBreak: 'break-all' }}>{fullTitle}</span>}>{labelContent}</TooltipOutlined>
|
||||
: labelContent
|
||||
}
|
||||
{/*<Box sx={{ fontSize: 'xs', fontWeight: 'sm' }}>*/}
|
||||
{/* {fragment.caption}*/}
|
||||
{/*</Box>*/}
|
||||
|
||||
+2
-2
@@ -53,7 +53,7 @@ function _inferInitialViewAsCode(attachmentFragment: DMessageAttachmentFragment)
|
||||
}
|
||||
|
||||
|
||||
export function DocAttachmentFragment(props: {
|
||||
export const DocAttachmentFragmentPane = React.memo(function DocAttachmentFragment(props: {
|
||||
fragment: DMessageAttachmentFragment,
|
||||
controlledEditor: boolean,
|
||||
editedText?: string,
|
||||
@@ -400,4 +400,4 @@ export function DocAttachmentFragment(props: {
|
||||
|
||||
</RenderCodePanelFrame>
|
||||
);
|
||||
}
|
||||
});
|
||||
+37
-6
@@ -1,5 +1,5 @@
|
||||
import * as React from 'react';
|
||||
import { Box } from '@mui/joy';
|
||||
import { Box, Button } from '@mui/joy';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import type { DMessageRole } from '~/common/stores/chat/chat.message';
|
||||
@@ -7,7 +7,7 @@ import { DMessageAttachmentFragment, DMessageFragmentId, isDocPart, updateFragme
|
||||
|
||||
import type { ChatMessageTextPartEditState } from '../ChatMessage';
|
||||
import { DocAttachmentFragmentButton } from './DocAttachmentFragmentButton';
|
||||
import { DocAttachmentFragment } from './DocAttachmentFragment';
|
||||
import { DocAttachmentFragmentPane } from './DocAttachmentFragmentPane';
|
||||
|
||||
|
||||
/**
|
||||
@@ -15,7 +15,7 @@ import { DocAttachmentFragment } from './DocAttachmentFragment';
|
||||
* When one is active, there is a content part just right under (with the collapse mechanism in case it's a user role).
|
||||
* If one is clicked the content part (use ContentPartText) is displayed.
|
||||
*/
|
||||
export function DocumentAttachmentFragments(props: {
|
||||
export const DocumentAttachmentFragments = React.memo(function DocumentAttachmentFragments(props: {
|
||||
attachmentFragments: DMessageAttachmentFragment[],
|
||||
messageRole: DMessageRole,
|
||||
contentScaling: ContentScaling,
|
||||
@@ -30,6 +30,7 @@ export function DocumentAttachmentFragments(props: {
|
||||
// state
|
||||
const [_activeFragmentId, setActiveFragmentId] = React.useState<DMessageFragmentId | null>(null);
|
||||
const [editState, setEditState] = React.useState<ChatMessageTextPartEditState | null>(null);
|
||||
const [showAllAttachments, setShowAllAttachments] = React.useState<boolean>(false);
|
||||
|
||||
|
||||
// derived state
|
||||
@@ -92,6 +93,20 @@ export function DocumentAttachmentFragments(props: {
|
||||
}, []);
|
||||
|
||||
|
||||
// pagination logic
|
||||
const SHOW_LIMIT = 49;
|
||||
const totalAttachments = props.attachmentFragments.length;
|
||||
const hasMoreThanLimit = totalAttachments > SHOW_LIMIT + 1; // +1 to account for "show more" button
|
||||
const visibleAttachments = hasMoreThanLimit && !showAllAttachments
|
||||
? props.attachmentFragments.slice(0, SHOW_LIMIT)
|
||||
: props.attachmentFragments;
|
||||
const remainingCount = totalAttachments - SHOW_LIMIT;
|
||||
|
||||
const handleToggleShowAll = React.useCallback(() => {
|
||||
setShowAllAttachments(prev => !prev);
|
||||
}, []);
|
||||
|
||||
|
||||
// memos
|
||||
const buttonsSx = React.useMemo(() => ({
|
||||
// layout
|
||||
@@ -112,7 +127,7 @@ export function DocumentAttachmentFragments(props: {
|
||||
|
||||
{/* Document buttons */}
|
||||
<Box sx={buttonsSx}>
|
||||
{props.attachmentFragments.map((attachmentFragment) =>
|
||||
{visibleAttachments.map((attachmentFragment) =>
|
||||
<DocAttachmentFragmentButton
|
||||
key={attachmentFragment.fId}
|
||||
fragment={attachmentFragment}
|
||||
@@ -122,11 +137,27 @@ export function DocumentAttachmentFragments(props: {
|
||||
toggleSelected={handleToggleSelectedId}
|
||||
/>,
|
||||
)}
|
||||
|
||||
{/* Show more/less button */}
|
||||
{hasMoreThanLimit && (
|
||||
<Button
|
||||
size={props.contentScaling === 'md' ? 'md' : 'sm'}
|
||||
variant='soft'
|
||||
onClick={handleToggleShowAll}
|
||||
sx={{
|
||||
minHeight: props.contentScaling === 'md' ? 40 : props.contentScaling === 'sm' ? 38 : 36,
|
||||
minWidth: '64px',
|
||||
fontWeight: 'md',
|
||||
}}
|
||||
>
|
||||
{showAllAttachments ? `Show fewer docs...` : `Show ${remainingCount} more...`}
|
||||
</Button>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
{/* Document Viewer & Editor */}
|
||||
{!!selectedFragment && isDocPart(selectedFragment.part) && (
|
||||
<DocAttachmentFragment
|
||||
<DocAttachmentFragmentPane
|
||||
key={selectedFragment.fId /* this is here for the useLiveFile hook which otherwise would migrate state across fragments */}
|
||||
fragment={selectedFragment}
|
||||
controlledEditor={controlledEditor}
|
||||
@@ -144,4 +175,4 @@ export function DocumentAttachmentFragments(props: {
|
||||
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
|
||||
import { BlocksTextarea } from '~/modules/blocks/BlocksContainers';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
@@ -96,6 +98,8 @@ export function BlockEdit_TextFragment(props: {
|
||||
|
||||
const handleEditKeyDown = React.useCallback((e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Enter') {
|
||||
if (e.nativeEvent.isComposing)
|
||||
return;
|
||||
const withControl = e.ctrlKey;
|
||||
if (enterIsNewline ? e.shiftKey : !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
@@ -120,6 +124,32 @@ export function BlockEdit_TextFragment(props: {
|
||||
{ key: ShortcutKey.Esc, description: 'Cancel', level: 3, action: onEscapePressed },
|
||||
], [isControlled, isEdited, isFocused, onEscapePressed, onSubmit, props.enableRestart]));
|
||||
|
||||
|
||||
// memo style
|
||||
const sx = React.useMemo((): SxProps | undefined => {
|
||||
// check sources of custom, and early outs
|
||||
const isXS = props.contentScaling === 'xs';
|
||||
const isSquareTop = !!props.squareTopBorder;
|
||||
if (!isXS && !isSquareTop) return undefined;
|
||||
if (isSquareTop && !isXS) return _styles.squareTop;
|
||||
|
||||
return {
|
||||
// scaling note: in Chat, this can go xs/sm/md, while in Beam, this is xs/xs/sm
|
||||
...(isXS && {
|
||||
fontSize: 'xs',
|
||||
lineHeight: 'md', // was 1.75 on all
|
||||
// '--Textarea-paddingBlock': 'calc(0.25rem - 0.5px - var(--variant-borderWidth, 0px))', // not used, overridden in BlocksTextarea
|
||||
'--Textarea-paddingInline': '6px',
|
||||
'--Textarea-minHeight': '1.75rem', // was 2rem on 'sm'
|
||||
'--Icon-fontSize': 'lg', // was 'xl' on 'sm'
|
||||
'--Textarea-focusedThickness': '1px',
|
||||
boxShadow: 'none', // too small to show this
|
||||
}),
|
||||
...(isSquareTop && _styles.squareTop),
|
||||
};
|
||||
}, [props.contentScaling, props.squareTopBorder]);
|
||||
|
||||
|
||||
return (
|
||||
<BlocksTextarea
|
||||
variant={/*props.invertedColors ? 'plain' :*/ 'soft'}
|
||||
@@ -140,7 +170,7 @@ export function BlockEdit_TextFragment(props: {
|
||||
onKeyDown={handleEditKeyDown}
|
||||
slotProps={enterIsNewline ? _textAreaSlotPropsEnter : _textAreaSlotPropsDone}
|
||||
// endDecorator={props.endDecorator}
|
||||
sx={!props.squareTopBorder ? undefined : _styles.squareTop}
|
||||
sx={sx}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,15 +3,44 @@ import * as React from 'react';
|
||||
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import type { DMessageErrorPart } from '~/common/stores/chat/chat.fragments';
|
||||
import type { DMessageRole } from '~/common/stores/chat/chat.message';
|
||||
|
||||
import { BlockPartError_NetDisconnected } from './BlockPartError_NetDisconnected';
|
||||
import { BlockPartError_RequestExceeded } from './BlockPartError_RequestExceeded';
|
||||
|
||||
|
||||
export function BlockPartError(props: {
|
||||
errorText: string,
|
||||
errorHint?: DMessageErrorPart['hint'],
|
||||
messageRole: DMessageRole,
|
||||
messageGeneratorLlmId?: string | null,
|
||||
contentScaling: ContentScaling,
|
||||
}) {
|
||||
|
||||
// special error presentation, based on hints
|
||||
switch (props.errorHint) {
|
||||
case 'aix-net-disconnected':
|
||||
// determine the 2 'kinds' of disconnection errors in aix.client.ts
|
||||
const kind =
|
||||
props.errorText.includes('**network error**') ? 'net-client-closed'
|
||||
: props.errorText.includes('**connection terminated**') ? 'net-server-closed'
|
||||
: 'net-unknown-closed';
|
||||
|
||||
// For client-side error, we don't show the _NetDisconnected component
|
||||
if (kind === 'net-client-closed')
|
||||
break;
|
||||
|
||||
return <BlockPartError_NetDisconnected disconnectionKind={kind} messageGeneratorLlmId={props.messageGeneratorLlmId} contentScaling={props.contentScaling} />;
|
||||
|
||||
case 'aix-request-exceeded':
|
||||
return <BlockPartError_RequestExceeded messageGeneratorLlmId={props.messageGeneratorLlmId} contentScaling={props.contentScaling} />;
|
||||
|
||||
default:
|
||||
// continue rendering generic error
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the errorText starts with '**' and has a closing '**' following Markdown rules
|
||||
let textToRender = props.errorText;
|
||||
let renderAsMarkdown = false;
|
||||
|
||||
+103
@@ -0,0 +1,103 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Alert, Box, FormHelperText, Switch } from '@mui/joy';
|
||||
import WifiOffRoundedIcon from '@mui/icons-material/WifiOffRounded';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import { useLLM } from '~/common/stores/llms/llms.hooks';
|
||||
import { useModelServiceClientSideFetch } from '~/common/stores/llms/hooks/useModelServiceClientSideFetch';
|
||||
|
||||
|
||||
/**
|
||||
* Error recovery component for "Connection terminated" errors.
|
||||
*/
|
||||
export function BlockPartError_NetDisconnected(props: {
|
||||
disconnectionKind: 'net-client-closed' | 'net-server-closed' | 'net-unknown-closed';
|
||||
messageGeneratorLlmId?: string | null;
|
||||
contentScaling: ContentScaling;
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const model = useLLM(props.messageGeneratorLlmId) ?? null;
|
||||
const isServerSideClosed = props.disconnectionKind === 'net-server-closed'; // do not show CSF option for non-server-side
|
||||
const { csfAvailable, csfActive, csfToggle, vendorName } = useModelServiceClientSideFetch(isServerSideClosed, model);
|
||||
|
||||
return (
|
||||
<Alert
|
||||
size={props.contentScaling === 'xs' ? 'sm' : 'md'}
|
||||
color='danger'
|
||||
variant='plain'
|
||||
sx={{ display: 'flex', alignItems: 'flex-start', gap: 1 }}
|
||||
>
|
||||
|
||||
|
||||
<Box sx={{ flex: 1, display: 'flex', flexDirection: 'column', gap: 0.5, alignItems: 'flex-start' }}>
|
||||
|
||||
{/* Header */}
|
||||
<Box sx={{ display: 'flex', gap: 2 }}>
|
||||
<WifiOffRoundedIcon sx={{ flexShrink: 0, mt: 0.5 }} />
|
||||
<div>
|
||||
<Box fontSize='larger'>
|
||||
Connection Terminated
|
||||
</Box>
|
||||
<div>
|
||||
The connection was unexpectedly closed before the response completed.
|
||||
</div>
|
||||
</div>
|
||||
</Box>
|
||||
|
||||
|
||||
{/* Recovery options */}
|
||||
{csfAvailable ? <>
|
||||
|
||||
{/* Explanation */}
|
||||
<Box color='text.tertiary' fontSize='sm' my={2}>
|
||||
<strong>Experimental:</strong> enable direct connection to {vendorName} to bypass server timeouts - then try again.
|
||||
</Box>
|
||||
|
||||
{/* Toggle */}
|
||||
<Box
|
||||
sx={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 2,
|
||||
p: 2,
|
||||
borderRadius: 'sm',
|
||||
bgcolor: 'background.popup',
|
||||
boxShadow: 'md',
|
||||
// border: '1px solid',
|
||||
// borderColor: 'divider',
|
||||
}}
|
||||
>
|
||||
|
||||
<Box sx={{ flex: 1 }}>
|
||||
<Box color={!csfActive ? undefined : 'primary.solidBg'} fontWeight='lg' mb={0.5}>
|
||||
Direct Connection {csfActive && '- Now Try Again'}
|
||||
</Box>
|
||||
<FormHelperText>
|
||||
Connect directly from this client -> {vendorName || 'AI service'}
|
||||
</FormHelperText>
|
||||
</Box>
|
||||
|
||||
<Switch
|
||||
checked={csfActive}
|
||||
onChange={(e) => csfToggle(e.target.checked)}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
</> : (
|
||||
<div>
|
||||
<Box sx={{ color: 'text.secondary', my: 1 }}>
|
||||
Suggestions:
|
||||
</Box>
|
||||
<Box component='ul' sx={{ color: 'text.secondary' }}>
|
||||
<li>Check your internet connection and try again</li>
|
||||
<li>The AI service may be experiencing issues - wait a moment and retry</li>
|
||||
<li>If the issue persists, please let us know promptly on Discord or GitHib</li>
|
||||
</Box>
|
||||
</div>
|
||||
)}
|
||||
</Box>
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
+107
@@ -0,0 +1,107 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Alert, Box, FormHelperText, Switch } from '@mui/joy';
|
||||
import WarningRoundedIcon from '@mui/icons-material/WarningRounded';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import { useLLM } from '~/common/stores/llms/llms.hooks';
|
||||
import { useModelServiceClientSideFetch } from '~/common/stores/llms/hooks/useModelServiceClientSideFetch';
|
||||
|
||||
|
||||
/**
|
||||
* Error recovery component for "Request too large" errors.
|
||||
*/
|
||||
export function BlockPartError_RequestExceeded(props: {
|
||||
messageGeneratorLlmId?: string | null;
|
||||
contentScaling: ContentScaling;
|
||||
onRegenerate?: () => void;
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const model = useLLM(props.messageGeneratorLlmId) ?? null;
|
||||
const { csfAvailable, csfActive, csfToggle, vendorName } = useModelServiceClientSideFetch(true, model);
|
||||
|
||||
return (
|
||||
<Alert
|
||||
size={props.contentScaling === 'xs' ? 'sm' : 'md'}
|
||||
color='warning'
|
||||
sx={{ display: 'flex', alignItems: 'flex-start', gap: 1, border: '1px solid', borderColor: 'warning.outlinedBorder' }}
|
||||
>
|
||||
|
||||
<WarningRoundedIcon sx={{ flexShrink: 0, mt: 0.25 }} />
|
||||
|
||||
<Box sx={{ flex: 1, display: 'flex', flexDirection: 'column', gap: 0.5 }}>
|
||||
|
||||
<Box fontSize='larger'>
|
||||
Request Too Large
|
||||
</Box>
|
||||
<div>
|
||||
Your message or attachments exceed the limit of the Vercel edge network
|
||||
</div>
|
||||
|
||||
{/* Recovery options */}
|
||||
{csfAvailable ? <>
|
||||
|
||||
{/* Explanation */}
|
||||
<Box color='text.secondary' fontSize='sm' my={2}>
|
||||
<strong>Experimental:</strong> enable Direct Connection to {vendorName} to work around size limitations.
|
||||
</Box>
|
||||
|
||||
{/* Toggle */}
|
||||
<Box
|
||||
sx={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 2,
|
||||
p: 2,
|
||||
borderRadius: 'sm',
|
||||
bgcolor: 'background.popup',
|
||||
boxShadow: 'md',
|
||||
}}
|
||||
>
|
||||
|
||||
<Box sx={{ flex: 1 }}>
|
||||
<Box color={!csfActive ? undefined : 'primary.solidBg'} fontWeight='lg' mb={0.5}>
|
||||
Direct Connection {csfActive && '- Now Try Again'}
|
||||
</Box>
|
||||
<FormHelperText>
|
||||
Connect directly from this client -> {vendorName || 'AI service'}
|
||||
</FormHelperText>
|
||||
</Box>
|
||||
|
||||
<Switch
|
||||
checked={csfActive}
|
||||
onChange={(e) => csfToggle(e.target.checked)}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
{/* Regenerate button */}
|
||||
{/*{props.onRegenerate && (*/}
|
||||
{/* <Button*/}
|
||||
{/* size='sm'*/}
|
||||
{/* variant={csfActive ? 'solid' : 'outlined'}*/}
|
||||
{/* color={csfActive ? 'success' : 'neutral'}*/}
|
||||
{/* startDecorator={<RefreshIcon />}*/}
|
||||
{/* onClick={props.onRegenerate}*/}
|
||||
{/* sx={{ alignSelf: 'flex-start' }}*/}
|
||||
{/* >*/}
|
||||
{/* {csfActive ? 'Regenerate with Direct Connection' : 'Regenerate'}*/}
|
||||
{/* </Button>*/}
|
||||
{/*)}*/}
|
||||
|
||||
</> : (
|
||||
<Box>
|
||||
<Box sx={{ color: 'text.secondary', my: 1 }}>
|
||||
Suggestions:
|
||||
</Box>
|
||||
<Box component='ul' sx={{ color: 'text.secondary' }}>
|
||||
<li>Use the cleanup button in the right pane to hide old messages</li>
|
||||
<li>Remove large attachments from the conversation</li>
|
||||
{/*<li>Reduce conversation length before sending</li>*/}
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
@@ -1,20 +1,25 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { ColorPaletteProp, SxProps, VariantProp } from '@mui/joy/styles/types';
|
||||
import { Sheet } from '@mui/joy';
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, IconButton, Sheet, Typography } from '@mui/joy';
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
|
||||
|
||||
import { BlocksContainer } from '~/modules/blocks/BlocksContainers';
|
||||
import { useScaledTypographySx } from '~/modules/blocks/blocks.styles';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import type { DMessageToolInvocationPart } from '~/common/stores/chat/chat.fragments';
|
||||
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
|
||||
|
||||
import { humanReadableFunctionName } from './BlockPartToolInvocation.utils';
|
||||
|
||||
|
||||
const keyValueGridSx = {
|
||||
border: '1px solid',
|
||||
borderRadius: 'sm',
|
||||
boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
|
||||
p: 1.5,
|
||||
// border: '1px solid',
|
||||
// borderRadius: 'sm',
|
||||
// boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
|
||||
// p: 1.5,
|
||||
|
||||
// Grid layout with 2 columns
|
||||
display: 'grid',
|
||||
@@ -30,36 +35,49 @@ const keyValueGridSx = {
|
||||
// },
|
||||
} as const;
|
||||
|
||||
const _styleKeyValueGrid: SxProps = {
|
||||
border: 'none',
|
||||
boxShadow: 'none',
|
||||
p: 0,
|
||||
fontSize: '0.875em',
|
||||
opacity: 0.9,
|
||||
} as const;
|
||||
|
||||
|
||||
export type KeyValueData = { label: string, value: React.ReactNode, asCode?: boolean }[];
|
||||
|
||||
export function KeyValueGrid(props: {
|
||||
data: KeyValueData,
|
||||
contentScaling: ContentScaling,
|
||||
color?: ColorPaletteProp,
|
||||
variant?: VariantProp,
|
||||
stableSx?: SxProps,
|
||||
// contentScaling: ContentScaling,
|
||||
// color?: ColorPaletteProp,
|
||||
// variant?: VariantProp,
|
||||
// stableSx?: SxProps,
|
||||
}) {
|
||||
|
||||
const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
|
||||
// const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
|
||||
|
||||
const gridSx = React.useMemo(() => ({
|
||||
...keyValueGridSx,
|
||||
// fontWeight,
|
||||
fontSize,
|
||||
lineHeight,
|
||||
...props.stableSx,
|
||||
}), [fontSize, lineHeight, props.stableSx]);
|
||||
// fontSize,
|
||||
// lineHeight,
|
||||
// ...props.stableSx,
|
||||
_styleKeyValueGrid,
|
||||
}), [/*props.stableSx*/]);
|
||||
|
||||
return (
|
||||
<Sheet color={props.color} variant={props.variant || 'soft'} sx={gridSx}>
|
||||
<Box
|
||||
// color={props.color}
|
||||
// variant={props.variant || 'soft'}
|
||||
sx={gridSx}
|
||||
>
|
||||
{props.data.map(({ label, value }, index) => (
|
||||
<React.Fragment key={index}>
|
||||
<div>{label}</div>
|
||||
<div>{value}</div>
|
||||
</React.Fragment>
|
||||
))}
|
||||
</Sheet>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -70,35 +88,124 @@ export function BlockPartToolInvocation(props: {
|
||||
onDoubleClick?: (event: React.MouseEvent) => void;
|
||||
}) {
|
||||
|
||||
const part = props.toolInvocationPart;
|
||||
// state
|
||||
const [expanded, setExpanded] = React.useState(false);
|
||||
|
||||
const kvData: KeyValueData = React.useMemo(() => {
|
||||
switch (part.invocation.type) {
|
||||
// external state
|
||||
const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
|
||||
|
||||
|
||||
// memo name
|
||||
|
||||
const { id: iId, invocation } = props.toolInvocationPart;
|
||||
|
||||
const { humanName, originalName } = React.useMemo(() => {
|
||||
const invocationType = invocation.type;
|
||||
const originalName = invocationType === 'function_call' ? invocation.name : 'code_execution';
|
||||
const humanName = humanReadableFunctionName(originalName, invocationType, 'invocation');
|
||||
return { humanName, originalName };
|
||||
}, [invocation]);
|
||||
|
||||
|
||||
// memo details
|
||||
|
||||
const detailsData: KeyValueData = React.useMemo(() => {
|
||||
switch (invocation.type) {
|
||||
case 'function_call':
|
||||
return [
|
||||
{ label: 'Name', value: <strong>{part.invocation.name}</strong> },
|
||||
{ label: 'Args', value: part.invocation.args || 'None', asCode: true },
|
||||
{ label: 'Id', value: part.id },
|
||||
{ label: 'Name', value: invocation.name },
|
||||
{ label: 'Args', value: invocation.args || 'None', asCode: true },
|
||||
{ label: 'ID', value: iId },
|
||||
];
|
||||
case 'code_execution':
|
||||
return [
|
||||
{ label: 'Language', value: part.invocation.language },
|
||||
{ label: 'Author', value: part.invocation.author },
|
||||
{ label: 'Language', value: invocation.language },
|
||||
{ label: 'Author', value: invocation.author },
|
||||
{
|
||||
label: 'Code',
|
||||
value: <div style={{ whiteSpace: 'pre-wrap' }}>{part.invocation.code.trim()}</div>,
|
||||
value: <div style={{ whiteSpace: 'pre-wrap' }}>{invocation.code.trim()}</div>,
|
||||
},
|
||||
{ label: 'Id', value: part.id },
|
||||
{ label: 'ID', value: iId },
|
||||
];
|
||||
}
|
||||
}, [part]);
|
||||
}, [invocation, iId]);
|
||||
|
||||
|
||||
const toggleExpanded = React.useCallback((event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
setExpanded(prev => !prev);
|
||||
}, []);
|
||||
|
||||
|
||||
return (
|
||||
<BlocksContainer onDoubleClick={props.onDoubleClick}>
|
||||
<KeyValueGrid
|
||||
data={kvData}
|
||||
contentScaling={props.contentScaling}
|
||||
/>
|
||||
</BlocksContainer>
|
||||
<BlocksContainer onDoubleClick={props.onDoubleClick}><Box /*sx={{ px: 1.5 }}*/>
|
||||
|
||||
<Sheet
|
||||
variant='soft'
|
||||
sx={{
|
||||
borderLeft: '3px solid',
|
||||
borderLeftColor: 'primary.softBg',
|
||||
borderRadius: 'sm',
|
||||
pl: 1,
|
||||
pr: 2,
|
||||
py: 0.75,
|
||||
fontSize,
|
||||
lineHeight,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
...(expanded ? {
|
||||
border: '1px solid',
|
||||
borderColor: 'primary.outlinedBorder',
|
||||
boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
|
||||
} : {}),
|
||||
}}
|
||||
>
|
||||
|
||||
{/* Compact header */}
|
||||
<Box
|
||||
sx={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 1,
|
||||
cursor: 'pointer',
|
||||
'&:hover': { '& .expand-icon': { opacity: 1 } },
|
||||
}}
|
||||
onClick={toggleExpanded}
|
||||
>
|
||||
<IconButton
|
||||
size='sm'
|
||||
className='expand-icon'
|
||||
sx={{
|
||||
minWidth: 'auto',
|
||||
minHeight: 'auto',
|
||||
padding: 0,
|
||||
opacity: expanded ? 1 : 0.5,
|
||||
transition: 'opacity 0.2s',
|
||||
}}
|
||||
>
|
||||
{expanded ? <KeyboardArrowDownIcon fontSize='small' /> : <KeyboardArrowRightIcon fontSize='small' />}
|
||||
</IconButton>
|
||||
|
||||
{/*<Tooltip title={humanName !== originalName ? `Original: ${originalName}` : undefined} placement='top'>*/}
|
||||
<Typography level='body-sm' sx={{ fontWeight: 'md' }}>
|
||||
{humanName}
|
||||
</Typography>
|
||||
{/*</Tooltip>*/}
|
||||
</Box>
|
||||
|
||||
{/* Expanded details */}
|
||||
<ExpanderControlledBox expanded={expanded}>
|
||||
{expanded && <Box sx={{ mt: 1, ml: 2.625, pl: 1 }}>
|
||||
<KeyValueGrid
|
||||
data={detailsData}
|
||||
// contentScaling={props.contentScaling}
|
||||
// stableSx={_styleKeyValueGrid}
|
||||
/>
|
||||
</Box>}
|
||||
</ExpanderControlledBox>
|
||||
|
||||
</Sheet>
|
||||
|
||||
</Box></BlocksContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
//
|
||||
// Utilities for rendering tool invocations
|
||||
//
|
||||
|
||||
/**
|
||||
* [EDITORIAL] Known hosted tool name translations
|
||||
*
|
||||
* This mapping provides human-readable names for actual hosted tools
|
||||
* from AI model providers. Only add entries for confirmed provider-hosted tools.
|
||||
*
|
||||
* Note: Tool calls != Function calls
|
||||
* - Tool calls: Provider-hosted tools (e.g., Anthropic's computer use, Gemini's code execution)
|
||||
* - Function calls: User/app-defined functions that the model can invoke
|
||||
*/
|
||||
const KNOWN_TOOL_TRANSLATIONS: Record<string, string> = {
|
||||
// Anthropic Computer Use Tools (hosted)
|
||||
'computer': 'Computer Use',
|
||||
'computer_20241022': 'Computer Use',
|
||||
'bash': 'Bash',
|
||||
'bash_20241022': 'Bash',
|
||||
'text_editor': 'Text Editor',
|
||||
'text_editor_20241022': 'Text Editor',
|
||||
|
||||
// Gemini Tools (hosted)
|
||||
'code_execution': 'Code Execution',
|
||||
'google_search_retrieval': 'Google Search',
|
||||
|
||||
// Add other confirmed provider-hosted tools here as discovered
|
||||
} as const;
|
||||
|
||||
|
||||
/**
|
||||
* Translate a function/tool name to a human-readable format
|
||||
*
|
||||
* First checks for known hosted tools, then applies heuristics for function names
|
||||
*/
|
||||
export function humanReadableFunctionName(name: string, invocationType: 'function_call' | 'code_execution', phase: 'invocation' | 'response'): string {
|
||||
if (invocationType === 'code_execution')
|
||||
return phase === 'invocation' ? 'Generated code' : 'Executed code';
|
||||
|
||||
// check for known hosted tools
|
||||
if (KNOWN_TOOL_TRANSLATIONS[name])
|
||||
return KNOWN_TOOL_TRANSLATIONS[name];
|
||||
|
||||
// apply heuristics for user-defined function names
|
||||
if (name.startsWith('get_'))
|
||||
return _toTitleCase(name.substring(4));
|
||||
if (name.startsWith('fetch_'))
|
||||
return _toTitleCase(name.substring(6));
|
||||
if (name.startsWith('search_'))
|
||||
return _toTitleCase(name.substring(7)) + ' Search';
|
||||
|
||||
return _toTitleCase(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get function display name and color
|
||||
*/
|
||||
export function functionNameAppearance(environment: 'upstream' | 'server' | 'client'): {
|
||||
label: string;
|
||||
color: 'primary' | 'neutral' | 'success';
|
||||
} {
|
||||
switch (environment) {
|
||||
case 'upstream':
|
||||
return { label: 'Hosted', color: 'primary' };
|
||||
case 'server':
|
||||
return { label: 'Server', color: 'neutral' };
|
||||
case 'client':
|
||||
return { label: 'Client', color: 'success' };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _toTitleCase(fName: string): string {
|
||||
// snake_case -> Title Case
|
||||
if (fName.includes('_'))
|
||||
return fName
|
||||
.split('_')
|
||||
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
|
||||
.join(' ');
|
||||
|
||||
// camelCase -> Title Case
|
||||
const withSpaces = fName.replace(/([A-Z])/g, ' $1').trim();
|
||||
return withSpaces.charAt(0).toUpperCase() + withSpaces.slice(1);
|
||||
}
|
||||
@@ -1,10 +1,17 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Chip, IconButton, Sheet, Typography } from '@mui/joy';
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
|
||||
|
||||
import { BlocksContainer } from '~/modules/blocks/BlocksContainers';
|
||||
import { useScaledTypographySx } from '~/modules/blocks/blocks.styles';
|
||||
|
||||
import type { ContentScaling } from '~/common/app.theme';
|
||||
import type { DMessageToolResponsePart } from '~/common/stores/chat/chat.fragments';
|
||||
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
|
||||
|
||||
import { functionNameAppearance, humanReadableFunctionName } from './BlockPartToolInvocation.utils';
|
||||
import { KeyValueData, KeyValueGrid } from './BlockPartToolInvocation';
|
||||
|
||||
|
||||
@@ -14,36 +21,148 @@ export function BlockPartToolResponse(props: {
|
||||
onDoubleClick?: (event: React.MouseEvent) => void;
|
||||
}) {
|
||||
|
||||
const part = props.toolResponsePart;
|
||||
// state
|
||||
const [expanded, setExpanded] = React.useState(false);
|
||||
|
||||
const kvData: KeyValueData = React.useMemo(() => {
|
||||
switch (part.response.type) {
|
||||
// external state
|
||||
const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
|
||||
|
||||
|
||||
// memo name
|
||||
|
||||
const { id: rId, response, environment, error: rError } = props.toolResponsePart;
|
||||
|
||||
const { humanName, originalName, envInfo } = React.useMemo(() => {
|
||||
const invocationType = response.type;
|
||||
const originalName = invocationType === 'function_call' ? response.name : 'code_execution';
|
||||
const humanName = humanReadableFunctionName(originalName, invocationType, 'response');
|
||||
const envInfo = functionNameAppearance(environment);
|
||||
return { humanName, originalName, envInfo };
|
||||
}, [response, environment]);
|
||||
|
||||
// memo details data
|
||||
|
||||
const detailsData: KeyValueData = React.useMemo(() => {
|
||||
switch (response.type) {
|
||||
case 'function_call':
|
||||
return [
|
||||
{ label: 'Id', value: part.id },
|
||||
{ label: 'Name', value: <strong>{part.response.name}</strong> },
|
||||
{ label: 'Response', value: part.response.result, asCode: true },
|
||||
...(!part.error ? [] : [{ label: 'Error', value: part.error }]),
|
||||
{ label: 'Environment', value: part.environment },
|
||||
{ label: 'Function', value: response.name },
|
||||
{ label: 'Result', value: response.result, asCode: true },
|
||||
...(!rError ? [] : [{ label: 'Error', value: String(rError) }]),
|
||||
{ label: 'Environment', value: envInfo.label },
|
||||
{ label: 'ID', value: rId },
|
||||
];
|
||||
case 'code_execution':
|
||||
return [
|
||||
{ label: 'Id', value: part.id },
|
||||
{ label: 'Response', value: part.response.result, asCode: true },
|
||||
...(!part.error ? [] : [{ label: 'Error', value: part.error }]),
|
||||
{ label: 'Executor', value: part.response.executor },
|
||||
{ label: 'Environment', value: part.environment },
|
||||
{ label: 'Result', value: response.result, asCode: true },
|
||||
...(!rError ? [] : [{ label: 'Error', value: String(rError) }]),
|
||||
{ label: 'Executor', value: response.executor },
|
||||
{ label: 'Environment', value: envInfo.label },
|
||||
{ label: 'ID', value: rId },
|
||||
];
|
||||
}
|
||||
}, [part]);
|
||||
}, [envInfo.label, rError, rId, response]);
|
||||
|
||||
// memo border color
|
||||
|
||||
const borderColor = React.useMemo(() => {
|
||||
if (rError) return 'danger.softBg';
|
||||
switch (environment) {
|
||||
case 'upstream':
|
||||
return 'primary.softBg'; // Hosted - blue
|
||||
case 'server':
|
||||
return 'neutral.softBg'; // Server - gray
|
||||
case 'client':
|
||||
return 'success.softBg'; // Client - green
|
||||
}
|
||||
}, [rError, environment]);
|
||||
|
||||
|
||||
const toggleExpanded = React.useCallback((event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
setExpanded(prev => !prev);
|
||||
}, []);
|
||||
|
||||
|
||||
return (
|
||||
<BlocksContainer onDoubleClick={props.onDoubleClick}>
|
||||
<KeyValueGrid
|
||||
data={kvData}
|
||||
contentScaling={props.contentScaling}
|
||||
color={part.error ? 'danger' : 'primary'}
|
||||
/>
|
||||
</BlocksContainer>
|
||||
<BlocksContainer onDoubleClick={props.onDoubleClick}><Box /*sx={{ px: 1.5 }}*/>
|
||||
<Sheet
|
||||
variant='soft'
|
||||
color={rError ? 'danger' : undefined}
|
||||
sx={{
|
||||
borderLeft: '3px solid',
|
||||
borderLeftColor: borderColor,
|
||||
borderRadius: 'sm',
|
||||
pl: 1,
|
||||
pr: 2,
|
||||
py: 0.75,
|
||||
fontSize,
|
||||
lineHeight,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
...(expanded ? {
|
||||
border: '1px solid',
|
||||
borderColor: 'primary.outlinedBorder',
|
||||
boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
|
||||
} : {}),
|
||||
}}
|
||||
>
|
||||
|
||||
{/* Compact header */}
|
||||
<Box
|
||||
sx={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 1,
|
||||
cursor: 'pointer',
|
||||
'&:hover': { '& .expand-icon': { opacity: 1 } },
|
||||
}}
|
||||
onClick={toggleExpanded}
|
||||
>
|
||||
<IconButton
|
||||
size='sm'
|
||||
className='expand-icon'
|
||||
sx={{
|
||||
minWidth: 'auto',
|
||||
minHeight: 'auto',
|
||||
padding: 0,
|
||||
opacity: expanded ? 1 : 0.5,
|
||||
transition: 'opacity 0.2s',
|
||||
}}
|
||||
>
|
||||
{expanded ? <KeyboardArrowDownIcon fontSize='small' /> : <KeyboardArrowRightIcon fontSize='small' />}
|
||||
</IconButton>
|
||||
|
||||
{/*<Tooltip title={humanName !== originalName ? `Original: ${originalName}` : undefined} placement='top'>*/}
|
||||
<Typography level='body-sm' sx={{ fontWeight: 'md' }}>
|
||||
{humanName}
|
||||
</Typography>
|
||||
{/*</Tooltip>*/}
|
||||
|
||||
{rError && (
|
||||
<Chip size='sm' color='danger' variant='soft'>
|
||||
Error
|
||||
</Chip>
|
||||
)}
|
||||
|
||||
<Chip size='sm' color={envInfo.color} variant='soft' sx={{ ml: 'auto' }}>
|
||||
{envInfo.label}
|
||||
</Chip>
|
||||
</Box>
|
||||
|
||||
{/* Expanded details */}
|
||||
<ExpanderControlledBox expanded={expanded}>
|
||||
{expanded && <Box sx={{ mt: 1, ml: 2.625, pl: 1 }}>
|
||||
<KeyValueGrid
|
||||
data={detailsData}
|
||||
// contentScaling={props.contentScaling}
|
||||
// stableSx={_styleKeyValueGrid}
|
||||
/>
|
||||
</Box>}
|
||||
</ExpanderControlledBox>
|
||||
|
||||
</Sheet>
|
||||
|
||||
</Box></BlocksContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,16 +6,21 @@ import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRendere
|
||||
|
||||
import type { ContentScaling, UIComplexityMode } from '~/common/app.theme';
|
||||
import type { DMessageRole } from '~/common/stores/chat/chat.message';
|
||||
import { DMessageContentFragment, DMessageFragmentId, isTextPart } from '~/common/stores/chat/chat.fragments';
|
||||
import type { InterleavedFragment } from '~/common/stores/chat/hooks/useFragmentBuckets';
|
||||
import { DMessageContentFragment, DMessageFragmentId, isTextContentFragment, isTextPart, isVoidPlaceholderFragment } from '~/common/stores/chat/chat.fragments';
|
||||
import { Release } from '~/common/app.release';
|
||||
|
||||
import type { ChatMessageTextPartEditState } from '../ChatMessage';
|
||||
import { BlockEdit_TextFragment } from './BlockEdit_TextFragment';
|
||||
import { BlockOpEmpty } from './BlockOpEmpty';
|
||||
import { BlockPartError } from './BlockPartError';
|
||||
import { BlockPartImageRef } from './BlockPartImageRef';
|
||||
import { BlockPartModelAux } from '../fragments-void/BlockPartModelAux';
|
||||
import { BlockPartPlaceholder } from '../fragments-void/BlockPartPlaceholder';
|
||||
import { BlockPartText_AutoBlocks } from './BlockPartText_AutoBlocks';
|
||||
import { BlockPartToolInvocation } from './BlockPartToolInvocation';
|
||||
import { BlockPartToolResponse } from './BlockPartToolResponse';
|
||||
import { humanReadableFunctionName } from './BlockPartToolInvocation.utils';
|
||||
|
||||
|
||||
const _editLayoutSx: SxProps = {
|
||||
@@ -42,7 +47,7 @@ const _endLayoutSx: SxProps = {
|
||||
|
||||
export function ContentFragments(props: {
|
||||
|
||||
contentFragments: DMessageContentFragment[]
|
||||
contentFragments: InterleavedFragment[]
|
||||
showEmptyNotice: boolean,
|
||||
|
||||
contentScaling: ContentScaling,
|
||||
@@ -50,6 +55,8 @@ export function ContentFragments(props: {
|
||||
fitScreen: boolean,
|
||||
isMobile: boolean,
|
||||
messageRole: DMessageRole,
|
||||
messagePendingIncomplete?: boolean,
|
||||
messageGeneratorLlmId?: string | null,
|
||||
optiAllowSubBlocksMemo?: boolean,
|
||||
disableMarkdownText: boolean,
|
||||
enhanceCodeBlocks: boolean,
|
||||
@@ -76,10 +83,30 @@ export function ContentFragments(props: {
|
||||
const isEditingText = !!props.textEditsState;
|
||||
const enableRestartFromEdit = !fromAssistant && props.messageRole !== 'system';
|
||||
|
||||
|
||||
// solo placeholder - dataStreamViz trigger
|
||||
const showDataStreamViz =
|
||||
!Release.Features.LIGHTER_ANIMATIONS
|
||||
&& props.uiComplexityMode !== 'minimal'
|
||||
&& props.contentFragments.length === 1
|
||||
// && props.noVoidFragments // not needed, we have all the interleaved fragments here
|
||||
&& isVoidPlaceholderFragment(props.contentFragments[0]);
|
||||
|
||||
|
||||
// Content Fragments Edit Zero-State: button to create a new TextContentFragment
|
||||
if (isEditingText && isEmpty)
|
||||
if (isEditingText && !props.contentFragments.some(isTextContentFragment))
|
||||
return !props.onFragmentAddBlank ? null : (
|
||||
<Button aria-label='message body empty' variant='plain' color='neutral' onClick={props.onFragmentAddBlank} sx={{ justifyContent: 'flex-start' }}>
|
||||
<Button
|
||||
aria-label='message body empty'
|
||||
color={fromAssistant ? 'neutral' : 'primary'}
|
||||
variant='outlined'
|
||||
onClick={props.onFragmentAddBlank}
|
||||
sx={{
|
||||
justifyContent: 'flex-start',
|
||||
backgroundColor: fromAssistant ? 'neutral.softBg' : 'primary.softBg',
|
||||
'&:hover': { backgroundColor: fromAssistant ? 'neutral.softHoverBg' : 'primary.softHoverBg' },
|
||||
}}
|
||||
>
|
||||
add text ...
|
||||
</Button>
|
||||
);
|
||||
@@ -92,7 +119,7 @@ export function ContentFragments(props: {
|
||||
if (!props.showEmptyNotice && isEmpty)
|
||||
return null;
|
||||
|
||||
return <Box aria-label='message body' sx={isEditingText ? _editLayoutSx : fromAssistant ? _startLayoutSx : _endLayoutSx}>
|
||||
return <Box aria-label='message body' sx={(showDataStreamViz || isEditingText) ? _editLayoutSx : fromAssistant ? _startLayoutSx : _endLayoutSx}>
|
||||
|
||||
{/* Empty Message Block - if empty */}
|
||||
{props.showEmptyNotice && (
|
||||
@@ -103,35 +130,107 @@ export function ContentFragments(props: {
|
||||
/>
|
||||
)}
|
||||
|
||||
{props.contentFragments.map((fragment) => {
|
||||
{props.contentFragments.map((fragment, fragmentIndex) => {
|
||||
|
||||
// simplify
|
||||
const { fId, part } = fragment;
|
||||
const { fId, ft } = fragment;
|
||||
|
||||
// Determine the text to edit based on the part type
|
||||
let editText = '';
|
||||
let editLabel;
|
||||
if (isTextPart(part))
|
||||
editText = part.text;
|
||||
else if (part.pt === 'error')
|
||||
editText = part.error;
|
||||
else if (part.pt === 'tool_invocation') {
|
||||
if (part.invocation.type === 'function_call') {
|
||||
editText = part.invocation.args /* string | null */ || '';
|
||||
editLabel = `[Invocation] Function Call: \`${part.invocation.name}\``;
|
||||
} else {
|
||||
editText = part.invocation.code;
|
||||
editLabel = `[Invocation] Code Execution: \`${part.invocation.language}\``;
|
||||
}
|
||||
} else if (part.pt === 'tool_response') {
|
||||
if (!part.error) {
|
||||
editText = part.response.result;
|
||||
editLabel = `[Response]: ${part.response.type === 'function_call' ? 'Function Call' : 'Code Execution'}: \`${part.id}\``;
|
||||
// VOID FRAGMENTS (reasoning, placeholders - interleaved with content)
|
||||
if (ft === 'void') {
|
||||
const { part } = fragment;
|
||||
switch (part.pt) {
|
||||
|
||||
// Handled by VoidFragments
|
||||
// case 'annotations':
|
||||
// console.warn('[DEV] ContentFragments: annotations fragment found in interleaved list');
|
||||
// return null;
|
||||
|
||||
case 'ma':
|
||||
return (
|
||||
<BlockPartModelAux
|
||||
key={fId}
|
||||
fragmentId={fId}
|
||||
auxType={part.aType}
|
||||
auxText={part.aText}
|
||||
auxHasSignature={part.textSignature !== undefined}
|
||||
auxRedactedDataCount={part.redactedData?.length ?? 0}
|
||||
messagePendingIncomplete={!!props.messagePendingIncomplete}
|
||||
zenMode={props.uiComplexityMode === 'minimal'}
|
||||
contentScaling={props.contentScaling}
|
||||
isLastFragment={fragmentIndex === props.contentFragments.length - 1}
|
||||
onFragmentDelete={props.onFragmentDelete}
|
||||
onFragmentReplace={props.onFragmentReplace}
|
||||
/>
|
||||
);
|
||||
|
||||
case 'ph':
|
||||
return (
|
||||
<BlockPartPlaceholder
|
||||
key={fId}
|
||||
placeholderText={part.pText}
|
||||
placeholderType={part.pType}
|
||||
placeholderModelOp={part.modelOp}
|
||||
placeholderAixControl={part.aixControl}
|
||||
messageRole={props.messageRole}
|
||||
contentScaling={props.contentScaling}
|
||||
showAsItalic
|
||||
showAsDataStreamViz={showDataStreamViz}
|
||||
/>
|
||||
);
|
||||
|
||||
case '_pt_sentinel':
|
||||
return null;
|
||||
|
||||
default:
|
||||
const _exhaustiveVoidCheck: never = part;
|
||||
// fallthrough - we don't handle these here anymore
|
||||
case 'annotations':
|
||||
return (
|
||||
<ScaledTextBlockRenderer
|
||||
key={fId}
|
||||
text={`Unknown Void Fragment: ${(part as any)?.pt}`}
|
||||
contentScaling={props.contentScaling}
|
||||
textRenderVariant='text'
|
||||
showAsDanger
|
||||
/>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// CONTENT FRAGMENTS (text, code, tool calls, images, errors)
|
||||
const { part } = fragment;
|
||||
|
||||
// editing for text parts, tool invocations, or tool responses
|
||||
if (props.textEditsState && !!props.setEditedText && (isTextPart(part) || part.pt === 'error' || part.pt === 'tool_invocation' || part.pt === 'tool_response')) {
|
||||
if (props.textEditsState && !!props.setEditedText && (
|
||||
isTextPart(part) || part.pt === 'error' || part.pt === 'tool_invocation' || part.pt === 'tool_response'
|
||||
)) {
|
||||
|
||||
// Determine the text to edit based on the part type
|
||||
let editText = '';
|
||||
let editLabel;
|
||||
if (isTextPart(part)) {
|
||||
editText = part.text;
|
||||
} else if (part.pt === 'error') {
|
||||
editText = part.error;
|
||||
} else if (part.pt === 'tool_invocation') {
|
||||
if (part.invocation.type === 'function_call') {
|
||||
editText = part.invocation.args /* string | null */ || '';
|
||||
const humanName = humanReadableFunctionName(part.invocation.name, 'function_call', 'invocation');
|
||||
editLabel = `[Invocation] ${humanName} · \`${part.invocation.name}\``;
|
||||
} else {
|
||||
editText = part.invocation.code;
|
||||
const humanName = humanReadableFunctionName('code_execution', 'code_execution', 'invocation');
|
||||
editLabel = `[Invocation] ${humanName} · \`${part.invocation.language}\``;
|
||||
}
|
||||
} else if (part.pt === 'tool_response') {
|
||||
if (!part.error) {
|
||||
editText = part.response.result;
|
||||
const responseName = part.response.type === 'function_call' ? part.response.name : 'code_execution';
|
||||
const humanName = humanReadableFunctionName(responseName, part.response.type, 'response');
|
||||
editLabel = `[Response] ${humanName} · \`${part.id}\``;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<BlockEdit_TextFragment
|
||||
key={'edit-' + fId}
|
||||
@@ -155,7 +254,9 @@ export function ContentFragments(props: {
|
||||
<BlockPartError
|
||||
key={fId}
|
||||
errorText={part.error}
|
||||
errorHint={part.hint}
|
||||
messageRole={props.messageRole}
|
||||
messageGeneratorLlmId={props.messageGeneratorLlmId}
|
||||
contentScaling={props.contentScaling}
|
||||
/>
|
||||
);
|
||||
@@ -165,7 +266,7 @@ export function ContentFragments(props: {
|
||||
const rt = part.rt;
|
||||
switch (rt) {
|
||||
case 'zync':
|
||||
const zt = part.zType
|
||||
const zt = part.zType;
|
||||
switch (zt) {
|
||||
case 'asset':
|
||||
// TODO: [ASSET] future: implement rendering for the real Reference to Zync Asset
|
||||
|
||||
@@ -170,7 +170,10 @@ export function BlockPartModelAnnotations(props: {
|
||||
return null;
|
||||
|
||||
return (
|
||||
<Box>
|
||||
<Box
|
||||
data-agi-no-copy // do not copy these buttons: has its own copy functionality
|
||||
sx={{ mx: 1.5 }}
|
||||
>
|
||||
|
||||
{/* Row of favicons */}
|
||||
<Button
|
||||
|
||||
@@ -3,6 +3,7 @@ import * as React from 'react';
|
||||
import type { ColorPaletteProp } from '@mui/joy/styles/types';
|
||||
import { Box, Chip, Typography } from '@mui/joy';
|
||||
import AllInclusiveIcon from '@mui/icons-material/AllInclusive';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import TextFieldsIcon from '@mui/icons-material/TextFields';
|
||||
|
||||
import { RenderMarkdown } from '~/modules/blocks/markdown/RenderMarkdown';
|
||||
@@ -11,6 +12,7 @@ import { useScaledTypographySx } from '~/modules/blocks/blocks.styles';
|
||||
import { ConfirmationModal } from '~/common/components/modals/ConfirmationModal';
|
||||
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
|
||||
import { adjustContentScaling, ContentScaling } from '~/common/app.theme';
|
||||
import { animationSpinHalfPause } from '~/common/util/animUtils';
|
||||
import { createTextContentFragment, DMessageContentFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
|
||||
import { useOverlayComponents } from '~/common/layout/overlays/useOverlayComponents';
|
||||
|
||||
@@ -32,17 +34,29 @@ const _styles = {
|
||||
chip: {
|
||||
px: 1.5,
|
||||
py: 0.375,
|
||||
my: '1px', // to not crop the outline on mobile
|
||||
my: '1px', // to not crop the outline on mobile, or on beam
|
||||
outline: '1px solid',
|
||||
outlineColor: `${REASONING_COLOR}.solidBg`, // .outlinedBorder
|
||||
boxShadow: `1px 2px 4px -3px var(--joy-palette-${REASONING_COLOR}-solidBg)`,
|
||||
} as const,
|
||||
|
||||
chipDisabled: {
|
||||
px: 1.5,
|
||||
py: 0.375,
|
||||
my: '1px', // to not crop the outline on mobile, or on beam
|
||||
} as const,
|
||||
|
||||
chipIcon: {
|
||||
fontSize: '1rem',
|
||||
mr: 0.5,
|
||||
} as const,
|
||||
|
||||
chipIconPending: {
|
||||
fontSize: '1rem',
|
||||
mr: 0.5,
|
||||
animation: `${animationSpinHalfPause} 2s ease-in-out infinite`,
|
||||
} as const,
|
||||
|
||||
chipExpanded: {
|
||||
mt: '1px', // need to copy the `chip` mt
|
||||
px: 1.5,
|
||||
@@ -93,8 +107,11 @@ export function BlockPartModelAux(props: {
|
||||
auxText: string,
|
||||
auxHasSignature: boolean,
|
||||
auxRedactedDataCount: number,
|
||||
messagePendingIncomplete: boolean,
|
||||
zenMode: boolean,
|
||||
contentScaling: ContentScaling,
|
||||
isLastFragment: boolean,
|
||||
onFragmentDelete?: (fragmentId: DMessageFragmentId) => void,
|
||||
onFragmentReplace?: (fragmentId: DMessageFragmentId, newFragment: DMessageContentFragment) => void,
|
||||
}) {
|
||||
|
||||
@@ -115,7 +132,8 @@ export function BlockPartModelAux(props: {
|
||||
|
||||
// handlers
|
||||
|
||||
const { onFragmentReplace } = props;
|
||||
const { onFragmentDelete, onFragmentReplace } = props;
|
||||
const showDelete = !!onFragmentDelete;
|
||||
const showInline = !!onFragmentReplace;
|
||||
|
||||
const handleToggleExpanded = React.useCallback(() => {
|
||||
@@ -123,6 +141,23 @@ export function BlockPartModelAux(props: {
|
||||
setExpanded(on => !on);
|
||||
}, []);
|
||||
|
||||
const handleDelete = React.useCallback(() => {
|
||||
if (!onFragmentDelete) return;
|
||||
showPromisedOverlay('chat-message-delete-aux', {}, ({ onResolve, onUserReject }) =>
|
||||
<ConfirmationModal
|
||||
open onClose={onUserReject} onPositive={() => onResolve(true)}
|
||||
confirmationText={<>
|
||||
Delete this {typeText.toLowerCase()} completely?
|
||||
<br />
|
||||
This action cannot be undone.
|
||||
</>}
|
||||
positiveActionText='Delete'
|
||||
/>,
|
||||
).then(() => {
|
||||
onFragmentDelete(props.fragmentId);
|
||||
}).catch(() => null /* ignore closure */);
|
||||
}, [onFragmentDelete, props.fragmentId, showPromisedOverlay, typeText]);
|
||||
|
||||
const handleInline = React.useCallback(() => {
|
||||
if (!onFragmentReplace) return;
|
||||
showPromisedOverlay('chat-message-inline-aux', {}, ({ onResolve, onUserReject }) =>
|
||||
@@ -147,31 +182,54 @@ export function BlockPartModelAux(props: {
|
||||
return <Box sx={_styles.block}>
|
||||
|
||||
{/* Chip to expand/collapse */}
|
||||
<Box sx={{ display: 'flex', flexWrap: 'wrap', gap: 1, alignItems: 'center', justifyContent: 'space-between' }}>
|
||||
<Box data-agi-no-copy /* do not copy these buttons */ sx={{ display: 'flex', flexWrap: 'wrap', gap: 1, alignItems: 'center', justifyContent: 'space-between' }}>
|
||||
<Chip
|
||||
color={REASONING_COLOR}
|
||||
color={props.isLastFragment ? REASONING_COLOR : 'neutral'}
|
||||
variant={expanded ? 'solid' : 'soft'}
|
||||
size='sm'
|
||||
onClick={handleToggleExpanded}
|
||||
sx={expanded ? _styles.chipExpanded : _styles.chip}
|
||||
startDecorator={<AllInclusiveIcon sx={_styles.chipIcon} /* sx={{ color: expanded ? undefined : REASONING_COLOR }} */ />}
|
||||
sx={expanded ? _styles.chipExpanded : props.isLastFragment ? _styles.chip : _styles.chipDisabled}
|
||||
startDecorator={
|
||||
<AllInclusiveIcon
|
||||
sx={(props.messagePendingIncomplete && !expanded && props.isLastFragment) ? _styles.chipIconPending : _styles.chipIcon}
|
||||
/* sx={{ color: expanded ? undefined : REASONING_COLOR }} */
|
||||
/>
|
||||
}
|
||||
// startDecorator='🧠'
|
||||
>
|
||||
Show {typeText}
|
||||
</Chip>
|
||||
|
||||
{expanded && showInline && !!props.auxText && (
|
||||
<Chip
|
||||
color={REASONING_COLOR}
|
||||
variant='soft'
|
||||
size='sm'
|
||||
disabled={!onFragmentReplace}
|
||||
onClick={!onFragmentReplace ? undefined : handleInline}
|
||||
endDecorator={<TextFieldsIcon />}
|
||||
sx={_styles.chip}
|
||||
>
|
||||
Make Regular Text
|
||||
</Chip>
|
||||
{expanded && (showInline || showDelete) && !!props.auxText && (
|
||||
<Box sx={{ display: 'flex', gap: 1 }}>
|
||||
|
||||
{/* Make inline */}
|
||||
{showInline && <Chip
|
||||
color={REASONING_COLOR}
|
||||
variant='soft'
|
||||
size='sm'
|
||||
disabled={!onFragmentReplace || props.messagePendingIncomplete}
|
||||
onClick={!onFragmentReplace ? undefined : handleInline}
|
||||
endDecorator={<TextFieldsIcon />}
|
||||
sx={(!onFragmentReplace || props.messagePendingIncomplete) ? _styles.chipDisabled : _styles.chip}
|
||||
>
|
||||
Make Regular Text
|
||||
</Chip>}
|
||||
|
||||
{/* Delete */}
|
||||
{showDelete && <Chip
|
||||
color={REASONING_COLOR}
|
||||
variant='soft'
|
||||
size='sm'
|
||||
disabled={!onFragmentDelete || props.messagePendingIncomplete}
|
||||
onClick={!onFragmentDelete ? undefined : handleDelete}
|
||||
endDecorator={<DeleteOutlineIcon />}
|
||||
sx={(!onFragmentDelete || props.messagePendingIncomplete) ? _styles.chipDisabled : _styles.chip}
|
||||
>
|
||||
Delete
|
||||
</Chip>}
|
||||
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
|
||||
@@ -5,13 +5,14 @@ import { Box, Chip } from '@mui/joy';
|
||||
import BrushRoundedIcon from '@mui/icons-material/BrushRounded';
|
||||
import CodeIcon from '@mui/icons-material/Code';
|
||||
import HourglassEmptyIcon from '@mui/icons-material/HourglassEmpty';
|
||||
import RepeatIcon from '@mui/icons-material/Repeat';
|
||||
import SearchRoundedIcon from '@mui/icons-material/SearchRounded';
|
||||
|
||||
import { BlocksContainer } from '~/modules/blocks/BlocksContainers';
|
||||
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
|
||||
|
||||
import type { DMessageRole } from '~/common/stores/chat/chat.message';
|
||||
import type { DVoidPlaceholderModelOp } from '~/common/stores/chat/chat.fragments';
|
||||
import type { DVoidPlaceholderModelOp, DVoidPlaceholderPart } from '~/common/stores/chat/chat.fragments';
|
||||
import { adjustContentScaling, ContentScaling, themeScalingMap } from '~/common/app.theme';
|
||||
import { DataStreamViz } from '~/common/components/DataStreamViz';
|
||||
import { animationSpinHalfPause } from '~/common/util/animUtils';
|
||||
@@ -31,6 +32,10 @@ const _styles = {
|
||||
outline: '1px solid',
|
||||
outlineColor: 'primary.solidBg', // .outlinedBorder
|
||||
boxShadow: `1px 2px 4px -3px var(--joy-palette-primary-solidBg)`,
|
||||
|
||||
// wrap text if needed - introduced for retry error messages
|
||||
whiteSpace: 'normal',
|
||||
wordBreak: 'break-word',
|
||||
} as const,
|
||||
|
||||
followUpChipIcon: {
|
||||
@@ -113,8 +118,9 @@ function ModelOperationChip(props: {
|
||||
|
||||
export function BlockPartPlaceholder(props: {
|
||||
placeholderText: string,
|
||||
placeholderType?: 'chat-gen-follow-up',
|
||||
placeholderType?: DVoidPlaceholderPart['pType'],
|
||||
placeholderModelOp?: DVoidPlaceholderModelOp,
|
||||
placeholderAixControl?: DVoidPlaceholderPart['aixControl'],
|
||||
messageRole: DMessageRole,
|
||||
contentScaling: ContentScaling,
|
||||
showAsItalic?: boolean,
|
||||
@@ -146,7 +152,8 @@ export function BlockPartPlaceholder(props: {
|
||||
|
||||
|
||||
// Type-based visualization
|
||||
if (props.placeholderType === 'chat-gen-follow-up') return (
|
||||
const isFollowUp = props.placeholderType === 'chat-gen-follow-up';
|
||||
if (isFollowUp) return (
|
||||
<Chip
|
||||
color='primary'
|
||||
variant='soft'
|
||||
@@ -158,6 +165,34 @@ export function BlockPartPlaceholder(props: {
|
||||
</Chip>
|
||||
);
|
||||
|
||||
// AIX Control renderer (e.g., error correction retry)
|
||||
if (props.placeholderAixControl?.ctl === 'ec-retry') {
|
||||
const { rScope, rCauseHttp, rCauseConn } = props.placeholderAixControl;
|
||||
const color = rScope === 'srv-dispatch' ? 'primary' : rScope === 'srv-op' ? 'warning' : 'danger';
|
||||
return (
|
||||
<Chip
|
||||
// size='sm'
|
||||
color={color}
|
||||
variant='soft'
|
||||
startDecorator={<div style={{ opacity: 0.75 }}>{rCauseHttp || rCauseConn || rScope}</div>}
|
||||
endDecorator={<RepeatIcon style={{ opacity: 0.5 }} />}
|
||||
onClick={() => console.log({ props })}
|
||||
sx={{
|
||||
gap: 1.5,
|
||||
px: 1.5,
|
||||
py: 0.375,
|
||||
my: '1px', // to not crop the outline on mobile, or on beam
|
||||
boxShadow: `1px 2px 4px -3px var(--joy-palette-${color}-solidBg)`,
|
||||
// wrap text if needed - introduced for retry error messages
|
||||
whiteSpace: 'normal',
|
||||
wordBreak: 'break-word',
|
||||
}}
|
||||
>
|
||||
{props.placeholderText}
|
||||
</Chip>
|
||||
);
|
||||
}
|
||||
|
||||
// Model operation renderer
|
||||
if (props.placeholderModelOp)
|
||||
return (
|
||||
|
||||
@@ -6,29 +6,16 @@ import { Box } from '@mui/joy';
|
||||
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
|
||||
|
||||
import type { ContentScaling, UIComplexityMode } from '~/common/app.theme';
|
||||
import type { DMessageContentFragment, DMessageFragmentId, DMessageVoidFragment } from '~/common/stores/chat/chat.fragments';
|
||||
import type { DMessageRole } from '~/common/stores/chat/chat.message';
|
||||
import { DMessageContentFragment, DMessageFragmentId, DMessageVoidFragment, isPlaceholderPart } from '~/common/stores/chat/chat.fragments';
|
||||
import { Release } from '~/common/app.release';
|
||||
|
||||
import { BlockPartModelAux } from './BlockPartModelAux';
|
||||
import { BlockPartPlaceholder } from './BlockPartPlaceholder';
|
||||
import { BlockPartModelAnnotations } from './BlockPartModelAnnotations';
|
||||
|
||||
|
||||
const editLayoutSx: SxProps = {
|
||||
const startLayoutSx: SxProps = {
|
||||
display: 'grid',
|
||||
gap: 1.5, // see why we give more space on ChatMessage
|
||||
|
||||
// horizontal separator between messages (second part+ and before)
|
||||
// '& > *:not(:first-of-type)': {
|
||||
// borderTop: '1px solid',
|
||||
// borderTopColor: 'background.level3',
|
||||
// },
|
||||
};
|
||||
|
||||
const startLayoutSx: SxProps = {
|
||||
...editLayoutSx,
|
||||
|
||||
// NOTE: we used to have 'flex-start' here, but it was causing the Annotation fragment to not be able to
|
||||
// stretch to the full with of this 'void fragments' container.
|
||||
// So now we don't have 'flex-start' anymore, and we may expect issues with other Fragment kinds?
|
||||
@@ -36,7 +23,7 @@ const startLayoutSx: SxProps = {
|
||||
};
|
||||
|
||||
const endLayoutSx: SxProps = {
|
||||
...editLayoutSx,
|
||||
...startLayoutSx,
|
||||
justifyContent: 'flex-end',
|
||||
};
|
||||
|
||||
@@ -47,6 +34,7 @@ const endLayoutSx: SxProps = {
|
||||
*
|
||||
* In the future we can revisit this decision in case Content fragments and *Void Fragments** are
|
||||
* interleaved - but for now, Void fragments will be grouped together at the top.
|
||||
* ^ 2025-11-20: NOTE: Lol, yes we did
|
||||
*/
|
||||
export function VoidFragments(props: {
|
||||
|
||||
@@ -56,21 +44,16 @@ export function VoidFragments(props: {
|
||||
contentScaling: ContentScaling,
|
||||
uiComplexityMode: UIComplexityMode,
|
||||
messageRole: DMessageRole,
|
||||
messagePendingIncomplete?: boolean,
|
||||
|
||||
onFragmentDelete?: (fragmentId: DMessageFragmentId) => void,
|
||||
onFragmentReplace?: (fragmentId: DMessageFragmentId, newFragment: DMessageContentFragment) => void,
|
||||
|
||||
}) {
|
||||
|
||||
const showDataStreamViz =
|
||||
!Release.Features.LIGHTER_ANIMATIONS
|
||||
&& props.uiComplexityMode !== 'minimal'
|
||||
&& props.voidFragments.length === 1 && props.nonVoidFragmentsCount === 0
|
||||
&& isPlaceholderPart(props.voidFragments[0].part);
|
||||
|
||||
const fromAssistant = props.messageRole === 'assistant';
|
||||
|
||||
|
||||
return <Box aria-label='message void' sx={showDataStreamViz ? editLayoutSx : fromAssistant ? startLayoutSx : endLayoutSx}>
|
||||
return <Box aria-label='message void' sx={fromAssistant ? startLayoutSx : endLayoutSx}>
|
||||
|
||||
{props.voidFragments.map(({ fId, part }) => {
|
||||
switch (part.pt) {
|
||||
@@ -84,41 +67,15 @@ export function VoidFragments(props: {
|
||||
/>
|
||||
);
|
||||
|
||||
case 'ma':
|
||||
return (
|
||||
<BlockPartModelAux
|
||||
key={fId}
|
||||
fragmentId={fId}
|
||||
auxType={part.aType}
|
||||
auxText={part.aText}
|
||||
auxHasSignature={part.textSignature !== undefined}
|
||||
auxRedactedDataCount={part.redactedData?.length ?? 0}
|
||||
zenMode={props.uiComplexityMode === 'minimal'}
|
||||
contentScaling={props.contentScaling}
|
||||
onFragmentReplace={props.onFragmentReplace}
|
||||
/>
|
||||
);
|
||||
|
||||
case 'ph':
|
||||
return (
|
||||
<BlockPartPlaceholder
|
||||
key={fId}
|
||||
placeholderText={part.pText}
|
||||
placeholderType={part.pType}
|
||||
placeholderModelOp={part.modelOp}
|
||||
messageRole={props.messageRole}
|
||||
contentScaling={props.contentScaling}
|
||||
showAsItalic
|
||||
showAsDataStreamViz={showDataStreamViz}
|
||||
/>
|
||||
);
|
||||
|
||||
case '_pt_sentinel':
|
||||
return null;
|
||||
|
||||
default:
|
||||
// noinspection JSUnusedLocalSymbols
|
||||
const _exhaustiveVoidFragmentCheck: never = part;
|
||||
// fallthrough - we don't handle these here anymore
|
||||
case 'ma':
|
||||
case 'ph':
|
||||
return (
|
||||
<ScaledTextBlockRenderer
|
||||
key={fId}
|
||||
|
||||
@@ -7,6 +7,14 @@ import { wrapWithMarkdownSyntax } from '~/modules/blocks/markdown/markdown.wrapp
|
||||
import { BUBBLE_MIN_TEXT_LENGTH } from './ChatMessage';
|
||||
|
||||
|
||||
/**
|
||||
* Text matching strategy for selection highlighting:
|
||||
* - 'exact': Direct substring match in source (former behavior)
|
||||
* - 'md-approx': Markdown-approximate match - finds rendered text in decorated source (new behavior)
|
||||
*/
|
||||
const MATCH_METHOD: 'exact' | 'md-approx' = 'md-approx';
|
||||
|
||||
|
||||
/* Note: future evolution of Marking:
|
||||
* 'data-purpose'?: 'review' | 'important' | 'note'; // Purpose of the highlight
|
||||
* 'data-user-id'?: string; // Unique user identifier
|
||||
@@ -27,6 +35,112 @@ const APPLY_CUT = (_text: string) => ''; // Cut removes the text entirely
|
||||
|
||||
type HighlightTool = 'highlight' | 'strike' | 'strong' | 'cut';
|
||||
|
||||
|
||||
// -- Matcher algorithms --
|
||||
|
||||
/**
|
||||
* Result from text matching: the source substring and the inner text to apply tools to
|
||||
*/
|
||||
interface MatchResult {
|
||||
sourceText: string; // Text in source (may include decorators)
|
||||
selText: string; // Text to apply tool to (decorators stripped)
|
||||
leadingDecorators: string;
|
||||
trailingDecorators: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds text using exact substring matching.
|
||||
*/
|
||||
function findExactMatch(needle: string, haystack: string): MatchResult | null {
|
||||
const firstIndex = haystack.indexOf(needle);
|
||||
if (firstIndex === -1) return null;
|
||||
|
||||
// Ensure uniqueness - only one occurrence
|
||||
if (haystack.indexOf(needle, firstIndex + 1) !== -1) return null;
|
||||
|
||||
return {
|
||||
sourceText: needle,
|
||||
selText: needle,
|
||||
leadingDecorators: '',
|
||||
trailingDecorators: '',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds text in source markdown by stripping decorators and tracking positions.
|
||||
* Returns the source substring (including decorators) that renders to the needle text.
|
||||
*/
|
||||
function findInMarkdownSource(needle: string, haystack: string): MatchResult | null {
|
||||
|
||||
// 1. strip markdown decorators while tracking positions
|
||||
let stripped = '';
|
||||
const posMap: number[] = []; // stripped char index -> haystack char index
|
||||
|
||||
let i = 0;
|
||||
while (i < haystack.length) {
|
||||
const char = haystack[i];
|
||||
|
||||
// skip common markdown decorator characters
|
||||
if (char === '*' || char === '_' || char === '~' || char === '`') {
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// regular character - track position
|
||||
stripped += char;
|
||||
posMap.push(i);
|
||||
i++;
|
||||
}
|
||||
|
||||
// if the needle is empty after stripping -- nothing we can do here
|
||||
const idx = stripped.indexOf(needle);
|
||||
if (idx === -1) {
|
||||
// not found - need a different approach
|
||||
return null;
|
||||
}
|
||||
|
||||
// ensure uniqueness - only one occurrence
|
||||
if (stripped.indexOf(needle, idx + 1) !== -1) {
|
||||
// multiple occurrences - need a different approach
|
||||
return null;
|
||||
}
|
||||
|
||||
// map back to source positions
|
||||
const startPos = posMap[idx];
|
||||
const endIdx = idx + needle.length - 1;
|
||||
const endPos = endIdx < posMap.length ? posMap[endIdx] + 1 : haystack.length;
|
||||
|
||||
// expand to include surrounding markdown decorators
|
||||
let actualStart = startPos;
|
||||
let actualEnd = endPos;
|
||||
|
||||
// walk backwards to include opening decorators
|
||||
while (actualStart > 0) {
|
||||
const prevChar = haystack[actualStart - 1];
|
||||
if (prevChar === '*' || prevChar === '_' || prevChar === '~' || prevChar === '`')
|
||||
actualStart--;
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
// walk forwards to include closing decorators
|
||||
while (actualEnd < haystack.length) {
|
||||
const nextChar = haystack[actualEnd];
|
||||
if (nextChar === '*' || nextChar === '_' || nextChar === '~' || nextChar === '`')
|
||||
actualEnd++;
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
const sourceText = haystack.substring(actualStart, actualEnd);
|
||||
const leadingDecorators = sourceText.match(/^[*_~`]+/)?.[0] || '';
|
||||
const trailingDecorators = sourceText.match(/[*_~`]+$/)?.[0] || '';
|
||||
const selText = sourceText.slice(leadingDecorators.length, sourceText.length - trailingDecorators.length);
|
||||
|
||||
return { sourceText, selText, leadingDecorators, trailingDecorators };
|
||||
}
|
||||
|
||||
|
||||
export function useSelHighlighterMemo(
|
||||
messageId: DMessageId,
|
||||
selText: string | null,
|
||||
@@ -44,31 +158,35 @@ export function useSelHighlighterMemo(
|
||||
const highlightFunction = fragments.reduce((acc: false /* not found */ | ((tool: HighlightTool) => void) | true /* more than one */, fragment) => {
|
||||
if (!acc && isTextContentFragment(fragment)) {
|
||||
const fragmentText = fragment.part.text;
|
||||
let index = fragmentText.indexOf(selText);
|
||||
const match = MATCH_METHOD === 'md-approx'
|
||||
? findInMarkdownSource(selText, fragmentText)
|
||||
: findExactMatch(selText, fragmentText);
|
||||
|
||||
while (index !== -1) {
|
||||
|
||||
// If we've found more than one occurrence, we can stop
|
||||
if (match) {
|
||||
// If we already found one, this is a duplicate
|
||||
if (acc) return true;
|
||||
|
||||
index = fragmentText.indexOf(selText, index + 1);
|
||||
const { sourceText, selText, leadingDecorators, trailingDecorators } = match;
|
||||
|
||||
// Tool application function
|
||||
acc = (tool: HighlightTool) => {
|
||||
|
||||
// Apply the tool
|
||||
const highlighted =
|
||||
// Apply the tool to the inner text
|
||||
const selProcessed =
|
||||
tool === 'highlight' ? APPLY_HTML_HIGHLIGHT(selText)
|
||||
: tool === 'strike' ? APPLY_HTML_STRIKE(selText)
|
||||
: tool === 'strong' ? APPLY_MD_STRONG(selText)
|
||||
: tool === 'cut' ? APPLY_CUT(selText)
|
||||
: selText;
|
||||
|
||||
// Reconstruct with original decorators
|
||||
const reconstructed = leadingDecorators + selProcessed + trailingDecorators;
|
||||
|
||||
// Toggle, if the tooled text is already present (except for cut which always removes)
|
||||
const newFragmentText =
|
||||
tool === 'cut' ? fragmentText.replace(selText, highlighted) // Cut always removes text
|
||||
: fragmentText.includes(highlighted) ? fragmentText.replace(highlighted, selText) // toggles selection
|
||||
: fragmentText.replace(selText, highlighted);
|
||||
tool === 'cut' ? fragmentText.replace(sourceText, reconstructed) // Cut always removes text
|
||||
: fragmentText.includes(reconstructed) ? fragmentText.replace(reconstructed, sourceText) // toggles selection
|
||||
: fragmentText.replace(sourceText, reconstructed);
|
||||
|
||||
// Replace the whole fragment within the message
|
||||
onMessageFragmentReplace(messageId, fragment.fId, createTextContentFragment(newFragmentText));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AixChatGenerateContent_DMessage, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
|
||||
import { AixChatGenerateContent_DMessageGuts, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
|
||||
import { autoChatFollowUps } from '~/modules/aifn/auto-chat-follow-ups/autoChatFollowUps';
|
||||
import { autoConversationTitle } from '~/modules/aifn/autotitle/autoTitle';
|
||||
|
||||
@@ -19,7 +19,7 @@ export const CHATGENERATE_RESPONSE_PLACEHOLDER = '...'; // 💫 ..., 🖊️ ...
|
||||
|
||||
|
||||
export interface PersonaProcessorInterface {
|
||||
handleMessage(accumulatedMessage: AixChatGenerateContent_DMessage, messageComplete: boolean): void;
|
||||
handleMessage(accumulatedMessage: AixChatGenerateContent_DMessageGuts, messageComplete: boolean): void;
|
||||
}
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ export async function runPersonaOnConversationHead(
|
||||
'conversation',
|
||||
conversationId,
|
||||
{ abortSignal: abortController.signal, throttleParallelThreads: parallelViewCount },
|
||||
(messageOverwrite: AixChatGenerateContent_DMessage, messageComplete: boolean) => {
|
||||
(messageOverwrite: AixChatGenerateContent_DMessageGuts, messageComplete: boolean) => {
|
||||
|
||||
// Note: there was an abort check here, but it removed the last packet, which contained the cause and final text.
|
||||
// if (abortController.signal.aborted)
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import type { AixChatGenerateContent_DMessageGuts } from '~/modules/aix/client/aix.client';
|
||||
import { speakText } from '~/modules/speex/speex.client';
|
||||
|
||||
import { isTextContentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
|
||||
import type { AixChatGenerateContent_DMessage } from '~/modules/aix/client/aix.client';
|
||||
|
||||
import type { PersonaProcessorInterface } from '../chat-persona';
|
||||
|
||||
|
||||
@@ -16,7 +15,7 @@ export class PersonaChatMessageSpeak implements PersonaProcessorInterface {
|
||||
constructor(private autoSpeakType: AutoSpeakType) {
|
||||
}
|
||||
|
||||
handleMessage(accumulatedMessage: Partial<AixChatGenerateContent_DMessage>, messageComplete: boolean) {
|
||||
handleMessage(accumulatedMessage: Partial<AixChatGenerateContent_DMessageGuts>, messageComplete: boolean) {
|
||||
if (this.autoSpeakType === 'off' || this.spokenLine) return;
|
||||
|
||||
// Require a Content.Text first fragment
|
||||
@@ -58,7 +57,7 @@ export class PersonaChatMessageSpeak implements PersonaProcessorInterface {
|
||||
#speak(text: string) {
|
||||
console.log('📢 TTS:', text);
|
||||
this.spokenLine = true;
|
||||
// fire/forget: we don't want to stall this loop
|
||||
void elevenLabsSpeakText(text, undefined, false, true);
|
||||
// fire/forget: we don't want to stall streaming
|
||||
void speakText(text, undefined, { label: 'Chat message' });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,6 +101,10 @@ export function PromptComposer(props: {
|
||||
if (e.key !== 'Enter')
|
||||
return;
|
||||
|
||||
// Skip if composing (e.g., CJK input methods) - issue #784
|
||||
if (e.nativeEvent.isComposing)
|
||||
return;
|
||||
|
||||
// Shift: toggles the 'enter is newline'
|
||||
if (enterIsNewline ? e.shiftKey : !e.shiftKey) {
|
||||
if (userHasText)
|
||||
|
||||
@@ -2,7 +2,7 @@ import * as React from 'react';
|
||||
import NextImage from 'next/image';
|
||||
import TimeAgo from 'react-timeago';
|
||||
|
||||
import { AspectRatio, Box, Button, Card, CardContent, CardOverflow, ColorPaletteProp, Container, Grid, Sheet, Typography, VariantProp } from '@mui/joy';
|
||||
import { AspectRatio, Box, Button, Card, CardContent, CardOverflow, ColorPaletteProp, Container, Divider, Grid, ListDivider, Sheet, Typography, VariantProp } from '@mui/joy';
|
||||
import ArrowForwardRoundedIcon from '@mui/icons-material/ArrowForwardRounded';
|
||||
import ArrowOutwardRoundedIcon from '@mui/icons-material/ArrowOutwardRounded';
|
||||
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
|
||||
@@ -107,7 +107,10 @@ function NewsCard(props: {
|
||||
const { addPadding, idx, newsItem: ni } = props;
|
||||
|
||||
return (
|
||||
<Card color={props.color} variant={props.variant ?? 'plain'} sx={{ mb: 3, minHeight: 32, gap: 1 }}>
|
||||
<Card color={props.color} variant={props.variant ?? 'plain'} sx={{
|
||||
mb: 3, minHeight: 32, gap: 1,
|
||||
boxShadow: !idx ? 'md' : undefined,
|
||||
}}>
|
||||
<CardContent sx={{ position: 'relative', pr: addPadding ? 4 : 0 }}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between' }}>
|
||||
<Typography level='title-sm' component='div'>
|
||||
@@ -190,6 +193,7 @@ export function AppNews() {
|
||||
overflowY: 'auto',
|
||||
display: 'flex', justifyContent: 'center',
|
||||
p: { xs: 3, md: 6 },
|
||||
// backgroundColor: 'background.level2'
|
||||
}}>
|
||||
|
||||
<Box sx={{
|
||||
@@ -231,7 +235,7 @@ export function AppNews() {
|
||||
}}
|
||||
>
|
||||
Big-AGI Pro
|
||||
{/*✨*/}
|
||||
✨
|
||||
</Button>
|
||||
)}
|
||||
</Box>
|
||||
@@ -242,8 +246,13 @@ export function AppNews() {
|
||||
|
||||
<Container disableGutters maxWidth='sm'>
|
||||
|
||||
{/* Inject the callout item here*/}
|
||||
{/*<Box sx={{ mb: 3 }}>*/}
|
||||
{/* <BigAgiProNewsCallout />*/}
|
||||
{/*</Box>*/}
|
||||
|
||||
{/* Development Notices */}
|
||||
{Release.TenantSlug === 'open' && <NewsCard variant='soft' color='warning' newsItem={DevNewsItem} idx={0} addPadding={false} />}
|
||||
{Release.TenantSlug === 'open' && Release.IsNodeDevBuild && <NewsCard variant='soft' color='warning' newsItem={DevNewsItem} idx={0} addPadding={false} />}
|
||||
|
||||
{news?.map((ni, idx) => {
|
||||
// const firstCard = idx === 0;
|
||||
@@ -274,6 +283,8 @@ export function AppNews() {
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{idx === 1 && <Divider sx={{ my: 6, mx: 6 }}/>}
|
||||
|
||||
</React.Fragment>;
|
||||
})}
|
||||
|
||||
|
||||
@@ -59,24 +59,51 @@ const _frontendPkgVersion = _frontendBuild.pkgVersion ?? 'open/main';
|
||||
export const newsFrontendTimestamp = _frontendBuild.timestamp;
|
||||
|
||||
export const DevNewsItem: NewsItem = {
|
||||
versionCode: _frontendPkgVersion,
|
||||
versionCode: 'DEV',
|
||||
versionName: 'Development Build',
|
||||
versionDate: undefined,
|
||||
items: [
|
||||
{ text: <>You're running an unsupported <B>developer build</B> of Big-AGI Open {_frontendPkgVersion}</> },
|
||||
{ text: <>This branch carries breaking features that are subject to change</> },
|
||||
{ text: <>Please report screenshots of breakages and console error messages</> },
|
||||
{ text: <>Do not use, no cloud backups</> },
|
||||
{ text: <ExternalLink href='https://big-agi.com'>Back to the official version</ExternalLink> },
|
||||
],
|
||||
} as const;
|
||||
|
||||
// news and feature surfaces
|
||||
export const NewsItems: NewsItem[] = [
|
||||
{
|
||||
versionCode: '2.0.3',
|
||||
versionName: 'Red Carpet',
|
||||
versionDate: new Date('2026-02-03T12:00:00Z'),
|
||||
items: [
|
||||
{ text: <><B>Kimi K2.5</B>, <B>Gemini 3 Flash</B>, <B>GPT Image 1.5</B>, <B>GPT 5.2 Codex</B>, <B issue={921}>Novita.ai</B> models, and xAI search and code execution</> },
|
||||
{ text: <><B issue={943}>Google Drive</B>: attach docs, sheets, images with optimal LLM conversion</> },
|
||||
{ text: <>Speech: new <B href='https://inworld.ai'>Inworld</B> support, cancelable, unlimited length</> },
|
||||
{ text: <>Copy as-seen, reorder messages, AI Injector, PDF auto-OCR</> },
|
||||
{ text: <>Models: <B issue={941}>duplication</B>, improved parameters, cleaner UI</> },
|
||||
{ text: <>Fixes, security patches, CJK/IME input</> },
|
||||
{ text: <>Developers: new Docker build, faster, and smaller containers, AI request injection capabilities in the inspector</>, dev: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
versionCode: '2.0.2',
|
||||
versionName: 'Heavy Critters',
|
||||
versionDate: new Date('2025-12-01T06:00:00Z'), // 2.0.2
|
||||
// versionDate: new Date('2025-11-24T23:30:00Z'), // 2.0.1
|
||||
items: [
|
||||
{ text: <><B>New in 2.0.2</B> Speech synthesis with Web Speech, LocalAI, OpenAI and more</> },
|
||||
{ text: <><B>Opus 4.5</B>, <B>Gemini 3 Pro</B>, <B>Nano Banana Pro</B>, <B>Grok 4.1</B>, <B>GPT-5.1</B>, <B>Kimi K2</B></> },
|
||||
{ text: <><B>Image Generation</B> with Azure and LocalAI providers, in addition to OpenAI</> },
|
||||
{ text: <>Enhanced <B>OpenRouter</B> integration with auto-capabilities and reasoning</> },
|
||||
{ text: <>Call transcripts, generate persona images, search button in beams</> },
|
||||
{ text: <>Starred models, errors resilience, 278 fixes</> },
|
||||
{ text: <ExternalLink href='https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml'>AI-Automatic feature development</ExternalLink> },
|
||||
],
|
||||
},
|
||||
{
|
||||
versionCode: '2.0.0',
|
||||
versionName: 'Open',
|
||||
versionDate: new Date('2025-10-06T15:00:00Z'),
|
||||
versionDate: new Date('2025-10-31T15:00:00Z'),
|
||||
versionCoverImage: coverV200,
|
||||
items: [
|
||||
{ text: <><B wow>Big-AGI Open</B> is ready and more productive and faster than ever, with:</> },
|
||||
|
||||
@@ -14,6 +14,37 @@ import { InlineError } from '~/common/components/InlineError';
|
||||
import type { SimplePersonaProvenance } from '../store-app-personas';
|
||||
|
||||
|
||||
// configuration
|
||||
const TEMP_DISABLE_YOUTUBE_TRANSCRIPT = true;
|
||||
|
||||
|
||||
function YouTubeDisabledCard() {
|
||||
return (
|
||||
<Card
|
||||
variant='soft'
|
||||
color='primary'
|
||||
invertedColors
|
||||
sx={{
|
||||
p: 3,
|
||||
textAlign: 'center',
|
||||
border: '1px solid',
|
||||
borderColor: 'primary.solidBg',
|
||||
}}
|
||||
>
|
||||
<Typography level='title-sm' sx={{ mb: 1 }}>
|
||||
Temporarily Disabled
|
||||
</Typography>
|
||||
<Typography level='body-sm' sx={{ mb: 2 }}>
|
||||
YouTube transcript extraction is currently unavailable due to API changes.
|
||||
</Typography>
|
||||
<Typography level='body-xs' color='neutral'>
|
||||
Download transcripts manually and use the "From Text" option instead.
|
||||
</Typography>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
function YouTubeVideoTranscriptCard(props: { transcript: YTVideoTranscript, onClose: () => void, sx?: SxProps }) {
|
||||
const { transcript } = props;
|
||||
return (
|
||||
@@ -109,6 +140,13 @@ export function FromYouTube(props: {
|
||||
setVideoID(videoId);
|
||||
};
|
||||
|
||||
if (TEMP_DISABLE_YOUTUBE_TRANSCRIPT)
|
||||
return <>
|
||||
<Typography level='title-md' startDecorator={<YouTubeIcon sx={{ color: '#f00' }} />} sx={{ mb: 3 }}>
|
||||
YouTube -> Persona
|
||||
</Typography>
|
||||
<YouTubeDisabledCard />
|
||||
</>;
|
||||
|
||||
return <>
|
||||
|
||||
|
||||
@@ -129,6 +129,13 @@ export function AppChatSettingsAI() {
|
||||
</>}
|
||||
/>
|
||||
|
||||
<FormControlDomainModel
|
||||
domainId='imageCaption'
|
||||
title='Vision model'
|
||||
description='Image captioning'
|
||||
tooltip='Vision model used to generate text descriptions of images when the Caption (Text) attachment option is selected.'
|
||||
/>
|
||||
|
||||
{labsDevMode && (
|
||||
<FormControlDomainModel
|
||||
domainId='primaryChat'
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import * as React from 'react';
|
||||
import { Accordion, AccordionDetails, accordionDetailsClasses, AccordionGroup, AccordionSummary, accordionSummaryClasses, Avatar, Box, Button, ListItemContent, styled, Tab, TabList, TabPanel, Tabs } from '@mui/joy';
|
||||
|
||||
import { Accordion, AccordionDetails, AccordionGroup, AccordionSummary, accordionSummaryClasses, Avatar, Box, Button, ListItemContent, styled, Tab, TabList, TabPanel, Tabs, Typography } from '@mui/joy';
|
||||
import AddIcon from '@mui/icons-material/Add';
|
||||
import AutoAwesomeIcon from '@mui/icons-material/AutoAwesome';
|
||||
import KeyboardCommandKeyOutlinedIcon from '@mui/icons-material/KeyboardCommandKeyOutlined';
|
||||
import LanguageRoundedIcon from '@mui/icons-material/LanguageRounded';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver';
|
||||
import ScienceIcon from '@mui/icons-material/Science';
|
||||
import SearchIcon from '@mui/icons-material/Search';
|
||||
import TerminalOutlinedIcon from '@mui/icons-material/TerminalOutlined';
|
||||
|
||||
import { BrowseSettings } from '~/modules/browse/BrowseSettings';
|
||||
import { DallESettings } from '~/modules/t2i/dalle/DallESettings';
|
||||
import { ElevenlabsSettings } from '~/modules/elevenlabs/ElevenlabsSettings';
|
||||
import { GoogleSearchSettings } from '~/modules/google/GoogleSearchSettings';
|
||||
import { T2ISettings } from '~/modules/t2i/T2ISettings';
|
||||
|
||||
@@ -20,14 +19,15 @@ import type { PreferencesTabId } from '~/common/layout/optima/store-layout-optim
|
||||
import { AppBreadcrumbs } from '~/common/components/AppBreadcrumbs';
|
||||
import { DarkModeToggleButton, darkModeToggleButtonSx } from '~/common/components/DarkModeToggleButton';
|
||||
import { GoodModal } from '~/common/components/modals/GoodModal';
|
||||
import { Is } from '~/common/util/pwaUtils';
|
||||
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
|
||||
import { optimaActions } from '~/common/layout/optima/useOptima';
|
||||
import { useIsMobile } from '~/common/components/useMatchMedia';
|
||||
|
||||
import { AppChatSettingsAI } from './AppChatSettingsAI';
|
||||
import { AppChatSettingsUI } from './settings-ui/AppChatSettingsUI';
|
||||
import { UxLabsSettings } from './UxLabsSettings';
|
||||
import { VoiceSettings } from './VoiceSettings';
|
||||
import { VoiceInSettings } from './VoiceInSettings';
|
||||
import { VoiceOutSettings } from './VoiceOutSettings';
|
||||
|
||||
|
||||
// configuration
|
||||
@@ -44,7 +44,11 @@ const Topics = styled(AccordionGroup)({
|
||||
|
||||
// larger summary, with a spinning icon
|
||||
[`& .${accordionSummaryClasses.button}`]: {
|
||||
minHeight: 64,
|
||||
minHeight: '52px',
|
||||
border: 'none',
|
||||
paddingRight: '0.75rem',
|
||||
backgroundColor: 'rgba(var(--joy-palette-primary-lightChannel) / 0.2)',
|
||||
gap: '1rem',
|
||||
},
|
||||
[`& .${accordionSummaryClasses.indicator}`]: {
|
||||
transition: '0.2s',
|
||||
@@ -52,11 +56,6 @@ const Topics = styled(AccordionGroup)({
|
||||
[`& [aria-expanded="true"] .${accordionSummaryClasses.indicator}`]: {
|
||||
transform: 'rotate(45deg)',
|
||||
},
|
||||
|
||||
// larger padded block
|
||||
[`& .${accordionDetailsClasses.content}.${accordionDetailsClasses.expanded}`]: {
|
||||
paddingBlock: '1rem',
|
||||
},
|
||||
});
|
||||
|
||||
function Topic(props: { title?: React.ReactNode, icon?: string | React.ReactNode, startCollapsed?: boolean, children?: React.ReactNode }) {
|
||||
@@ -92,9 +91,9 @@ function Topic(props: { title?: React.ReactNode, icon?: string | React.ReactNode
|
||||
>
|
||||
{!!props.icon && (
|
||||
<Avatar
|
||||
size='sm'
|
||||
color={COLOR_TOPIC_ICON}
|
||||
variant={expanded ? 'plain' /* was: soft */ : 'plain'}
|
||||
// size='sm'
|
||||
>
|
||||
{props.icon}
|
||||
</Avatar>
|
||||
@@ -109,7 +108,7 @@ function Topic(props: { title?: React.ReactNode, icon?: string | React.ReactNode
|
||||
slotProps={{
|
||||
content: {
|
||||
sx: {
|
||||
px: { xs: 1.5, md: 2 },
|
||||
p: { xs: 1.5, md: 2.5 },
|
||||
},
|
||||
},
|
||||
}}
|
||||
@@ -131,6 +130,7 @@ const _styles = {
|
||||
|
||||
// modal: undefined,
|
||||
modal: {
|
||||
flexGrow: 1,
|
||||
backgroundColor: 'background.level1',
|
||||
} as const,
|
||||
|
||||
@@ -153,6 +153,7 @@ const _styles = {
|
||||
tabsListTab: {
|
||||
// borderRadius: '2rem',
|
||||
borderRadius: 'sm',
|
||||
fontSize: 'sm',
|
||||
flex: 1,
|
||||
p: 0,
|
||||
'&[aria-selected="true"]': {
|
||||
@@ -209,7 +210,7 @@ export function SettingsModal(props: {
|
||||
<GoodModal
|
||||
// title='Preferences' strongerTitle
|
||||
title={
|
||||
<AppBreadcrumbs size='md' rootTitle='App'>
|
||||
<AppBreadcrumbs size='md' rootTitle={isMobile ? 'App' : 'Application'}>
|
||||
<AppBreadcrumbs.Leaf><b>Preferences</b></AppBreadcrumbs.Leaf>
|
||||
</AppBreadcrumbs>
|
||||
}
|
||||
@@ -251,7 +252,7 @@ export function SettingsModal(props: {
|
||||
<Tab value='tools' disableIndicator sx={_styles.tabsListTab}>Tools</Tab>
|
||||
</TabList>
|
||||
|
||||
<TabPanel value='chat' variant='outlined' sx={_styles.tabPanel}>
|
||||
<TabPanel value='chat' color='primary' variant='outlined' sx={_styles.tabPanel}>
|
||||
<Topics>
|
||||
<Topic>
|
||||
<AppChatSettingsUI />
|
||||
@@ -268,18 +269,18 @@ export function SettingsModal(props: {
|
||||
</Topics>
|
||||
</TabPanel>
|
||||
|
||||
<TabPanel value='voice' variant='outlined' sx={_styles.tabPanel}>
|
||||
<TabPanel value='voice' color='primary' variant='outlined' sx={_styles.tabPanel}>
|
||||
<Topics>
|
||||
<Topic icon={/*'🎙️'*/ <MicIcon />} title='Microphone'>
|
||||
<VoiceSettings />
|
||||
<VoiceInSettings isMobile={isMobile} />
|
||||
</Topic>
|
||||
<Topic icon={/*'📢'*/ <RecordVoiceOverIcon />} title='ElevenLabs API'>
|
||||
<ElevenlabsSettings />
|
||||
<Topic icon={/*'📢'*/ <PhVoice />} title={'Speech'/*<>Voices <GoodBadge badge='New' /></>*/}>
|
||||
<VoiceOutSettings isMobile={isMobile} />
|
||||
</Topic>
|
||||
</Topics>
|
||||
</TabPanel>
|
||||
|
||||
<TabPanel value='draw' variant='outlined' sx={_styles.tabPanel}>
|
||||
<TabPanel value='draw' color='primary' variant='outlined' sx={_styles.tabPanel}>
|
||||
<Topics>
|
||||
<Topic>
|
||||
<T2ISettings />
|
||||
@@ -290,7 +291,45 @@ export function SettingsModal(props: {
|
||||
</Topics>
|
||||
</TabPanel>
|
||||
|
||||
<TabPanel value='tools' variant='outlined' sx={_styles.tabPanel}>
|
||||
<TabPanel value='tools' color='primary' variant='outlined' sx={_styles.tabPanel}>
|
||||
|
||||
{/* Search Modifier Info */}
|
||||
<Box sx={{
|
||||
p: 2,
|
||||
borderRadius: 'calc(var(--joy-radius-md) - 1px)',
|
||||
// backgroundColor: 'background.level1',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 2,
|
||||
}}>
|
||||
<Button
|
||||
variant='soft'
|
||||
color='success'
|
||||
startDecorator={<SearchIcon />}
|
||||
sx={{
|
||||
// this is copied frmo ButtonSearchControl._styles.desktop
|
||||
minWidth: 100,
|
||||
justifyContent: 'flex-start',
|
||||
borderRadius: '18px',
|
||||
pointerEvents: 'none',
|
||||
'[data-joy-color-scheme="light"] &': {
|
||||
bgcolor: '#d5ec31',
|
||||
},
|
||||
boxShadow: 'inset 0 2px 4px -1px rgba(0,0,0,0.15)',
|
||||
textWrap: 'nowrap',
|
||||
}}
|
||||
>
|
||||
Search
|
||||
</Button>
|
||||
<Box sx={{ flex: 1 }}>
|
||||
<Typography level='body-sm' sx={{ fontWeight: 'md', mb: 0.5 }}>
|
||||
Use the Search button
|
||||
</Typography>
|
||||
<Typography level='body-xs' sx={{ color: 'text.secondary' }}>
|
||||
Modern AI models have native search built-in. Click the Search button when chatting to enable real-time web search.
|
||||
</Typography>
|
||||
</Box>
|
||||
</Box>
|
||||
<Topics>
|
||||
<Topic icon={<LanguageRoundedIcon />} title='Load Web Pages (with images)' startCollapsed>
|
||||
<BrowseSettings />
|
||||
|
||||
@@ -38,6 +38,7 @@ const shortcutsMd = platformAwareKeystrokes(`
|
||||
| Ctrl + , | ⚙️ Preferences |
|
||||
| Ctrl + Shift + M | 🧠 Models |
|
||||
| Ctrl + Shift + O | 💬 Options (current Chat Model) |
|
||||
| Ctrl + Shift + A | Toggle AI Request Inspector |
|
||||
| Ctrl + Shift + + | Increase Text Size |
|
||||
| Ctrl + Shift + - | Decrease Text Size |
|
||||
| Ctrl + Shift + / | Shortcuts |
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { FormControl } from '@mui/joy';
|
||||
|
||||
import { useChatMicTimeoutMs } from '../chat/store-app-chat';
|
||||
|
||||
import type { FormRadioOption } from '~/common/components/forms/FormRadioControl';
|
||||
import { FormChipControl } from '~/common/components/forms/FormChipControl';
|
||||
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
|
||||
import { LanguageSelect } from '~/common/components/LanguageSelect';
|
||||
|
||||
|
||||
const _minTimeouts: ReadonlyArray<FormRadioOption<string>> = [
|
||||
{ value: '600', label: '0.6s', description: 'Best for quick calls' },
|
||||
{ value: '2000', label: '2s', description: 'Standard' },
|
||||
{ value: '5000', label: '5s', description: 'Breathe' },
|
||||
{ value: '15000', label: '15s', description: 'Best for thinking' },
|
||||
] as const;
|
||||
|
||||
|
||||
export function VoiceInSettings(props: { isMobile: boolean }) {
|
||||
|
||||
// external state
|
||||
const [chatTimeoutMs, setChatTimeoutMs] = useChatMicTimeoutMs();
|
||||
|
||||
// derived - converts from string keys to numbers and vice versa
|
||||
const chatTimeoutValue: string = '' + chatTimeoutMs;
|
||||
const setChatTimeoutValue = React.useCallback((value: string) => {
|
||||
value && setChatTimeoutMs(parseInt(value));
|
||||
}, [setChatTimeoutMs]);
|
||||
|
||||
return <>
|
||||
|
||||
<FormControl orientation='horizontal' sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<FormLabelStart
|
||||
title='Language'
|
||||
description='Mic and voice'
|
||||
// tooltip='For Microphone input and Voice output. Microphone support varies by browser (iPhone/Safari lacks speech input).'
|
||||
/>
|
||||
<LanguageSelect />
|
||||
</FormControl>
|
||||
|
||||
{!props.isMobile && (
|
||||
<FormChipControl
|
||||
title='Timeout'
|
||||
// color='primary'
|
||||
options={_minTimeouts}
|
||||
value={chatTimeoutValue}
|
||||
onChange={setChatTimeoutValue}
|
||||
/>
|
||||
)}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
import { SpeexConfigureEngines } from '~/modules/speex/components/SpeexConfigureEngines';
|
||||
import { useSpeexEngines, useSpeexTtsCharLimit } from '~/modules/speex/store-module-speex';
|
||||
|
||||
import { ChatAutoSpeakType, useChatAutoAI } from '../chat/store-app-chat';
|
||||
|
||||
import { FormChipControl } from '~/common/components/forms/FormChipControl';
|
||||
import { FormRadioOption } from '~/common/components/forms/FormRadioControl';
|
||||
import { FormSwitchControl } from '~/common/components/forms/FormSwitchControl';
|
||||
|
||||
|
||||
const _autoSpeakOptions: FormRadioOption<ChatAutoSpeakType>[] = [
|
||||
{ value: 'off', label: 'No', description: 'Off' },
|
||||
{ value: 'firstLine', label: 'Start', description: 'First paragraph' },
|
||||
{ value: 'all', label: 'Full', description: 'Complete response' },
|
||||
] as const;
|
||||
|
||||
|
||||
/**
|
||||
* Voice output settings - Auto-speak mode and TTS engine configuration
|
||||
*/
|
||||
export function VoiceOutSettings(props: { isMobile: boolean }) {
|
||||
|
||||
// external state
|
||||
const { autoSpeak, setAutoSpeak } = useChatAutoAI();
|
||||
const { ttsCharLimit, setTtsCharLimit } = useSpeexTtsCharLimit();
|
||||
|
||||
// external state - module
|
||||
const hasEngines = useSpeexEngines().length > 0;
|
||||
|
||||
return <>
|
||||
|
||||
{/* Auto-speak setting */}
|
||||
<FormChipControl
|
||||
title='Speak Chats'
|
||||
size='md'
|
||||
// color='primary'
|
||||
tooltip={!hasEngines ? 'No voice engines available. Configure a TTS service or use system voice.' : undefined}
|
||||
disabled={!hasEngines}
|
||||
options={_autoSpeakOptions}
|
||||
value={autoSpeak}
|
||||
onChange={setAutoSpeak}
|
||||
/>
|
||||
|
||||
{/* TTS character limit toggle */}
|
||||
<FormSwitchControl
|
||||
title='Speak Cost Guard'
|
||||
description={ttsCharLimit !== null ? 'Max ~3 min' : 'Unlimited'}
|
||||
tooltip='Limits text sent to TTS providers, helping prevent unexpected costs with cloud services. By default the limit is 4096 characters (~3 minutes of speech).'
|
||||
checked={ttsCharLimit !== null}
|
||||
onChange={(checked) => setTtsCharLimit(checked ? 4096 : null)}
|
||||
/>
|
||||
|
||||
{/* Engine configuration */}
|
||||
<SpeexConfigureEngines isMobile={props.isMobile} />
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { FormControl } from '@mui/joy';
|
||||
|
||||
import { useChatMicTimeoutMs } from '../chat/store-app-chat';
|
||||
|
||||
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
|
||||
import { FormRadioControl } from '~/common/components/forms/FormRadioControl';
|
||||
import { LanguageSelect } from '~/common/components/LanguageSelect';
|
||||
import { useIsMobile } from '~/common/components/useMatchMedia';
|
||||
|
||||
|
||||
export function VoiceSettings() {
|
||||
|
||||
// external state
|
||||
const isMobile = useIsMobile();
|
||||
const [chatTimeoutMs, setChatTimeoutMs] = useChatMicTimeoutMs();
|
||||
|
||||
|
||||
// this converts from string keys to numbers and vice versa
|
||||
const chatTimeoutValue: string = '' + chatTimeoutMs;
|
||||
const setChatTimeoutValue = (value: string) => value && setChatTimeoutMs(parseInt(value));
|
||||
|
||||
return <>
|
||||
|
||||
{/* LanguageSelect: moved from the UI settings (where it logically belongs), just to group things better from an UX perspective */}
|
||||
<FormControl orientation='horizontal' sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<FormLabelStart title='Language'
|
||||
description='ASR and TTS'
|
||||
tooltip='Currently for Microphone input and Voice output. Microphone support varies by browser (iPhone/Safari lacks speech input). We will use the ElevenLabs MultiLanguage model if a language other than English is selected.' />
|
||||
<LanguageSelect />
|
||||
</FormControl>
|
||||
|
||||
{!isMobile && <FormRadioControl
|
||||
title='Mic Timeout'
|
||||
description={chatTimeoutMs < 1000 ? 'Best for quick calls' : chatTimeoutMs > 5000 ? 'Best for thinking' : 'Standard'}
|
||||
options={[
|
||||
{ value: '600', label: '.6s' },
|
||||
{ value: '2000', label: '2s' },
|
||||
{ value: '5000', label: '5s' },
|
||||
{ value: '15000', label: '15s' },
|
||||
]}
|
||||
value={chatTimeoutValue} onChange={setChatTimeoutValue}
|
||||
/>}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Copyright (c)2024-2025 Enrico Ros
|
||||
* Copyright (c)2024-2026 Enrico Ros
|
||||
*
|
||||
* This file is include by both the frontend and backend, however depending on the time
|
||||
* of the build, the values may be different.
|
||||
@@ -23,13 +23,13 @@ export const Release = {
|
||||
|
||||
// this is here to trigger revalidation of data, e.g. models refresh
|
||||
Monotonics: {
|
||||
Aix: 36,
|
||||
NewsVersion: 192,
|
||||
Aix: 54,
|
||||
NewsVersion: 203,
|
||||
},
|
||||
|
||||
// Frontend: pretty features
|
||||
TechLevels: {
|
||||
AIX: '1.1', Apply: '0.8', Beam: '2.0', LFS: '0.5', /*Precog: '0.1',*/ React: '1.5',
|
||||
AIX: '2', Apply: '0.8', Beam: '2', LFS: '0.9', /*Precog: '0.1',*/ React: '1.6',
|
||||
},
|
||||
AiFunctions: [
|
||||
// from `ContextChatGenerate_schema`
|
||||
@@ -59,6 +59,14 @@ export const Release = {
|
||||
|
||||
|
||||
export const BaseProduct = {
|
||||
ProductName: 'Big-AGI',
|
||||
ProductURL: 'https://big-agi.com',
|
||||
PrivacyPolicy: 'https://big-agi.com/privacy',
|
||||
TermsOfService: 'https://big-agi.com/terms',
|
||||
// ecosystem
|
||||
DocsBaseSite: 'https://big-agi.com/docs',
|
||||
OpenSupportDiscord: 'https://discord.gg/MkH4qj2Jp9',
|
||||
OpenSourceRepo: 'https://github.com/enricoros/big-agi',
|
||||
ReleaseNotes: '',
|
||||
SupportForm: (_userId?: string) => 'https://github.com/enricoros/big-agi/issues',
|
||||
SupportForm: (_userId?: string) => 'https://github.com/enricoros/big-AGI/issues/new?template=ai-triage.yml',
|
||||
} as const;
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
/**
|
||||
* Attachment Cloud Files
|
||||
*
|
||||
* For future refresh capability, the output fragments should preserve:
|
||||
* - provider, fileId: to identify the file
|
||||
* - mimeType: the original cloud MIME type
|
||||
* - the converter used (stored in outputsHeuristic.actualConverterId)
|
||||
*
|
||||
* Google Workspace files (Docs, Sheets, Slides) are auto-exported during
|
||||
* input loading to standard formats (HTML, CSV, PDF) and then processed
|
||||
* by standard converters.
|
||||
*/
|
||||
|
||||
import type { AttachmentCloudProviderId } from './attachment.types';
|
||||
|
||||
|
||||
// Error handling
|
||||
|
||||
export class CloudFetchError extends Error {
|
||||
constructor(public readonly code: _CloudFetchErrorCode, public readonly details?: string) {
|
||||
super(`Cloud fetch error: ${code}${details ? ` - ${details}` : ''}`);
|
||||
this.name = 'CloudFetchError';
|
||||
}
|
||||
}
|
||||
|
||||
type _CloudFetchErrorCode = 'AUTH_EXPIRED' | 'NOT_FOUND' | 'FORBIDDEN' | 'RATE_LIMITED' | 'NETWORK_ERROR' | 'NOT_IMPLEMENTED' | 'FETCH_FAILED';
|
||||
|
||||
|
||||
// Utility functions
|
||||
|
||||
|
||||
/**
|
||||
* Google Workspace files can't be downloaded directly - they must be exported.
|
||||
* We prioritize AI-friendly formats (text > binary).
|
||||
*
|
||||
* Docs: md, docx, pdf, txt, rtf, odt, epub, html.zip
|
||||
* Sheets: xlsx, pdf, csv (1st sheet), tsv, ods
|
||||
* Slides: pptx, pdf, txt, png/jpg/svg (1st slide)
|
||||
* Drawings: png, pdf, jpg, svg
|
||||
*
|
||||
* Regular files: we'll return no conversion
|
||||
*
|
||||
* @see https://developers.google.com/workspace/drive/api/guides/ref-export-formats
|
||||
*/
|
||||
const _GOOGLE_WORKSPACE_EXPORT: Record<string, { mimeType: string; ext: string, converter: string }> = {
|
||||
'application/vnd.google-apps.document': { mimeType: 'text/markdown', ext: '.md', converter: 'Doc -> ' },
|
||||
'application/vnd.google-apps.spreadsheet': { mimeType: 'text/csv', ext: '.csv', converter: 'Sheet -> ' },
|
||||
'application/vnd.google-apps.presentation': { mimeType: 'application/pdf', ext: '.pdf', converter: 'Slides -> ' },
|
||||
'application/vnd.google-apps.drawing': { mimeType: 'image/svg+xml', ext: '.svg', converter: 'Drawing -> ' },
|
||||
};
|
||||
|
||||
export function attachmentCloudGoogleWorkspaceExportMIME(cloudMimeType: string): string | undefined {
|
||||
return _GOOGLE_WORKSPACE_EXPORT[cloudMimeType]?.mimeType;
|
||||
}
|
||||
|
||||
export function attachmentCloudConverterPrefix(cloudMimeType: string): string {
|
||||
return _GOOGLE_WORKSPACE_EXPORT[cloudMimeType]?.converter || 'Drive -> ';
|
||||
}
|
||||
|
||||
|
||||
// Fetcher
|
||||
|
||||
/**
|
||||
* Fetch a file from a cloud provider.
|
||||
*
|
||||
* @param provider - The cloud provider ID
|
||||
* @param fileId - The file ID in the provider's system
|
||||
* @param accessToken - OAuth access token
|
||||
* @param exportMimeType - For native formats (Docs/Sheets), the export format
|
||||
* @returns The file content as a Blob
|
||||
*/
|
||||
export async function attachmentCloudFetchFile(
|
||||
provider: AttachmentCloudProviderId,
|
||||
fileId: string,
|
||||
accessToken: string,
|
||||
exportMimeType?: string,
|
||||
): Promise<Blob> {
|
||||
switch (provider) {
|
||||
case 'gdrive':
|
||||
return _fetchGoogleDriveFile(fileId, accessToken, exportMimeType);
|
||||
|
||||
case 'onedrive':
|
||||
case 'dropbox':
|
||||
throw new CloudFetchError('NOT_IMPLEMENTED', `${provider} support coming soon`);
|
||||
|
||||
default:
|
||||
throw new CloudFetchError('NOT_IMPLEMENTED', `Unknown provider: ${provider}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Google Drive API - Fetch file content
|
||||
* https://developers.google.com/drive/api/reference/rest/v3/files/get
|
||||
* https://developers.google.com/drive/api/reference/rest/v3/files/export
|
||||
*/
|
||||
async function _fetchGoogleDriveFile(
|
||||
fileId: string,
|
||||
accessToken: string,
|
||||
exportMimeType?: string,
|
||||
): Promise<Blob> {
|
||||
|
||||
// for native Google Workspace files, use export endpoint
|
||||
const url = exportMimeType
|
||||
? `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=${encodeURIComponent(exportMimeType)}`
|
||||
: `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
},
|
||||
}).catch((error) => {
|
||||
console.log('[DEV] Network error while fetching Google Drive file:', { error });
|
||||
throw new CloudFetchError('NETWORK_ERROR', error?.message || String(error));
|
||||
});
|
||||
|
||||
// NOTE: we shall consider moving this to use fetchResponseOrTRPCThrow instead of this custom small impl..
|
||||
if (!response.ok) {
|
||||
const errorCode = _mapHttpStatusToErrorCode(response.status);
|
||||
let details = `${response.status}: ${response.statusText}`;
|
||||
try {
|
||||
const errorBody = await response.text();
|
||||
if (errorBody) details += ` - ${errorBody.slice(0, 200)}`;
|
||||
} catch { /* ignore */
|
||||
}
|
||||
throw new CloudFetchError(errorCode, details);
|
||||
}
|
||||
|
||||
return response.blob();
|
||||
}
|
||||
|
||||
|
||||
function _mapHttpStatusToErrorCode(status: number): _CloudFetchErrorCode {
|
||||
switch (status) {
|
||||
case 401:
|
||||
return 'AUTH_EXPIRED';
|
||||
case 403:
|
||||
return 'FORBIDDEN';
|
||||
case 404:
|
||||
return 'NOT_FOUND';
|
||||
case 429:
|
||||
return 'RATE_LIMITED';
|
||||
default:
|
||||
return 'FETCH_FAILED';
|
||||
}
|
||||
}
|
||||
@@ -59,17 +59,35 @@ export async function imageDataToImageAttachmentFragmentViaDBlob(
|
||||
origin: { // User originated
|
||||
ot: 'user',
|
||||
source: 'attachment',
|
||||
media: source.media === 'file' ? source.origin : source.media === 'url' ? 'url' : 'unknown',
|
||||
url: source.media === 'url' ? source.url : undefined,
|
||||
fileName: source.media === 'file' ? source.refPath : undefined,
|
||||
media:
|
||||
source.media === 'file' ? source.origin
|
||||
: source.media === 'url' ? 'url'
|
||||
: source.media === 'cloud' ? source.provider
|
||||
: 'unknown',
|
||||
url:
|
||||
source.media === 'url' ? source.url
|
||||
: source.media === 'cloud' ? source.webViewLink
|
||||
: undefined,
|
||||
fileName:
|
||||
source.media === 'file' ? source.refPath
|
||||
: source.media === 'cloud' ? source.fileName
|
||||
: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
// use title if available, otherwise use the source refPath/refUrl/fileName
|
||||
const refTextSummary = title || (
|
||||
source.media === 'file' ? source.refPath
|
||||
: source.media === 'url' ? source.refUrl
|
||||
: source.media === 'cloud' ? source.fileName
|
||||
: undefined
|
||||
);
|
||||
|
||||
// Future-proof: create a Zync Image Asset reference attachment fragment, with the legacy image_ref part for compatibility for the time being
|
||||
return createZyncAssetReferenceAttachmentFragment(
|
||||
title, caption,
|
||||
nanoidToUuidV4(dblobAssetId, 'convert-dblob-to-dasset'),
|
||||
title || (source.media === 'file' ? source.refPath : source.media === 'url' ? source.refUrl : undefined), // use title if available, otherwise use the source refPath or refUrl
|
||||
refTextSummary,
|
||||
'image',
|
||||
{
|
||||
pt: 'image_ref' as const,
|
||||
@@ -77,7 +95,7 @@ export async function imageDataToImageAttachmentFragmentViaDBlob(
|
||||
...(title ? { altText: title } : {}),
|
||||
...(imageWidth ? { width: imageWidth } : {}),
|
||||
...(imageHeight ? { height: imageHeight } : {}),
|
||||
}
|
||||
},
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('imageAttachment: Error processing image:', error);
|
||||
|
||||
@@ -2,33 +2,38 @@ import type { FileWithHandle } from 'browser-fs-access';
|
||||
|
||||
import { callBrowseFetchPageOrThrow } from '~/modules/browse/browse.client';
|
||||
import { extractYoutubeVideoIDFromURL } from '~/modules/youtube/youtube.utils';
|
||||
import { imageCaptionFromImageOrThrow } from '~/modules/aifn/image-caption/imageCaptionFromImage';
|
||||
import { youTubeGetVideoData } from '~/modules/youtube/useYouTubeTranscript';
|
||||
|
||||
import type { CommonImageMimeTypes } from '~/common/util/imageUtils';
|
||||
import { Is } from '~/common/util/pwaUtils';
|
||||
import { PLATFORM_IMAGE_MIMETYPE } from '~/common/util/imageUtils';
|
||||
import { agiCustomId, agiUuid } from '~/common/util/idUtils';
|
||||
import { convert_Base64DataURL_To_Base64WithMimeType, convert_Base64WithMimeType_To_Blob } from '~/common/util/blobUtils';
|
||||
import { getDomainModelConfiguration } from '~/common/stores/llms/hooks/useModelDomain';
|
||||
import { htmlTableToMarkdown } from '~/common/util/htmlTableToMarkdown';
|
||||
import { humanReadableHyphenated } from '~/common/util/textUtils';
|
||||
import { ocrImageWithProgress, ocrPdfPagesWithProgress } from '~/common/util/ocrUtils';
|
||||
import { pdfToImageDataURLs, pdfToText } from '~/common/util/pdfUtils';
|
||||
|
||||
import { createDMessageDataInlineText, createDocAttachmentFragment, DMessageAttachmentFragment, DMessageDataInline, DMessageDocPart, DVMimeType, isContentOrAttachmentFragment, isDocPart, specialContentPartToDocAttachmentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
|
||||
import type { AttachmentCreationOptions, AttachmentDraft, AttachmentDraftConverter, AttachmentDraftId, AttachmentDraftInput, AttachmentDraftSource, AttachmentDraftSourceOriginFile, DraftEgoFragmentsInputData, DraftWebInputData, DraftYouTubeInputData } from './attachment.types';
|
||||
import type { AttachmentsDraftsStore } from './store-attachment-drafts_slice';
|
||||
import { attachmentCloudConverterPrefix, attachmentCloudFetchFile, attachmentCloudGoogleWorkspaceExportMIME, CloudFetchError } from './attachment.cloud';
|
||||
import { attachmentGetLiveFileId, attachmentSourceSupportsLiveFile } from './attachment.livefile';
|
||||
import { guessInputContentTypeFromMime, heuristicMimeTypeFixup, mimeTypeIsDocX, mimeTypeIsPDF, mimeTypeIsPlainText, mimeTypeIsSupportedImage, reverseLookupMimeType } from './attachment.mimetypes';
|
||||
import { imageDataToImageAttachmentFragmentViaDBlob } from './attachment.dblobs';
|
||||
|
||||
|
||||
// configuration
|
||||
export const DEFAULT_ADRAFT_IMAGE_MIMETYPE: CommonImageMimeTypes = !Is.Browser.Safari ? 'image/webp' : 'image/jpeg';
|
||||
export const DEFAULT_ADRAFT_IMAGE_QUALITY = 0.96;
|
||||
const PDF_IMAGE_PAGE_SCALE = 1.5;
|
||||
const PDF_IMAGE_QUALITY = 0.5;
|
||||
const ENABLE_TEXT_AND_IMAGES = false; // [PROD] ?
|
||||
const DOCPART_DEFAULT_VERSION = 1;
|
||||
|
||||
// PDF text extraction quality thresholds
|
||||
const IMAGE_LOW_TEXT_THRESHOLD = 80; // chars per image - below this, consider the image as low-text (photo-like) rather than document-like
|
||||
const PDF_LOW_TEXT_THRESHOLD = 160; // chars per page - below this, consider the PDF as scanned/image-based
|
||||
const PDF_FALLBACK_MAX_IMAGES = 32; // max pages to convert to images when auto-falling back (to respect LLM limits)
|
||||
|
||||
|
||||
// internal mimes, only used to route data within us (source -> input -> converters)
|
||||
const INT_MIME_VND_AGI_EGO_FRAGMENTS = 'application/vnd.agi.ego.fragments';
|
||||
@@ -65,7 +70,8 @@ export function attachmentCreate(source: AttachmentDraftSource): AttachmentDraft
|
||||
export async function attachmentLoadInputAsync(source: Readonly<AttachmentDraftSource>, edit: (changes: Partial<Omit<AttachmentDraft, 'outputFragments'>>) => void) {
|
||||
edit({ inputLoading: true });
|
||||
|
||||
switch (source.media) {
|
||||
const sourceMedia = source.media;
|
||||
switch (sourceMedia) {
|
||||
|
||||
// Download URL (page, file, ..) and attach as input
|
||||
case 'url':
|
||||
@@ -143,6 +149,7 @@ export async function attachmentLoadInputAsync(source: Readonly<AttachmentDraftS
|
||||
} else
|
||||
edit({ inputError: 'No content or file found at this link' });
|
||||
} catch (error: any) {
|
||||
console.log('[DEV] Issue downloading page for attachment:', { error });
|
||||
edit({ inputError: `Issue downloading page: ${error?.message || (typeof error === 'string' ? error : JSON.stringify(error))}` });
|
||||
}
|
||||
break;
|
||||
@@ -223,6 +230,34 @@ export async function attachmentLoadInputAsync(source: Readonly<AttachmentDraftS
|
||||
}
|
||||
break;
|
||||
|
||||
case 'cloud':
|
||||
const cloudLabel = source.fileName || 'Cloud File';
|
||||
const cloudRef = source.webViewLink || `${source.provider}:${source.fileId}`;
|
||||
edit({ label: cloudLabel, ref: cloudRef });
|
||||
|
||||
try {
|
||||
// fetch / export to the destination mime
|
||||
const exportMime = attachmentCloudGoogleWorkspaceExportMIME(source.mimeType);
|
||||
const cloudBlob = await attachmentCloudFetchFile(source.provider, source.fileId, source.accessToken, exportMime);
|
||||
|
||||
// use export mime if we exported, otherwise use source or detected mime
|
||||
const resultMime = exportMime || source.mimeType /* provided outside */ || cloudBlob.type /* connection */ || 'application/octet-stream';
|
||||
|
||||
edit({
|
||||
input: {
|
||||
mimeType: resultMime,
|
||||
data: cloudBlob,
|
||||
dataSize: cloudBlob.size,
|
||||
},
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof CloudFetchError
|
||||
? `${error.code}: ${error.details || error.message}`
|
||||
: `Failed to download: ${error instanceof Error ? error.message : String(error)}`;
|
||||
edit({ inputError: errorMessage });
|
||||
}
|
||||
break;
|
||||
|
||||
case 'ego':
|
||||
edit({
|
||||
label: source.label,
|
||||
@@ -233,6 +268,10 @@ export async function attachmentLoadInputAsync(source: Readonly<AttachmentDraftS
|
||||
},
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
const _exhaustiveCheck: never = sourceMedia;
|
||||
break;
|
||||
}
|
||||
|
||||
edit({ inputLoading: false });
|
||||
@@ -253,6 +292,7 @@ export function attachmentDefineConverters(source: AttachmentDraftSource, input:
|
||||
const converters: AttachmentDraftConverter[] = [];
|
||||
|
||||
const autoAddImages = ENABLE_TEXT_AND_IMAGES && !!options?.hintAddImages;
|
||||
const fromCloud = source.media === 'cloud';
|
||||
|
||||
switch (true) {
|
||||
|
||||
@@ -260,6 +300,7 @@ export function attachmentDefineConverters(source: AttachmentDraftSource, input:
|
||||
case mimeTypeIsPlainText(input.mimeType):
|
||||
// handle a secondary layer of HTML 'text' origins: drop, paste, and clipboard-read
|
||||
const textOriginHtml = source.media === 'text' && input.altMimeType === 'text/html' && !!input.altData;
|
||||
const textOriginClipboard = source.media === 'text' && ['clipboard-read', 'paste'].includes(source.method);
|
||||
const isHtmlTable = !!input.altData?.startsWith('<table');
|
||||
|
||||
// p1: Tables
|
||||
@@ -267,32 +308,45 @@ export function attachmentDefineConverters(source: AttachmentDraftSource, input:
|
||||
converters.push({ id: 'rich-text-table', name: 'Markdown Table' });
|
||||
|
||||
// p2: Text
|
||||
converters.push({ id: 'text', name: attachmentSourceSupportsLiveFile(source) ? 'Text (Live)' : 'Text' });
|
||||
if (fromCloud && input.mimeType === 'text/markdown') {
|
||||
converters.push({ id: 'text', name: 'Markdown' });
|
||||
} else {
|
||||
converters.push({ id: 'text', name: attachmentSourceSupportsLiveFile(source) ? 'Text (Live)' : 'Text' });
|
||||
if (!textOriginHtml && textOriginClipboard) {
|
||||
converters.push({ id: 'text-markdown', name: 'Text -> Markdown' });
|
||||
converters.push({ id: 'text-cleaner', name: 'Text -> Clean HTML' });
|
||||
}
|
||||
}
|
||||
|
||||
// p3: Html
|
||||
// p3: Html -> Markdown, and Html
|
||||
if (textOriginHtml) {
|
||||
converters.push({ id: 'rich-text-cleaner', name: 'Cleaner HTML' });
|
||||
converters.push({ id: 'rich-text', name: 'HTML · Heavy' });
|
||||
converters.push({ id: 'rich-text-markdown', name: 'HTML -> Markdown' });
|
||||
converters.push({ id: 'rich-text-cleaner', name: 'HTML -> Clean HTML' });
|
||||
}
|
||||
break;
|
||||
|
||||
// Images (Known/Unknown)
|
||||
case input.mimeType.startsWith('image/'):
|
||||
const inputImageMimeSupported = mimeTypeIsSupportedImage(input.mimeType);
|
||||
const visionModelMissing = !getDomainModelConfiguration('imageCaption', true, true);
|
||||
converters.push({ id: 'image-resized-high', name: 'Image (high detail)', disabled: !inputImageMimeSupported });
|
||||
converters.push({ id: 'image-resized-low', name: 'Image (low detail)', disabled: !inputImageMimeSupported });
|
||||
converters.push({ id: 'image-original', name: 'Image (original quality)', disabled: !inputImageMimeSupported });
|
||||
if (!inputImageMimeSupported)
|
||||
converters.push({ id: 'image-to-default', name: `As Image (${DEFAULT_ADRAFT_IMAGE_MIMETYPE})` });
|
||||
converters.push({ id: 'image-to-default', name: `As Image (${PLATFORM_IMAGE_MIMETYPE})` });
|
||||
converters.push({ id: 'image-caption', name: 'AI Caption (Text)', disabled: visionModelMissing });
|
||||
converters.push({ id: 'unhandled', name: 'No Image' });
|
||||
converters.push({ id: 'image-ocr', name: 'Add Text (OCR)', isCheckbox: true });
|
||||
break;
|
||||
|
||||
// PDF
|
||||
case mimeTypeIsPDF(input.mimeType):
|
||||
converters.push({ id: 'pdf-text', name: 'PDF To Text', isActive: !autoAddImages || undefined });
|
||||
converters.push({ id: 'pdf-images', name: 'PDF To Images' });
|
||||
converters.push({ id: 'pdf-text-and-images', name: 'PDF Text & Images (best)', isActive: autoAddImages });
|
||||
converters.push({ id: 'pdf-auto', name: 'Auto', isActive: !autoAddImages });
|
||||
converters.push({ id: 'pdf-text', name: 'PDF Text' });
|
||||
converters.push({ id: 'pdf-images-ocr', name: 'PDF -> OCR (for scans)' });
|
||||
converters.push({ id: 'pdf-images', name: 'PDF -> Images' });
|
||||
converters.push({ id: 'pdf-text-and-images', name: 'PDF -> Text + Images', isActive: autoAddImages });
|
||||
break;
|
||||
|
||||
// DOCX
|
||||
@@ -337,6 +391,12 @@ export function attachmentDefineConverters(source: AttachmentDraftSource, input:
|
||||
break;
|
||||
}
|
||||
|
||||
// cosmetic for cloud: prepend cloud label prefixes
|
||||
const cloudLabelPrefix = source.media === 'cloud' ? attachmentCloudConverterPrefix(source.mimeType) : '';
|
||||
if (cloudLabelPrefix)
|
||||
for (const converter of converters)
|
||||
converter.name = cloudLabelPrefix + converter.name;
|
||||
|
||||
edit({ converters });
|
||||
}
|
||||
|
||||
@@ -380,7 +440,8 @@ function _prepareDocData(source: AttachmentDraftSource, input: Readonly<Attachme
|
||||
srcFileSize: source.fileWithHandle.size || input.dataSize,
|
||||
};
|
||||
|
||||
switch (source.origin) {
|
||||
const sourceOrigin = source.origin;
|
||||
switch (sourceOrigin) {
|
||||
case 'camera':
|
||||
fileTitle = source.refPath || _lowCollisionRefString('Camera Photo', 6);
|
||||
break;
|
||||
@@ -398,6 +459,10 @@ function _prepareDocData(source: AttachmentDraftSource, input: Readonly<Attachme
|
||||
case 'drop':
|
||||
fileTitle = source.refPath || _lowCollisionRefString('Dropped File', 6);
|
||||
break;
|
||||
default:
|
||||
const _exhaustiveCheck: never = sourceOrigin;
|
||||
fileTitle = 'File';
|
||||
break;
|
||||
}
|
||||
return {
|
||||
title: fileTitle,
|
||||
@@ -415,6 +480,25 @@ function _prepareDocData(source: AttachmentDraftSource, input: Readonly<Attachme
|
||||
refString: humanReadableHyphenated(textRef),
|
||||
};
|
||||
|
||||
// Cloud files
|
||||
case 'cloud':
|
||||
const cloudFileName = source.fileName || 'Cloud File';
|
||||
const cloudProviderLabel = source.provider === 'gdrive' ? 'Google Drive'
|
||||
: source.provider === 'onedrive' ? 'OneDrive'
|
||||
: source.provider === 'dropbox' ? 'Dropbox'
|
||||
: 'Cloud';
|
||||
const cloudRef = `${source.provider}-${source.fileName || _lowCollisionRefString('file', 6)}`;
|
||||
return {
|
||||
title: cloudFileName,
|
||||
caption: `From ${cloudProviderLabel}`,
|
||||
refString: humanReadableHyphenated(cloudRef),
|
||||
// TODO: expand this to allow future redownload - or other location but for the same purpose
|
||||
docMeta: {
|
||||
srcFileName: source.fileName,
|
||||
srcFileSize: source.fileSize || input.dataSize,
|
||||
},
|
||||
};
|
||||
|
||||
// The application attaching pieces of itself
|
||||
case 'ego':
|
||||
const egoKind = source.method === 'ego-fragments' ? 'Chat Message' : '';
|
||||
@@ -478,6 +562,8 @@ export async function attachmentPerformConversion(
|
||||
edit(attachment.id, {
|
||||
outputsConverting: true,
|
||||
outputsConversionProgress: null,
|
||||
outputWarnings: undefined,
|
||||
outputsHeuristic: undefined,
|
||||
});
|
||||
|
||||
// apply converter to the input
|
||||
@@ -490,35 +576,69 @@ export async function attachmentPerformConversion(
|
||||
|
||||
switch (converter.id) {
|
||||
|
||||
// text as-is
|
||||
// text
|
||||
case 'text':
|
||||
case 'text-cleaner':
|
||||
case 'text-markdown':
|
||||
const possibleLiveFileId = await attachmentGetLiveFileId(source);
|
||||
const textContent = await _inputDataToString(input.data, 'text');
|
||||
const textualInlineData = createDMessageDataInlineText(textContent, input.mimeType);
|
||||
let textContent = await _inputDataToString(input.data, 'text');
|
||||
let textContentMime = input.mimeType || 'text/plain';
|
||||
|
||||
switch (converter.id) {
|
||||
case 'text-cleaner':
|
||||
textContent = _cleanPossibleHtmlText(textContent);
|
||||
break;
|
||||
case 'text-markdown':
|
||||
try {
|
||||
const { convertHtmlToMarkdown } = await import('./file-converters/HtmlToMarkdown');
|
||||
textContent = convertHtmlToMarkdown(textContent);
|
||||
textContentMime = 'text/markdown';
|
||||
} catch (error) {
|
||||
console.log('[DEV] Error converting Text (HTML) to Markdown:', error);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
const textualInlineData = createDMessageDataInlineText(textContent, textContentMime);
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, _guessDocVDT(input.mimeType), textualInlineData, refString, DOCPART_DEFAULT_VERSION, docMeta, possibleLiveFileId));
|
||||
break;
|
||||
|
||||
// html as-is
|
||||
// html
|
||||
case 'rich-text':
|
||||
case 'rich-text-cleaner':
|
||||
case 'rich-text-markdown':
|
||||
let richText: string;
|
||||
if (input.altData)
|
||||
richText = input.altData;
|
||||
else if (input.mimeType === 'text/html')
|
||||
richText = await _inputDataToString(input.data, 'rich-text');
|
||||
else
|
||||
richText = '';
|
||||
let richTextMimeType = 'text/html';
|
||||
|
||||
// html -> cleaner/html or markdown
|
||||
switch (converter.id) {
|
||||
case 'rich-text-cleaner':
|
||||
richText = _cleanPossibleHtmlText(richText);
|
||||
richTextMimeType = 'text/html';
|
||||
break;
|
||||
case 'rich-text-markdown':
|
||||
try {
|
||||
const { convertHtmlToMarkdown } = await import('./file-converters/HtmlToMarkdown');
|
||||
richText = convertHtmlToMarkdown(richText);
|
||||
richTextMimeType = 'text/markdown';
|
||||
} catch (error) {
|
||||
console.log('[DEV] Error converting HTML to Markdown:', error);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// NOTE: before we had the following: createTextAttachmentFragment(ref || '\n<!DOCTYPE html>', input.altData!), which
|
||||
// was used to wrap the HTML in a code block to facilitate AutoRenderBlocks's parser. Historic note, for future debugging.
|
||||
const richTextData = createDMessageDataInlineText(input.altData || '', input.altMimeType);
|
||||
const richTextData = createDMessageDataInlineText(richText, richTextMimeType);
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.VndAgiCode, richTextData, refString, DOCPART_DEFAULT_VERSION, docMeta));
|
||||
break;
|
||||
|
||||
// html cleaned
|
||||
case 'rich-text-cleaner':
|
||||
const cleanerHtml = (input.altData || '')
|
||||
// remove class and style attributes
|
||||
.replace(/<[^>]+>/g, (tag) =>
|
||||
tag.replace(/ class="[^"]*"/g, '').replace(/ style="[^"]*"/g, ''),
|
||||
)
|
||||
// remove svg elements
|
||||
.replace(/<svg[^>]*>.*?<\/svg>/g, '');
|
||||
const cleanedHtmlData = createDMessageDataInlineText(cleanerHtml, 'text/html');
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.VndAgiCode, cleanedHtmlData, refString, DOCPART_DEFAULT_VERSION, docMeta));
|
||||
break;
|
||||
|
||||
// html to markdown table
|
||||
case 'rich-text-table':
|
||||
let tableData: DMessageDataInline;
|
||||
@@ -561,7 +681,7 @@ export async function attachmentPerformConversion(
|
||||
// image converted (potentially unsupported mime)
|
||||
case 'image-to-default':
|
||||
if (!_expectBlob(input.data, 'image-to-default')) return;
|
||||
const imageCastF = await imageDataToImageAttachmentFragmentViaDBlob(input.mimeType, input.data, source, title, caption, DEFAULT_ADRAFT_IMAGE_MIMETYPE, false);
|
||||
const imageCastF = await imageDataToImageAttachmentFragmentViaDBlob(input.mimeType, input.data, source, title, caption, PLATFORM_IMAGE_MIMETYPE, false);
|
||||
if (imageCastF)
|
||||
newFragments.push(imageCastF);
|
||||
break;
|
||||
@@ -570,47 +690,152 @@ export async function attachmentPerformConversion(
|
||||
case 'image-ocr':
|
||||
if (!_expectBlob(input.data, 'Image OCR converter')) break;
|
||||
try {
|
||||
let lastProgress = -1;
|
||||
const { recognize } = await import('tesseract.js');
|
||||
const result = await recognize(input.data, undefined, {
|
||||
errorHandler: e => console.error(e),
|
||||
logger: (message) => {
|
||||
if (message.status === 'recognizing text') {
|
||||
if (message.progress > lastProgress + 0.01) {
|
||||
lastProgress = message.progress;
|
||||
edit(attachment.id, { outputsConversionProgress: lastProgress });
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
const imageText = result.data.text;
|
||||
// Image -> OCR -> Inline text doc
|
||||
const imageText = await ocrImageWithProgress(input.data, (progress) => edit(attachment.id, { outputsConversionProgress: progress }));
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(imageText, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'image' }));
|
||||
// warn if very little text was extracted (likely a photo/diagram rather than text)
|
||||
if (imageText.trim().length < IMAGE_LOW_TEXT_THRESHOLD)
|
||||
edit(attachment.id, { outputWarnings: ['Very little text extracted - this image may not contain readable text.'] });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
console.error('[Image OCR Error]', error);
|
||||
}
|
||||
break;
|
||||
|
||||
// image to caption
|
||||
case 'image-caption':
|
||||
if (!_expectBlob(input.data, 'Image captioning converter')) break;
|
||||
try {
|
||||
const abortController = new AbortController();
|
||||
const captionText = await imageCaptionFromImageOrThrow(
|
||||
input.data,
|
||||
input.mimeType,
|
||||
attachment.id,
|
||||
abortController.signal,
|
||||
progress => edit(attachment.id, { outputsConversionProgress: progress / 100 }),
|
||||
);
|
||||
// if we're here we shall have valid text
|
||||
newFragments.push(createDocAttachmentFragment(
|
||||
title,
|
||||
caption + ' (Caption)',
|
||||
DVMimeType.TextPlain,
|
||||
createDMessageDataInlineText(captionText || 'This image could not be described', 'text/plain'),
|
||||
refString,
|
||||
DOCPART_DEFAULT_VERSION,
|
||||
{ ...docMeta, srcOcrFrom: 'image-caption' },
|
||||
));
|
||||
} catch (error: any) {
|
||||
console.log('[DEV] Failed to caption image:', error);
|
||||
const errorText = `[Captioning failed: ${error?.message || String(error)}]`;
|
||||
edit(attachment.id, { outputWarnings: [errorText] });
|
||||
newFragments.push(createDocAttachmentFragment(title, caption + ' (Error)', DVMimeType.TextPlain, createDMessageDataInlineText(errorText, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'image-caption' }));
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
// pdf to text
|
||||
case 'pdf-text':
|
||||
if (!_expectBlob(input.data, 'PDF text converter')) break;
|
||||
// Convert Blob to ArrayBuffer for PDF.js
|
||||
const pdfText = await pdfToText(await input.data.arrayBuffer(), (progress: number) => {
|
||||
edit(attachment.id, { outputsConversionProgress: progress });
|
||||
});
|
||||
if (pdfText.trim().length < 2) {
|
||||
// Warn the user if no text is extracted
|
||||
// edit(attachment.id, { inputError: 'No text found in the PDF file.' });
|
||||
} else
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(pdfText, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' }));
|
||||
// pdf-auto: intelligent conversion with fallback chain (text → OCR → images)
|
||||
case 'pdf-auto':
|
||||
if (!_expectBlob(input.data, 'PDF auto converter')) break;
|
||||
try {
|
||||
// Phase 1: Try text extraction (0-20% progress)
|
||||
const pdfArrayBuffer = await input.data.arrayBuffer();
|
||||
|
||||
// [pdf-text] Extract text with quality metadata
|
||||
const pdfTextResult = await pdfToText(pdfArrayBuffer, (progress: number) => {
|
||||
// Reserve 0-20% for text extraction attempt, 20-100% for potential image fallback
|
||||
edit(attachment.id, { outputsConversionProgress: progress * 0.2 });
|
||||
});
|
||||
|
||||
// Check text density to detect scanned/image-based PDFs
|
||||
if (pdfTextResult.avgCharsPerPage >= PDF_LOW_TEXT_THRESHOLD) {
|
||||
// Good text extraction - use it
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(pdfTextResult.text, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' }));
|
||||
edit(attachment.id, {
|
||||
outputsHeuristic: { isAuto: true, actualConverterId: 'pdf-text', explain: `${pdfTextResult.avgCharsPerPage.toFixed(0)} chars/page` },
|
||||
});
|
||||
} else {
|
||||
// Low text density - try OCR
|
||||
// console.log(`[PDF Auto] Low text density (${pdfTextResult.avgCharsPerPage.toFixed(0)} chars/page), trying OCR...`);
|
||||
|
||||
// [pdf-images] Phase 2: Render pages to images (20-40% progress)
|
||||
const pdfArrayBufferForImages = await input.data.arrayBuffer();
|
||||
const imageDataURLs = await pdfToImageDataURLs(pdfArrayBufferForImages, PLATFORM_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
|
||||
edit(attachment.id, { outputsConversionProgress: 0.2 + progress * 0.2 });
|
||||
});
|
||||
|
||||
// Limit pages for OCR (performance)
|
||||
const pagesToProcess = Math.min(imageDataURLs.length, PDF_FALLBACK_MAX_IMAGES);
|
||||
const imagesToOcr = imageDataURLs.slice(0, pagesToProcess);
|
||||
|
||||
// Phase 3: Try OCR on rendered pages (40-90% progress)
|
||||
try {
|
||||
// [pdf-images-ocr] OCR the images
|
||||
const ocrResult = await ocrPdfPagesWithProgress(imagesToOcr, (progress) => {
|
||||
edit(attachment.id, { outputsConversionProgress: 0.4 + progress * 0.5 });
|
||||
});
|
||||
|
||||
if (ocrResult.avgCharsPerPage >= PDF_LOW_TEXT_THRESHOLD) {
|
||||
// OCR yielded good text - use it
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(ocrResult.text, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' }));
|
||||
const truncNote = pdfTextResult.pageCount > pagesToProcess ? ` (${pagesToProcess}/${pdfTextResult.pageCount} pages)` : '';
|
||||
edit(attachment.id, {
|
||||
outputsHeuristic: { isAuto: true, actualConverterId: 'pdf-images-ocr', explain: /*OCR extracted */`${ocrResult.avgCharsPerPage.toFixed(0)} chars/page${truncNote}` },
|
||||
});
|
||||
} else {
|
||||
// OCR also yielded poor results - fall back to images
|
||||
// console.log(`[PDF Auto] OCR also sparse (${ocrResult.avgCharsPerPage.toFixed(0)} chars/page), falling back to images`);
|
||||
for (let i = 0; i < pagesToProcess; i++) {
|
||||
const pdfPageImage = imageDataURLs[i];
|
||||
const pdfPageImageF = await imageDataToImageAttachmentFragmentViaDBlob(pdfPageImage.mimeType, pdfPageImage.base64Data, source, `${title} (pg. ${i + 1})`, caption, false, false);
|
||||
if (pdfPageImageF)
|
||||
newFragments.push(pdfPageImageF);
|
||||
}
|
||||
const truncNote = pdfTextResult.pageCount > pagesToProcess ? ` (${pagesToProcess}/${pdfTextResult.pageCount} pages)` : '';
|
||||
edit(attachment.id, {
|
||||
outputsHeuristic: { isAuto: true, actualConverterId: 'pdf-images', explain: `not a text page${truncNote}` },
|
||||
});
|
||||
}
|
||||
} catch (ocrError) {
|
||||
// OCR failed - fall back to images
|
||||
console.warn('[PDF Auto] OCR failed, falling back to images:', ocrError);
|
||||
for (let i = 0; i < pagesToProcess; i++) {
|
||||
const pdfPageImage = imageDataURLs[i];
|
||||
const pdfPageImageF = await imageDataToImageAttachmentFragmentViaDBlob(pdfPageImage.mimeType, pdfPageImage.base64Data, source, `${title} (pg. ${i + 1})`, caption, false, false);
|
||||
if (pdfPageImageF)
|
||||
newFragments.push(pdfPageImageF);
|
||||
}
|
||||
edit(attachment.id, {
|
||||
outputsHeuristic: { isAuto: true, actualConverterId: 'pdf-images', explain: 'OCR failed, attached as images' },
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error in PDF auto conversion:', error);
|
||||
}
|
||||
break;
|
||||
|
||||
// pdf to images
|
||||
// pdf-text: strict text extraction, no fallback (honors user choice)
|
||||
case 'pdf-text':
|
||||
if (!_expectBlob(input.data, 'PDF text converter')) break;
|
||||
try {
|
||||
const pdfTextResult = await pdfToText(await input.data.arrayBuffer(), progress => edit(attachment.id, { outputsConversionProgress: progress }));
|
||||
// Always output text, even if sparse (user explicitly chose this)
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(pdfTextResult.text, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' }));
|
||||
edit(attachment.id, {
|
||||
// warn if very little text was extracted (likely a scanned PDF)
|
||||
outputWarnings: pdfTextResult.avgCharsPerPage >= 20 ? undefined : ['Very little text extracted - this PDF may be scanned. Try "Auto" or "OCR (for scans)" mode.'],
|
||||
outputsHeuristic: { isAuto: false, actualConverterId: 'pdf-text', explain: `${pdfTextResult.avgCharsPerPage.toFixed(0)} chars/page` },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in PDF text extraction:', error);
|
||||
}
|
||||
break;
|
||||
|
||||
// pdf-images: render all pages as images (honors user choice)
|
||||
case 'pdf-images':
|
||||
if (!_expectBlob(input.data, 'PDF images converter')) break;
|
||||
// Convert Blob to ArrayBuffer for PDF.js
|
||||
try {
|
||||
const imageDataURLs = await pdfToImageDataURLs(await input.data.arrayBuffer(), DEFAULT_ADRAFT_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
|
||||
const imageDataURLs = await pdfToImageDataURLs(await input.data.arrayBuffer(), PLATFORM_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
|
||||
edit(attachment.id, { outputsConversionProgress: progress });
|
||||
});
|
||||
for (const pdfPageImage of imageDataURLs) {
|
||||
@@ -618,11 +843,39 @@ export async function attachmentPerformConversion(
|
||||
if (pdfPageImageF)
|
||||
newFragments.push(pdfPageImageF);
|
||||
}
|
||||
edit(attachment.id, {
|
||||
outputsHeuristic: { isAuto: false, actualConverterId: 'pdf-images', explain: `${imageDataURLs.length} pages` },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error converting PDF to images:', error);
|
||||
}
|
||||
break;
|
||||
|
||||
// pdf-images-ocr: force OCR on all pages (for scanned documents)
|
||||
case 'pdf-images-ocr':
|
||||
if (!_expectBlob(input.data, 'PDF OCR converter')) break;
|
||||
try {
|
||||
// Render pages to images (0-40% progress)
|
||||
const imageDataURLs = await pdfToImageDataURLs(await input.data.arrayBuffer(), PLATFORM_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
|
||||
edit(attachment.id, { outputsConversionProgress: progress * 0.4 });
|
||||
});
|
||||
|
||||
// OCR all pages (40-100% progress)
|
||||
const ocrResult = await ocrPdfPagesWithProgress(imageDataURLs, (progress) => {
|
||||
edit(attachment.id, { outputsConversionProgress: 0.4 + progress * 0.6 });
|
||||
});
|
||||
|
||||
newFragments.push(createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(ocrResult.text, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' }));
|
||||
edit(attachment.id, {
|
||||
// warn if very little text was extracted (likely a scanned PDF)
|
||||
outputWarnings: ocrResult.avgCharsPerPage >= 20 ? undefined : ['Very little text extracted via OCR - this PDF may contain mostly images/diagrams.'],
|
||||
outputsHeuristic: { isAuto: false, actualConverterId: 'pdf-images-ocr', explain: `${ocrResult.avgCharsPerPage.toFixed(0)} chars/page from ${ocrResult.pageCount} pages` },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in PDF OCR:', error);
|
||||
}
|
||||
break;
|
||||
|
||||
// pdf to text and images
|
||||
case 'pdf-text-and-images':
|
||||
if (!_expectBlob(input.data, 'PDF text and images converter')) break;
|
||||
@@ -634,7 +887,7 @@ export async function attachmentPerformConversion(
|
||||
|
||||
// duplicated from 'pdf-images' (different progress update)
|
||||
const imageFragments: DMessageAttachmentFragment[] = [];
|
||||
const imageDataURLs = await pdfToImageDataURLs(pdfArrayBufferForImages, DEFAULT_ADRAFT_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
|
||||
const imageDataURLs = await pdfToImageDataURLs(pdfArrayBufferForImages, PLATFORM_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
|
||||
edit(attachment.id, { outputsConversionProgress: progress / 2 }); // Update progress (0% to 50%)
|
||||
});
|
||||
for (const pdfPageImage of imageDataURLs) {
|
||||
@@ -644,18 +897,21 @@ export async function attachmentPerformConversion(
|
||||
}
|
||||
|
||||
// duplicated from 'pdf-text'
|
||||
const pdfText = await pdfToText(pdfArrayBufferForText, (progress: number) => {
|
||||
const pdfTextResult = await pdfToText(pdfArrayBufferForText, (progress: number) => {
|
||||
edit(attachment.id, { outputsConversionProgress: 0.5 + progress / 2 }); // Update progress (50% to 100%)
|
||||
});
|
||||
if (pdfText.trim().length < 2) {
|
||||
// Do not warn the user, as hopefully the images are useful
|
||||
} else {
|
||||
const textFragment = createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(pdfText, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' });
|
||||
if (pdfTextResult.text.trim().length >= 2) {
|
||||
// Add text fragment if there's meaningful text
|
||||
const textFragment = createDocAttachmentFragment(title, caption, DVMimeType.TextPlain, createDMessageDataInlineText(pdfTextResult.text, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'pdf' });
|
||||
newFragments.push(textFragment);
|
||||
}
|
||||
// Note: if text is sparse, images are still attached (user explicitly chose text+images)
|
||||
|
||||
// Add the text fragment first, then the image fragments
|
||||
newFragments.push(...imageFragments);
|
||||
edit(attachment.id, {
|
||||
outputsHeuristic: { isAuto: false, actualConverterId: 'pdf-text-and-images', explain: `${pdfTextResult.avgCharsPerPage.toFixed(0)} chars/page + ${imageFragments.length} images` },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error converting PDF to text and images:', error);
|
||||
}
|
||||
@@ -772,6 +1028,12 @@ export async function attachmentPerformConversion(
|
||||
case 'unhandled':
|
||||
// force the user to explicitly select 'as text' if they want to proceed
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
const _exhaustiveCheck: never = converter.id;
|
||||
console.warn('[DEV] Unhandled converter type:', _exhaustiveCheck);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -814,6 +1076,19 @@ async function _inputDataToString(data: AttachmentDraftInput['data'], debugLocat
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple Client-side cleaning of possible HTML
|
||||
*/
|
||||
function _cleanPossibleHtmlText(inputStr: string): string {
|
||||
return inputStr
|
||||
// remove class and style attributes
|
||||
.replace(/<[^>]+>/g, (tag) =>
|
||||
tag.replace(/ class="[^"]*"/g, '').replace(/ style="[^"]*"/g, ''),
|
||||
)
|
||||
// remove svg elements
|
||||
.replace(/<svg[^>]*>.*?<\/svg>/g, '');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Special function to convert a list of files to Attachment Fragments, without passing through the attachments system
|
||||
|
||||
@@ -24,6 +24,16 @@ export type AttachmentDraft = {
|
||||
outputsConversionProgress: number | null;
|
||||
outputFragments: DMessageAttachmentFragment[];
|
||||
|
||||
// Warnings for poor conversions (e.g. scanned PDF with text extraction rather than OCR)
|
||||
outputWarnings?: string[];
|
||||
|
||||
// Tracks what method was actually used (especially for Auto mode)
|
||||
outputsHeuristic?: {
|
||||
isAuto: boolean;
|
||||
actualConverterId: AttachmentDraftConverterType;
|
||||
explain?: string; // e.g., "42 chars/page detected"
|
||||
};
|
||||
|
||||
// metadata: {
|
||||
// creationDate?: Date; // Creation date of the file
|
||||
// modifiedDate?: Date; // Last modified date of the file
|
||||
@@ -33,6 +43,13 @@ export type AttachmentDraft = {
|
||||
|
||||
export type AttachmentDraftId = string;
|
||||
|
||||
export type AttachmentCreationOptions = {
|
||||
/** Also attach an image representation of the attachment. Requires Release.Features.ENABLE_TEXT_AND_IMAGES as well. */
|
||||
hintAddImages?: boolean;
|
||||
}
|
||||
|
||||
export type AttachmentCloudProviderId = 'gdrive' | 'onedrive' | 'dropbox';
|
||||
|
||||
|
||||
// 0. draft source (filled at the onset)
|
||||
|
||||
@@ -51,6 +68,23 @@ export type AttachmentDraftSource = {
|
||||
method: 'clipboard-read' | AttachmentDraftSourceOriginDTO;
|
||||
textPlain?: string;
|
||||
textHtml?: string;
|
||||
} | {
|
||||
media: 'cloud';
|
||||
origin: AttachmentDraftSourceOriginCloud;
|
||||
|
||||
// auth for fetching
|
||||
accessToken: string;
|
||||
// tokenExpiresAt?: number; // optional for staleness detection, unix ts
|
||||
|
||||
// recipe for fetching
|
||||
provider: AttachmentCloudProviderId;
|
||||
fileId: string;
|
||||
mimeType: string; // cloud-native MIME (e.g., 'application/vnd.google-apps.document')
|
||||
|
||||
// decorative
|
||||
fileName: string;
|
||||
fileSize?: number;
|
||||
webViewLink?: string; // link to view in cloud provider's UI
|
||||
} | {
|
||||
// special type for attachments thar are references to self (ego, application) objects
|
||||
media: 'ego';
|
||||
@@ -65,10 +99,7 @@ export type AttachmentDraftSourceOriginDTO = 'drop' | 'paste';
|
||||
|
||||
export type AttachmentDraftSourceOriginUrl = 'input-link' | 'clipboard-read' | AttachmentDraftSourceOriginDTO;
|
||||
|
||||
export type AttachmentCreationOptions = {
|
||||
/** Also attach an image representation of the attachment. Requires Release.Features.ENABLE_TEXT_AND_IMAGES as well. */
|
||||
hintAddImages?: boolean;
|
||||
}
|
||||
export type AttachmentDraftSourceOriginCloud = `picker-${AttachmentCloudProviderId}`;
|
||||
|
||||
|
||||
// 1. draft input (loaded from the source)
|
||||
@@ -135,9 +166,10 @@ export type AttachmentDraftConverter = {
|
||||
}
|
||||
|
||||
export type AttachmentDraftConverterType =
|
||||
| 'text' | 'rich-text' | 'rich-text-cleaner' | 'rich-text-table'
|
||||
| 'image-original' | 'image-resized-high' | 'image-resized-low' | 'image-ocr' | 'image-to-default'
|
||||
| 'pdf-text' | 'pdf-images' | 'pdf-text-and-images'
|
||||
| 'text' | 'text-cleaner' | 'text-markdown'
|
||||
| 'rich-text' | 'rich-text-cleaner' | 'rich-text-markdown' | 'rich-text-table'
|
||||
| 'image-original' | 'image-resized-high' | 'image-resized-low' | 'image-ocr' | 'image-caption' | 'image-to-default'
|
||||
| 'pdf-auto' | 'pdf-text' | 'pdf-images' | 'pdf-images-ocr' | 'pdf-text-and-images'
|
||||
| 'docx-to-html'
|
||||
| 'url-page-text' | 'url-page-markdown' | 'url-page-html' | 'url-page-null' | 'url-page-image'
|
||||
| 'youtube-transcript' | 'youtube-transcript-simple'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user