mirror of
https://github.com/enricoros/big-AGI.git
synced 2026-05-10 21:50:14 -07:00
Compare commits
1772 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b35901d94c | |||
| c0df1a23f4 | |||
| 495619af2c | |||
| 72dfadf106 | |||
| 5825909e45 | |||
| d3f6d87ee0 | |||
| c4f4c5ddad | |||
| 2921d7ca27 | |||
| 2021cbc988 | |||
| e9e29861b2 | |||
| 8e6da36059 | |||
| 5e1469e12e | |||
| bd7465f8b1 | |||
| 570397a616 | |||
| b3b5f1daef | |||
| 25ec3ae47c | |||
| 5ba5e3da58 | |||
| 9296c14ca0 | |||
| 310b5d3422 | |||
| 1c5967112e | |||
| 49a3d8ee71 | |||
| cf8b61e8d9 | |||
| 967ae5723e | |||
| 03421acf2f | |||
| d43896cc5a | |||
| b283124a2f | |||
| 8c39be01f8 | |||
| fb2bd4ccd8 | |||
| 5b826ffc45 | |||
| 0b2ab365d3 | |||
| 93fc54992c | |||
| 60b7326deb | |||
| d6e6139244 | |||
| 0892911ddc | |||
| 30267ac50c | |||
| ffef0ef31d | |||
| fc047087ce | |||
| 81d4966535 | |||
| 004d63fda1 | |||
| 23e2dbb354 | |||
| 28e9899b97 | |||
| 7441d41550 | |||
| 99e2d5597a | |||
| 74321a44ca | |||
| 7b664affb7 | |||
| c411835f3b | |||
| 7b62c946a5 | |||
| 252e2fcd29 | |||
| aa2731bccc | |||
| 282c439963 | |||
| e99459aba0 | |||
| 4c35cbbe34 | |||
| cab3537ae2 | |||
| c3f211389b | |||
| a4de84a842 | |||
| 2bf1eaaa0f | |||
| 7f5ddd1629 | |||
| ed798fec65 | |||
| 90386f5794 | |||
| 8ada8811bf | |||
| b24badabef | |||
| 4e20cb12cd | |||
| 245da9e6cc | |||
| a800b34aa7 | |||
| 50c3941f42 | |||
| 6e5d5ee36c | |||
| 2c8b713ff3 | |||
| 8162a6706d | |||
| 952f6883fa | |||
| 373f3e3698 | |||
| 17791f631f | |||
| 6987c67cc7 | |||
| 65a59e5d2d | |||
| 05b9a6d412 | |||
| 6608f4f164 | |||
| 93378ad6b0 | |||
| bd4a60203e | |||
| c9e6a62641 | |||
| 68d797fa99 | |||
| 08011d8cf2 | |||
| 2f91bf7f52 | |||
| d5182c05c1 | |||
| 8e0947a833 | |||
| 1d88fc37b0 | |||
| 46bd8e6f4d | |||
| b95b427331 | |||
| 9b574c60eb | |||
| a8b39cc0a4 | |||
| cdbc7dd9b8 | |||
| 08dfec4fcf | |||
| 7f4553225b | |||
| f37e65a91e | |||
| c022f8a68c | |||
| daa7a506a5 | |||
| f3dcf39c15 | |||
| 06cbef16d4 | |||
| ab31bcd3e3 | |||
| 563a99864f | |||
| 39b8abc2c6 | |||
| f3dd837076 | |||
| d6b3a5259d | |||
| 9fea1d5c64 | |||
| 0adb5355c7 | |||
| 01d807b61e | |||
| 285bb812d0 | |||
| d897155d6e | |||
| 7154426279 | |||
| 4526084e4d | |||
| 0c5c786ae3 | |||
| 8a2c4aa356 | |||
| 4cba819edd | |||
| 4db42a2b29 | |||
| fc0ee5b698 | |||
| 2c0c3f1c70 | |||
| 3f3976b73c | |||
| 82d5dcced5 | |||
| f4eaed694a | |||
| 05d9869326 | |||
| 2675934ff8 | |||
| fb6e19d3ea | |||
| f1151d54e1 | |||
| 6a0fa4f9fa | |||
| 20d96fffc8 | |||
| ad6c06308a | |||
| 84ee4171a4 | |||
| 6bc4f8a1e4 | |||
| 8876aa0866 | |||
| 691d2e7228 | |||
| 7a12755de9 | |||
| 8573f56d03 | |||
| 8f3e683321 | |||
| 64867b0b67 | |||
| e42d060e57 | |||
| 2ca9ab8a0c | |||
| fdc0c6b371 | |||
| 8f8779c3cd | |||
| 851877ad8b | |||
| 8df74529ad | |||
| 353f51ebf0 | |||
| 6c5cb08118 | |||
| 54fee92b15 | |||
| 776431c801 | |||
| 9f893ce999 | |||
| 820447670c | |||
| b43c49cd64 | |||
| f9c3558975 | |||
| 1b75250824 | |||
| 3fa3bb5d03 | |||
| ef0ff55f1f | |||
| 66aa8ed177 | |||
| 519286bc69 | |||
| 9882f45fd2 | |||
| 634f6216a0 | |||
| 69574a7d1c | |||
| eddd4b9be8 | |||
| 9a9c31ff53 | |||
| 41ee7a1c85 | |||
| 2f9bbf373c | |||
| d662e10ebb | |||
| cd31092333 | |||
| 1eae7ab6f3 | |||
| ba378f852f | |||
| 5cfd1e557d | |||
| df31d79eaf | |||
| 12d7304325 | |||
| 41424cbdfd | |||
| 05dda519a2 | |||
| 120d39282e | |||
| 8e7d0fd13b | |||
| 3d979fdfbb | |||
| 6ab47ae3cb | |||
| a4977b4924 | |||
| bac9c692b8 | |||
| 6ab15356e1 | |||
| 73cc7121c3 | |||
| 1aeef06f49 | |||
| 3b16bcf01d | |||
| f6351fda41 | |||
| 007e91480d | |||
| 163ef9296e | |||
| fa042f7d68 | |||
| 8a11040dde | |||
| a88971d557 | |||
| 5867e5fcc5 | |||
| 20e587d6d3 | |||
| 6bfa8471cd | |||
| 5c10bce2f4 | |||
| f1663f6668 | |||
| 90c27e0e74 | |||
| b5eac0d907 | |||
| 4eabe2cb3a | |||
| a1c0d30a06 | |||
| 63c9f65040 | |||
| f58a066bff | |||
| 952ea6357a | |||
| 6695973035 | |||
| 3dc28635f4 | |||
| 0bde01a85f | |||
| b9840c2074 | |||
| 8228a76875 | |||
| 46b370a2e3 | |||
| 820e9513ba | |||
| bd71d64db3 | |||
| 9d4baf827c | |||
| d6843d7fcf | |||
| babb1dd962 | |||
| aa32e396a7 | |||
| 1068efcb49 | |||
| 576c7f1458 | |||
| 37c857b055 | |||
| 794dfb44d1 | |||
| 929bb6dc66 | |||
| 28337e31eb | |||
| 09a38c0e4b | |||
| 645b8fb9cd | |||
| 541588948c | |||
| bdd6fcfbbc | |||
| 9e50286c66 | |||
| 418e4649dc | |||
| 4a70f20f4a | |||
| d6eabfcb6d | |||
| d88889d760 | |||
| 85146d8af0 | |||
| 9612572f07 | |||
| 4bb1dddf4d | |||
| b066a86962 | |||
| 6086455782 | |||
| 9020b3cbad | |||
| 5822dea270 | |||
| c445f59664 | |||
| 737e4cb4f9 | |||
| dba7368d01 | |||
| 314c4cd8cc | |||
| 3e46f99e14 | |||
| e0cc552b8d | |||
| 6b5be403af | |||
| 269d5989bc | |||
| edfe3d9b65 | |||
| ffb2c42a26 | |||
| b7de19b020 | |||
| 77cd659b39 | |||
| fbba9d8357 | |||
| f464a9efdf | |||
| 7ec4290582 | |||
| 3f887a1d3a | |||
| ffd76dc587 | |||
| d7f3594a73 | |||
| 32fa5f206b | |||
| 70d2c09e81 | |||
| 17f03806d0 | |||
| b6aba0efa4 | |||
| 65a5e06935 | |||
| f459cb9805 | |||
| f5470aca5d | |||
| c26af97fe7 | |||
| 766ec458a2 | |||
| 48ff78580c | |||
| 396f7524d7 | |||
| da19ef42f5 | |||
| 91abe5aa43 | |||
| 682435321b | |||
| 76f0d60224 | |||
| 628b88ef9f | |||
| 6a792814ce | |||
| 05ce15d677 | |||
| 4a9d0d4f8e | |||
| 16f0552682 | |||
| 9e3819b9c7 | |||
| 233a0d4b35 | |||
| bd95b808ae | |||
| 96132c4585 | |||
| 3edacef572 | |||
| 36889c1695 | |||
| cd2c6c1d8f | |||
| d8c78b1a00 | |||
| 74a22c26cf | |||
| f742eba4c1 | |||
| 36c2812157 | |||
| d353fc4c63 | |||
| 98bd3d6da0 | |||
| cd5ec8d295 | |||
| f91c6456bd | |||
| 67af87968e | |||
| 58ea3e1b35 | |||
| a9435c10e8 | |||
| a86860fe76 | |||
| a3d707f78a | |||
| c502426249 | |||
| 2fb5ffcecf | |||
| 6d995c1253 | |||
| a860c1c490 | |||
| 481d9cc745 | |||
| 7e53a7bc2b | |||
| 4df10e3782 | |||
| 396da65178 | |||
| 87e8faf383 | |||
| 9eb3e6d398 | |||
| 332c4fdf82 | |||
| 4d247344d5 | |||
| 4e4738d4f6 | |||
| dbfa7b0932 | |||
| e90231d58d | |||
| 9bc7d40425 | |||
| d2d5c0621b | |||
| e41d57c914 | |||
| 7c5336cba3 | |||
| d041e4e2bf | |||
| 7fba6255ff | |||
| dc226d9ac0 | |||
| c01a937d7d | |||
| ee6646a66f | |||
| b73aa16001 | |||
| 92c875459a | |||
| 011fbbe834 | |||
| a921ea6fe5 | |||
| 82bcc6d5d5 | |||
| f6d52da034 | |||
| cd3159cacf | |||
| 1af4e18cb3 | |||
| 7b6eb94bf7 | |||
| 8cc6d65dd4 | |||
| 54e5f9a1bc | |||
| fa28305141 | |||
| 1e56b36eae | |||
| e2253cde7f | |||
| 6a4bfc1cf2 | |||
| dfc0d5088d | |||
| 8f154305e9 | |||
| 09b96a01bf | |||
| 1ce0c631b4 | |||
| 61a5b6d5eb | |||
| ca62bad217 | |||
| 13f352a901 | |||
| 775af756fd | |||
| 5c4545877d | |||
| 9c820dcaf1 | |||
| 49f0bf4802 | |||
| fbb2f106f0 | |||
| cb46d3d536 | |||
| 84289c4ade | |||
| b35ffd9983 | |||
| 8197fed036 | |||
| f6c40cdce6 | |||
| b8cca72cf1 | |||
| d20cafa22b | |||
| 421a5ae681 | |||
| 49157b9efa | |||
| c11684a9cf | |||
| 12aa812b37 | |||
| 3667425c61 | |||
| fd0ab93744 | |||
| a0b549855f | |||
| c70c89c2e8 | |||
| 32c5c00d55 | |||
| 013d0e0217 | |||
| f0bf866654 | |||
| 2c14cb1113 | |||
| 15abecfbb6 | |||
| 827d64d49a | |||
| 01c45b2286 | |||
| d3e5c196f9 | |||
| 71978b94f2 | |||
| 79da87d823 | |||
| 1c19f36783 | |||
| a4d4e351e5 | |||
| 45ef2afccb | |||
| 9ef5b61722 | |||
| ff008d1034 | |||
| 3cd38f471e | |||
| 1581d46be7 | |||
| 32571e15eb | |||
| d69adaa6af | |||
| 246968098a | |||
| 861c4ef370 | |||
| bfe94e98f2 | |||
| 9152318ef6 | |||
| 302694bdad | |||
| 14602a1411 | |||
| 044baa5fc2 | |||
| 3fa09194a7 | |||
| d3aa10f9d1 | |||
| e2b2d5974f | |||
| d99668aa40 | |||
| 5f8d5678fa | |||
| 14f245df2b | |||
| f104fb64fd | |||
| 3c2d7a636a | |||
| 31b215e58b | |||
| 53ae177396 | |||
| 3e1bb3bb3d | |||
| eac150f590 | |||
| 5466b8a265 | |||
| c3d10c355f | |||
| d96a8c14b9 | |||
| be94f31a85 | |||
| f7ce349125 | |||
| a4516b5fa6 | |||
| 7c1f30c3c7 | |||
| df67be4b03 | |||
| 578bb93d8b | |||
| b4c5a24864 | |||
| c4a38a6cf6 | |||
| e58f6cc48e | |||
| 8a0c4747c7 | |||
| 8bef4b9aae | |||
| 66382ed980 | |||
| 8984b65a51 | |||
| efea6dafbd | |||
| 6d4d05e8f7 | |||
| 560a07b4fe | |||
| fbaff3bde3 | |||
| 2a01f929f1 | |||
| d1d0c32a92 | |||
| 3a513e2a4d | |||
| 9b32c4b8c5 | |||
| 64542af5af | |||
| 1db35feeca | |||
| 7392063e25 | |||
| e6745b16f6 | |||
| be09b452f0 | |||
| 42588444a5 | |||
| dc48bd1222 | |||
| b59eb6cbfb | |||
| a75a31ff04 | |||
| a0f97e9cd8 | |||
| fe6e7245de | |||
| a46a9bf76c | |||
| 925e500dc2 | |||
| 22f0a70272 | |||
| 220cc60f7d | |||
| 3964fca4b2 | |||
| 8fdbb21300 | |||
| c42c9545d2 | |||
| 0de37e337b | |||
| 3ecf7f6016 | |||
| da7a62945c | |||
| c876390e27 | |||
| 9bbc2a2e00 | |||
| 2b18cbc3b9 | |||
| 388391ddae | |||
| 3e4e6b2f4b | |||
| e6a65bdf8e | |||
| 0e09cf3d84 | |||
| 5634aa0cac | |||
| 07916be684 | |||
| 8d20b4675b | |||
| d906669ea4 | |||
| 5d7b00f0dc | |||
| 740d76c15c | |||
| ca4d21d4b8 | |||
| e4defc1baf | |||
| 9ea859081d | |||
| 87d8320b31 | |||
| 84aea90860 | |||
| 95f35cb5cf | |||
| c79ba097c0 | |||
| 8ea1f02c86 | |||
| 674c9c8c25 | |||
| 98a3e7e185 | |||
| ee00c53ada | |||
| 0553f64fe8 | |||
| ff06f6f04c | |||
| 3f45617e06 | |||
| 9d93c8c55a | |||
| 73eaf740db | |||
| 48426d5022 | |||
| c79237b419 | |||
| b0abaf4d9e | |||
| ec92a8d31a | |||
| a4600a4d1d | |||
| ad6a465ce7 | |||
| 0820bb5af6 | |||
| 73f8488d22 | |||
| 2b3c1c38f3 | |||
| 59f379f46b | |||
| 2bc6ecbe4c | |||
| 8274a34841 | |||
| 6e7197caa3 | |||
| 7c78d48b6c | |||
| b149eb7fa2 | |||
| ba79a3c42c | |||
| 4445ac295f | |||
| 09c2a8b072 | |||
| 92e371837d | |||
| 7fad41dc8a | |||
| 0be8ac7e09 | |||
| de6e8a047c | |||
| 92955f92bf | |||
| 5327866836 | |||
| 54b8836faa | |||
| eb39db9974 | |||
| 087e6e2eaf | |||
| 295d91b310 | |||
| f75bcb78d7 | |||
| ffb32d8720 | |||
| 879458d692 | |||
| 96eece3a3e | |||
| dc75136131 | |||
| 57c43b3c4e | |||
| 4c5b7677e6 | |||
| 43890150e5 | |||
| bc86214c5e | |||
| ef1f412019 | |||
| 1249efb53b | |||
| 8bc81e45ce | |||
| 810f316185 | |||
| 5b49e801d1 | |||
| 3269e10da9 | |||
| 53a57fd7ff | |||
| dbbf25c3af | |||
| a2ff00f53b | |||
| 4904383838 | |||
| 8221444308 | |||
| 7cd94b3163 | |||
| 52cdf7da4e | |||
| 6ff010ae0e | |||
| 6d81150975 | |||
| 0fdcc4c64d | |||
| f272c9cb12 | |||
| 5354f83736 | |||
| f4b2f36ac0 | |||
| 5fca834c20 | |||
| fff48335ae | |||
| f39a1825cf | |||
| c1b10405a5 | |||
| 37ba583cf2 | |||
| 4beb7de83f | |||
| cb8202e327 | |||
| 90c90f78b6 | |||
| e700c27256 | |||
| 7372287b5c | |||
| d059948f62 | |||
| 1cb6491d17 | |||
| 3a6e8a5f27 | |||
| c0cd820880 | |||
| 7b5655dd6d | |||
| 0f4c108614 | |||
| 86f4cc66d1 | |||
| ca38e7f160 | |||
| 99bd54ca79 | |||
| 9a3ef83078 | |||
| c1d3c5d350 | |||
| a36e202c80 | |||
| b713b65a35 | |||
| 925445c729 | |||
| ce8140ce22 | |||
| d2f60e51c7 | |||
| c66885d25c | |||
| 8d4ca7b547 | |||
| 280b32b3a9 | |||
| 522bd890c1 | |||
| 88e1f51099 | |||
| 8774b222d9 | |||
| b9ef1d608c | |||
| a0d25a1d48 | |||
| 92cd9e5930 | |||
| 3099b0d0ec | |||
| 4a5ce94d29 | |||
| b47a1fd562 | |||
| 10bef4f75c | |||
| 41c571caf5 | |||
| a21b049437 | |||
| f06fbec8df | |||
| 24b6b4e1a9 | |||
| df8f9b3e3a | |||
| 85a55bcc4c | |||
| facb2e3f2b | |||
| f6e79510c9 | |||
| 528055929a | |||
| 7a1774a2ba | |||
| 66749ded0a | |||
| 6f74dc6c72 | |||
| b8d27346e0 | |||
| e1e73cd260 | |||
| a1bf15c316 | |||
| e69bf34ed6 | |||
| fa1a977870 | |||
| 7ed4ccb66c | |||
| 76a90ede24 | |||
| 89e8c24f46 | |||
| 430c7602d4 | |||
| 51b9fbac0f | |||
| 63eba761c5 | |||
| e80fb7aa73 | |||
| 8b2b98fc10 | |||
| c9712c72a0 | |||
| d0ad4095c0 | |||
| 1c00286a70 | |||
| 8687c6b08b | |||
| 7bdf467833 | |||
| 39736fbd27 | |||
| f5e34e8096 | |||
| b2246ed922 | |||
| a499e8463c | |||
| 708ae291cc | |||
| 0d4db0322b | |||
| 39ae2e47f9 | |||
| 25159669df | |||
| 4e24281e18 | |||
| d9bdeeb6b3 | |||
| b2847e7026 | |||
| 3f6bd90f64 | |||
| 6b5984deac | |||
| 2dfaec9216 | |||
| ddbc5e65e8 | |||
| 5dae51d2a1 | |||
| 75215955be | |||
| 79ee764a9f | |||
| dce27e89a1 | |||
| 448df4baf8 | |||
| dafd09084a | |||
| cae7d06256 | |||
| a27eae46f6 | |||
| 9f067c07f0 | |||
| 1f0be73695 | |||
| ce6d42dcdd | |||
| 439740adba | |||
| cff36c0c31 | |||
| 7c9edaf186 | |||
| bbc736d72a | |||
| 47439b9907 | |||
| c3274d66c9 | |||
| d4836914dd | |||
| 4a44393878 | |||
| 123ebc0f26 | |||
| 0a133a764b | |||
| c1d807a516 | |||
| aad715f7e1 | |||
| f1ec94111a | |||
| 07fcecc5b5 | |||
| c56328009e | |||
| f8cbb6faa2 | |||
| c07eb4014f | |||
| 94c1b35cee | |||
| 2277fd0880 | |||
| a2313186e4 | |||
| 3351d61ca7 | |||
| 905d438075 | |||
| ba3290f4e1 | |||
| a828ea45aa | |||
| 7c484ea5d8 | |||
| 5b68608d5b | |||
| 08ef5396f3 | |||
| 662ef5ae4f | |||
| 23a1e9b335 | |||
| b79f8a1508 | |||
| a793fa041e | |||
| c5ef92f1f7 | |||
| 7ccf22c2f4 | |||
| 67df71ab45 | |||
| 0636ca76ea | |||
| 2f2e4e36be | |||
| 913c821eae | |||
| 43f2bacf58 | |||
| ae0cf1a89e | |||
| 0b2d037385 | |||
| cd5cef1c25 | |||
| 8c5f70a339 | |||
| f5ecfd1d74 | |||
| e0de908741 | |||
| cd2ccff0d7 | |||
| 3df6c62dce | |||
| 463af67d17 | |||
| 80f345b179 | |||
| bf212ca83f | |||
| 0185712cbf | |||
| a5199a23d9 | |||
| 011c382360 | |||
| 5c9ce84249 | |||
| 9e89ba9b10 | |||
| cb8cefb0ea | |||
| 7607b8fec5 | |||
| 05a96c5aca | |||
| 762b0c11ff | |||
| c903f9bc5c | |||
| c190ae89ce | |||
| 1b6b491eee | |||
| 4e9c0ba489 | |||
| 13fcb932d1 | |||
| f9f2c3d2b2 | |||
| bdab75c336 | |||
| 5996934f60 | |||
| 2f8659fc38 | |||
| 1e1206ab7e | |||
| 4682afc985 | |||
| 8722e1be6c | |||
| fbd6fd3e7c | |||
| cfba3ce834 | |||
| a4ad1e8295 | |||
| aa441b0656 | |||
| 39a7e30880 | |||
| 74b69f9ea4 | |||
| 3094540b93 | |||
| 513500b16e | |||
| 51c41473a5 | |||
| e79df4a347 | |||
| 53a4a66e9e | |||
| aaf2de278f | |||
| d2e8bad75f | |||
| 98bcbba7ca | |||
| 61258163e2 | |||
| 80b393ca14 | |||
| b57c292581 | |||
| 044e2f9b57 | |||
| b14e9c91c6 | |||
| 58fe41edc3 | |||
| 73a089e177 | |||
| ada9e07c2f | |||
| 3b9e42948e | |||
| 2e822b1eeb | |||
| 8f67c3e398 | |||
| 82289c0564 | |||
| 16e5e08d21 | |||
| 62671ae04f | |||
| 266a5c6408 | |||
| e9264c782f | |||
| 37eb046c10 | |||
| 6e75f7dbee | |||
| e420fa9661 | |||
| 505649e360 | |||
| 3d93c856ba | |||
| 9fe5697fd4 | |||
| 7fde3473ea | |||
| 56a2d68c71 | |||
| 3d140604f8 | |||
| 0a2167fa6a | |||
| 79e95379ec | |||
| 3f740f3800 | |||
| a4a0ecc0e5 | |||
| 686ad2ed7b | |||
| d8db79b4e5 | |||
| d33449f4af | |||
| 7e6a12bddf | |||
| ec80413be0 | |||
| c2af65facd | |||
| 46266ac825 | |||
| 91dc25e1c6 | |||
| 7f9dafd749 | |||
| 989d0e5741 | |||
| 3277c009fa | |||
| 85f1fe088d | |||
| 3c554c92d6 | |||
| f95d071197 | |||
| da887d58db | |||
| 5273a3c84f | |||
| f51712867f | |||
| ecac1dffec | |||
| 28817bee72 | |||
| 3fd41329ea | |||
| f734f0b5f7 | |||
| 3b34a8b96d | |||
| 74e6ee4b2d | |||
| 61929527a3 | |||
| a3e216c956 | |||
| d4203f728e | |||
| 616376f4ac | |||
| 1a309c9bdf | |||
| 253fc3b213 | |||
| a79fd0a10c | |||
| 04df3dcba8 | |||
| 00fbf77dbd | |||
| 9a34c1e376 | |||
| e248104d4b | |||
| c10558f230 | |||
| 5be41b8199 | |||
| d6b6e30cf5 | |||
| 825ca7ba87 | |||
| 5c2a8a4996 | |||
| 58aef2a97d | |||
| e983f9d8a9 | |||
| 7e95dcc1cb | |||
| 69a21a82ac | |||
| 95d2fee63d | |||
| 6f22a71555 | |||
| a30409fcfc | |||
| 217346f572 | |||
| 4472671470 | |||
| b1a026bdd1 | |||
| 312fae5f6d | |||
| 46235aa28a | |||
| 6fe0e297eb | |||
| 3b3214ef5e | |||
| ebc28ed8a4 | |||
| 54c23a9907 | |||
| 8fce40be80 | |||
| 5c5a213c4c | |||
| 3b730680cb | |||
| d7765ae578 | |||
| ab21d5c308 | |||
| 392319a300 | |||
| bee5f950b9 | |||
| 3fc1e3f643 | |||
| ee10b39866 | |||
| 867265fd31 | |||
| 68109a4a37 | |||
| 874401ef8c | |||
| 303a3f2c7d | |||
| 915f338378 | |||
| fd2e1fe34b | |||
| e2e7453431 | |||
| b07573ec4f | |||
| 66c279e895 | |||
| 06e879b884 | |||
| f205dafe4d | |||
| fec18d7039 | |||
| 5ef09455da | |||
| c799869e3b | |||
| 448f5a85d0 | |||
| 9909a537c2 | |||
| 9772a18bf4 | |||
| 0ac80b26bd | |||
| 1f5e25a57b | |||
| 8e5f7ef977 | |||
| ed21c8affd | |||
| 023228c2c5 | |||
| 68f4118bde | |||
| 0edc839857 | |||
| ee6f560388 | |||
| c100355b7b | |||
| 4f7402c343 | |||
| 5ac73e9599 | |||
| c1e46e00d9 | |||
| 7a05f0f9ab | |||
| afcd511893 | |||
| 8f42900e8e | |||
| bcc12876d7 | |||
| e1c2f85bda | |||
| 6989a807d6 | |||
| d92739c793 | |||
| 2fcb80b932 | |||
| 03b0e88ef7 | |||
| a5a73ddbef | |||
| eb57147ed3 | |||
| 0cf12d2a8f | |||
| 06d332e785 | |||
| a75eaaec69 | |||
| 513ee36027 | |||
| 975f425ae4 | |||
| c310ca9c5c | |||
| 21a6f0aa50 | |||
| c2c3fdf7d4 | |||
| ce0880bf5b | |||
| eed099bfed | |||
| 08b37efb55 | |||
| 8443445ed0 | |||
| d011599060 | |||
| 0dd043cb6a | |||
| 1ebd1d9e15 | |||
| 202aef8916 | |||
| 30acf51410 | |||
| d4b01398c7 | |||
| 4dde3d0fe7 | |||
| 8aa6fd7c8e | |||
| e2e6e6d641 | |||
| 20aa91b9a6 | |||
| 7bfd82ae4f | |||
| c5101ee4cf | |||
| 378f390941 | |||
| 3bc8360959 | |||
| af124e7cd9 | |||
| 71633ff441 | |||
| daf2e58c99 | |||
| 3818af2156 | |||
| dd0fd2edcf | |||
| 07304c6d0e | |||
| 4db1708fae | |||
| 0952926265 | |||
| a695484921 | |||
| 55c3eb4cf0 | |||
| 8e42356956 | |||
| 255ef64b37 | |||
| e3f1307b30 | |||
| 93beda7fff | |||
| 91251985db | |||
| b41cb74f45 | |||
| 303b90d1ee | |||
| 86f80a320d | |||
| d4e158a8b6 | |||
| f58eae623a | |||
| bc5493ed50 | |||
| 4e51f26ef2 | |||
| 04226eb686 | |||
| f9743fd04b | |||
| b9746ef100 | |||
| 92e56c3c84 | |||
| aa134d7f21 | |||
| f2bea1867c | |||
| a55acf5146 | |||
| 869b9b994d | |||
| 93fca32e9a | |||
| 1d7dfd53f4 | |||
| a68f35d909 | |||
| f800639e1a | |||
| ed45a01267 | |||
| b0634e272d | |||
| d90f012140 | |||
| 41363a534f | |||
| 44d53e581b | |||
| 5aeb034945 | |||
| 13a95db7a4 | |||
| 1705461e80 | |||
| 3fa7d61c7e | |||
| 0b8268fea3 | |||
| 22ffc74371 | |||
| 31edb6a881 | |||
| b8245095c9 | |||
| ed26e57352 | |||
| ea8a757b19 | |||
| b5d1e5f6c9 | |||
| 142a4495a6 | |||
| 7a9a21c02e | |||
| a60c84987d | |||
| 3150900e13 | |||
| 85e4946ff5 | |||
| dbf6ad70f5 | |||
| bf7a16559b | |||
| fa4c78c9c2 | |||
| 9d99f46f3c | |||
| 5dc86c5649 | |||
| fa82083670 | |||
| fa3bff3e6d | |||
| 9d68b26868 | |||
| 47a0214105 | |||
| 82ea6fef3d | |||
| eec61adad1 | |||
| ada9fb10e8 | |||
| c2bd9c3310 | |||
| ba93062638 | |||
| 61366b7096 | |||
| e1dd9c0117 | |||
| 407d3d8db4 | |||
| 5a2fa26dad | |||
| fd22faeef8 | |||
| 76c5ef46d0 | |||
| 1e725984cd | |||
| 12c6b6f59b | |||
| 4e1d7f0b82 | |||
| 0635edbfff | |||
| 07e2ab07ab | |||
| 134d82c673 | |||
| 947f9c8355 | |||
| 5e6575a63d | |||
| bef61a8547 | |||
| 7eb8c08e6e | |||
| aed5272b6c | |||
| 13e0779ced | |||
| 702006f6ea | |||
| b4fad03c46 | |||
| 77e43a4a7e | |||
| cfd21e7abb | |||
| db490bf4fb | |||
| bc6f3401f8 | |||
| e5c0079f0e | |||
| a68d80f7aa | |||
| 872c9e9e3b | |||
| 0e51924e5e | |||
| c9460a07ef | |||
| f8d80730fe | |||
| c2e0cd844b | |||
| 5493896392 | |||
| 1ad3cb460e | |||
| 721e23de68 | |||
| 97b9f5a232 | |||
| 1a9f5a4fda | |||
| b2153a14d8 | |||
| 8d6499a91c | |||
| 6d6fbac01f | |||
| d576e2387e | |||
| 4e255a355f | |||
| 94401f95d7 | |||
| 739f613881 | |||
| 5dc24557e6 | |||
| 65842a976e | |||
| c6dfc66a14 | |||
| bc54967720 | |||
| 1112aa292f | |||
| 31bb06293d | |||
| 0139f0421b | |||
| 4f63e98e7f | |||
| c04e147ca7 | |||
| b88feeac2c | |||
| 0902c35e13 | |||
| e02ee99d26 | |||
| 313313db1f | |||
| b7bdae00f8 | |||
| b699a665a1 | |||
| b28a282aba | |||
| a30d2ca025 | |||
| f7f3929342 | |||
| 35abb6e69d | |||
| b759be62ea | |||
| 9a2db4a6e9 | |||
| 5bff478d06 | |||
| 3a7402b03d | |||
| d076e73de6 | |||
| 1d98a994d0 | |||
| 3957fae782 | |||
| 72c07faedf | |||
| be3b6ee394 | |||
| 61910827e6 | |||
| 6582beaf2a | |||
| 840223af6f | |||
| a084b71682 | |||
| 1dbe30af3d | |||
| e57fbb88bf | |||
| a5002b4c12 | |||
| c139884671 | |||
| 2b97b0e0cf | |||
| 2e4176d41c | |||
| 40d62b6f2d | |||
| 43d7e19dfb | |||
| ef06071ab1 | |||
| 18578a63ec | |||
| aab0beba93 | |||
| 7d32de50a6 | |||
| 57d91e330e | |||
| a81da26452 | |||
| 803f6bbdea | |||
| 10a3669551 | |||
| d910fbcae1 | |||
| e2a6ee94b0 | |||
| 055a2134e0 | |||
| 30310a51ff | |||
| be648017f5 | |||
| e737272a39 | |||
| d7a5c50ce3 | |||
| a51d5c315f | |||
| 8c1af95b0e | |||
| c4d61fdd21 | |||
| 6301f1f6b5 | |||
| edbe2e55bc | |||
| 604cf43627 | |||
| e124669545 | |||
| 9ee7c6dddd | |||
| 5136261c8e | |||
| c9ebb44442 | |||
| 95d9976a2c | |||
| 1d177c960f | |||
| 81a34ca96c | |||
| 9749b44dbb | |||
| 6dfe2a92a1 | |||
| 44646001c1 | |||
| 088e67c235 | |||
| 0d41c92c01 | |||
| e966674d39 | |||
| ff74a8ed9c | |||
| 64fd32de9a | |||
| 6584bb4cd1 | |||
| a9065d1a1e | |||
| a22832f741 | |||
| 663a33a895 | |||
| 5f7508633b | |||
| 6a99f65979 | |||
| a983f25fb9 | |||
| 7119d92321 | |||
| 5f1a52d620 | |||
| 42d58ed202 | |||
| 20f0dd5b80 | |||
| d95e8b70b9 | |||
| 69d7f3f195 | |||
| 61b2bedf5e | |||
| ab217596d8 | |||
| c6d3bbd7b9 | |||
| ce7699c06b | |||
| ca3df18d99 | |||
| 0f96c9f825 | |||
| d6e41c1026 | |||
| bc1d0ef6e9 | |||
| 352d1425ca | |||
| f92941f4a2 | |||
| 4b6f6728fa | |||
| d12771d408 | |||
| 7a679dd7d8 | |||
| 72ae27e419 | |||
| b5722ac9f5 | |||
| 60b7a20b71 | |||
| 33ea55ec9d | |||
| 294b1c1ea3 | |||
| 75e19914cc | |||
| e24bd418b5 | |||
| 66c1af8333 | |||
| a0917b4533 | |||
| 74731d512f | |||
| e0e8a94031 | |||
| 67306ec0f7 | |||
| a42cfe26e7 | |||
| 9c63614367 | |||
| ccfc129e44 | |||
| ad3b500781 | |||
| 2894c07049 | |||
| e189d3e174 | |||
| b9ead56ec4 | |||
| 48c4ac18ab | |||
| 48d1bc7635 | |||
| 9112cef5f3 | |||
| ff0183b7e6 | |||
| 14ef63b4d2 | |||
| eac6228dde | |||
| 0d28934f37 | |||
| 57b694a93d | |||
| c2a1fcc942 | |||
| 3fdd2fb04d | |||
| 8d9c8f681e | |||
| bdfd8fb526 | |||
| 0d88217a78 | |||
| b15e27e1d3 | |||
| 7db5d84e4d | |||
| c0a37d618a | |||
| b20db5ff50 | |||
| 43e8d5639c | |||
| 92ce0af012 | |||
| fe76cfdd8b | |||
| 738ff07e6a | |||
| e11d3d7407 | |||
| 70dd92f54d | |||
| 75381a2798 | |||
| 29bddbc6ed | |||
| 2ca9baf6ba | |||
| a796a98cd4 | |||
| 02749c290c | |||
| ec13a9664c | |||
| a6d6f69d4e | |||
| 6d4fb2b444 | |||
| a9e3da8b21 | |||
| 56adb0aa88 | |||
| aa9dc1a06f | |||
| e503335026 | |||
| 350aac79b1 | |||
| 56a36987c6 | |||
| 5ef00eb42a | |||
| 6aa52cf5e6 | |||
| 6118d0f940 | |||
| bab7afdfba | |||
| 865cf0652b | |||
| 7126a952b9 | |||
| 66eb325779 | |||
| a55411c150 | |||
| 77eb6fa97c | |||
| 440b13fa48 | |||
| 02f30524a3 | |||
| 26ad736aa1 | |||
| 040588d708 | |||
| 5a635bb532 | |||
| e8014fccb3 | |||
| 85586fdf58 | |||
| d819e03c79 | |||
| 0f2def82c1 | |||
| 634ae0e213 | |||
| d0d2051edf | |||
| a2e0ddcf81 | |||
| 82be58b54a | |||
| ba18891696 | |||
| f2df042c0a | |||
| 3547119577 | |||
| 66519ac33e | |||
| a8ae3aa124 | |||
| 28a00bfb29 | |||
| 8c46abbac3 | |||
| ae7376a708 | |||
| b2d0844959 | |||
| a88ca25708 | |||
| 61acf9e56b | |||
| 8b6ffca2cb | |||
| 56e3aa3835 | |||
| 11bbe22d80 | |||
| 777a6bb29d | |||
| 16b91ba63a | |||
| 0e0ed3d657 | |||
| 910cbb542e | |||
| 17cd63d445 | |||
| 89a4283868 | |||
| 195e167414 | |||
| d5a4fadebd | |||
| 21178f4974 | |||
| 80b22e6c2d | |||
| 9e02e0aabd | |||
| 3572b94e8f | |||
| c53fbe8c73 | |||
| 16450a347e | |||
| 9a12164082 | |||
| 3ba3ab41d2 | |||
| 1ed31199ae | |||
| fc9caa79f8 | |||
| 0c19d011cb | |||
| b4eddbbc30 | |||
| f522f5bbc6 | |||
| fde08e6793 | |||
| 4e8e7fa6cf | |||
| a79806e86c | |||
| 8c0868418c | |||
| b90919a4df | |||
| 7f2842f9ba | |||
| a7f0771ca9 | |||
| 47315ed4a5 | |||
| d4df2f989b | |||
| 3c369e11ae | |||
| 16ba957f3a | |||
| 88a8b10b95 | |||
| 3063c9950c | |||
| fdc5845d90 | |||
| 6f66e2a2bb | |||
| a57a41e676 | |||
| 5209d2c416 | |||
| 64138cdcd2 | |||
| 80a5db3e91 | |||
| fb7dfdf341 | |||
| b0f0e35170 | |||
| cb0cc8b370 | |||
| 9e8a8cb7db | |||
| c4959776dc | |||
| d50d489de7 | |||
| 9472de0246 | |||
| 5d2fff8e53 | |||
| d68ca9df1e | |||
| 04a437e9a6 | |||
| f7fb8c780b | |||
| a7ebf8a014 | |||
| e950ced1a1 | |||
| 15d5a9cb58 | |||
| 5c9747d8eb | |||
| d308739643 | |||
| 6c5db40bd0 | |||
| f3212291dd | |||
| 140a829291 | |||
| e30d938425 | |||
| 521b6a414f | |||
| a20d0f970e | |||
| 66c1307112 | |||
| 241a25599f | |||
| 3f610bf122 | |||
| 858cc41a89 | |||
| 244917faf9 | |||
| 08964188ea | |||
| 07c96661e7 | |||
| 048940d383 | |||
| 85ad0e1e86 | |||
| f2f06f5d44 | |||
| b8e0ef5340 | |||
| 07608b3fe3 | |||
| e808509331 | |||
| 2d9a4fccfa | |||
| 49cf263408 | |||
| 69e7dc7481 | |||
| a76fe34a64 | |||
| 7a3882fe28 | |||
| 6954b79178 | |||
| 2032d045ca | |||
| e4ee0c3ab6 | |||
| 3218d00850 | |||
| 7f0b4f79ff | |||
| 945ba0a34c | |||
| 676797f0ac | |||
| 8e89d5dbfc | |||
| 150eb4e9e2 | |||
| 14766629a2 | |||
| f475c8ae6c | |||
| 11badbf22c | |||
| aedc051523 | |||
| b5336eb63c | |||
| 0c85ddd82d | |||
| f0386a21c6 | |||
| a7518937f0 | |||
| 87b012f0be | |||
| 6a7a34c0b0 | |||
| 9ce29138d2 | |||
| 95a6e09158 | |||
| 0962b79149 | |||
| 51ba5304a6 | |||
| d7137d1311 | |||
| d8babc91d5 | |||
| 3649a79f07 | |||
| 3992acd9d4 | |||
| b6f130e00b | |||
| 63c475e24f | |||
| d8d4f4e8f3 | |||
| e4a2bf8b71 | |||
| 19a1110bcf | |||
| 1997599b33 | |||
| 467f24022b | |||
| 3147f9b087 | |||
| 79e5931a45 | |||
| 1c9cefb61b | |||
| 318bf80ad6 | |||
| bc3a757764 | |||
| 31459c0121 | |||
| 87bd9ff08b | |||
| 972284ec20 | |||
| 8aaec8e13b | |||
| f668fb85b2 | |||
| a5e4ab8f9e | |||
| a58db6c2bf | |||
| cc8c5a4b7c | |||
| 7027474942 | |||
| 41dd1e4b81 | |||
| dd24b33cce | |||
| a703edab58 | |||
| 57346617a5 | |||
| 3a8bfb0bb1 | |||
| cd4e6f0f5e | |||
| 1a3037b756 | |||
| ef32834e10 | |||
| a684a0fd3b | |||
| 44505d0e44 | |||
| d1589cf665 | |||
| 4a7b4fbabf | |||
| ac1b3d7938 | |||
| 1686e662b4 | |||
| 67c97e7bd2 | |||
| 805c925e0d | |||
| 8ffba9cdb5 | |||
| 5d5290f69d | |||
| 563403a7f8 | |||
| 5cbf013a8e | |||
| 8bee761bb4 | |||
| 8bc482abe9 | |||
| 51fd83cd7f | |||
| a0811c6d25 | |||
| 77e8497100 | |||
| b46aaa388b | |||
| eeeba2febe | |||
| 75921d08d1 | |||
| 7764d18a8b | |||
| 797293ad8d | |||
| 7c7f1bcd5f | |||
| 50a430b353 | |||
| 5b562c6671 | |||
| cb0bf2d2e7 | |||
| 0b042bb2b5 | |||
| b91fbeb978 | |||
| d0b84e7ca3 | |||
| 0edeeb54b4 | |||
| e1b2a28f7d | |||
| 347c7be899 | |||
| c71d88d3bf | |||
| 0d4cbe462f | |||
| a05110cd93 | |||
| 8f6ebe8301 | |||
| 818775a12b | |||
| 80b60cdaa8 | |||
| 69118df912 | |||
| ff65382e06 | |||
| 420b8c49c6 | |||
| 0f9c02e249 | |||
| 4890a90641 | |||
| 653f0991e0 | |||
| a40efb4780 | |||
| feea74268d | |||
| 631582ccbb | |||
| 4f048a9907 | |||
| a8752ccde0 | |||
| feafad0d77 | |||
| 6faa468ed3 | |||
| ab55804039 | |||
| 05d9bb3bab | |||
| 39ae8cd250 | |||
| 5d34e3eb88 | |||
| ee20441307 | |||
| b12920ae67 | |||
| f9ab682559 | |||
| d042f7b396 | |||
| d8e4c8a78c | |||
| 1e2dcce664 | |||
| ab4af50daf | |||
| 26c83764d9 | |||
| 85ac64dea1 | |||
| 7305c9d354 | |||
| b99f8e6b14 | |||
| eb7e2ab92a | |||
| f7edbfb5af | |||
| 7c918e4735 | |||
| 7d4d1e13a0 | |||
| dbe58e30c4 | |||
| d2aa97b889 | |||
| 0eac3e3aca | |||
| 75d61d0604 | |||
| 2f7b053f96 | |||
| 5ab5a85b73 | |||
| 1d7da8fa8c | |||
| 727b2edf74 | |||
| 6caff0ca59 | |||
| b41f930d08 | |||
| 5a70d926cb | |||
| dbfe7b734c | |||
| 8acf5df3aa | |||
| f3b882ca2f | |||
| 94adf3cda6 | |||
| bfacaa6cf8 | |||
| 0033debb90 | |||
| 20f2bda6ed | |||
| bcc278c9cf | |||
| 75ccac221d | |||
| d90dd90a4a | |||
| d9156ce66c | |||
| 61457681e1 | |||
| bf5019108e | |||
| 622edec2fb | |||
| dac02f81c0 | |||
| d8037ebd8d | |||
| fba1bac8d2 | |||
| 510fbd293b | |||
| ab8c974e6f | |||
| 870f5afcfb | |||
| 6192bda94f | |||
| 3f701fcee3 | |||
| 524d049d74 | |||
| 983e964e36 | |||
| 84f989d6da | |||
| 49356fa769 | |||
| 2a6a03da64 | |||
| fd17860dd8 | |||
| 46fea48b6e | |||
| 54ef248df5 | |||
| 2dfb8990d2 | |||
| a50ac8167b | |||
| 86baab6858 | |||
| 67c18bb0af | |||
| c4584c27ef | |||
| 0022439bba | |||
| 5a81ef573c | |||
| 6f7ea5c7df | |||
| 926452bd55 | |||
| b5eeb6945c | |||
| 241ba623cc | |||
| cbd3099fa5 | |||
| 49e12e2a0b | |||
| 4b405af0e4 | |||
| 578ef40106 | |||
| f6e76b0fb9 | |||
| 17549bfe29 | |||
| 7915aed388 | |||
| e26c23e238 | |||
| fb5da15245 | |||
| 0021e4f354 | |||
| afa850231c | |||
| 935dc7ddaf | |||
| ac08eec0e4 | |||
| 5deb062e5f | |||
| 8e33fdbae5 | |||
| 403e6fbe37 | |||
| 071c43997e | |||
| 04f9512c2a | |||
| b9bc4421a3 | |||
| b2efd5af0a | |||
| 264a2f9449 | |||
| 561959e960 | |||
| 41a5f9a775 | |||
| 9a61e04293 | |||
| 3f1e01c6f9 | |||
| 12eabf86cf | |||
| 82d39d3256 | |||
| a1921e6fa4 | |||
| a5463fabe5 | |||
| 26f71ddedd | |||
| bdc2f7e8e1 | |||
| 2083be39da | |||
| 521419a5aa | |||
| 5bf9270d5d | |||
| 2b55921830 | |||
| 707ffa162e | |||
| 19848da7c3 | |||
| 334df849b3 | |||
| 801d34692b | |||
| 0aa70f2b80 | |||
| 5ad11a8b75 | |||
| 3f1bed3b6e | |||
| ca3668dd60 | |||
| b3ae2b1cbc | |||
| f6abca0663 | |||
| 084ff69239 | |||
| 8d31be462a | |||
| 6d010c0ef1 | |||
| dfc37fb2d4 | |||
| 56cd7b0b4f | |||
| 0060739bd2 | |||
| e98f86d878 | |||
| 1683790315 | |||
| 3c32c906de | |||
| d8c9c50743 | |||
| 2fc6febfaf | |||
| f49c679005 | |||
| 67206a3c4d | |||
| ed23f1d243 | |||
| 3b8c6c8c06 | |||
| e0c956e3e7 | |||
| 6efff8b285 | |||
| 4422c6c803 | |||
| 511b9241f5 | |||
| 89549ebeef | |||
| bdb24f6da1 | |||
| d7bc03f0a9 | |||
| 64c18e3f68 | |||
| 7bba7e0c32 | |||
| e48b3f0f8e | |||
| 31da502123 | |||
| 9c64bbdd60 | |||
| f4c1b0c1da | |||
| c761e9fe38 | |||
| e66aaaf98a | |||
| 58b5811d9e | |||
| 3b3429d77a | |||
| 98eb1a6694 | |||
| 91929a3217 | |||
| 5eecbc43be | |||
| 609502c545 | |||
| d0b420f9a1 | |||
| 1222c53a1a | |||
| 7b2d51e6c9 | |||
| 46cb286839 | |||
| 2e6f0c06fb | |||
| 31c138dacb | |||
| e428683ec7 | |||
| b6462225a7 | |||
| dfc110ca05 | |||
| f55bd26f2e | |||
| 603b6b90df | |||
| 2c132ae2cf | |||
| c7f4ad5a31 | |||
| b9d5593895 | |||
| 6a833fc141 | |||
| 4e1ad84831 | |||
| e90bcdf1a3 | |||
| dfbb346180 | |||
| 2d5b97f68f | |||
| 32826f1e4d | |||
| b1ed1d624a | |||
| 06c4040334 | |||
| b71c389f5c | |||
| 5557de6dc3 | |||
| ccdcd24d22 | |||
| c410a655ea | |||
| 2fd84ae57c | |||
| b760b717ef | |||
| acf9bd8663 | |||
| 7327f1440e | |||
| 87d8c10905 | |||
| ee45f3cae9 | |||
| 195255ce9a | |||
| 0e4fda0c5a | |||
| f1babdee60 | |||
| a703d85688 | |||
| 0cd677cb39 | |||
| 9fe11fb6e2 | |||
| 58451b17dc | |||
| cba924a31a | |||
| 74e50d1cb2 | |||
| bd1c01b4e1 | |||
| 541fa4aa28 | |||
| 4dd03c7bd6 | |||
| 3a2de83920 | |||
| 2ef5d339c6 | |||
| 6355098703 | |||
| a10a953097 | |||
| 99293d9841 | |||
| 6d409e4df5 | |||
| 2fceef4f0c | |||
| 7577e64085 | |||
| 4a9750865f | |||
| fba0685266 | |||
| e3fa1c740d | |||
| de190f6d41 | |||
| 7a5bc39376 | |||
| c0b67653de | |||
| c6b1bd2f3a | |||
| ae5c30af6b | |||
| a513378d73 | |||
| 5b63c12958 | |||
| f3fec33085 | |||
| 3a071af42d | |||
| a06a863745 | |||
| 93f2cf4bce | |||
| 0b70728f04 | |||
| b12f422db6 | |||
| 13681deaa1 | |||
| d2d43af0df | |||
| 500f053afd | |||
| 8cf9b06d7b | |||
| 88002fd78b | |||
| c4684d2dab | |||
| e46a244fea | |||
| c940de6cd7 | |||
| c391ecc7a9 | |||
| d65ad7324d | |||
| a68ffd5339 | |||
| 59736d19af | |||
| 9967f09566 | |||
| 3d7e4ebb71 | |||
| c9457f7610 | |||
| 13aef1fd89 | |||
| a9548747cd | |||
| 0da4cd6eb1 | |||
| 083246bea1 | |||
| 9f372ebd72 | |||
| cdf4c96ed6 | |||
| c757b57e07 | |||
| 6629585b32 | |||
| ad96d6ce66 | |||
| 5877dc1e24 | |||
| 908a6b808b | |||
| fbd41fae7f | |||
| f9ff37c820 | |||
| eed91491aa | |||
| 6faf9db2ba | |||
| 713fd7fc22 | |||
| d86ce3ac2f | |||
| 076163ccfd | |||
| 8f74c26f77 | |||
| 1b37ed61e3 | |||
| c6a421e61b | |||
| 550a60f4af | |||
| 01a6901bfe | |||
| e655aa5bbd | |||
| f02409c5a9 | |||
| 8524473488 | |||
| 0b039c6453 | |||
| 62250abe8b | |||
| 5b0fc66cb1 | |||
| ffa15c274b | |||
| 09596000d7 | |||
| 8e7a5e7d60 | |||
| fc6d485fa3 | |||
| 0ed2e7e175 | |||
| cb0a54fe2b | |||
| d9cf91d2f0 | |||
| 3ec820f212 | |||
| 474f743d28 | |||
| 3f1b508752 | |||
| 2c49a1d8b9 | |||
| ab441659b2 | |||
| 84d843b356 | |||
| 9b3af38326 | |||
| 8226a638d9 | |||
| 4cd2c5878c | |||
| 8242198068 | |||
| 59be5dc807 | |||
| de6b6012ba | |||
| 5928c84cf4 | |||
| b393469584 | |||
| 6f5cef3a6c | |||
| 5234d78719 | |||
| aebe64ef3d | |||
| 224a40dcb7 | |||
| 5ddb6bf718 | |||
| 11cb61874d | |||
| 00ed22ad28 | |||
| e263922b43 | |||
| a4172a74d1 | |||
| b1fb2aeeb3 | |||
| 4f3c2b7b8c | |||
| ec493ee91b | |||
| 2200bb9ee8 | |||
| 588129436d | |||
| fed51d9959 | |||
| e6af5e77f8 | |||
| 2eb230d366 | |||
| a66ecd7660 | |||
| 46a9459b7d | |||
| 0a34dae6c0 | |||
| 2209a76f25 | |||
| ba2e27dc7e | |||
| 5f5cedb428 | |||
| a4da127078 | |||
| 109d0ffab6 | |||
| 3af2eb1b59 | |||
| 51d3f37058 | |||
| 3b76018db9 | |||
| 271d42c09f | |||
| ddfb7f0e88 | |||
| 3cb8ce1b3b | |||
| 42b00f4942 | |||
| 749c7ce796 | |||
| 27ff214d04 | |||
| 46ff3c293a | |||
| c034e9f2ee | |||
| b2c5cebc08 | |||
| 0017a6b0f9 | |||
| a2c9df06de | |||
| 4152510452 | |||
| d253f7279a | |||
| b186caa1d0 | |||
| f99ac2f471 | |||
| 409af6e23e | |||
| 36d81e027b | |||
| 2a0cb6125a | |||
| b65ef1289a | |||
| e67f1fb974 | |||
| 292d7c9e05 | |||
| 617cb79299 | |||
| dbad11ad9a | |||
| 04cb6d2538 | |||
| b6ff3852a0 | |||
| 70a68bb676 | |||
| e04fc80b62 | |||
| 35d63e7894 | |||
| 9e71358ae2 | |||
| 0891b103e0 | |||
| 2480904929 | |||
| da903d1879 | |||
| eafc009ff0 | |||
| 3023bcaf95 | |||
| 2d29953318 | |||
| 6b9ec4bc05 | |||
| 540176059a | |||
| 9051354c58 | |||
| 26985aeacb | |||
| c2a84c7f93 | |||
| 51975f6748 | |||
| 6fdc16c33f | |||
| ed4f347563 | |||
| a1cdb3b273 | |||
| 8b8088b74a | |||
| 94e9f2678d | |||
| 05965e749a | |||
| 1a9cea263f | |||
| 966c402ecc | |||
| d5e0a3e4f6 | |||
| 2fafca7dfd | |||
| bfbd1bcfed | |||
| c1d476a991 | |||
| f7b78ca855 | |||
| 0e1429b604 | |||
| 57f2ca6460 | |||
| e1d8dabd3d | |||
| d498287f76 | |||
| 8a3026e43e | |||
| 133f26c691 | |||
| 9b169d1f43 | |||
| 2c331f9a65 | |||
| b9e8559002 | |||
| a8f843fea5 | |||
| a0da3b564f | |||
| bdc5e09ecc | |||
| d88e16dccf | |||
| 77680fcdc9 | |||
| 6afcc42c38 | |||
| 0bf7b86217 | |||
| fa306338aa | |||
| 5921a099d9 | |||
| e6dd1f0c48 | |||
| ae8602a769 | |||
| 8d86636a95 | |||
| 87a9191013 | |||
| e847933c3c | |||
| ad7280c065 | |||
| b124bac190 | |||
| 6f926f4849 | |||
| 48df9d4af6 | |||
| a5d0c183a7 | |||
| 37354484c2 | |||
| eeae13d4ba | |||
| c84b474632 | |||
| a207030899 | |||
| b97e28ad3b | |||
| b307adda99 | |||
| 069421f47a | |||
| 8f1a11757f | |||
| 3fa5f07f51 | |||
| 8b8a200b83 | |||
| 2c87d3e714 | |||
| ddf3b54917 | |||
| 846da8e17d | |||
| 0d0d414fc8 | |||
| 0c01bce460 | |||
| 37c83ce039 | |||
| 9e504d577e | |||
| ab70692c49 | |||
| d48f594147 | |||
| 3e4e634c97 | |||
| 0e17a0bcd0 | |||
| 32e0d32dea | |||
| 1ecf355346 | |||
| 2ff15b54af | |||
| 30ac3f8c0a |
@@ -0,0 +1,20 @@
|
||||
---
|
||||
description: Increment the AIX monotonic version number
|
||||
allowed-tools: Bash(git add:*),Bash(git status:*),Bash(git commit:*),Edit,Write
|
||||
model: haiku
|
||||
disable-model-invocation: true
|
||||
---
|
||||
|
||||
Increment `Monotonics.Aix` in `src/common/app.release.ts` and commit it.
|
||||
|
||||
**Pre-flight checks (MUST pass or abort):**
|
||||
1. Run `git branch --show-current` - MUST be on `main` branch
|
||||
2. Run `git status src/common/app.release.ts` - file MUST be unmodified (no changes on this specific file)
|
||||
|
||||
**Execute:**
|
||||
1. Read current `Monotonics.Aix` value from `src/common/app.release.ts`
|
||||
2. Increment by 1
|
||||
3. Update ONLY that line
|
||||
4. Run: `git add src/common/app.release.ts && git commit -m "Roll AIX"`
|
||||
|
||||
Confirm new version number.
|
||||
@@ -0,0 +1,31 @@
|
||||
---
|
||||
description: Sync Anthropic API implementation with latest upstream documentation
|
||||
argument-hint: specific feature to check
|
||||
---
|
||||
|
||||
Please take a look at my API code for Anthropic: message wire types `src/modules/aix/server/dispatch/wiretypes/anthropic.wiretypes.ts`, assembly of the request messages (adapters) `src/modules/aix/server/dispatch/chatGenerate/adapters/anthropic.messageCreate.ts`, and parsing of the response in streaming or not `src/modules/aix/server/dispatch/chatGenerate/parsers/anthropic.parser.ts`.
|
||||
|
||||
IMPORTANT: we only support the Messages API (message create). We do NOT support other APIs such as the older Completions API.
|
||||
We support Anthropic caching natively, and want to make sure tools and state (crafting the history) are also done well.
|
||||
|
||||
Then take a look at the newest API information available. Try these sources, and be creative if some are blocked:
|
||||
|
||||
**Primary Sources:**
|
||||
- Docs API: https://docs.claude.com/en/api/messages
|
||||
- Release notes: https://docs.claude.com/en/release-notes/api
|
||||
- Tools use: https://docs.claude.com/en/docs/agents-and-tools/tool-use/overview
|
||||
- Handling stop reasons: https://docs.claude.com/en/api/handling-stop-reasons
|
||||
|
||||
**Alternative Sources if primary blocked:**
|
||||
- Anthropic TypeScript SDK: https://github.com/anthropics/anthropic-sdk-typescript
|
||||
- Anthropic Python SDK: https://github.com/anthropics/anthropic-sdk-python
|
||||
- Recent news and announcements: Web Search for "anthropic api changelog" or "new claude api" or "new claude api pricing"
|
||||
|
||||
**If all blocked:** Explain what you attempted and ask user to provide documentation manually.
|
||||
|
||||
$ARGUMENTS
|
||||
Check carefully and look if there are any discrepancies in the protocols, the available API surface, the structure of the messages, functionality, logic, etc.
|
||||
Make sure you look deep in the fields of the requests and responses, especially required fields, streaming event types, and any new response shapes.
|
||||
|
||||
Please point out all of the differences in the API whether it's in the final parsing and reassembly of the streaming message, or the protocol changed, etc.
|
||||
Prioritize breaking changes and new capabilities that would improve the user experience.
|
||||
@@ -0,0 +1,30 @@
|
||||
---
|
||||
description: Sync Google Gemini API implementation with latest upstream documentation
|
||||
argument-hint: specific feature to check
|
||||
---
|
||||
|
||||
Please take a look at my API code for Google Gemini: message wire types `src/modules/aix/server/dispatch/wiretypes/gemini.wiretypes.ts`, assembly of the request messages (adapters) `src/modules/aix/server/dispatch/chatGenerate/adapters/gemini.generateContent.ts`, and parsing of the response in streaming or not `src/modules/aix/server/dispatch/chatGenerate/parsers/gemini.parser.ts`.
|
||||
|
||||
IMPORTANT: we only support the generateContent API, not other Gemini APIs such as embeddings, etc.
|
||||
Caching is only supported when implicit, we do not explicitly manage Gemini Caches. Same for file uploads and other systems.
|
||||
Image generation happens through models, i.e. 'Gemini 2.5 Flash - Nano Banana' generates images using AIX from generateContent (chat input).
|
||||
|
||||
Then take a look at the newest API information available. Try these sources, and be creative if some are blocked:
|
||||
|
||||
**Primary Sources:**
|
||||
- Docs API 1/2: https://ai.google.dev/api/generate-content
|
||||
- Docs API 2/2: https://ai.google.dev/api/caching#Content
|
||||
- Release notes: https://ai.google.dev/gemini-api/docs/changelog
|
||||
|
||||
**Alternative Sources if primary blocked:**
|
||||
- Google AI JavaScript SDK: https://github.com/googleapis/js-genai (check latest commits, README, type definitions)
|
||||
Recent news and announcements: Web Search for "gemini api changelog" or "nwe gemini api updates" or "new gemini api pricing"
|
||||
|
||||
**If all blocked:** Explain what you attempted and ask user to provide documentation manually.
|
||||
|
||||
$ARGUMENTS
|
||||
Check carefully and look if there are any discrepancies in the protocols, the available API surface, the structure of the messages, functionality, logic, etc.
|
||||
Make sure you look deep in the fields of the requests and responses, especially required fields, streaming event types, and any new response shapes.
|
||||
|
||||
Please point out all of the differences in the API whether it's in the final parsing and reassembly of the streaming message, or the protocol changed, etc.
|
||||
Prioritize breaking changes and new capabilities that would improve the user experience.
|
||||
@@ -0,0 +1,34 @@
|
||||
---
|
||||
description: Sync OpenAI API implementation with latest upstream documentation
|
||||
argument-hint: specific feature to check
|
||||
---
|
||||
|
||||
Please take a look at my API code for OpenAI: message wire types `src/modules/aix/server/dispatch/wiretypes/openai.wiretypes.ts`, assembly of the request messages (adapters) `src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts`, and parsing of the response in streaming or not `src/modules/aix/server/dispatch/chatGenerate/parsers/openai.parser.ts`.
|
||||
|
||||
IMPORTANT: we prioritize the new Responses API, while Chat Completions is still supported but legacy.
|
||||
We do NOT support other APIs such as Realtime (incl. websockets), etc.
|
||||
We also do not support Agentic APIs (Agent SDK, AgentKit, ChatKit, Assistants API etc), as we perform similar functionality in AIX (server or client side).
|
||||
|
||||
Then take a look at the newest API information available. Try these sources, and be creative if some are blocked:
|
||||
|
||||
**Primary Sources:**
|
||||
- Responses API (AIX prioritizes it): https://platform.openai.com/docs/api-reference/responses/create
|
||||
- Chat Completions API: https://platform.openai.com/docs/api-reference/chat/create
|
||||
- Changelog: https://platform.openai.com/docs/changelog
|
||||
- Models: https://platform.openai.com/docs/models
|
||||
- Pricing (use Copy Page button to download markdown): https://platform.openai.com/docs/pricing
|
||||
|
||||
**Alternative Sources if primary blocked:**
|
||||
- OpenAI Node.js SDK: https://github.com/openai/openai-node
|
||||
- OpenAI Python SDK: https://github.com/openai/openai-python
|
||||
- OpenAI OpenAPI spec: https://github.com/openai/openai-openapi
|
||||
Recent news and announcements: Web Search for "openai api changelog" or "openai new models" or "openai new prices"
|
||||
|
||||
**If all blocked:** Explain what you attempted and ask user to provide documentation manually.
|
||||
|
||||
$ARGUMENTS
|
||||
Check carefully and look if there are any discrepancies in the protocols, the available API surface, the structure of the messages, functionality, logic, etc.
|
||||
Make sure you look deep in the fields of the requests and responses, especially required fields, streaming event types, and any new response shapes.
|
||||
|
||||
Please point out all of the differences in the API whether it's in the final parsing and reassembly of the streaming message, or the protocol changed, etc.
|
||||
Prioritize breaking changes and new capabilities that would improve the user experience.
|
||||
@@ -0,0 +1,49 @@
|
||||
---
|
||||
description: Sync OpenRouter API implementation with latest upstream documentation
|
||||
argument-hint: specific feature to check
|
||||
---
|
||||
|
||||
Review the OpenRouter implementation:
|
||||
- Models list: `src/modules/llms/server/openai/openrouter.wiretypes.ts` (list API response schema)
|
||||
- Chat wire types: `src/modules/aix/server/dispatch/wiretypes/openai.wiretypes.ts` (OpenAI-compatible)
|
||||
- Request adapter: `src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts` ('openrouter' dialect)
|
||||
- Response parser: `src/modules/aix/server/dispatch/chatGenerate/parsers/openai.parser.ts` (shared OpenAI parser)
|
||||
- Vendor config: `src/modules/llms/vendors/openrouter/openrouter.vendor.ts`
|
||||
|
||||
GOAL: Ensure complete support for OpenRouter's API including advanced features like reasoning/thinking tokens, tool use, search integration, and multi-modal capabilities. OpenRouter is OpenAI-compatible but has important extensions and differences.
|
||||
|
||||
Use Task tool with subagent_type=Explore and thoroughness="very thorough" to discover:
|
||||
1. Map API structure - all endpoints, parameters, capabilities from https://openrouter.ai/docs
|
||||
2. **Advanced features** - How to use: reasoning/thinking tokens (o1, DeepSeek R1), tool use/function calling, search integration, multi-modal (vision/audio)
|
||||
3. Changelog location - How does OpenRouter communicate API updates and breaking changes?
|
||||
4. Model metadata - What capabilities are exposed in the models list API? How to detect feature support?
|
||||
5. OpenAI deviations - Extensions, special headers (HTTP-Referer, X-Title), response fields, streaming differences
|
||||
|
||||
Then check the latest API information. Try these sources (be creative if blocked):
|
||||
|
||||
**Primary Sources:**
|
||||
- API Reference: https://openrouter.ai/docs/api-reference
|
||||
- Chat Completions: https://openrouter.ai/docs/api-reference#chat-completions
|
||||
- Models List: https://openrouter.ai/docs/api-reference#models-list
|
||||
- Parameters Guide: https://openrouter.ai/docs/parameters
|
||||
- Announcements: https://openrouter.ai/announcements (feature launches, API updates, new models)
|
||||
- Models Directory: https://openrouter.ai/models (check metadata for capabilities)
|
||||
|
||||
**Alternative Sources:**
|
||||
- GitHub: https://github.com/OpenRouterTeam (SDKs, examples, issues for recent changes)
|
||||
- Web Search: "openrouter api changelog" or "openrouter reasoning tokens" or "openrouter tool use"
|
||||
|
||||
**If blocked:** Ask user to provide documentation.
|
||||
|
||||
$ARGUMENTS
|
||||
Focus on discrepancies and gaps:
|
||||
- **Request/Response structure**: New fields, changed requirements, streaming event types
|
||||
- **Feature support**: Thinking tokens format, tool calling protocol, search parameters
|
||||
- **Model capabilities**: How to detect and enable advanced features per model
|
||||
- **OpenRouter extensions**: Headers, routing, fallbacks, rate limiting (free vs paid)
|
||||
- **Breaking changes**: Protocol updates, deprecated fields, new required parameters
|
||||
|
||||
Report differences in wire types, adapter logic, parser handling, or dialect-specific quirks.
|
||||
Prioritize new capabilities that improve user experience (reasoning visibility, better tool use, etc.).
|
||||
|
||||
When making changes, add comments with date: `// [OpenRouter, 2025-MM-DD]: explanation`
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
description: Update Alibaba model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/alibaba.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models & Pricing: https://www.alibabacloud.com/help/en/model-studio/models
|
||||
- Billing Guide: https://www.alibabacloud.com/help/en/model-studio/billing-for-model-studio
|
||||
|
||||
**Fallbacks if blocked:**
|
||||
- Search "alibaba model studio latest pricing", "alibaba latest models", "qwen models pricing", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
description: Update Anthropic model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/anthropic/anthropic.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://docs.claude.com/en/docs/about-claude/models/overview
|
||||
- Pricing: https://claude.com/pricing#api
|
||||
- Deprecations: https://docs.claude.com/en/docs/about-claude/model-deprecations
|
||||
|
||||
**Fallbacks if blocked:** Check Anthropic TypeScript SDK at https://github.com/anthropics/anthropic-sdk-typescript, search "anthropic models latest pricing", "anthropic latest models", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
description: Update DeepSeek model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/deepseek.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Pricing: https://api-docs.deepseek.com/quick_start/pricing
|
||||
- Model List: https://api-docs.deepseek.com/api/list-models
|
||||
- Release Notes: https://api-docs.deepseek.com/updates (check for version updates like V3.2-Exp)
|
||||
|
||||
**Note:** DeepSeek frequently releases new versions with significant pricing changes. Always check release notes first.
|
||||
|
||||
**Fallbacks if blocked:** Search "deepseek api latest pricing", "deepseek latest models", "deepseek models list" or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,21 @@
|
||||
---
|
||||
description: Update Gemini model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/gemini/gemini.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.types.ts`, `src/modules/llms/server/llm.server.types.ts`, and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://ai.google.dev/gemini-api/docs/models
|
||||
- Pricing: https://ai.google.dev/gemini-api/docs/pricing
|
||||
- Changelog: https://ai.google.dev/gemini-api/docs/changelog
|
||||
|
||||
**Fallbacks if blocked:** Check Google AI JS SDK at https://github.com/googleapis/js-genai, search "gemini models latest pricing", "gemini latest models", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Ignore context windows (auto-determined at runtime) and training cutoffs (not supported)
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review, do NOT remove comments
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,19 @@
|
||||
---
|
||||
description: Update Groq model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/groq.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://console.groq.com/docs/models
|
||||
- Pricing: https://groq.com/pricing/
|
||||
|
||||
**Fallbacks if blocked:** Search "groq models latest pricing", "groq latest models", "groq api models", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,19 @@
|
||||
---
|
||||
description: Update Kimi model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/moonshot.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Pricing: https://platform.moonshot.ai/docs/pricing/chat
|
||||
- API Reference: https://platform.moonshot.ai/docs/api/chat
|
||||
|
||||
**Fallbacks if blocked:** Search "moonshot kimi models latest pricing", "kimi k2 models", "moonshot api models", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
description: Update Mistral model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/mistral.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://docs.mistral.ai/getting-started/models/models_overview/
|
||||
- Pricing: https://mistral.ai/pricing#api-pricing
|
||||
- Changelog: https://docs.mistral.ai/getting-started/changelog/
|
||||
|
||||
**Fallbacks if blocked:**
|
||||
- Search "mistral [model-name] latest pricing", "mistral api latest pricing", "mistral latest models", or search GitHub for latest model prices and context windows
|
||||
- Cross-reference: pricepertoken.com, helicone.ai, artificialanalysis.ai
|
||||
- Check Mistral API list models response
|
||||
- As last resort: Use Chrome DevTools MCP to render pricing table
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,41 @@
|
||||
---
|
||||
description: Update Ollama model definitions with latest featured models
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/ollama/ollama.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Automated Workflow:**
|
||||
```bash
|
||||
# 1. Fetch the HTML
|
||||
curl -s "https://ollama.com/library?sort=featured" -o /tmp/ollama-featured.html
|
||||
|
||||
# 2. Parse it with the script
|
||||
node .claude/scripts/parse-ollama-models.js > /tmp/ollama-parsed.txt 2>&1
|
||||
|
||||
# 3. Review the parsed output
|
||||
cat /tmp/ollama-parsed.txt
|
||||
```
|
||||
|
||||
The parser outputs: `modelName|pulls|capabilities|sizes`
|
||||
- Example: `deepseek-r1|66200000|tools,thinking|1.5b,7b,8b,14b,32b,70b,671b`
|
||||
|
||||
**Primary Sources:**
|
||||
- Model Library: https://ollama.com/library?sort=featured
|
||||
- Parser script: `.claude/scripts/parse-ollama-models.js`
|
||||
|
||||
**Fallbacks if blocked:** Check https://github.com/ollama/ollama, search "ollama featured models", "ollama latest models", or search GitHub for latest model info
|
||||
|
||||
**Important:**
|
||||
- Skip models below 50,000 pulls (parser does this automatically)
|
||||
- Skip embedding models (parser does not do this automatically)
|
||||
- Sort them in the EXACT same order as the source (featured models)
|
||||
- Extract tags: 'tools' → hasTools, 'vision' → hasVision, 'embedding' → isEmbeddings (note the 's'), 'thinking' → tags only
|
||||
- Extract 'b' tags (1.5b, 7b, 32b) to tags field
|
||||
- Set today's date (YYYYMMDD format) for newly added models only
|
||||
- Update OLLAMA_LAST_UPDATE constant to today's date
|
||||
- Do NOT change dates of existing models
|
||||
- Review the full model list for additions, removals, and changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments and newlines to make diffs easy to review
|
||||
@@ -0,0 +1,26 @@
|
||||
---
|
||||
description: Update OpenAI model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/openai.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Manual hint:** For pricing page, expand all tables before copying content.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://platform.openai.com/docs/models (use Copy Page button)
|
||||
- Pricing: https://platform.openai.com/docs/pricing (expand tables first)
|
||||
|
||||
**Known Issue:** OpenAI docs block automated access (403 Forbidden). Manual browser access required.
|
||||
|
||||
**Fallbacks if blocked:**
|
||||
- Search "openai models latest pricing", "openai latest models" for third-party aggregators, or search GitHub for latest model prices and context windows
|
||||
- OpenAI Node SDK (https://github.com/openai/openai-node) has limited model metadata only
|
||||
- As last resort: Use Chrome DevTools MCP to navigate and extract from official docs
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,19 @@
|
||||
---
|
||||
description: Update OpenPipe model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/openpipe.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Base Models: https://docs.openpipe.ai/base-models
|
||||
- Pricing: https://docs.openpipe.ai/pricing/pricing
|
||||
|
||||
**Fallbacks if blocked:** Search "openpipe models latest pricing", "openpipe latest models", "openpipe base models", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
description: Update Perplexity model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/perplexity.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models: https://docs.perplexity.ai/getting-started/models
|
||||
- Pricing: https://docs.perplexity.ai/getting-started/pricing
|
||||
- Changelog: https://docs.perplexity.ai/changelog/changelog
|
||||
|
||||
**Fallbacks if blocked:** Search "perplexity api latest pricing", "perplexity latest models", or search GitHub for latest model prices and context windows
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
description: Update xAI model definitions with latest pricing and capabilities
|
||||
---
|
||||
|
||||
Update `src/modules/llms/server/openai/models/xai.models.ts` with latest model definitions.
|
||||
|
||||
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
|
||||
|
||||
**Primary Sources:**
|
||||
- Models & Pricing: https://docs.x.ai/docs/models?cluster=us-east-1#detailed-pricing-for-all-grok-models
|
||||
|
||||
**Known Issue:** docs.x.ai blocks automated access (403 Forbidden). Use fallbacks below.
|
||||
|
||||
**Fallbacks if blocked:**
|
||||
- Search "xai grok latest pricing", "xai latest models", "xai api models", or search GitHub for latest model prices and context windows
|
||||
- Random sites? https://the-rogue-marketing.github.io/grok-api-latest-llms-pricing-october-2025/ (find a newer version), https://langdb.ai/app/providers/xai/ (browse by model, limited coverage)
|
||||
- As last resort: Use Chrome DevTools MCP to access docs.x.ai
|
||||
|
||||
**Important:**
|
||||
- Review the full model list for additions, removals, and price changes
|
||||
- Minimize whitespace/comment changes, focus on content
|
||||
- Preserve comments to make diffs easy to review
|
||||
- Flag broken links or unexpected content
|
||||
Executable
+81
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Parse Ollama featured models from HTML
|
||||
*
|
||||
* Usage:
|
||||
* 1. Fetch HTML: curl -s "https://ollama.com/library?sort=featured" -o /tmp/ollama-featured.html
|
||||
* 2. Parse: node .claude/scripts/parse-ollama-models.js
|
||||
*
|
||||
* Outputs: pipe-delimited format: modelName|pulls|capabilities|sizes
|
||||
* Example: deepseek-r1|66200000|tools,thinking|1.5b,7b,8b,14b,32b,70b,671b
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
const htmlPath = process.argv[2] || '/tmp/ollama-featured.html';
|
||||
|
||||
if (!fs.existsSync(htmlPath)) {
|
||||
console.error(`Error: HTML file not found at ${htmlPath}`);
|
||||
console.error('Please fetch it first with:');
|
||||
console.error(' curl -s "https://ollama.com/library?sort=featured" -o /tmp/ollama-featured.html');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const html = fs.readFileSync(htmlPath, 'utf8');
|
||||
|
||||
// Split into model sections - each starts with <a href="/library/
|
||||
const modelSections = html.split(/<a href="\/library\//);
|
||||
const models = [];
|
||||
|
||||
for (let i = 1; i < modelSections.length; i++) {
|
||||
const section = modelSections[i].substring(0, 5000); // Large enough window to capture all data
|
||||
|
||||
// Extract model name (first quoted string)
|
||||
const nameMatch = section.match(/^([^"]+)"/);
|
||||
if (!nameMatch) continue;
|
||||
const name = nameMatch[1];
|
||||
|
||||
// Extract pulls using x-test-pull-count
|
||||
const pullsMatch = section.match(/x-test-pull-count>([^<]+)</);
|
||||
let pulls = 0;
|
||||
if (pullsMatch) {
|
||||
const pullStr = pullsMatch[1].replace(/,/g, '');
|
||||
if (pullStr.includes('M')) {
|
||||
pulls = Math.floor(parseFloat(pullStr) * 1000000);
|
||||
} else if (pullStr.includes('K')) {
|
||||
pulls = Math.floor(parseFloat(pullStr) * 1000);
|
||||
} else {
|
||||
pulls = parseInt(pullStr);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract capabilities (tools, vision, embedding, thinking, cloud)
|
||||
const capabilities = [];
|
||||
const capabilityRegex = /x-test-capability[^>]*>([^<]+)</g;
|
||||
let capMatch;
|
||||
while ((capMatch = capabilityRegex.exec(section)) !== null) {
|
||||
capabilities.push(capMatch[1].trim());
|
||||
}
|
||||
|
||||
// Extract sizes (1.5b, 7b, etc.)
|
||||
const sizes = [];
|
||||
const sizeRegex = /x-test-size[^>]*>([^<]+)</g;
|
||||
let sizeMatch;
|
||||
while ((sizeMatch = sizeRegex.exec(section)) !== null) {
|
||||
sizes.push(sizeMatch[1].trim());
|
||||
}
|
||||
|
||||
// Only include models with 50K+ pulls
|
||||
if (pulls >= 50000) {
|
||||
models.push({ name, pulls, capabilities, sizes });
|
||||
}
|
||||
}
|
||||
|
||||
// Output in pipe-delimited format (in the order they appear on the page)
|
||||
models.forEach(m => {
|
||||
const caps = m.capabilities.join(',');
|
||||
const tags = m.sizes.join(',');
|
||||
console.log(`${m.name}|${m.pulls}|${caps}|${tags}`);
|
||||
});
|
||||
|
||||
console.error(`\nTotal models with 50K+ pulls: ${models.length}`);
|
||||
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(cat:*)",
|
||||
"Bash(cp:*)",
|
||||
"Bash(curl:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(git branch:*)",
|
||||
"Bash(git describe:*)",
|
||||
"Bash(git grep:*)",
|
||||
"Bash(git log:*)",
|
||||
"Bash(git log:*)",
|
||||
"Bash(git show:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(ls:*)",
|
||||
"Bash(mkdir:*)",
|
||||
"Bash(node:*)",
|
||||
"Bash(npm install)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(npm run:*)",
|
||||
"Bash(npx eslint:*)",
|
||||
"Bash(npx tsc:*)",
|
||||
"Bash(rg:*)",
|
||||
"Bash(rm:*)",
|
||||
"Bash(sed:*)",
|
||||
"Bash(tree:*)",
|
||||
"Read(//tmp/**)",
|
||||
"WebFetch",
|
||||
"WebFetch(domain:big-agi.com)",
|
||||
"WebSearch",
|
||||
"mcp__chrome-devtools",
|
||||
"mcp__github",
|
||||
"mcp__ide__getDiagnostics"
|
||||
],
|
||||
"deny": [
|
||||
"Read(node_modules)",
|
||||
"Read(node_modules/**)"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"extends": "next/core-web-vitals"
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
name: 🔥 Make AI Fix This
|
||||
description: Bug, question, or feedback - AI analyzes and changes Big-AGI appropriately
|
||||
labels: [ 'claude-triage' ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for opening an issue! Our AI will analyze it and change Big-AGI appropriately.
|
||||
|
||||
**What happens next:**
|
||||
- AI searches the codebase and documentation
|
||||
- You get a response, typically within 30 minutes
|
||||
- Ticket gets follow-up and community votes
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What's happening?
|
||||
description: Describe the bug, feature request, or question. Be as detailed as you can.
|
||||
placeholder: |
|
||||
Bug example: "In Beam, Anthropic models seem to have search off..."
|
||||
Model request: "Add Claude Opus 4.5 out today, see https://..."
|
||||
Feature example: "Add the option to to save frequent prompt templates for reuse..."
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Where does this happen?
|
||||
description: If this is a bug or issue, where are you experiencing it?
|
||||
options:
|
||||
- Big-AGI Pro (big-agi.com)
|
||||
- Self-deployed from GitHub
|
||||
- Docker deployment
|
||||
- Local development
|
||||
- Not applicable (question/feedback)
|
||||
- Other
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Impact on your workflow
|
||||
description: How does this affect your use of Big-AGI?
|
||||
options:
|
||||
- Blocking - Can't use Big-AGI
|
||||
- High - Major feature broken
|
||||
- Medium - Workaround exists
|
||||
- Low - Minor inconvenience
|
||||
- None - Just a question/suggestion
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment (if applicable)
|
||||
description: Device, OS, browser - only if reporting a bug
|
||||
placeholder: |
|
||||
Device: Macbook Pro M3
|
||||
OS: macOS 15.2
|
||||
Browser: Chrome 131
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Screenshots, error messages, or anything else that helps
|
||||
placeholder: Paste screenshots or error messages here
|
||||
validations:
|
||||
required: false
|
||||
@@ -5,14 +5,29 @@ labels: [ 'type: bug' ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Thank you for reporting a bug.
|
||||
value: Thank you for reporting a bug. Please help us by providing accurate environment information.
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Environment
|
||||
description: (required) Where are you experiencing this issue?
|
||||
options:
|
||||
- Big-AGI Pro (big-agi.com)
|
||||
- Self-deployed from GitHub
|
||||
- Docker container (specify in description)
|
||||
- Local development
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: (required) Please provide a clear description. Please also provide the steps to reproduce.
|
||||
description: (required) Please provide a clear description and **steps to reproduce**.
|
||||
placeholder: 'Concise description + steps to reproduce.'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Device and browser
|
||||
@@ -20,10 +35,12 @@ body:
|
||||
placeholder: 'Device: (e.g., iPhone 16, Pixel 9, PC, Macbook...), OS: (e.g., iOS 17, Windows 12), Browser: (e.g., Chrome 119, Safari 18, Firefox..)'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots and more
|
||||
placeholder: 'Attach screenshots, or add any additional context here.'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Willingness to Contribute
|
||||
|
||||
@@ -32,7 +32,6 @@ assignees: enricoros
|
||||
- [ ] verify deployment on Vercel
|
||||
- [ ] verify container on GitHub Packages
|
||||
- [ ] update the GitHub release
|
||||
- [ ] push as stable `git push opensource main:main-stable`
|
||||
- Announce:
|
||||
- [ ] Discord announcement
|
||||
- [ ] Twitter announcement
|
||||
@@ -51,7 +50,7 @@ To familiarize yourself with the application, the following are the Website and
|
||||
```
|
||||
|
||||
- paste the URL: https://big-agi.com
|
||||
- drag & drop: [README.md](https://raw.githubusercontent.com/enricoros/big-AGI/v2-dev/README.md)
|
||||
- drag & drop: [README.md](https://raw.githubusercontent.com/enricoros/big-AGI/main/README.md)
|
||||
|
||||
```markdown
|
||||
I am announcing a new version, 1.2.3.
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
name: Claude Code DM
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
claude-dm:
|
||||
if: |
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) ||
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude'))
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code DM Response
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Security: Only users with write access can trigger (DMs allow code execution)
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Add claude_args to customize behavior and configuration
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
# claude_args: '--allowed-tools Bash(gh pr:*)'
|
||||
# disabling opus for now claude-opus-4-1-20250805
|
||||
claude_args: |
|
||||
--model claude-sonnet-4-5-20250929
|
||||
--max-turns 100
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
|
||||
@@ -0,0 +1,77 @@
|
||||
name: Claude Code Auto-Triage Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ opened, assigned ]
|
||||
|
||||
jobs:
|
||||
claude-issue-triage:
|
||||
# Optional: Skip for bot users and direct mentions in the body (handled by claude-dm.yml)
|
||||
if: |
|
||||
github.event.issue.user.type != 'Bot' &&
|
||||
!contains(github.event.issue.body, '@claude')
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Analyze issue and provide help
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
# Security: Allow any user to trigger triage (automated issue help is safe)
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
allowed_non_write_users: '*'
|
||||
# track_progress: true # Enables tracking comments
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
prompt: |
|
||||
REPO: ${{ github.repository }}
|
||||
ISSUE NUMBER: #${{ github.event.issue.number }}
|
||||
|
||||
A user has reported an issue. Please help them by:
|
||||
|
||||
1. Deep think about the issue:
|
||||
**Understand the problem**: Analyze the issue description and any error messages
|
||||
**Search for context**:
|
||||
- Use the repository's CLAUDE.md for high level guidance and especially kb/ documentation
|
||||
- Look in relevant code files, including kb/ documentation
|
||||
**Use web search**: When potentially outside Big-AGI (e.g. user configuration), search the web for similar errors or related issues
|
||||
**Provide a solution**:
|
||||
- Provide multiple solutions if uncertain, and say so
|
||||
- If you can fix it in code, propose the fix
|
||||
- If possible also suggest fixes or workarounds for immediate relief
|
||||
- Reference specific files and line numbers
|
||||
- Test selectively and even npm install and run build if needed to verify the solution
|
||||
2. Always add the 'claude-triage' issue label to indicate this issue was triaged by Claude
|
||||
3. Comment with:
|
||||
- Very brief thank you note, if applicable
|
||||
- Initial assessment
|
||||
- Next steps or clarification needed
|
||||
- Link duplicates if found
|
||||
|
||||
If you're uncertain, say so and suggest next steps.
|
||||
If you write any code make sure that it compiles and that you push it.
|
||||
Be welcoming, helpful, professional, solution-focused and no-BS.
|
||||
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
claude_args: |
|
||||
--model claude-sonnet-4-5-20250929
|
||||
--max-turns 75
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
|
||||
@@ -0,0 +1,77 @@
|
||||
name: Claude Code PR Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize, ready_for_review ]
|
||||
|
||||
# Limit branches
|
||||
branches: [ main, dev, v1 ]
|
||||
|
||||
# Optional: Only run on specific file changes
|
||||
# paths:
|
||||
# - "src/**/*.ts"
|
||||
# - "src/**/*.tsx"
|
||||
|
||||
jobs:
|
||||
claude-pr-review:
|
||||
# Skip draft PRs
|
||||
# Optional: filter authors: github.event.pull_request.user.login != 'enricoros'
|
||||
if: |
|
||||
github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run PR Review
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
# Security: Allow any user to trigger reviews (read-only PR analysis is safe)
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
allowed_non_write_users: '*'
|
||||
# track_progress: true # Enables tracking comments
|
||||
|
||||
# This setting allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
prompt: |
|
||||
REPO: ${{ github.repository }}
|
||||
PR NUMBER: ${{ github.event.pull_request.number }}
|
||||
|
||||
Please review this pull request and provide feedback on:
|
||||
- Potential bugs or issues
|
||||
- Adherence to Big-AGI architecture and design patterns
|
||||
- Code quality and best practices, including TypeScript types, error handling, and edge cases
|
||||
- Performance considerations: bundle size, React patterns, streaming efficiency
|
||||
- Security concerns if applicable
|
||||
|
||||
Use the repository's CLAUDE.md for guidance on style and conventions.
|
||||
|
||||
Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR.
|
||||
Use `gh pr review comment` for inline suggestions on specific lines.
|
||||
|
||||
IMPORTANT: After completing your review, always add the 'claude-review' label to the PR to indicate it was reviewed by Claude:
|
||||
gh pr edit ${{ github.event.pull_request.number }} --add-label "claude-review"
|
||||
|
||||
Be constructive, helpful, no-BS, and specific with file:line references.
|
||||
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
claude_args: |
|
||||
--model claude-sonnet-4-5-20250929
|
||||
--max-turns 100
|
||||
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools"
|
||||
@@ -12,11 +12,9 @@ name: Create and publish Docker images
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v2-dev
|
||||
#- v1-dev # Disabled because this is not needed anymore
|
||||
#- v1-stable # Disabled as the v* tag is used for stable releases
|
||||
- main # Primary branch (Big-AGI Open)
|
||||
tags:
|
||||
- 'v*' # Trigger on version tags (e.g., v1.7.0)
|
||||
- 'v2.*' # Stable releases (v2.0.0, v2.1.0, etc.)
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
@@ -25,6 +23,7 @@ env:
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60 # Max 1 hour (expected: ~25min)
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -55,14 +54,21 @@ jobs:
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=development,enable=${{ github.ref == 'refs/heads/v2-dev' }} # For v2-dev branch
|
||||
type=raw,value=stable,enable=${{ github.ref == 'refs/heads/v1-stable' }}
|
||||
type=ref,event=tag # Use the tag name as a tag for tag builds
|
||||
type=semver,pattern={{version}} # Generate semantic versioning tags for tag builds
|
||||
type=sha,format=short,prefix=sha- # Just in case none of the above applies
|
||||
# Development: main branch
|
||||
type=raw,value=development,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
# Latest: v2.x releases (safe default)
|
||||
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/v2.') }}
|
||||
|
||||
# Stable: v2.x releases (alias)
|
||||
type=raw,value=stable,enable=${{ startsWith(github.ref, 'refs/tags/v2.') }}
|
||||
|
||||
# Version tags (v2.0.0, 2.0.0)
|
||||
type=ref,event=tag
|
||||
type=semver,pattern={{version}}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Big-AGI
|
||||
org.opencontainers.image.description=Generative AI suite powered by state-of-the-art models
|
||||
org.opencontainers.image.title=Big-AGI Open
|
||||
org.opencontainers.image.description=Big-AGI Open - Multi-model AI workspace for experts who need to think broader, decide smarter, and build with confidence.
|
||||
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
|
||||
org.opencontainers.image.documentation=https://big-agi.com
|
||||
|
||||
@@ -77,6 +83,8 @@ jobs:
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
|
||||
NEXT_PUBLIC_BUILD_HASH=${{ github.sha }}
|
||||
NEXT_PUBLIC_BUILD_REF_NAME=${{ github.ref_name }}
|
||||
# Enable build cache (future)
|
||||
#cache-from: type=gha
|
||||
#cache-to: type=gha,mode=max
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
overrides=@mui/material@^5.0.0:
|
||||
dependencies:
|
||||
@mui/material: replaced-by=@mui/joy
|
||||
@@ -0,0 +1,242 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Development Commands
|
||||
|
||||
```bash
|
||||
# Targeted Code Quality (safe while dev server runs)
|
||||
npx tsc --noEmit # Type check without building
|
||||
npx eslint src/path/to/file.ts # Lint specific file
|
||||
npm run lint # Lint entire project
|
||||
```
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Big-AGI is a Next.js 15 application with a modular architecture built for advanced AI interactions. The codebase follows a three-layer structure with distinct separation of concerns.
|
||||
|
||||
### Core Directory Structure
|
||||
|
||||
```
|
||||
/app/api/ # Next.js App Router (API routes only, mostly -> /src/server/)
|
||||
/pages/ # Next.js Pages Router (file-based, mostly -> /src/apps/)
|
||||
/src/
|
||||
├── apps/ # Feature applications (self-contained modules)
|
||||
├── modules/ # Reusable business logic and integrations
|
||||
├── common/ # Shared infrastructure and utilities
|
||||
└── server/ # Backend API layer with tRPC
|
||||
/kb/ # Knowledge base for modules, architectures
|
||||
```
|
||||
|
||||
### Key Technologies
|
||||
|
||||
- **Frontend**: Next.js 15, React 18, Material-UI Joy, Emotion (CSS-in-JS)
|
||||
- **State Management**: Zustand with localStorge/IndexedDB (single cell) persistence
|
||||
- **API Layer**: tRPC with React Query for type-safe communication
|
||||
- **Runtime**: Edge Runtime for AI operations, Node.js for data processing
|
||||
|
||||
### Apps Architecture Pattern
|
||||
|
||||
Each app in `/src/apps/` is a self-contained feature module:
|
||||
- Main component (`App*.tsx`)
|
||||
- Local state store (`store-app-*.ts`)
|
||||
- Feature-specific components and layouts
|
||||
- Runtime configurations
|
||||
|
||||
Example apps: `chat/`, `call/`, `beam/`, `draw/`, `personas/`, `settings-modal/`
|
||||
|
||||
### Modules Architecture Pattern
|
||||
|
||||
Modules in `/src/modules/` provide reusable business logic:
|
||||
- **`aix/`** - AI communication framework for real-time streaming
|
||||
- **`beam/`** - Multi-model AI reasoning system (scatter/gather pattern)
|
||||
- **`blocks/`** - Content rendering (markdown, code, images, etc.)
|
||||
- **`llms/`** - Language model abstraction supporting 16 vendors
|
||||
|
||||
### Key Subsystems & Their Patterns
|
||||
|
||||
#### 1. AIX - Real-time AI Communication
|
||||
**Location**: `/src/modules/aix/`
|
||||
**Pattern**: Client-server streaming architecture with provider abstraction
|
||||
|
||||
- **Client** → tRPC → **Server** → **AI Providers**
|
||||
- Handles streaming/non-streaming responses with batching and error recovery
|
||||
- Particle-based streaming: `AixWire_Particles` → `ContentReassembler` → `DMessage`
|
||||
- Provider-agnostic through adapter pattern (OpenAI, Anthropic, Gemini protocols)
|
||||
|
||||
#### 3. Beam - Multi-Model Reasoning
|
||||
**Location**: `/src/modules/beam/`
|
||||
**Pattern**: Scatter/Gather for parallel AI processing
|
||||
|
||||
- **Scatter**: Multiple models (rays) process input in parallel
|
||||
- **Gather**: Fusion algorithms combine outputs
|
||||
- Real-time UI updates via vanilla Zustand stores
|
||||
- BeamStore per conversation via ConversationHandler
|
||||
|
||||
#### 4. Conversation Management
|
||||
**Location**: `/src/common/stores/chat/` and `/src/common/chat-overlay/`
|
||||
**Pattern**: Overlay architecture with handler per conversation
|
||||
|
||||
- `ConversationHandler` orchestrates chat, beam, ephemerals
|
||||
- Per-chat stores: `PerChatOverlayStore` + `BeamStore`
|
||||
- Message structure: `DMessage` → `DMessageFragment[]`
|
||||
- Supports multi-pane with independent conversation states
|
||||
|
||||
### Storage System
|
||||
|
||||
Big-AGI uses a local-first architecture with Zustand + IndexedDB:
|
||||
- **Zustand** stores for in-memory state management
|
||||
- **localStorage** for persistent settings/all storage (via Zustand persist middleware)
|
||||
- **IndexedDB** for persistent chat-only storage (via Zustand persist middleware) on a single key-val cell
|
||||
- **Local-first** architecture with offline capability
|
||||
- **Migration system** for upgrading data structures across versions
|
||||
|
||||
Key storage patterns:
|
||||
- Stores use `createIDBPersistStorage()` for IndexedDB persistence
|
||||
- Version-based migrations handle data structure changes
|
||||
- Partialize/merge functions control what gets persisted
|
||||
- Rehydration logic repairs and upgrades data on load
|
||||
|
||||
Located in `/src/common/stores/` with stores like:
|
||||
- `chat/store-chats.ts`: Conversations and messages
|
||||
- `llms/store-llms.ts`: Model configurations
|
||||
|
||||
### Layout System ("Optima")
|
||||
|
||||
The Optima layout system provides:
|
||||
- **Responsive design** adapting desktop/mobile
|
||||
- **Drawer/Panel/Toolbar** composition
|
||||
- **Split-pane support** for multi-conversation views
|
||||
- **Portal-based rendering** for flexible component placement
|
||||
|
||||
Located in `/src/common/layout/optima/`
|
||||
|
||||
### State Management Patterns
|
||||
|
||||
1. **Global Stores** (Zustand with IndexedDB persistence)
|
||||
- `store-chats`: Conversations and messages
|
||||
- `store-llms`: Model configurations
|
||||
- `store-ux-labs`: UI preferences and labs features
|
||||
- **Zustand pattern**: Always wrap multi-property selectors with `useShallow` from `zustand/react/shallow` to prevent re-renders on reference changes
|
||||
|
||||
2. **Per-Instance Stores** (Vanilla Zustand)
|
||||
- `store-beam_vanilla`: Beam scatter/gather state
|
||||
- `store-perchat_vanilla`: Chat overlay state
|
||||
- High-performance, no React integration
|
||||
|
||||
3. **Module Stores**
|
||||
- Feature-specific configuration and state
|
||||
- Example: `store-module-beam`, `store-module-t2i`
|
||||
|
||||
### User Flows & Interdependencies
|
||||
|
||||
#### Chat Message Flow
|
||||
1. User input → `Composer` → `DMessage` creation
|
||||
2. `ConversationHandler.messageAppend()` → Store update
|
||||
3. `_handleExecute()` / `ConversationHandler.executeChatMessages()` → AIX client request
|
||||
4. AIX streaming → `ContentReassembler` → UI updates
|
||||
5. Zustand auto-persistence → IndexedDB
|
||||
|
||||
#### Beam Multi-Model Flow
|
||||
1. User triggers Beam → `BeamStore.open()` state update
|
||||
2. Scatter: Parallel `aixChatGenerateContent()` to N models
|
||||
3. Real-time ray updates → UI progress
|
||||
4. Gather: User selects fusion → Combined output
|
||||
5. Result → New message in conversation
|
||||
|
||||
### Development Patterns
|
||||
|
||||
#### Module Integration
|
||||
- Each module exports its functionality through index files
|
||||
- Modules register with central registries (e.g., `vendors.registry.ts`)
|
||||
- Configuration objects define module behavior
|
||||
- Type-safe integration through strict TypeScript interfaces
|
||||
|
||||
#### Component Patterns
|
||||
- **Controlled components** with clear prop interfaces
|
||||
- **Hook-based logic** extraction for reusability
|
||||
- **Portal rendering** for overlays and modals
|
||||
- **Suspense boundaries** for async operations
|
||||
|
||||
#### API Patterns
|
||||
- **tRPC routers** for type-safe API endpoints
|
||||
- **Zod schemas** for runtime validation
|
||||
- **Middleware** for request/response processing
|
||||
- **Edge functions** for performance-critical AI operations
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- API keys stored client-side in localStorage (user-provided)
|
||||
- Server-side API keys in environment variables only
|
||||
- XSS protection through proper content escaping
|
||||
- No credential transmission to third parties
|
||||
|
||||
## Knowledge Base
|
||||
|
||||
Architecture and system documentation is available in the `/kb/` knowledge base:
|
||||
|
||||
@kb/KB.md
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Testing & Quality
|
||||
- Run `npm run lint` before committing
|
||||
- Type-check with `npx tsc --noEmit`
|
||||
- Test critical user flows manually
|
||||
|
||||
### Adding a New LLM Vendor
|
||||
1. Create vendor in `/src/modules/llms/vendors/[vendor]/`
|
||||
2. Implement `IModelVendor` interface
|
||||
3. Register in `vendors.registry.ts`
|
||||
4. Add environment variables to `env.ts` (if server-side keys needed)
|
||||
|
||||
### Debugging Storage Issues
|
||||
- Check IndexedDB: DevTools → Application → IndexedDB → `app-chats`
|
||||
- Monitor Zustand state: Use Zustand DevTools
|
||||
- Check migration logs in console during rehydration
|
||||
|
||||
## Code Examples
|
||||
|
||||
### AIX Streaming Pattern
|
||||
```typescript
|
||||
// Efficient streaming with decimation
|
||||
aixChatGenerateContent_DMessage(
|
||||
llmId,
|
||||
request,
|
||||
{ abortSignal, throttleParallelThreads: 1 },
|
||||
async (update, isDone) => {
|
||||
// Real-time UI updates
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Model Registry Pattern
|
||||
```typescript
|
||||
// Registry pattern for extensibility
|
||||
const MODEL_VENDOR_REGISTRY: Record<ModelVendorId, IModelVendor> = {
|
||||
openai: ModelVendorOpenAI,
|
||||
anthropic: ModelVendorAnthropic,
|
||||
// ... 14 more vendors
|
||||
};
|
||||
```
|
||||
|
||||
## Server Architecture
|
||||
|
||||
The server uses a split architecture with two tRPC routers:
|
||||
|
||||
### Edge Network (`trpc.router-edge`)
|
||||
Distributed edge runtime for low-latency AI operations:
|
||||
- **AIX** - AI streaming and communication
|
||||
- **LLM Routers** - Direct vendor integrations (OpenAI, Anthropic, Gemini, Ollama)
|
||||
- **External Services** - ElevenLabs (TTS), Google Search, YouTube transcripts
|
||||
|
||||
Located at `/src/server/trpc/trpc.router-edge.ts`
|
||||
|
||||
### Cloud Network (`trpc.router-cloud`)
|
||||
Centralized server for data processing operations:
|
||||
- **Browse** - Web scraping and content extraction
|
||||
- **Trade** - Import/export functionality (ChatGPT, markdown, JSON)
|
||||
|
||||
Located at `/src/server/trpc/trpc.router-cloud.ts`
|
||||
|
||||
**Key Pattern**: Edge runtime for AI (fast, distributed), Cloud runtime for data ops (centralized, Node.js)
|
||||
+21
-5
@@ -1,6 +1,6 @@
|
||||
# Base
|
||||
FROM node:22-alpine AS base
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
|
||||
# Dependencies
|
||||
FROM base AS deps
|
||||
@@ -14,7 +14,7 @@ COPY src/server/prisma ./src/server/prisma
|
||||
RUN sh -c '[ ! -e /lib/libssl.so.3 ] && ln -s /usr/lib/libssl.so.3 /lib/libssl.so.3 || echo "Link already exists"'
|
||||
|
||||
# Install dependencies, including dev (release builds should use npm ci)
|
||||
ENV NODE_ENV development
|
||||
ENV NODE_ENV=development
|
||||
RUN npm ci
|
||||
|
||||
|
||||
@@ -22,16 +22,32 @@ RUN npm ci
|
||||
FROM base AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Deployment type marker
|
||||
ENV NEXT_PUBLIC_DEPLOYMENT_TYPE=docker
|
||||
|
||||
# Optional build version arguments at build time
|
||||
ARG NEXT_PUBLIC_BUILD_HASH
|
||||
ENV NEXT_PUBLIC_BUILD_HASH=${NEXT_PUBLIC_BUILD_HASH}
|
||||
ARG NEXT_PUBLIC_BUILD_REF_NAME
|
||||
ENV NEXT_PUBLIC_BUILD_REF_NAME=${NEXT_PUBLIC_BUILD_REF_NAME}
|
||||
|
||||
# Optional argument to configure GA4 at build time (see: docs/deploy-analytics.md)
|
||||
ARG NEXT_PUBLIC_GA4_MEASUREMENT_ID
|
||||
ENV NEXT_PUBLIC_GA4_MEASUREMENT_ID=${NEXT_PUBLIC_GA4_MEASUREMENT_ID}
|
||||
|
||||
# Optional argument to configure PostHog at build time (see: docs/deploy-analytics.md)
|
||||
ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_KEY=${NEXT_PUBLIC_POSTHOG_KEY}
|
||||
|
||||
# Copy development deps and source
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
# link ssl3 for latest Alpine
|
||||
RUN sh -c '[ ! -e /lib/libssl.so.3 ] && ln -s /usr/lib/libssl.so.3 /lib/libssl.so.3 || echo "Link already exists"'
|
||||
|
||||
# Build the application
|
||||
ENV NODE_ENV production
|
||||
ENV NODE_ENV=production
|
||||
RUN npm run build
|
||||
|
||||
# Reduce installed packages to production-only
|
||||
@@ -53,8 +69,8 @@ COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/src/server/prisma ./src/server/prisma
|
||||
|
||||
# Minimal ENV for production
|
||||
ENV NODE_ENV production
|
||||
ENV PATH $PATH:/app/node_modules/.bin
|
||||
ENV NODE_ENV=production
|
||||
ENV PATH=$PATH:/app/node_modules/.bin
|
||||
|
||||
# Run as non-root user
|
||||
USER nextjs
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023-2024 Enrico Ros
|
||||
Copyright (c) 2023-2025 Enrico Ros
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -1,41 +1,185 @@
|
||||
# BIG-AGI 🧠✨
|
||||
<div align="center">
|
||||
|
||||
Welcome to big-AGI, the AI suite for professionals that need function, form,
|
||||
simplicity, and speed. Powered by the latest models from 12 vendors and
|
||||
open-source servers, `big-AGI` offers best-in-class Chats,
|
||||
[Beams](https://github.com/enricoros/big-AGI/issues/470),
|
||||
and [Calls](https://github.com/enricoros/big-AGI/issues/354) with AI personas,
|
||||
visualizations, coding, drawing, side-by-side chatting, and more -- all wrapped in a polished UX.
|
||||
<img width="256" height="256" alt="Big-AGI Logo" src="https://big-agi.com/assets/logo-bright-github.svg" />
|
||||
|
||||
Stay ahead of the curve with big-AGI. 🚀 Pros & Devs love big-AGI. 🤖
|
||||
<h1><a href="https://big-agi.com">Big-AGI</a></h1>
|
||||
|
||||
[](https://big-agi.com)
|
||||
[](https://big-agi.com)
|
||||
[](https://github.com/enricoros/big-AGI/pkgs/container/big-agi)
|
||||
[](https://vercel.com/new/clone?repository-url=https://github.com/enricoros/big-agi)
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
<br/>
|
||||
[](https://github.com/enricoros/big-agi/commits)
|
||||
[](https://github.com/enricoros/big-AGI/pkgs/container/big-agi)
|
||||
[](https://github.com/enricoros/big-AGI/graphs/contributors)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
<br/>
|
||||
|
||||
> 🚀 Big-AGI 2 is launching Q4 2024. Be the first to experience it before the public release.
|
||||
>
|
||||
> 👉 [Apply for Early Access](https://y2rjg0zillz.typeform.com/to/ZSADpr5u?utm_source=gh-2&utm_medium=readme&utm_campaign=ea2)
|
||||
[](https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml)
|
||||
|
||||
Or fork & run on Vercel
|
||||
[//]: # ([](https://stats.uptimerobot.com/59MXcnmjrM))
|
||||
[//]: # ([](https://github.com/enricoros/big-AGI/releases/latest))
|
||||
[//]: # ()
|
||||
[//]: # ([](#))
|
||||
[//]: # ([](https://x.com/enricoros))
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
</div>
|
||||
|
||||
### New Version
|
||||
<br/>
|
||||
|
||||
This repository contains two main versions:
|
||||
# Big-AGI Open 🧠
|
||||
|
||||
- Big-AGI 2: next-generation, bringing the most advanced AI experience
|
||||
- `v2-dev`: V2 development branch, the exciting one, future default
|
||||
- Big-AGI Stable: as deployed on big-agi.com
|
||||
- `v1-dev`: V1 development branch (this branch)
|
||||
- `v1-stable`: Current stable version
|
||||
This is the open-source foundation of **Big-AGI**, ___the multi-model AI workspace for experts___.
|
||||
|
||||
Note: After the V2 release in Q4, `v2-dev` will become the default branch and `v1-dev` will reach EOL.
|
||||
Big-AGI is the multi-model AI workspace for experts: Engineers architecting systems. Founders making decisions. Researchers validating hypotheses.
|
||||
You need to think broader, decide faster, and build with confidence, then you need Big-AGI.
|
||||
|
||||
### Quick links: 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [installation](docs/installation.md) 👉 [documentation](docs/README.md)
|
||||
It comes packed with **world-class features** like Beam, and is praised for its **best-in-class AI chat UX**.
|
||||
**As an independent, non-VC-funded project, Pro subscriptions at $10.99/mo fund development for everyone, including the free and open-source tiers.**
|
||||
|
||||
### What's New in 1.16.1...1.16.8 · Sep 13, 2024 (patch releases)
|
||||

|
||||
[](https://big-agi.com/beam)
|
||||
[](https://big-agi.com/inspector)
|
||||
|
||||
- 1.16.8: OpenAI ChatGPT-4o Latest (o1-preview and o1-mini are supported in Big-AGI 2)
|
||||
### What makes Big-AGI different:
|
||||
**Intelligence**: with [Beam & Merge](https://big-agi.com/beam) for multi-model de-hallucination, native search, and bleeding-edge AI models like Nano Banana, Kimi K2 Thinking or GPT 5.1 -
|
||||
**Control**: with personas, data ownership, requests inspection, unlimited usage with API keys, and *no vendor lock-in* -
|
||||
and **Speed**: with a local-first, over-powered, zero-latency, madly optimized web app.
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center" width="25%">
|
||||
<b>🧠 Intelligence</b><br/>
|
||||
<img src="https://img.shields.io/badge/Multi--Model-Trust-4285F4?style=for-the-badge" alt="Multi-Model"/>
|
||||
</td>
|
||||
<td align="center" width="25%">
|
||||
<b>✨ Experience</b><br/>
|
||||
<img src="https://img.shields.io/badge/Clean-UX-34A853?style=for-the-badge" alt="Clean UX"/>
|
||||
</td>
|
||||
<td align="center" width="25%">
|
||||
<b>⚡ Performance</b><br/>
|
||||
<img src="https://img.shields.io/badge/Zero-Latency-EA4335?style=for-the-badge" alt="Zero Latency"/>
|
||||
</td>
|
||||
<td align="center" width="25%">
|
||||
<b>🔒 Control</b><br/>
|
||||
<img src="https://img.shields.io/badge/No-Lock--in-FBBC04?style=for-the-badge" alt="No Lock-in"/>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" valign="top">
|
||||
Beam & Merge<br/>
|
||||
No context junk<br/>
|
||||
Purest AI outputs
|
||||
</td>
|
||||
<td align="center" valign="top">
|
||||
Flow-state interface<br/>
|
||||
Higly customizable<br/>
|
||||
Best-in-class UX
|
||||
</td>
|
||||
<td align="center" valign="top">
|
||||
Local-first<br/>
|
||||
Highly parallel<br/>
|
||||
Madly optimized
|
||||
</td>
|
||||
<td align="center" valign="top">
|
||||
No vendor lock-in<br/>
|
||||
Your API keys<br/>
|
||||
AI Inspector
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
### Who uses Big-AGI:
|
||||
Loved by engineers, founders, researchers, self-hosters, and IT departments for its power, reliability, and transparency.
|
||||
|
||||
<img width="830" height="370" alt="image" src="https://github.com/user-attachments/assets/513c4f77-0970-4a56-b23b-1416c8246174" />
|
||||
|
||||
Choose Big-AGI because you don't need another clone or slop - you need an AI tool that scales with you.
|
||||
|
||||
### Show me a screenshot:
|
||||
Sure - here is real-world screeengrab as I'm writing this, while running a Beam to extract SVG from an image with Sonnet 4.5, Opus 4.1, GPT 5.1, Gemini 2.5 Pro, Nano Banana, etc.
|
||||
<img alt="Real-world screen capture as of Nov 15 2025, 2am" src="https://github.com/user-attachments/assets/853f4160-27cb-4ac9-826b-402f1e63d4af" />
|
||||
|
||||
|
||||
## Get Started
|
||||
|
||||
| Tier | Best For | What You Get | Setup |
|
||||
|------------------------------------------------------|-------------------|---------------------------------------------------------------|-------------|
|
||||
| Big-AGI Open (self-host) | **IT** | First to get new models support. Maximum control and privacy. | 5-30 min |
|
||||
| [big-agi.com](https://big-agi.com) Free | **Everyone** | Full core experience, improved Beam, new Personas, best UX. | **2 min**\* |
|
||||
| **[big-agi.com](https://big-agi.com) Pro** $10.99/mo | **Professionals** | Everything + **Sync** across unlimited devices + 1GB storage | **2 min**\* |
|
||||
|
||||
\*: **Configuration requires your API keys**. *Big-AGI does not charge for model usage or limit your access*.
|
||||
**Why Pro?** As an independent project, Pro subscriptions fund all development. Early subscribers shape the roadmap directly.
|
||||
|
||||
[](https://big-agi.com)
|
||||
|
||||
**Self-host and developers** (full control)
|
||||
- Develop locally or self-host with Docker on your own infrastructure – [guide](docs/installation.md)
|
||||
- Or fork & run on Vercel:
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
[//]: # (**For the latest Big-AGI:**)
|
||||
|
||||
[//]: # (- [**Big-AGI Open**](https://github.com/enricoros/big-AGI/tree/main) - Open Source, latest models and features (main branch))
|
||||
|
||||
[//]: # (- [**Big-AGI Pro**](https://big-agi.com) - Hosted with Cloud Sync)
|
||||
|
||||
---
|
||||
|
||||
## Our Philosophy
|
||||
|
||||
We're an independent, non-VC-funded project with a simple belief: **AI should elevate you, not replace you**.
|
||||
|
||||
This is why we built Big-AGI to be **local-first**, madly optimized to 0-latency, launched multi-model first to
|
||||
defeat hallucinations, designed Beam around the **humans in the loop**, re-wrote frameworks and abstractions
|
||||
so you **are not vendor locked-in**, and obsessed over a powerful UI that works, just works.
|
||||
|
||||
NOTE: this is a powerful tool - if you need a toy UI or clone, this ain't it.
|
||||
|
||||
|
||||
## What's New in 2.0 · Oct 31, 2025 · Open
|
||||
|
||||
👉 **[See the full changelog](https://big-agi.com/changes)**
|
||||
|
||||
- **Big-AGI Open** is ready and more productive and faster than ever, with:
|
||||
- **Beam 2**: multi-modal, program-based, follow-ups, save presets
|
||||
- Top-notch AI models support including **agentic models** and **reasoning models**
|
||||
- **Image Generation** and editing with Nano Banana and gpt-image-1
|
||||
- **Web Search** with citations for supported models
|
||||
- **UI** & Mobile UI overhaul with peeking and side panels
|
||||
- And all of the [Big-AGI 2 changes](https://github.com/enricoros/big-AGI/issues/567#issuecomment-2262187617) and more
|
||||
- Built for the future, madly optimized
|
||||
|
||||
<img width="830" height="385" alt="image" src="https://github.com/user-attachments/assets/ad52761d-7e3f-44d8-b41e-947ce8b4faa1" />
|
||||
|
||||
### Open links: 👉 [changelog](https://big-agi.com/changes) 👉 [installation](docs/installation.md) 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [documentation](docs/README.md)
|
||||
|
||||
**For teams and institutions:** Need shared prompts, SSO, or managed deployments? Reach out at enrico@big-agi.com. We're actively collecting requirements from research groups and IT departments.
|
||||
|
||||
<details>
|
||||
<summary>5,000 Commits Milestone</summary>
|
||||
|
||||
Hit 5k commits last week. That's a lot of code.
|
||||
|
||||
Recent work has been intense:
|
||||
- Chain of thought reasoning across multiple LLMs: **OpenAI o3** and o1, **DeepSeek R1**, **Gemini 2.0 Flash Thinking**, and more
|
||||
- Beam is real - ~35% of our users run it daily to compare models
|
||||
- New AIX framework lets us scale features we couldn't before
|
||||
- UI is faster than ever. Like, terminal-fast
|
||||
|
||||
The new architecture is solid and the speed improvements are real.
|
||||
|
||||

|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.16.1...1.16.10 · 2024-2025 (patch releases)</summary>
|
||||
|
||||
- 1.16.10: OpenRouter models support
|
||||
- 1.16.9: Docker Gemini fix, R1 models support
|
||||
- 1.16.8: OpenAI ChatGPT-4o Latest, o1 models support
|
||||
- 1.16.7: OpenAI support for GPT-4o 2024-08-06
|
||||
- 1.16.6: Groq support for Llama 3.1 models
|
||||
- 1.16.5: GPT-4o Mini support
|
||||
@@ -48,7 +192,10 @@ Note: After the V2 release in Q4, `v2-dev` will become the default branch and `v
|
||||
- 1.16.2: Updates to Beam
|
||||
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
|
||||
|
||||
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.16.0 · May 9, 2024 · Crystal Clear</summary>
|
||||
|
||||
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
|
||||
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
|
||||
@@ -59,14 +206,20 @@ Note: After the V2 release in Q4, `v2-dev` will become the default branch and `v
|
||||
- Models update: **Anthropic**, **Groq**, **Ollama**, **OpenAI**, **OpenRouter**, **Perplexity**
|
||||
- Code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, and more [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
|
||||
|
||||
#### 3,000 Commits Milestone · April 7, 2024
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>3,000 Commits Milestone · April 7, 2024</summary>
|
||||
|
||||

|
||||
|
||||
- 🥇 Today we <b>celebrate commit 3000</b> in just over one year, and going stronger 🚀
|
||||
- 📢️ Thanks everyone for your support and words of love for Big-AGI, we are committed to creating the best AI experiences for everyone.
|
||||
|
||||
### What's New in 1.15.0 · April 1, 2024 · Beam
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.15.0 · April 1, 2024 · Beam</summary>
|
||||
|
||||
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI models setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
@@ -76,6 +229,8 @@ Note: After the V2 release in Q4, `v2-dev` will become the default branch and `v
|
||||
- 1.15.1: Support for Gemini Pro 1.5 and OpenAI Turbo models
|
||||
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.14.1 · March 7, 2024 · Modelmorphic</summary>
|
||||
|
||||
@@ -146,99 +301,85 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
|
||||
</details>
|
||||
|
||||
For full details and former releases, check out the [changelog](docs/changelog.md).
|
||||
For full details and former releases, check out the [archived versions changelog](docs/changelog.md).
|
||||
|
||||
## 👉 Key Features ✨
|
||||
## 👉 Supported Models & Integrations
|
||||
|
||||
|  |  |  |  |  |
|
||||
Delightful UX with latest models exclusive features like Beam for **multi-model AI validation**.
|
||||
> 
|
||||
> [](https://big-agi.com/beam)
|
||||
|
||||
|  |  |  |  |  |
|
||||
|---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|
|
||||
| **Chat**<br/>**Call**<br/>**Beam**<br/>**Draw**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
|
||||
|
||||

|
||||
|
||||
You can easily configure 100s of AI models in big-AGI:
|
||||
### AI Models & Vendors
|
||||
|
||||
| **AI models** | _supported vendors_ |
|
||||
|:--------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Opensource Servers | [LocalAI](https://localai.io/) (multimodal) · [Ollama](https://ollama.com/) |
|
||||
| Local Servers | [LM Studio](https://lmstudio.ai/) |
|
||||
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
|
||||
| Language services | [Anthropic](https://anthropic.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) |
|
||||
| Image services | [Prodia](https://prodia.com/) (SDXL) |
|
||||
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
|
||||
Configure 100s of AI models from 18+ providers:
|
||||
|
||||
Add extra functionality with these integrations:
|
||||
| **AI models** | _supported vendors_ |
|
||||
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Opensource Servers | [LocalAI](https://localai.io/) · [Ollama](https://ollama.com/) |
|
||||
| Local Servers | [LM Studio](https://lmstudio.ai/) (non-open) |
|
||||
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Anthropic](https://anthropic.com) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
|
||||
| LLM services | [Alibaba](https://www.alibabacloud.com/en/product/modelstudio) · [DeepSeek](https://deepseek.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [Moonshot](https://www.moonshot.cn/) · [OpenPipe](https://openpipe.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) · [xAI](https://x.ai/) |
|
||||
| Image services | OpenAI · Google Gemini |
|
||||
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
|
||||
|
||||
| **More** | _integrations_ |
|
||||
|:-------------|:---------------------------------------------------------------------------------------------------------------|
|
||||
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
|
||||
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
|
||||
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
|
||||
| Sharing | [Paste.gg](https://paste.gg/) (Paste chats) |
|
||||
| Tracking | [Helicone](https://www.helicone.ai) (LLM Observability) |
|
||||
### Additional Integrations
|
||||
|
||||
[//]: # (- [x] **Flow-state UX** for uncompromised productivity)
|
||||
|
||||
[//]: # (- [x] **AI Personas**: Tailor your AI interactions with customizable personas)
|
||||
|
||||
[//]: # (- [x] **Sleek UI/UX**: A smooth, intuitive, and mobile-responsive interface)
|
||||
|
||||
[//]: # (- [x] **Efficient Interaction**: Voice commands, OCR, and drag-and-drop file uploads)
|
||||
|
||||
[//]: # (- [x] **Privacy First**: Self-host and use your own API keys for full control)
|
||||
|
||||
[//]: # (- [x] **Advanced Tools**: Execute code, import PDFs, and summarize documents)
|
||||
|
||||
[//]: # (- [x] **Seamless Integrations**: Enhance functionality with various third-party services)
|
||||
|
||||
[//]: # (- [x] **Open Roadmap**: Contribute to the progress of big-AGI)
|
||||
|
||||
<br/>
|
||||
|
||||
## 🚀 Installation
|
||||
|
||||
To get started with big-AGI, follow our comprehensive [Installation Guide](docs/installation.md).
|
||||
The guide covers various installation options, whether you're spinning it up on
|
||||
your local computer, deploying on Vercel, on Cloudflare, or rolling it out
|
||||
through Docker.
|
||||
|
||||
Whether you're a developer, system integrator, or enterprise user, you'll find step-by-step instructions
|
||||
to set up big-AGI quickly and easily.
|
||||
|
||||
[](docs/installation.md)
|
||||
|
||||
Or bring your API keys and jump straight into our free instance on [big-AGI.com](https://big-agi.com).
|
||||
|
||||
<br/>
|
||||
|
||||
# 🌟 Get Involved!
|
||||
|
||||
[//]: # ([](https://discord.gg/MkH4qj2Jp9))
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
|
||||
- [ ] 📢️ [**Chat with us** on Discord](https://discord.gg/MkH4qj2Jp9)
|
||||
- [ ] ⭐ **Give us a star** on GitHub 👆
|
||||
- [ ] 🚀 **Do you like code**? You'll love this gem of a project! [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
|
||||
- [ ] 💡 Got a feature suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
- [ ] ✨ [Deploy](docs/installation.md) your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
|
||||
|
||||
<br/>
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/stargazers))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/network))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/pulls))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/LICENSE))
|
||||
|
||||
## 📜 Licensing
|
||||
|
||||
Big-AGI incorporates third-party software components that are subject
|
||||
to separate license terms. For detailed information about these
|
||||
components and their respective licenses, please refer to
|
||||
the [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md).
|
||||
| **More** | _integrations_ |
|
||||
|:--------------|:---------------------------------------------------------------------------------------------------------------|
|
||||
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
|
||||
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
|
||||
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
|
||||
| Observability | [Helicone](https://www.helicone.ai) |
|
||||
|
||||
---
|
||||
|
||||
2023-2024 · Enrico Ros x [Big-AGI](https://big-agi.com) · Like this project? Leave a star! 💫⭐
|
||||
## 🚀 Installation
|
||||
|
||||
Self-host with Docker, deploy on Vercel, or develop locally. Full setup guide:
|
||||
|
||||
[](docs/installation.md)
|
||||
|
||||
Or use the hosted version at [big-agi.com](https://big-agi.com) with your API keys.
|
||||
|
||||
---
|
||||
|
||||
## 👋 Community & Contributing
|
||||
|
||||
### Connect
|
||||
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
|
||||
⭐ [Star the repo](https://github.com/enricoros/big-agi) if Big-AGI is useful to you
|
||||
|
||||
### Contribute
|
||||
|
||||
**🤖 AI-Powered Issue Assistance**
|
||||
|
||||
When you open an issue, our custom AI triage system (powered by [Claude Code](https://github.com/anthropics/claude-code-action) with Big-AGI architecture documentation) analyzes it, searches the codebase, and provides solutions - typically within 30 minutes. We've trained the system on our modules and subsystems so it handles most issues effectively. Your feedback drives development!
|
||||
|
||||
[](https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml)
|
||||
[](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
|
||||
[](https://github.com/users/enricoros/projects/4/views/4)
|
||||
[](docs/customizations.md)
|
||||
[](https://github.com/users/enricoros/projects/4/views/2)
|
||||
|
||||
#### Contributors
|
||||
|
||||
<a href="https://github.com/enricoros/big-agi/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=enricoros/big-agi&max=48&columns=12" />
|
||||
</a>
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
MIT License · [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md)
|
||||
|
||||
**2023-2025** · Enrico Ros × [Big-AGI](https://big-agi.com)
|
||||
|
||||
@@ -2,23 +2,38 @@ import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
||||
|
||||
import { appRouterCloud } from '~/server/trpc/trpc.router-cloud';
|
||||
import { createTRPCFetchContext } from '~/server/trpc/trpc.server';
|
||||
import { posthogServerSendException } from '~/server/posthog/posthog.server';
|
||||
|
||||
const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
|
||||
endpoint: '/api/cloud',
|
||||
router: appRouterCloud,
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-cloud failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
onError: async function({ path, error, type, ctx }) {
|
||||
|
||||
// -> DEV error logging
|
||||
if (process.env.NODE_ENV === 'development')
|
||||
console.error(`❌ tRPC-cloud failed on ${path ?? 'unk-path'}: ${error.message}`);
|
||||
|
||||
// -> Capture node errors
|
||||
await posthogServerSendException(error, undefined, {
|
||||
domain: 'trpc-onerror',
|
||||
runtime: 'nodejs',
|
||||
endpoint: path ?? 'unknown',
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
additionalProperties: {
|
||||
error_code: error.code,
|
||||
error_type: type,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
// NOTE: the following statement breaks the build on non-pro deployments, and conditionals don't work either
|
||||
// so we resorted to raising the timeout from 10s to 25s in the vercel.json file instead
|
||||
// export const maxDuration = 25;
|
||||
|
||||
// so we resorted to raising the timeout from 10s to 60s in the vercel.json file instead
|
||||
export const maxDuration = 60;
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
export { handlerNodeRoutes as GET, handlerNodeRoutes as POST };
|
||||
@@ -10,7 +10,7 @@ const handlerEdgeRoutes = (req: Request) => fetchRequestHandler({
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
? ({ path, error }) => console.error(`\n❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
# Very simple docker-compose file to run the app on http://localhost:3000 (or http://127.0.0.1:3000).
|
||||
#
|
||||
# For more examples, such runnin big-AGI alongside a web browsing service, see the `docs/docker` folder.
|
||||
# For more examples, such running big-AGI alongside a web browsing service, see the `docs/docker` folder.
|
||||
|
||||
version: '3.9'
|
||||
|
||||
|
||||
+31
-20
@@ -2,35 +2,47 @@
|
||||
|
||||
Information you need to get started, configure, and use big-AGI productively.
|
||||
|
||||
👉 **[Changelog](https://big-agi.com/changes)** - See what's new
|
||||
|
||||
## Getting Started
|
||||
|
||||
Guides for basic big-AGI features:
|
||||
Essential guides:
|
||||
|
||||
- **[Enabling Microphone for Speech Recognition](help-feature-microphone.md)**: Instructions to
|
||||
allow speech recognition in browsers and apps.
|
||||
- **[FAQ](help-faq.md)**: Common questions and answers
|
||||
- **[Enabling Microphone](help-feature-microphone.md)**: Configure speech recognition in your browser
|
||||
|
||||
## AI Model Configuration
|
||||
## AI Services
|
||||
|
||||
Detailed guides to configure AI models and advanced features in big-AGI.
|
||||
How to set up AI models and features in big-AGI.
|
||||
|
||||
> 👉 The following applies to users of big-AGI.com, as the public instance is empty and requires user configuration.
|
||||
|
||||
- **Cloud AI Services**:
|
||||
- **[Azure OpenAI](config-azure-openai.md)**
|
||||
- **[OpenRouter](config-openrouter.md)**
|
||||
- Easy API key setup: **Anthropic**, **Deepseek**, **Google AI**, **Groq**, **Mistral**, **OpenAI**, **OpenPipe**, **Perplexity**, **TogetherAI**, **xAI**
|
||||
- Easy API key configuration:
|
||||
[Alibaba](https://bailian.console.alibabacloud.com/?apiKey=1#/api-key),
|
||||
[Anthropic](https://console.anthropic.com/settings/keys),
|
||||
[Deepseek](https://platform.deepseek.com/api_keys),
|
||||
[Google Gemini](https://aistudio.google.com/app/apikey),
|
||||
[Groq](https://console.groq.com/keys),
|
||||
[Mistral](https://console.mistral.ai/api-keys/),
|
||||
[OpenAI](https://platform.openai.com/api-keys),
|
||||
[OpenPipe](https://app.openpipe.ai/settings),
|
||||
[Perplexity](https://www.perplexity.ai/settings/api),
|
||||
[TogetherAI](https://api.together.xyz/settings/api-keys),
|
||||
[xAI](http://x.ai/api)
|
||||
- **[Azure OpenAI](config-azure-openai.md)** guide
|
||||
- **FireworksAI** ([API keys](https://fireworks.ai/account/api-keys), via custom OpenAI endpoint: https://api.fireworks.ai/inference)
|
||||
- **[OpenRouter](config-openrouter.md)** guide
|
||||
|
||||
|
||||
- **Local AI Integrations**:
|
||||
- **[LocalAI](config-local-localai.md)**
|
||||
- **[LM Studio](config-local-lmstudio.md)**
|
||||
- **[Ollama](config-local-ollama.md)**
|
||||
- [LocalAI](config-local-localai.md), [LM Studio](config-local-lmstudio.md), [Ollama](config-local-ollama.md)
|
||||
|
||||
|
||||
- **Enhanced AI Features**:
|
||||
- **[Web Browsing](config-feature-browse.md)**: Enable web page download through third-party services or your own cloud (advanced)
|
||||
- **[Web Browsing](config-feature-browse.md)**: Enable web page download through third-party services or your own cloud
|
||||
- **Web Search**: Google Search API (see '[Environment Variables](environment-variables.md)')
|
||||
- **Image Generation**: DALL·E 3 and 2, or Prodia API for Stable Diffusion XL
|
||||
- **Image Generation**: GPT Image (gpt-image-1), DALL·E 3 and 2
|
||||
- **Voice Synthesis**: ElevenLabs API for voice generation
|
||||
|
||||
## Deployment & Customization
|
||||
@@ -39,13 +51,14 @@ Detailed guides to configure AI models and advanced features in big-AGI.
|
||||
|
||||
For deploying a custom big-AGI instance:
|
||||
|
||||
- **[Installation Guide](installation.md)**: Set up your own big-AGI instance
|
||||
- **[Installation Guide](installation.md)**, including:
|
||||
- Set up your own big-AGI instance
|
||||
- Source build or pre-built options
|
||||
- Local, cloud, or on-premises deployment
|
||||
|
||||
|
||||
- **Advanced Setup**:
|
||||
- **[Source Code Customization Guide](customizations.md)**: Modify the source code
|
||||
- **[Source Code Customization](customizations.md)**: Modify the source code
|
||||
- **[Access Control](deploy-authentication.md)**: Optional, add basic user authentication
|
||||
- **[Database Setup](deploy-database.md)**: Optional, enables "Chat Link Sharing"
|
||||
- **[Reverse Proxy](deploy-reverse-proxy.md)**: Optional, enables custom domains and SSL
|
||||
@@ -53,10 +66,8 @@ For deploying a custom big-AGI instance:
|
||||
|
||||
## Community & Support
|
||||
|
||||
Connect with the growing big-AGI community:
|
||||
|
||||
- Check the [changelog](https://big-agi.com/changes) for the latest updates
|
||||
- Visit our [GitHub repository](https://github.com/enricoros/big-AGI) for source code and issue tracking
|
||||
- Check the latest updates and features on [Changelog](changelog.md) or the in-app [News](https://get.big-agi.com/news)
|
||||
- Connect with us and other users on [Discord](https://discord.gg/MkH4qj2Jp9) for discussions, help, and sharing your experiences with big-AGI
|
||||
- Join our [Discord](https://discord.gg/MkH4qj2Jp9) for discussions and help
|
||||
|
||||
Thank you for choosing big-AGI. We're excited to give you the best tools to amplify yourself.
|
||||
Let's build something great.
|
||||
|
||||
+19
-7
@@ -1,18 +1,30 @@
|
||||
## Changelog
|
||||
## Archived Versions - Changelog
|
||||
|
||||
This is a high-level changelog. Calls out some of the high level features batched
|
||||
by release.
|
||||
|
||||
- For the live changelog, see [big-agi.com/changes](https://big-agi.com/changes)
|
||||
- For the live roadmap, please see [the GitHub project](https://github.com/users/enricoros/projects/4/views/2)
|
||||
|
||||
### 1.17.0 - Jun 2024
|
||||
> NOTE: with the release of 2.0.0 we switching to [big-agi.com/changes](https://big-agi.com/changes) for the
|
||||
> continuously updated changelog.
|
||||
|
||||
- milestone: [1.17.0](https://github.com/enricoros/big-agi/milestone/17)
|
||||
- work in progress: [big-AGI open roadmap](https://github.com/users/enricoros/projects/4/views/2), [help here](https://github.com/users/enricoros/projects/4/views/4)
|
||||
### What's New in 2 · Oct 31, 2025 · Open
|
||||
|
||||
### What's New in 1.16.1...1.16.8 · Sep 13, 2024 (patch releases)
|
||||
- **Big-AGI Open** is ready and more productive and faster than ever, with:
|
||||
- **Beam 2**: multi-modal, program-based, follow-ups, save presets
|
||||
- Top-notch AI models support including **agentic models** and **reasoning models**
|
||||
- **Image Generation** and editing with Nano Banana and gpt-image-1
|
||||
- **Web Search** with citations for supported models
|
||||
- **UI** & Mobile UI overhaul with peeking and side panels
|
||||
- And all of the [Big-AGI 2 changes](https://github.com/enricoros/big-AGI/issues/567#issuecomment-2262187617) and more
|
||||
- Built for the future, madly optimized
|
||||
|
||||
- 1.16.8: OpenAI ChatGPT-4o Latest (o1-preview and o1-mini are supported in Big-AGI 2)
|
||||
### What's New in 1.16.1...1.16.9 · Jan 21, 2025 (patch releases)
|
||||
|
||||
- 1.16.10: OpenRouter models support
|
||||
- 1.16.9: Docker Gemini fix, R1 models support
|
||||
- 1.16.8: OpenAI ChatGPT-4o Latest, o1 models support
|
||||
- 1.16.7: OpenAI support for GPT-4o 2024-08-06
|
||||
- 1.16.6: Groq support for Llama 3.1 models
|
||||
- 1.16.5: GPT-4o Mini support
|
||||
@@ -46,7 +58,7 @@ by release.
|
||||
### What's New in 1.15.0 · April 1, 2024 · Beam
|
||||
|
||||
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI mdoels setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI models setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
|
||||
- Enhanced the default Persona
|
||||
- Fixes to Gemini models and SVGs, improvements to UI and icons
|
||||
|
||||
+48
-28
@@ -14,7 +14,7 @@ If you have an `API Endpoint` and `API Key`, you can configure big-AGI as follow
|
||||
1. Launch the `big-AGI` application
|
||||
2. Go to the **Models** settings
|
||||
3. Add a Vendor and select **Azure OpenAI**
|
||||
- Enter the Endpoint (e.g., 'https://your-openai-api-1234.openai.azure.com/')
|
||||
- Enter the Endpoint (e.g., 'https://your-resource-name.openai.azure.com')
|
||||
- Enter the API Key (e.g., 'fd5...........................ba')
|
||||
|
||||
The deployed models are now available in the application. If you don't have a configured
|
||||
@@ -23,6 +23,36 @@ Azure OpenAI service instance, continue with the next section.
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
## Server Configuration
|
||||
|
||||
For server deployments, set these environment variables:
|
||||
|
||||
```bash
|
||||
AZURE_OPENAI_API_ENDPOINT=https://your-resource-name.openai.azure.com
|
||||
AZURE_OPENAI_API_KEY=your-api-key
|
||||
```
|
||||
|
||||
This enables Azure OpenAI for all users without requiring individual API keys. For more details, see [environment-variables.md](environment-variables.md).
|
||||
|
||||
## Azure OpenAI API Versions
|
||||
|
||||
Azure OpenAI supports both traditional deployment-based API and the next-generation v1 API:
|
||||
|
||||
### Next-Generation v1 API (Default)
|
||||
- **Enabled by default** for GPT-5-like models (GPT-5, GPT-6, o3, o4, etc.)
|
||||
- Uses direct `/openai/v1/responses` endpoint without deployment IDs
|
||||
- Optimized for advanced reasoning models and new features
|
||||
- Can be disabled by setting `AZURE_OPENAI_DISABLE_V1=true`
|
||||
|
||||
### Traditional Deployment-Based API
|
||||
- Uses `/openai/deployments/{deployment-name}/...` endpoints
|
||||
- Required for older models and when v1 API is disabled
|
||||
- Needs deployment ID for all API calls
|
||||
|
||||
### Known Limitations
|
||||
- **Web Search Tool**: Azure OpenAI does not support the `web_search_preview` tool that's available in OpenAI's API
|
||||
- Models with web search capabilities will have this feature automatically disabled on Azure
|
||||
|
||||
## Setting Up Azure
|
||||
|
||||
### Step 1: Azure Account & Subscription
|
||||
@@ -34,18 +64,7 @@ In addition to using the UI, configuration can also be done using
|
||||
- Fill in the required fields and click on **Create**
|
||||
- Note down the **Subscription ID** (e.g., `12345678-1234-1234-1234-123456789012`)
|
||||
|
||||
### Step 2: Apply for Azure OpenAI Service
|
||||
|
||||
We'll now be creating "OpenAI"-specific resources on Azure. This requires to 'apply',
|
||||
and acceptance should be quick (even as low as minutes).
|
||||
|
||||
1. Visit [Azure OpenAI Service](https://aka.ms/azure-openai)
|
||||
2. Click on **Apply for access**
|
||||
- Fill in the required fields (including the subscription ID) and click on **Apply**
|
||||
|
||||
Once your application is accepted, you can create OpenAI resources on Azure.
|
||||
|
||||
### Step 3: Create Azure OpenAI Resource
|
||||
### Step 2: Create Azure OpenAI Resource
|
||||
|
||||
For more information, see [Azure: Create and deploy OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal)
|
||||
|
||||
@@ -55,31 +74,32 @@ For more information, see [Azure: Create and deploy OpenAI](https://learn.micros
|
||||

|
||||
- Select the subscription
|
||||
- Select a resource group or create a new one
|
||||
- Select the region. Note that the region determines the available models.
|
||||
> For instance, **Canada East** offers GPT-4-32k models, For the full list, see [GPT-4 models](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models)
|
||||
- Select the region. **Important**: The region determines which models are available.
|
||||
> Popular regions like **East US**, **West Europe**, and **Australia East** typically have the best model availability. For the latest model availability by region, see [Azure OpenAI Model Availability](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models)
|
||||
- Name the service (e.g., `your-openai-api-1234`)
|
||||
- Select a pricing tier (e.g., `S0` for standard)
|
||||
- Select: "All networks, including the internet, can access this resource."
|
||||
- Click on **Review + create** and then **Create**
|
||||
|
||||
After creating the resource, you can access the API Keys and Endpoints. At any point, you can go to
|
||||
the OpenAI Service instance page to get this information.
|
||||
After creating the resource, you can access the API Keys and Endpoints:
|
||||
|
||||
- Click on **Go to resource**
|
||||
- Click on **Develop**
|
||||
- Copy the `Endpoint`, called "Language API", e.g. 'https://your-openai-api-1234.openai.azure.com/'
|
||||
- Copy `KEY 1`
|
||||
1. Click on **Go to resource** (or navigate to your Azure OpenAI resource)
|
||||
2. In the left sidebar, under **Resource Management**, click on **Keys and Endpoint**
|
||||
3. Copy the required information:
|
||||
- **Endpoint**: e.g., 'https://your-resource-name.openai.azure.com/'
|
||||
- **Key**: Copy either KEY 1 or KEY 2 (both work identically)
|
||||
|
||||
### Step 4: Deploy Models
|
||||
### Step 3: Deploy Models
|
||||
|
||||
By default, Azure OpenAI resource instances don't have models available. You need to deploy the models you want to use.
|
||||
|
||||
1. Click on **Model Deployments > Manage Deployments**
|
||||
2. Click on **+Create New Deployment**
|
||||

|
||||
- Select the model you want to deploy
|
||||
- Optionally select a version
|
||||
- name the model, e.g., `gpt4-32k-0613`
|
||||
1. In your Azure OpenAI resource, click on **Model deployments** in the left sidebar
|
||||
2. Click on **Create new deployment**
|
||||
3. Fill in the deployment details:
|
||||
- **Select a model**: Choose from available models
|
||||
- **Model version**: Select the latest version or a specific one
|
||||
- **Deployment name**: Give it a meaningful name
|
||||
4. Click **Deploy**
|
||||
|
||||
Repeat as necessary for each model you want to deploy.
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ If the running LocalAI instance is configured with a [Model Gallery](https://loc
|
||||
|
||||
At the time of writing, LocalAI does not publish the model `context window size`.
|
||||
Every model is assumed to be capable of chatting, and with a context window of 4096 tokens.
|
||||
Please update the [src/modules/llms/transports/server/openai/models/models.data.ts](../src/modules/llms/server/openai/models/models.data.ts)
|
||||
Please update the [src/modules/llms/server/models.mappings.ts](../src/modules/llms/server/models.mappings.ts)
|
||||
file with the mapping information between LocalAI model IDs and names/descriptions/tokens, etc.
|
||||
|
||||
# 🤝 Support
|
||||
|
||||
+27
-6
@@ -31,17 +31,14 @@ At time of writing, big-AGI has only 2 operations that run on Node.js Functions:
|
||||
browsing (fetching web pages) and sharing. They both can exceed 10 seconds, especially
|
||||
when fetching large pages or waiting for websites to be completed.
|
||||
|
||||
We provide `vercel_PRODUCTION.json` to raise the duration to 25 seconds (from a default of 10), to use it,
|
||||
make sure to rename it to `vercel.json` before build.
|
||||
|
||||
From the Vercel Project > Settings > General > Build & Development Settings,
|
||||
you can for instance set the build command to:
|
||||
|
||||
```bash
|
||||
mv vercel_PRODUCTION.json vercel.json; next build
|
||||
next build
|
||||
```
|
||||
|
||||
### Change the Personas
|
||||
### Change the Personas (v1.x only)
|
||||
|
||||
Edit the `src/data.ts` file to customize personas. This file houses the default personas. You can add, remove, or modify these to meet your project's needs.
|
||||
|
||||
@@ -55,6 +52,21 @@ Adapt the UI to match your project's aesthetic, incorporate new features, or exc
|
||||
- [ ] Modify `src/common/app.config.tsx` to alter the application's name
|
||||
- [ ] Update `src/common/app.nav.tsx` to revise the navigation bar
|
||||
|
||||
### Add a Message of the Day
|
||||
|
||||
You can display a temporary announcement banner at the top of the app using the `NEXT_PUBLIC_MOTD` environment variable.
|
||||
|
||||
- Set this variable in your deployment environment
|
||||
- The message supports template variables:
|
||||
- `{{app_build_hash}}`: Current git commit hash
|
||||
- `{{app_build_pkgver}}`: Package version
|
||||
- `{{app_build_time}}`: Build timestamp as date
|
||||
- `{{app_deployment_type}}`: Deployment type (local, docker, vercel, etc.)
|
||||
- Users can dismiss the message (until next page refresh)
|
||||
- Use it for version announcements, maintenance notices, or feature highlights
|
||||
|
||||
Example: `NEXT_PUBLIC_MOTD=🚀 New features available in {{app_build_pkgver}}! Try the improved Beam.`
|
||||
|
||||
## Testing & Deployment
|
||||
|
||||
Test your application thoroughly using local development (refer to README.md for local build instructions). Deploy using your preferred hosting service. big-AGI supports deployment on platforms like Vercel, Docker, or any Node.js-compatible service, especially those supporting NextJS's "Edge Runtime."
|
||||
@@ -65,7 +77,16 @@ Test your application thoroughly using local development (refer to README.md for
|
||||
|
||||
## Debugging
|
||||
|
||||
We introduced the `/info/debug` page that provides a detailed overview of the application's environment, including the API keys, environment variables, and other configuration settings.
|
||||
The application includes a client-side logging system. You can view recent logs via the UI (Settings > Tools > Logs).
|
||||
|
||||
For deeper debugging during development:
|
||||
|
||||
1. **Debug Page**: Access the `/info/debug` page for an overview of the application's environment, configuration, API status, and environment variables available to the client.
|
||||
2. **Conditional Breakpoints**: To automatically pause execution in your browser's developer tools when critical errors (`error`, `critical`, `DEV` levels) are logged to the console, set the following environment variable in your local `.env.local` file and restart your development server:
|
||||
```bash
|
||||
NEXT_PUBLIC_DEBUG_BREAKS=true
|
||||
```
|
||||
This allows you to inspect the application state at the exact moment an important error occurs. This feature only works in development mode (`npm run dev`) and requires the environment variable to be explicitly set to `true`.
|
||||
|
||||
<br/>
|
||||
|
||||
|
||||
+40
-23
@@ -2,8 +2,9 @@
|
||||
|
||||
The open-source big-AGI project provides support for the following analytics services:
|
||||
|
||||
- **Vercel Analytics**: automatic when deployed to Vercel
|
||||
- **Google Analytics 4**: manual setup required
|
||||
- **PostHog Analytics**: manual setup required
|
||||
- **Vercel Analytics**: automatic when deployed to Vercel
|
||||
|
||||
The following is a quick overview of the Analytics options for the deployers of this open-source project.
|
||||
big-AGI is deployed to many large-scale and enterprise though various ways (custom builds, Docker, Vercel, Cloudflare, etc.),
|
||||
@@ -11,6 +12,36 @@ and this guide is for its customization.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
### Google Analytics 4
|
||||
|
||||
- Why: user engagement and retention, performance insights, personalization, content optimization
|
||||
- What: https://support.google.com/analytics/answer/11593727
|
||||
|
||||
Google Analytics 4 (GA4) is a powerful tool for understanding user behavior and engagement.
|
||||
This can help optimize big-AGI, understanding which features are needed/users and which aren't.
|
||||
|
||||
To enable Google Analytics 4, you need to set the `NEXT_PUBLIC_GA4_MEASUREMENT_ID` environment variable
|
||||
before starting the local build or the docker build (i.e. at build time), at which point the
|
||||
server/container will be able to report analytics to your Google Analytics 4 property.
|
||||
|
||||
As of Feb 27, 2024, this feature is in development.
|
||||
|
||||
### PostHog Analytics
|
||||
|
||||
- Why: feature usage tracking, user journeys, conversion optimization, product analytics
|
||||
- What: page views, page leave events, user interactions, and deployment context
|
||||
|
||||
PostHog provides comprehensive product analytics with privacy controls. It helps understand how users interact with big-AGI's features, identify opportunities for improvement, and optimize the user experience.
|
||||
|
||||
To enable PostHog, set the `NEXT_PUBLIC_POSTHOG_KEY` environment variable at build time. PostHog is configured with tracking optimization and privacy in mind:
|
||||
|
||||
- Uses a proxy endpoint (`/a/ph`) to avoid ad blockers
|
||||
- Respects user opt-out preferences via local storage
|
||||
- Tracks only essential information without PII
|
||||
- Adds deployment context for better segmentation
|
||||
|
||||
The implementation follows PostHog's best practices for Next.js applications and includes manual page view tracking for proper single-page application support.
|
||||
|
||||
### Vercel Analytics
|
||||
|
||||
- Why: understand coarse traction, and identify deployment issues - all without tracking individual users
|
||||
@@ -31,33 +62,19 @@ const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) => <>
|
||||
</>;
|
||||
```
|
||||
|
||||
When big-AGI is served on Vercel hosts, the ```process.env.NEXT_PUBLIC_VERCEL_URL``` environment variable is trueish, and
|
||||
When big-AGI is served on Vercel hosts, the `process.env.NEXT_PUBLIC_VERCEL_URL` environment variable is trueish, and
|
||||
analytics will be sent by default to the Vercel Analytics service which is deployed by Vercel IF configured from the
|
||||
Vercel project dashboard.
|
||||
|
||||
In summary: to turn it on: activate the `Analytics` service in the Vercel project dashboard.
|
||||
|
||||
### Google Analytics 4
|
||||
|
||||
- Why: user engagement and retention, performance insights, personalization, content optimization
|
||||
- What: https://support.google.com/analytics/answer/11593727
|
||||
|
||||
Google Analytics 4 (GA4) is a powerful tool for understanding user behavior and engagement.
|
||||
This can help optimize big-AGI, understanding which features are needed/users and which aren't.
|
||||
|
||||
To enable Google Analytics 4, you need to set the `NEXT_PUBLIC_GA4_MEASUREMENT_ID` environment variable
|
||||
before starting the local build or the docker build (i.e. at build time), at which point the
|
||||
server/container will be able to report analytics to your Google Analytics 4 property.
|
||||
|
||||
As of Feb 27, 2024, this feature is in development.
|
||||
|
||||
## Configurations
|
||||
|
||||
| Scope | Default | Description / Instructions |
|
||||
|-----------------------------------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------|
|
||||
| Your source builds of big-AGI | None | **Vercel**: enable Vercel Analytics from the dashboard. · **Google Analytics**: set environment variable at build time. |
|
||||
| Your docker builds of big-AGI | None | **Vercel**: n/a. · **Google Analytics**: set environment variable at `docker build` time. |
|
||||
| [big-agi.com](https://big-agi.com) | Vercel + Google | The main website ([privacy policy](https://big-agi.com/privacy)) hosted for free for anyone. |
|
||||
| [official Docker packages](https://github.com/enricoros/big-AGI/pkgs/container/big-agi) | Google Analytics | **Vercel**: n/a · **Google Analytics**: set to the big-agi.com Google Analytics for analytics and improvements. |
|
||||
| Scope | Default | Description / Instructions |
|
||||
|-------------------------------------------------------------------------------------------------------------------------|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Your **Source** builds of big-AGI | None | **Google Analytics**: set environment variable at build time · **PostHog**: set environment variable at build time · **Vercel**: enable Vercel Analytics from the dashboard |
|
||||
| Your **Docker** builds of big-AGI | None | (**Vercel**: n/a) · **Google Analytics**: set environment variable at `docker build` time · **PostHog**: set environment variable at `docker build` time. |
|
||||
| [get.big-agi.com](https://get.big-agi.com) (**Big-AGI 1.x Legacy**) | Vercel + Google + PostHog | The main website ([privacy policy](https://big-agi.com/privacy)) hosted for free for anyone. |
|
||||
| [prebuilt Docker packages](https://github.com/enricoros/big-AGI/pkgs/container/big-agi) (**Big-AGI 1.x**, 'latest' tag) | Google Analytics | **Vercel**: n/a · **Google Analytics**: set to the big-agi.com Google Analytics for analytics and improvements · **PostHog**: n/a |
|
||||
|
||||
Note: this information is updated as of Feb 27, 2024 and can change at any time.
|
||||
Note: this information is updated as of March 3, 2025 and can change at any time.
|
||||
@@ -31,6 +31,12 @@ file.
|
||||
|
||||
### Official Images: [ghcr.io/enricoros/big-agi](https://github.com/enricoros/big-agi/pkgs/container/big-agi)
|
||||
|
||||
#### Available Tags
|
||||
|
||||
- **`:latest`** / **`:stable`** - Latest stable release (recommended)
|
||||
- **`:development`** - Main branch (bleeding edge)
|
||||
- **`:v2.0.0`** - Specific versions
|
||||
|
||||
#### Run using *docker* 🚀
|
||||
|
||||
```bash
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
This document provides an explanation of the environment variables used in the big-AGI application.
|
||||
|
||||
**All variables are optional**; and _UI options_ take precedence over _backend environment variables_,
|
||||
which take place over _defaults_. This file is kept in sync with [`../src/server/env.mjs`](../src/server/env.mjs).
|
||||
which take place over _defaults_. This file is kept in sync with [`../src/server/env.ts`](../src/server/env.ts).
|
||||
|
||||
### Setting Environment Variables
|
||||
|
||||
@@ -23,6 +23,8 @@ MDB_URI=
|
||||
OPENAI_API_KEY=
|
||||
OPENAI_API_HOST=
|
||||
OPENAI_API_ORG_ID=
|
||||
ALIBABA_API_HOST=
|
||||
ALIBABA_API_KEY=
|
||||
AZURE_OPENAI_API_ENDPOINT=
|
||||
AZURE_OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
@@ -33,6 +35,7 @@ GROQ_API_KEY=
|
||||
LOCALAI_API_HOST=
|
||||
LOCALAI_API_KEY=
|
||||
MISTRAL_API_KEY=
|
||||
MOONSHOT_API_KEY=
|
||||
OLLAMA_API_HOST=
|
||||
OPENPIPE_API_KEY=
|
||||
OPENROUTER_API_KEY=
|
||||
@@ -54,16 +57,16 @@ GOOGLE_CSE_ID=
|
||||
ELEVENLABS_API_KEY=
|
||||
ELEVENLABS_API_HOST=
|
||||
ELEVENLABS_VOICE_ID=
|
||||
# Text-To-Image: Prodia
|
||||
PRODIA_API_KEY=
|
||||
|
||||
# Backend HTTP Basic Authentication (see `deploy-authentication.md` for turning on authentication)
|
||||
HTTP_BASIC_AUTH_USERNAME=
|
||||
HTTP_BASIC_AUTH_PASSWORD=
|
||||
|
||||
|
||||
# Frontend variables
|
||||
# Frontend variables
|
||||
NEXT_PUBLIC_MOTD=
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
NEXT_PUBLIC_PLANTUML_SERVER_URL=
|
||||
```
|
||||
|
||||
@@ -88,8 +91,13 @@ requiring the user to enter an API key
|
||||
| `OPENAI_API_KEY` | API key for OpenAI | Recommended |
|
||||
| `OPENAI_API_HOST` | Changes the backend host for the OpenAI vendor, to enable platforms such as Helicone and CloudFlare AI Gateway | Optional |
|
||||
| `OPENAI_API_ORG_ID` | Sets the "OpenAI-Organization" header field to support organization users | Optional |
|
||||
| `ALIBABA_API_HOST` | The Alibaba AI OpenAI-compatible endpoint | Optional |
|
||||
| `ALIBABA_API_KEY` | The API key for Alibaba AI | Optional |
|
||||
| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI endpoint - host only, without the path | Optional, but if set `AZURE_OPENAI_API_KEY` must also be set |
|
||||
| `AZURE_OPENAI_API_KEY` | Azure OpenAI API key, see [config-azure-openai.md](config-azure-openai.md) | Optional, but if set `AZURE_OPENAI_API_ENDPOINT` must also be set |
|
||||
| `AZURE_OPENAI_DISABLE_V1` | Disables the next-generation v1 API for GPT-5-like models (set to 'true' to disable) | Optional, defaults to enabled |
|
||||
| `AZURE_OPENAI_API_VERSION` | API version for traditional deployment-based endpoints | Optional, defaults to '2025-04-01-preview' |
|
||||
| `AZURE_DEPLOYMENTS_API_VERSION` | API version for the deployments listing endpoint | Optional, defaults to '2023-03-15-preview' |
|
||||
| `ANTHROPIC_API_KEY` | The API key for Anthropic | Optional |
|
||||
| `ANTHROPIC_API_HOST` | Changes the backend host for the Anthropic vendor, to enable platforms such as AWS Bedrock | Optional |
|
||||
| `DEEPSEEK_API_KEY` | The API key for Deepseek AI | Optional |
|
||||
@@ -98,6 +106,7 @@ requiring the user to enter an API key
|
||||
| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional |
|
||||
| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional |
|
||||
| `MISTRAL_API_KEY` | The API key for Mistral | Optional |
|
||||
| `MOONSHOT_API_KEY` | The API key for Moonshot AI | Optional |
|
||||
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama.md) | |
|
||||
| `OPENPIPE_API_KEY` | The API key for OpenPipe | Optional |
|
||||
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
|
||||
@@ -127,8 +136,6 @@ Enable the app to Talk, Draw, and Google things up.
|
||||
| `ELEVENLABS_API_KEY` | ElevenLabs API Key - used for calls, etc. |
|
||||
| `ELEVENLABS_API_HOST` | Custom host for ElevenLabs |
|
||||
| `ELEVENLABS_VOICE_ID` | Default voice ID for ElevenLabs |
|
||||
| **Text-To-Image** | [Prodia](https://prodia.com/) is a reliable image generation service |
|
||||
| `PRODIA_API_KEY` | Prodia API Key - used with '/imagine ...' |
|
||||
| **Google Custom Search** | [Google Programmable Search Engine](https://programmablesearchengine.google.com/about/) produces links to pages |
|
||||
| `GOOGLE_CLOUD_API_KEY` | Google Cloud API Key, used with the '/react' command - [Link to GCP](https://console.cloud.google.com/apis/credentials) |
|
||||
| `GOOGLE_CSE_ID` | Google Custom/Programmable Search Engine ID - [Link to PSE](https://programmablesearchengine.google.com/) |
|
||||
@@ -142,10 +149,13 @@ Enable the app to Talk, Draw, and Google things up.
|
||||
|
||||
The value of these variables are passed to the frontend (Web UI) - make sure they do not contain secrets.
|
||||
|
||||
| Variable | Description |
|
||||
|:----------------------------------|:-----------------------------------------------------------------------------------------|
|
||||
| `NEXT_PUBLIC_GA4_MEASUREMENT_ID` | The measurement ID for Google Analytics 4. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
| `NEXT_PUBLIC_PLANTUML_SERVER_URL` | The URL of the PlantUML server, used for rendering UML diagrams. (code in RederCode.tsx) |
|
||||
| Variable | Description |
|
||||
|:----------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `NEXT_PUBLIC_DEBUG_BREAKS` | (optional, development) When set to 'true', enables automatic debugger breaks on DEV/error/critical logs in development builds |
|
||||
| `NEXT_PUBLIC_MOTD` | Message of the Day - displays a dismissible banner at the top of the app (see [customizations](customizations.md) for the template variables). Example: 🔔 Welcome to our deployment! Version {{app_build_pkgver}} built on {{app_build_time}}. |
|
||||
| `NEXT_PUBLIC_GA4_MEASUREMENT_ID` | (optional) The measurement ID for Google Analytics 4. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
| `NEXT_PUBLIC_POSTHOG_KEY` | (optional) Key for PostHog analytics. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
| `NEXT_PUBLIC_PLANTUML_SERVER_URL` | The URL of the PlantUML server, used for rendering UML diagrams. Allows using custom local servers. |
|
||||
|
||||
> Important: these variables must be set at build time, which is required by Next.js to pass them to the frontend.
|
||||
> This is in contrast to the backend variables, which can be set when starting the local server/container.
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
# Big-AGI Data Ownership Guide
|
||||
|
||||
Big-AGI is a **client-first** web application, which means it prioritizes speed and data ownership compared to cloud apps.
|
||||
Your *API keys*, *chat history*, and *settings* live in your
|
||||
browser's [local storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage), not
|
||||
on cloud servers.
|
||||
|
||||
You can use Big-AGI in two ways:
|
||||
|
||||
1. Run it yourself (open-source)
|
||||
2. Use big-agi.com (hosted service)
|
||||
|
||||
This guide explains how the open-source version handles your data. You can verify everything in [the source code](https://github.com/enricoros/big-agi).
|
||||
|
||||
## Client-Side Storage
|
||||
|
||||
Within Big-AGI almost all chat/keys data is handled client-side in your browser using two
|
||||
standard browser storage mechanisms:
|
||||
|
||||
- **Local Storage**: API keys, settings, and configurations ([learn more](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage))
|
||||
- **IndexedDB**: Chat history and larger files ([learn more](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API))
|
||||
|
||||
The Big-AGI backend mainly passes requests to AI services (OpenAI, Anthropic, etc.). It doesn't store your data, except for the chat-sharing function if used.
|
||||
|
||||
You can see your data in your browser's local storage and IndexedDB - try it yourself:
|
||||
|
||||
1. In Chrome: Open DevTools (press F12 on Windows, ⌘ + ⌥ + I on Mac)
|
||||
2. Click 'Application' > 'Local Storage'
|
||||
3. See your settings and API keys
|
||||
|
||||

|
||||
|
||||
### What This Means For You
|
||||
|
||||
Storing data in your browser means:
|
||||
|
||||
- Your data stays on **one device/browser only**
|
||||
- Clearing browser data **erases your chats** - make backups
|
||||
- Anyone using your browser can see your chats and keys
|
||||
- Running your own server needs technical skills
|
||||
|
||||
### Local Device Identifier
|
||||
|
||||
Big-AGI generates a _device identifier_ that combines timestamp and random components, stored only on your device. This identifier:
|
||||
|
||||
- Is used only for the **optional sync functionality** between your devices (not yet ready)
|
||||
- Helps maintain data consistency when using Big-AGI across multiple devices
|
||||
- Remains completely local unless you explicitly enable sync
|
||||
- Is not used for tracking, analytics, or telemetry
|
||||
- Can be deleted anytime by clearing local storage
|
||||
- Is fully transparent - see the implementation in `src/common/stores/store-client.ts`
|
||||
|
||||
## How Data Flows
|
||||
|
||||
AI interactions in Big-AGI, such as chats, AI titles, text to speech, browsing, flow through three components:
|
||||
|
||||
1. **Browser** (client/installed App) - Stores your keys & data locally
|
||||
2. **Backend** (routing server) - Passes requests to AI services
|
||||
3. **AI Services** - Where the actual AI processing happens
|
||||
|
||||
### Self-Deployed Version: Your Infrastructure
|
||||
|
||||
You run the server. Your data only leaves when making AI requests.
|
||||
The keys and chats are under your control and pass through your code, and are sent to
|
||||
the upstream AI services on a per-request basis.
|
||||
|
||||

|
||||
|
||||
### Web Version: Using big-agi.com
|
||||
|
||||
Your data passes through the hosted Big-AGI edge network to reach AI services. The keys
|
||||
and chats pass through Big-AGI's edge network to reach the AI services on a per-request basis,
|
||||
and then are send to the upstream AI services.
|
||||
|
||||

|
||||
|
||||
## Security Best Practices
|
||||
|
||||
**Basic Security**:
|
||||
|
||||
- **Never share API keys**
|
||||
- **Don't use shared computers**
|
||||
- Use private browsing for one-off sessions
|
||||
- Use trusted networks
|
||||
- Back up your data
|
||||
|
||||
**When Running Your Own Server**:
|
||||
|
||||
- Use [environment variables](environment-variables.md) for API keys
|
||||
- Run on trusted infrastructure
|
||||
- Keep your installation updated
|
||||
|
||||
## TL;DR
|
||||
|
||||
Your API keys and chats stay in your browser. The server only passes requests to AI services.
|
||||
|
||||
Use big-agi.com for convenience, or [run it yourself](installation.md) for full control.
|
||||
|
||||
Need help? Join our [Discord](https://discord.gg/MkH4qj2Jp9) or open a [GitHub issue](https://github.com/enricoros/big-agi/issues).
|
||||
@@ -0,0 +1,28 @@
|
||||
# Frequently Asked Questions
|
||||
|
||||
Quick answers to common questions about Big-AGI. For detailed documentation, see our [Website Docs](https://big-agi.com/docs).
|
||||
|
||||
### Versions
|
||||
|
||||
<details open>
|
||||
<summary><b>How do I check my Big-AGI version?</b></summary>
|
||||
|
||||
You can see the version in the _News_ section of the app, as per the image below.
|
||||
|
||||

|
||||
</details>
|
||||
|
||||
<details open>
|
||||
<summary><b>How do I verify my Vercel deployment version?</b></summary>
|
||||
|
||||
You can go in the **deployments** section of your Vercel project, and at a quick glance see
|
||||
what is the latest deployment status, time, and link to the source code.
|
||||
|
||||

|
||||
|
||||
Each deployment links directly to its source code commit.
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
Missing something? [Open an issue](https://github.com/enricoros/big-agi/issues/new) or [join our Discord](https://discord.gg/MkH4qj2Jp9).
|
||||
@@ -151,6 +151,6 @@ Enjoy all the features of big-AGI without the hassle of infrastructure managemen
|
||||
Join our vibrant community of developers, researchers, and AI enthusiasts. Share your projects, get help, and collaborate with others.
|
||||
|
||||
- [Discord Community](https://discord.gg/MkH4qj2Jp9)
|
||||
- [Twitter](https://twitter.com/yourusername)
|
||||
- [Twitter](https://twitter.com/enricoros)
|
||||
|
||||
For any questions or inquiries, please don't hesitate to [reach out to our team](mailto:hello@big-agi.com).
|
||||
|
||||
@@ -16,6 +16,8 @@ stringData:
|
||||
OPENAI_API_KEY: ""
|
||||
OPENAI_API_HOST: ""
|
||||
OPENAI_API_ORG_ID: ""
|
||||
ALIBABA_API_HOST: ""
|
||||
ALIBABA_API_KEY: ""
|
||||
AZURE_OPENAI_API_ENDPOINT: ""
|
||||
AZURE_OPENAI_API_KEY: ""
|
||||
ANTHROPIC_API_KEY: ""
|
||||
@@ -26,6 +28,7 @@ stringData:
|
||||
LOCALAI_API_HOST: ""
|
||||
LOCALAI_API_KEY: ""
|
||||
MISTRAL_API_KEY: ""
|
||||
MOONSHOT_API_KEY: ""
|
||||
OLLAMA_API_HOST: ""
|
||||
OPENPIPE_API_KEY: ""
|
||||
OPENROUTER_API_KEY: ""
|
||||
@@ -44,6 +47,3 @@ stringData:
|
||||
ELEVENLABS_API_KEY: ""
|
||||
ELEVENLABS_API_HOST: ""
|
||||
ELEVENLABS_VOICE_ID: ""
|
||||
|
||||
# Text-To-Image: Prodia
|
||||
PRODIA_API_KEY: ""
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 55 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 62 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 234 KiB |
@@ -0,0 +1,17 @@
|
||||
import { defineConfig } from "eslint/config";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import js from "@eslint/js";
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all
|
||||
});
|
||||
|
||||
export default defineConfig([{
|
||||
extends: compat.extends("next/core-web-vitals"),
|
||||
}]);
|
||||
@@ -0,0 +1,35 @@
|
||||
# Knowledge Base
|
||||
|
||||
Internal documentation for Big-AGI architecture and systems, for use by AI agents and developers.
|
||||
|
||||
**Structure:**
|
||||
- `/kb/modules/` - Core business logic (e.g. AIX)
|
||||
- `/kb/systems/` - Infrastructure (routing, startup)
|
||||
|
||||
## Index
|
||||
|
||||
### Modules Documentation
|
||||
|
||||
#### AIX - AI Communication Framework
|
||||
- **[AIX.md](modules/AIX.md)** - AIX streaming architecture documentation
|
||||
- **[AIX-callers-analysis.md](modules/AIX-callers-analysis.md)** - Analysis of AIX entry points, call chains, common and different rendering, error handling, etc.
|
||||
|
||||
### Systems Documentation
|
||||
|
||||
#### Core Platform Systems
|
||||
- **[app-routing.md](systems/app-routing.md)** - Next.js routing, provider stack, and display state hierarchy
|
||||
- **[LLM-parameters-system.md](systems/LLM-parameters-system.md)** - Language model parameter flow across the system
|
||||
|
||||
## Guidelines
|
||||
|
||||
### Writing Style
|
||||
|
||||
- **Direct and factual** - No marketing language
|
||||
- **Present tense** - "AIX handles streaming" not "AIX will handle"
|
||||
- **Active voice** - "The system processes" not "Processing is done by"
|
||||
- **Concrete examples** - Show actual code/config when helpful, briefly
|
||||
|
||||
### Maintenance
|
||||
|
||||
- Remove outdated information when detected!
|
||||
- Keep cross-references current when files move
|
||||
@@ -0,0 +1,144 @@
|
||||
# AIX Chat Generation Calls Analysis
|
||||
|
||||
This document analyzes all AIX function callers and their patterns for message removal, placeholder handling, and error management.
|
||||
|
||||
## AIX Function Architecture
|
||||
|
||||
### Three-Tier Call Hierarchy
|
||||
|
||||
**Core AIX Functions** (Direct tRPC API callers):
|
||||
- `aixChatGenerateContent_DMessage_FromConversation` - 8 callers (conversation streaming)
|
||||
- `aixChatGenerateContent_DMessage` - 6 callers (direct request/response)
|
||||
- `aixChatGenerateText_Simple` - 12 callers (text-only utilities)
|
||||
|
||||
**Utility Layer** (Hooks & Functions):
|
||||
- Conversation management, persona processing, content generation utilities
|
||||
|
||||
**UI Layer** (React Components):
|
||||
- User-facing interfaces with rich error states and fallback mechanisms
|
||||
|
||||
## Core Function Callers Analysis
|
||||
|
||||
### Conversation-Based Callers (`_FromConversation`)
|
||||
|
||||
| **Caller** | **Context** | **Message Removal** | **Placeholder** | **Error Handling** |
|
||||
|------------|-------------|-------------------|----------------|-------------------|
|
||||
| **Chat Persona** | `'conversation'` | `messageWasInterruptedAtStart()` → `removeMessage()` | None | Error fragments |
|
||||
| **Beam Scatter** | `'beam-scatter'` | `messageWasInterruptedAtStart()` → empty message | `SCATTER_PLACEHOLDER` | Ray status update |
|
||||
| **Beam Gather** | `'beam-gather'` | `messageWasInterruptedAtStart()` → clear fragments | `GATHER_PLACEHOLDER` | Re-throw errors |
|
||||
| **Beam Follow-up** | `'beam-followup'` | `messageWasInterruptedAtStart()` → remove message | `FOLLOWUP_PLACEHOLDER` | Status updates |
|
||||
| **ScratchChat** | `'scratch-chat'` | `aborted && !fragments` → array removal | `SCRATCH_CHAT_PLACEHOLDER` | Error fragments |
|
||||
| **Telephone** | `'call'` | None | None | Basic handling |
|
||||
| **ReAct Agent** | `'chat-react-turn'` | None | None | Append errors |
|
||||
| **Variform** | `'_DEV_'` | None | None | Throw errors |
|
||||
|
||||
### Direct Request Callers (`aixChatGenerateContent_DMessage`)
|
||||
|
||||
| **Caller** | **Context** | **Message Removal** | **Error Handling** |
|
||||
|------------|-------------|-------------------|-------------------|
|
||||
| **Auto Follow-ups** | `'chat-followup-*'` | `fragmentDelete()` on failure | `fragmentReplace()` with error |
|
||||
| **Gen CR Diffs** | `'aifn-gen-cr-diffs'` | None | State-based handling |
|
||||
| **Code Fixup** | `'fixup-code'` | None | Throw errors |
|
||||
| **Attachment Prompts** | `'chat-attachment-prompts'` | None | Throw errors |
|
||||
|
||||
### Text-Only Utilities (`aixChatGenerateText_Simple`)
|
||||
|
||||
| **Utility** | **Purpose** | **Error Strategy** | **Called By** |
|
||||
|-------------|-------------|-------------------|---------------|
|
||||
| **conversationTitle** | Auto-generate chat titles | Try/catch with fallback | UI components |
|
||||
| **conversationSummary** | Generate summaries | Try/catch with fallback | Chat drawer |
|
||||
| **useStreamChatText** | Generic text streaming | Error state management | FlattenerModal |
|
||||
| **useLLMChain** | Multi-step processing | Step-by-step handling | Persona creation |
|
||||
| **imaginePromptFromText** | Text → image prompts | Simple propagation | Image generation |
|
||||
| **aifnBeamGenerateBriefing** | Beam summaries | Null return on error | Beam completion |
|
||||
| **useAifnPersonaGenIdentity** | Extract persona identity | Query error handling | Persona flows |
|
||||
| **DiagramsModal** | Generate diagrams | Component error state | Manual generation |
|
||||
|
||||
## Message Removal Patterns
|
||||
|
||||
### 1. Complete Message Removal
|
||||
- **Chat Persona**: `messageWasInterruptedAtStart()` → `messageEditor.removeMessage()`
|
||||
- **ScratchChat**: `outcome === 'aborted' && !fragments?.length` → array removal
|
||||
- **Trigger**: Message aborted before any content generated
|
||||
|
||||
### 2. Fragment-Level Management
|
||||
- **Beam Gather**: Clear fragments array but keep message structure
|
||||
- **Auto Follow-ups**: Delete specific placeholder fragments on failure
|
||||
- **Purpose**: Maintain message structure while removing failed content
|
||||
|
||||
### 3. Empty Message Replacement
|
||||
- **Beam Scatter**: Replace with `createDMessageEmpty()` but preserve ray structure
|
||||
- **Purpose**: Keep UI structure intact while indicating failure
|
||||
|
||||
### 4. No Removal Strategy
|
||||
- **Text-only functions**: Use fallback values, error states, or null returns
|
||||
- **Simple callers**: Propagate errors upstream for handling
|
||||
|
||||
## Error Handling by Layer
|
||||
|
||||
### UI Layer (Components)
|
||||
- **Pattern**: Rich error states with user-facing messages
|
||||
- **Examples**: DiagramsModal, FlattenerModal
|
||||
- **Features**: Retry mechanisms, fallback UI, loading states
|
||||
|
||||
### Utility Layer (Hooks/Functions)
|
||||
- **Pattern**: Graceful degradation with fallbacks
|
||||
- **Examples**: conversationTitle, conversationSummary
|
||||
- **Features**: Silent failures, default values, try/catch blocks
|
||||
|
||||
### Core Layer (Direct API)
|
||||
- **Pattern**: Minimal handling, error propagation
|
||||
- **Examples**: Code Fixup, Attachment Prompts
|
||||
- **Features**: Assumes upstream error handling
|
||||
|
||||
## Key Implementation Details
|
||||
|
||||
### Message Removal Detection
|
||||
```typescript
|
||||
// Core detection logic
|
||||
function messageWasInterruptedAtStart(message: Pick<DMessage, 'generator' | 'fragments'>): boolean {
|
||||
return message.generator?.tokenStopReason === 'client-abort' && message.fragments.length === 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Placeholder Management
|
||||
- **Initialization**: `createPlaceholderVoidFragment(placeholderText)`
|
||||
- **Replacement**: During streaming updates or on completion
|
||||
- **Cleanup**: Delete on error to avoid stale content
|
||||
|
||||
### Context Patterns
|
||||
- **Production**: `'conversation'`, `'beam-scatter'`, `'scratch-chat'`
|
||||
- **Features**: `'chat-followup-*'`, `'fixup-code'`, `'ai-diagram'`
|
||||
- **Development**: `'_DEV_'`
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Message Removal
|
||||
- Use `messageWasInterruptedAtStart()` for consistent detection
|
||||
- Only remove messages with no content that were client-aborted
|
||||
- Consider UI context when choosing removal vs. clearing strategy
|
||||
|
||||
### Error Handling
|
||||
- **Fragment-level**: Use `messageEditor.fragmentReplace()` with error fragments
|
||||
- **Message-level**: Use `messageEditor.removeMessage()` or array removal
|
||||
- **Status-level**: Update component state for UI feedback
|
||||
|
||||
### Placeholder Management
|
||||
- Initialize with descriptive placeholders using `createPlaceholderVoidFragment()`
|
||||
- Replace during streaming updates
|
||||
- Clean up on error to prevent stale content
|
||||
|
||||
## Architectural Insights
|
||||
|
||||
1. **Layered Error Handling**: Sophistication increases closer to UI
|
||||
2. **Context Specialization**: Different contexts for different use cases
|
||||
3. **Streaming vs Non-Streaming**: Conversation functions stream, utilities typically don't
|
||||
4. **Message vs Fragment Management**: Different strategies for different UI needs
|
||||
|
||||
The most sophisticated handling is in **Beam modules** and **Chat Persona** with comprehensive removal logic, while simpler callers rely on upstream error handling.
|
||||
|
||||
## Code References
|
||||
|
||||
- **Core function**: `src/modules/aix/client/aix.client.ts:aixChatGenerateContent_DMessage_FromConversation`
|
||||
- **Removal check**: `src/common/stores/chat/chat.message.ts:388:messageWasInterruptedAtStart()`
|
||||
- **Placeholder creation**: `src/common/stores/chat/chat.fragments.ts:createPlaceholderVoidFragment()`
|
||||
@@ -0,0 +1,189 @@
|
||||
# AIX
|
||||
|
||||
AIX is a client/server library for integrating advanced AI capabilities into web applications.
|
||||
|
||||
## Overview
|
||||
|
||||
AIX provides real-time, type-safe communication between a Typescript application and AI providers.
|
||||
|
||||
Built with tRPC, it manages the lifecycle of AI-generated content from request to rendering, supporting both streaming and non-streaming AI providers.
|
||||
|
||||
## Features
|
||||
|
||||
- Content Generation
|
||||
- Multi-Modal streaming/non-streaming
|
||||
- Throttled batching and error handling
|
||||
- Server-side timeout/retry
|
||||
- Function Calling and Code Execution
|
||||
- Complex AI Workflows (future)
|
||||
- Embeddings / Information Retrieval / Image Manipulation (future)
|
||||
|
||||
## AIX Providers support
|
||||
|
||||
| Service | Chat | Function Calling | Multi-Modal Input | Cont. (1) | Streaming | Idiosyncratic |
|
||||
|------------|------------|------------------|-------------------|-----------|-----------|---------------|
|
||||
| Alibaba | ✅ | ✅ | | ✅ | Yes + 📦 | |
|
||||
| Anthropic | ✅ | ✅ + Parallel | Img: ✅ | ✅ | Yes + 📦 | |
|
||||
| Azure | ✅ | ✅ | | ✅ | Yes + 📦 | |
|
||||
| Deepseek | ✅ | ❌ (rejected) | | ✅ | Yes + 📦 | |
|
||||
| Gemini | ✅ | ✅ + Parallel | Img: ✅ | ✅ | Yes + 📦 | Code ex.: ✅ |
|
||||
| Groq | ✅ | ✅ + Parallel | | ✅ | Yes + 📦 | |
|
||||
| LM Studio | ✅ | ❌ (not working) | | ❌ | Yes + 📦 | |
|
||||
| Local AI | ✅ | ✅ | | ❌ | Yes + 📦 | |
|
||||
| Mistral | ✅ | ✅ | | ✅ | Yes + 📦 | |
|
||||
| OpenAI | ✅ | ✅ + Parallel | Img: ✅ | ✅ | Yes + 📦 | |
|
||||
| OpenPipe | ✅ | ✅ | Img: ✅ | ✅ | Yes + 📦 | |
|
||||
| OpenRouter | ✅ | ❌ (inconsistent) | | ✅ | Yes + 📦 | |
|
||||
| Perplexity | ✅ | ❌ (rejected) | | ✅ | Yes + 📦 | |
|
||||
| TogetherAI | ✅ | ✅ | | ✅ | Yes + 📦 | |
|
||||
| xAI | | | | | | |
|
||||
| Ollama (2) | ❌ (broken) | ? | | | | |
|
||||
|
||||
Notes:
|
||||
|
||||
- 1: Continuation marks: a. sends reason=max-tokens (streaming/non-streaming), b. TBA
|
||||
- 2: Ollama has not been ported to AIX yet due to the custom APIs.
|
||||
|
||||
## 1. System Architecture
|
||||
|
||||
The subsystem comprises three main components:
|
||||
|
||||
1. **Client (e.g. Next.js Frontend)**
|
||||
|
||||
- Initiates requests
|
||||
- Renders AI-generated content in real-time
|
||||
- Reconstructs streamed data
|
||||
|
||||
2. **Server (e.g. Next.js Backend)**
|
||||
|
||||
- Acts as an intermediary between client and AI providers
|
||||
- Handles request preparation, dispatching, and response processing
|
||||
- Streams responses back to the client
|
||||
|
||||
3. **Upstream AI Providers**
|
||||
|
||||
- Generate AI content based on requests
|
||||
|
||||
### ChatGenerate workflow:
|
||||
|
||||
1. Request Initialization: AIX Client prepares and sends request (systemInstruction, messages=AixWire_Parts[], etc.) to AIX Server
|
||||
2. Dispatch Preparation: AIX Server prepares for upstream communication
|
||||
3. AI Provider Interaction: AIX Server communicates with AI Provider (streaming or non-streaming)
|
||||
4. Data Decoding, Transformation and Transmission: AIX Server sends AixWire_Particles to AIX Client
|
||||
5. Client-side Processing: Client's ContentReassembler processes AixWire_Particles into a list (likely a single) of multi-fragment (DMessageContentFragment[]) messages
|
||||
6. Completion: AIX Server sends 'done' control message, AIX Client finalizes data update
|
||||
7. Error Handling: AIX Server sends specific error messages when necessary
|
||||
|
||||
## 2. Files and Folders
|
||||
|
||||
AIX is organized into the following files and folders:
|
||||
|
||||
1. Client-Side (`/client/`):
|
||||
|
||||
- `aix.client.ts`: Main client-side entry point for AIX operations.
|
||||
- `aix.client.chatGenerateRequest.ts`: Handles conversion of chat messages to AIX-compatible format (AixWire_Content, AixWire_Parts, etc.).
|
||||
|
||||
2. Server-Side (`/server/`):
|
||||
|
||||
- API (`/server/api/`) - Client to Server communication:
|
||||
- `aix.router.ts`: Defines the tRPC router for AIX operations.
|
||||
- `aix.wiretypes.ts`: Contains Zod schemas for types and calls incoming from the client (AixWire_Parts, AixWire_Content, AixWire_Tooling, AixWire_API, ...), and outgoing (AixWire_Particles)
|
||||
|
||||
- Dispatch (`/server/dispatch/`) - Server to AI Provider communication:
|
||||
- `/server/dispatch/chatGenerate/`: Content Generation with chat-style inputs:
|
||||
- `./adapters/`: Adapters for creating API requests for different AI protocols (Anthropic, Gemini, OpenAI).
|
||||
- `./parsers/`: Parsers for parsing streaming/non-streamin responses from different AI protocols (same 3).
|
||||
- `chatGenerate.dispatch.ts`: Creates a pipeline to execute Chat Generation to a specific provider.
|
||||
- `ChatGenerateTransmitter.ts`: Used to serialize and transmit AixWire_Particles to the client.
|
||||
- `/server/dispatch/wiretypes/`: AI provider Wire Types:
|
||||
- Type definitions for different AI providers/protocols (Anthropic, Gemini, OpenAI).
|
||||
- `stream.demuxers.ts`: Handles demuxing of different stream formats.
|
||||
|
||||
## 3. Architecture Diagram
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant AIX Client
|
||||
participant AIX Server
|
||||
participant PartTransmitter
|
||||
participant AI Provider
|
||||
AIX Client ->> AIX Client: Initialize ContentReassembler
|
||||
AIX Client ->> AIX Client: Convert DMessage*Part to AixWire_Parts
|
||||
AIX Client ->> AIX Server: Send messages (arrays of AixWire_Parts)
|
||||
AIX Server ->> AIX Server: Prepare Dispatch (Upstream request, demux, parsing)
|
||||
|
||||
alt Dispatch Preparation Error
|
||||
AIX Server ->> AIX Client: Send `dispatch-prepare` error message
|
||||
else Dispatch Fetch
|
||||
AIX Server ->> AI Provider: Send AI-provider specific stream/non-stream request
|
||||
AIX Server ->> AIX Client: Send 'start' control message
|
||||
AIX Server ->> PartTransmitter: Initialize part particle serialization
|
||||
|
||||
alt Streaming AI Provider
|
||||
loop Until stream end or error
|
||||
AI Provider ->> AIX Server: Stream response chunk
|
||||
AIX Server ->> AIX Server: Demux chunk into DispatchEvents
|
||||
loop For each AI-provider specific DispatchEvent
|
||||
AIX Server ->> AIX Server: Parse DispatchEvent
|
||||
AIX Server ->> PartTransmitter: (Parser) Calls serialization functions
|
||||
PartTransmitter ->> PartTransmitter: Generate and throttle AixWire_PartParticles
|
||||
PartTransmitter -->> AIX Server: Yield AixWire_PartParticle
|
||||
end
|
||||
AIX Server ->> AIX Client: Send accumulated AixWire_PartParticles
|
||||
end
|
||||
AIX Server ->> PartTransmitter: Request any remaining particles
|
||||
PartTransmitter -->> AIX Server: Yield any final AixWire_PartParticles
|
||||
AIX Server ->> AIX Client: Send final AixWire_PartParticles (if any)
|
||||
else Non-Streaming AI Provider
|
||||
AI Provider ->> AIX Server: Send AI-provider specific complete response
|
||||
alt AI-provider specific full-response parser
|
||||
AIX Server ->> AIX Server: Parse full response
|
||||
AIX Server ->> PartTransmitter: Call particle serialization functions
|
||||
PartTransmitter ->> PartTransmitter: Generate AixWire_PartParticle
|
||||
PartTransmitter -->> AIX Server: Yield ALL AixWire_PartParticle
|
||||
end
|
||||
AIX Server ->> AIX Client: Send all AixWire_PartParticles
|
||||
end
|
||||
AIX Server ->> AIX Client: Send 'done' control message
|
||||
loop For each received batch of particles
|
||||
AIX Client ->> AIX Client: ContentReassembler processes particles into DMessage*Part
|
||||
alt DMessageTextPart
|
||||
AIX Client ->> AIX Client: Update UI with text content
|
||||
else DMessageImageRefPart
|
||||
AIX Client ->> AIX Client: Load and display image
|
||||
else DMessageToolInvocationPart
|
||||
AIX Client ->> AIX Client: Process tool invocation (dev only)
|
||||
else DMessageToolResponsePart
|
||||
AIX Client ->> AIX Client: Process tool response (dev only)
|
||||
else DMessageErrorPart
|
||||
AIX Client ->> AIX Client: Display error message
|
||||
else DMessageDocPart
|
||||
AIX Client ->> AIX Client: Process and display document
|
||||
else DMetaPlaceholderPart
|
||||
AIX Client ->> AIX Client: Handle placeholder (non-submitted)
|
||||
end
|
||||
end
|
||||
AIX Client ->> AIX Client: Finalize data update
|
||||
end
|
||||
|
||||
alt Error Handling
|
||||
AIX Server ->> AIX Client: Send 'error' specific control messages
|
||||
end
|
||||
|
||||
note over AIX Server, AI Provider: Server-side Timeout/Retry mechanism
|
||||
loop Retry on timeout (server-side)
|
||||
AIX Server ->> AI Provider: Retry request
|
||||
end
|
||||
|
||||
note over AIX Client: Client-side Timeout mechanism
|
||||
AIX Client ->> AIX Client: Timeout if no response received within set time
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 2025-03-14 Update
|
||||
AIX is used in production in Big-AGI and is stable and performant.
|
||||
The code is tightly coupled with the tRPC framework and the rest of our codebase,
|
||||
so it is not recommended to use it outside of our ecosystem.
|
||||
|
||||
For a great Typescript alternative we recommend the Vercel AI SDK.
|
||||
@@ -0,0 +1,131 @@
|
||||
# LLM Parameters System
|
||||
|
||||
This document describes how parameters flow through Big-AGI's LLM parameters system, from definition to API invocation.
|
||||
|
||||
## System Overview
|
||||
|
||||
The LLM parameters system operates across five layers that transform parameters from global definitions to vendor-specific API calls. Each layer serves a specific purpose in the parameter resolution pipeline.
|
||||
|
||||
## Parameter Flow Architecture
|
||||
|
||||
### Layer 1: Parameter Registry
|
||||
**File**: `src/common/stores/llms/llms.parameters.ts`
|
||||
|
||||
The `DModelParameterRegistry` defines all available parameters with their constraints and metadata. Each parameter includes type information, validation rules, and default behavior.
|
||||
|
||||
**Example**: `llmVndOaiReasoningEffort4` defines a 4-value enum with 'medium' as the required fallback.
|
||||
|
||||
**Default Value System**: The registry supports multiple default mechanisms:
|
||||
- `initialValue` - Parameter's base default (e.g., `llmVndOaiRestoreMarkdown: true`)
|
||||
- `requiredFallback` - Fallback for required parameters (e.g., `llmTemperature: 0.5`)
|
||||
- `nullable` - Parameters that can be explicitly null to skip API transmission
|
||||
|
||||
### Layer 2: Model Specifications
|
||||
**File**: `src/modules/llms/server/llm.server.types.ts`
|
||||
|
||||
Models declare which parameters they support through `parameterSpecs` arrays. Each spec can override registry defaults:
|
||||
|
||||
```typescript
|
||||
parameterSpecs: [
|
||||
{ paramId: 'llmVndOaiReasoningEffort4' },
|
||||
{ paramId: 'llmVndAntThinkingBudget', initialValue: 1024 }, // Override default
|
||||
{ paramId: 'llmVndGeminiThinkingBudget', rangeOverride: [0, 8192] }, // Custom range
|
||||
]
|
||||
```
|
||||
|
||||
**Parameter Visibility**: The `hidden` flag removes parameters from the UI while keeping them functional. Models can also mark parameters as `required`.
|
||||
|
||||
### Layer 3: Client Configuration
|
||||
|
||||
The system provides two UI configurators with different scopes:
|
||||
|
||||
#### Full Model Configuration Dialog
|
||||
**File**: `src/modules/llms/models-modal/LLMParametersEditor.tsx`
|
||||
Shows all non-hidden parameters from model's `parameterSpecs`. Used in the models modal for complete configuration.
|
||||
|
||||
#### ChatPanel Quick Controls
|
||||
**File**: `src/apps/chat/components/layout-panel/ChatPanelModelParameters.tsx`
|
||||
Shows only parameters that are:
|
||||
- In model's `parameterSpecs`
|
||||
- Listed in `_interestingParameters` array
|
||||
- Not marked as `hidden`
|
||||
|
||||
**Value Resolution**: Both UIs use `getAllModelParameterValues()` to merge:
|
||||
1. **Fallback values** - Required parameters get their `requiredFallback` values
|
||||
2. **Initial values** - Model's `initialParameters` (populated during model creation)
|
||||
3. **User values** - User's `userParameters` (highest priority)
|
||||
|
||||
### Layer 4: AIX Translation
|
||||
**File**: `src/modules/aix/client/aix.client.ts`
|
||||
|
||||
The AIX client transforms DLLM parameters to wire protocol format. This layer handles parameter precedence rules and name transformations:
|
||||
|
||||
```
|
||||
// Parameter precedence: newer 4-value version takes priority over 3-value
|
||||
...((llmVndOaiReasoningEffort4 || llmVndOaiReasoningEffort) ?
|
||||
{ vndOaiReasoningEffort: llmVndOaiReasoningEffort4 || llmVndOaiReasoningEffort } : {})
|
||||
```
|
||||
|
||||
**Client Options**: The system supports parameter overrides through `llmOptionsOverride` and complete replacement via `llmUserParametersReplacement`.
|
||||
|
||||
### Layer 5: Vendor Adaptation
|
||||
**Files**: `src/modules/aix/server/dispatch/chatGenerate/adapters/*.ts`
|
||||
|
||||
Server-side adapters translate AIX parameters to vendor APIs. Each vendor may interpret parameters differently:
|
||||
|
||||
- **OpenAI**: `vndOaiReasoningEffort` → `reasoning_effort`
|
||||
- **Perplexity**: Reuses OpenAI parameter format
|
||||
- **OpenAI Responses API**: Maps to structured reasoning config with additional logic
|
||||
|
||||
## Parameter Initialization Process
|
||||
|
||||
When a model is loaded:
|
||||
|
||||
1. **Model Creation**: `modelDescriptionToDLLM()` creates the DLLM with empty `initialParameters`
|
||||
2. **Initial Value Application**: `applyModelParameterInitialValues()` populates initial values from:
|
||||
- Model spec `initialValue` (highest priority)
|
||||
- Registry `initialValue` (fallback)
|
||||
3. **Runtime Resolution**: `getAllModelParameterValues()` creates final parameter set:
|
||||
- Required fallbacks (for missing required parameters)
|
||||
- Initial parameters (model defaults)
|
||||
- User parameters (user overrides)
|
||||
|
||||
## Special Parameter Behaviors
|
||||
|
||||
**Hidden Parameters**: Parameters like `llmRef` are marked `hidden: true` in the registry and never appear in the UI, but remain functional for system use.
|
||||
|
||||
**Nullable Parameters**: Parameters with `nullable` configuration can be explicitly set to `null` to prevent transmission to the API, distinct from being undefined.
|
||||
|
||||
**Range Overrides**: Models can override parameter ranges (e.g., different Gemini models support different thinking budget ranges).
|
||||
|
||||
**Parameter Interactions**: The UI implements business logic like disabling web search when reasoning effort is 'minimal'.
|
||||
|
||||
## Type Safety Mechanisms
|
||||
|
||||
The system maintains type safety through:
|
||||
- `DModelParameterId` union from registry keys
|
||||
- `DModelParameterValue<T>` conditional types for values
|
||||
- `DModelParameterSpec<T>` interfaces for specifications
|
||||
- Runtime validation via Zod schemas at API boundaries
|
||||
|
||||
## Model Variant Pattern
|
||||
|
||||
Some vendors use model variants to enable features, for instance:
|
||||
- **Anthropic**: Creates separate `idVariant: 'thinking'` entries forcing value of hidden parameters
|
||||
- **Google/OpenAI**: Parameters directly on base models
|
||||
|
||||
## Migration and Compatibility
|
||||
|
||||
The architecture supports parameter evolution:
|
||||
- **Version Coexistence**: Both `llmVndOaiReasoningEffort` and `llmVndOaiReasoningEffort4` exist simultaneously
|
||||
- **Precedence Rules**: Newer parameters take priority during AIX translation
|
||||
- **Graceful Degradation**: Unknown parameters log warnings but don't break functionality
|
||||
|
||||
## Key Implementation Files
|
||||
|
||||
- **Registry**: `src/common/stores/llms/llms.parameters.ts`
|
||||
- **Specifications**: `src/modules/llms/server/llm.server.types.ts`
|
||||
- **UI Controls**: `src/modules/llms/models-modal/LLMParametersEditor.tsx`
|
||||
- **AIX Translation**: `src/modules/aix/client/aix.client.ts`
|
||||
- **Wire Types**: `src/modules/aix/server/api/aix.wiretypes.ts`
|
||||
- **Vendor Adapters**: `src/modules/aix/server/dispatch/chatGenerate/adapters/*.ts`
|
||||
@@ -0,0 +1,151 @@
|
||||
# Big-AGI Routing & Display States
|
||||
|
||||
This document describes the routing architecture and display state hierarchy in Big-AGI, from top-level providers down to component-level states.
|
||||
|
||||
## Overview
|
||||
|
||||
Big-AGI uses Next.js Pages Router with a provider stack that determines what users see based on application state and configuration.
|
||||
|
||||
## Quick Reference: Route Configurations
|
||||
|
||||
| Route | Purpose | Key Features |
|
||||
|-------|---------|--------------|
|
||||
| `/` | Main chat app | Default application |
|
||||
| `/call` | Voice interface | Voice-to-voice AI conversations |
|
||||
| `/personas` | Persona management | Create and manage AI personas |
|
||||
| ... | | |
|
||||
|
||||
## Decision Flow Diagram
|
||||
|
||||
The routing decisions follow a hierarchy from system-level provider configuration down to component-level states.
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
Start([Navigate to Route]) --> Root[_app.tsx]
|
||||
|
||||
Root --> Theme[ProviderTheming]
|
||||
Theme --> Error[ErrorBoundary]
|
||||
Error --> Bootstrap[ProviderBootstrapLogic]
|
||||
|
||||
Bootstrap --> BootCheck{Bootstrap Checks}
|
||||
BootCheck -->|News| News[↗️ /news]
|
||||
BootCheck -->|Continue| Router{Router}
|
||||
|
||||
Router -->|/| Chat[Chat App]
|
||||
Router -->|/personas,/call,/beam...| OtherApps[Other Apps]
|
||||
Router -->|/news| NewsApp[News App]
|
||||
|
||||
Chat --> ChatStates{Chat States}
|
||||
|
||||
ChatStates -->|No Models| ZeroModels[🟡 Setup Models]
|
||||
ChatStates -->|No Conv| ZeroConv[🟡 Select Chat]
|
||||
ChatStates -->|No Msgs| PersonaGrid[Choose Persona]
|
||||
ChatStates -->|Ready| Active[🟢 Active Chat]
|
||||
|
||||
Active --> Features[Features:<br/>• Chat Bar<br/>• Beam Mode<br/>• Attachments]
|
||||
|
||||
style ZeroModels fill:#fff4cc
|
||||
style ZeroConv fill:#fff4cc
|
||||
style Active fill:#ccffcc
|
||||
style Chat fill:#f0f8ff
|
||||
style OtherApps fill:#f0f8ff
|
||||
style NewsApp fill:#f0f8ff
|
||||
```
|
||||
|
||||
## Display State Hierarchy
|
||||
|
||||
```
|
||||
_app.tsx (Root)
|
||||
├── ProviderTheming ← Always Applied
|
||||
├── ErrorBoundary ← Always Applied
|
||||
├── ProviderBootstrapLogic ← Always Applied
|
||||
│ ├── Tiktoken preload & Model auto-config
|
||||
│ ├── Storage maintenance & cleanup
|
||||
│ └── News Redirect (if conditions met)
|
||||
│
|
||||
└── Page Component
|
||||
├── AppChat (/) → Default app
|
||||
│ ├── CMLZeroModels → If no models configured
|
||||
│ ├── CMLZeroConversation → If no conversation selected
|
||||
│ └── PersonaGrid → If conversation empty
|
||||
│
|
||||
└── Other Apps → Personas, Call, Draw, News, Beam
|
||||
```
|
||||
|
||||
## Provider Stack
|
||||
|
||||
| Provider | Purpose | Key Functions |
|
||||
|----------|---------|---------------|
|
||||
| **ProviderTheming** | UI theme management | Theme switching, CSS variables |
|
||||
| **ErrorBoundary** | Error handling | Catches and displays errors gracefully |
|
||||
| **ProviderBootstrapLogic** | App initialization | • Tiktoken preload<br>• Model auto-config<br>• Storage cleanup<br>• News redirect logic |
|
||||
|
||||
For detailed initialization sequence and provider functions, see [app-startup-sequence.md](app-startup-sequence.md), if present.
|
||||
|
||||
## Application Routes
|
||||
|
||||
### Primary Apps
|
||||
- `/` → AppChat (default)
|
||||
- `/call` → Voice call interface
|
||||
- `/beam` → Multi-model reasoning
|
||||
- `/draw` → Image generation
|
||||
- `/personas` → Personas app
|
||||
- `/news` → News/updates
|
||||
|
||||
### Zero States
|
||||
|
||||
#### Chat App Zero States
|
||||
|
||||
**CMLZeroModels**
|
||||
- **Location**: `/src/apps/chat/components/messages-list/CMLZeroModels.tsx`
|
||||
- **Triggered**: No LLM sources configured
|
||||
- **Shows**: Welcome screen with "Setup Models" button
|
||||
|
||||
**CMLZeroConversation**
|
||||
- **Location**: `/src/apps/chat/components/messages-list/CMLZeroConversation.tsx`
|
||||
- **Triggered**: No conversation selected
|
||||
- **Shows**: "Select/create conversation" prompt
|
||||
|
||||
**PersonaGrid**
|
||||
- **App**: Chat (when conversation is empty)
|
||||
- **Triggered**: Conversation exists but has no messages
|
||||
- **Shows**: Persona selector interface
|
||||
|
||||
#### Feature-Specific Zero States
|
||||
|
||||
**Beam Tutorial**
|
||||
- **Feature**: Beam (multi-model reasoning)
|
||||
- **Component**: `ExplainerCarousel`
|
||||
- **Triggered**: First-time Beam usage
|
||||
- **Shows**: Interactive feature walkthrough
|
||||
|
||||
## Common Scenarios
|
||||
|
||||
### New User First Visit
|
||||
1. Navigates to `/` → Provider stack loads
|
||||
2. Bootstrap runs → No news redirect (first visit)
|
||||
3. Chat loads → **CMLZeroModels** (no models configured)
|
||||
4. User clicks "Setup Models" → Configuration flow
|
||||
|
||||
### Returning User with Saved State
|
||||
1. Navigates to `/` → Provider stack loads
|
||||
2. IndexedDB restores state → Previous conversation loaded
|
||||
3. Chat loads → **Active chat interface** (bypasses all zero states)
|
||||
4. All messages and context preserved from last session
|
||||
|
||||
### Shared Chat Viewer
|
||||
1. Navigates to `/link/chat/[id]` → Full provider stack
|
||||
2. Views read-only chat → May see "Import" option
|
||||
3. If importing → Checks for duplicates, creates new local conversation
|
||||
|
||||
## Storage System
|
||||
|
||||
Big-AGI uses a local-first architecture:
|
||||
- **Zustand** for reactive state management
|
||||
- **IndexedDB** for persistent storage via Zustand persist middleware
|
||||
- **Version-based migrations** for data structure upgrades
|
||||
|
||||
Key stores:
|
||||
- `app-chats`: Conversations and messages (IndexedDB)
|
||||
- `app-llms`: Model configurations (IndexedDB)
|
||||
- `app-ui`: UI preferences (localStorage)
|
||||
@@ -1,85 +0,0 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
// Build information
|
||||
process.env.NEXT_PUBLIC_BUILD_HASH = 'big-agi-2-dev';
|
||||
process.env.NEXT_PUBLIC_BUILD_PKGVER = JSON.parse('' + await readFile(new URL('./package.json', import.meta.url))).version;
|
||||
process.env.NEXT_PUBLIC_BUILD_TIMESTAMP = new Date().toISOString();
|
||||
console.log(` 🧠 \x1b[1mbig-AGI\x1b[0m v${process.env.NEXT_PUBLIC_BUILD_PKGVER} (@${process.env.NEXT_PUBLIC_BUILD_HASH})`);
|
||||
|
||||
// Non-default build types
|
||||
const buildType =
|
||||
process.env.BIG_AGI_BUILD === 'standalone' ? 'standalone'
|
||||
: process.env.BIG_AGI_BUILD === 'static' ? 'export'
|
||||
: undefined;
|
||||
|
||||
buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
let nextConfig = {
|
||||
reactStrictMode: true,
|
||||
|
||||
// [exports] https://nextjs.org/docs/advanced-features/static-html-export
|
||||
...buildType && {
|
||||
output: buildType,
|
||||
distDir: 'dist',
|
||||
|
||||
// disable image optimization for exports
|
||||
images: { unoptimized: true },
|
||||
|
||||
// Optional: Change links `/me` -> `/me/` and emit `/me.html` -> `/me/index.html`
|
||||
// trailingSlash: true,
|
||||
},
|
||||
|
||||
// [puppeteer] https://github.com/puppeteer/puppeteer/issues/11052
|
||||
// NOTE: we may not be needing this anymore, as we use '@cloudflare/puppeteer'
|
||||
serverExternalPackages: ['puppeteer-core'],
|
||||
|
||||
webpack: (config, { isServer }) => {
|
||||
// @mui/joy: anything material gets redirected to Joy
|
||||
config.resolve.alias['@mui/material'] = '@mui/joy';
|
||||
|
||||
// @dqbd/tiktoken: enable asynchronous WebAssembly
|
||||
config.experiments = {
|
||||
asyncWebAssembly: true,
|
||||
layers: true,
|
||||
};
|
||||
|
||||
// fix warnings for async functions in the browser (https://github.com/vercel/next.js/issues/64792)
|
||||
if (!isServer) {
|
||||
config.output.environment = { ...config.output.environment, asyncFunction: true };
|
||||
}
|
||||
|
||||
// prevent too many small chunks (40kb min) on 'client' packs (not 'server' or 'edge-server')
|
||||
// noinspection JSUnresolvedReference
|
||||
if (typeof config.optimization.splitChunks === 'object' && config.optimization.splitChunks.minSize) {
|
||||
// noinspection JSUnresolvedReference
|
||||
config.optimization.splitChunks.minSize = 40 * 1024;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
|
||||
// Note: disabled to check whether the project becomes slower with this
|
||||
// modularizeImports: {
|
||||
// '@mui/icons-material': {
|
||||
// transform: '@mui/icons-material/{{member}}',
|
||||
// },
|
||||
// },
|
||||
|
||||
// Uncomment the following leave console messages in production
|
||||
// compiler: {
|
||||
// removeConsole: false,
|
||||
// },
|
||||
};
|
||||
|
||||
// Validate environment variables, if set at build time. Will be actually read and used at runtime.
|
||||
// This is the reason both this file and the servr/env.mjs files have this extension.
|
||||
await import('./src/server/env.mjs');
|
||||
|
||||
// conditionally enable the nextjs bundle analyzer
|
||||
if (process.env.ANALYZE_BUNDLE) {
|
||||
const { default: withBundleAnalyzer } = await import('@next/bundle-analyzer');
|
||||
nextConfig = withBundleAnalyzer({ openAnalyzer: true })(nextConfig);
|
||||
}
|
||||
|
||||
export default nextConfig;
|
||||
+160
@@ -0,0 +1,160 @@
|
||||
import type { NextConfig } from 'next';
|
||||
import type { WebpackConfigContext } from 'next/dist/server/config-shared';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
|
||||
// Build information: from CI, or git commit hash
|
||||
let buildHash = process.env.NEXT_PUBLIC_BUILD_HASH || process.env.GITHUB_SHA || process.env.VERCEL_GIT_COMMIT_SHA; // Docker or custom, GitHub Actions, Vercel
|
||||
try {
|
||||
// fallback to local git commit hash
|
||||
if (!buildHash)
|
||||
buildHash = execSync('git rev-parse --short HEAD').toString().trim();
|
||||
} catch {
|
||||
// final fallback
|
||||
buildHash = '2-dev';
|
||||
}
|
||||
// The following are used by/available to Release.buildInfo(...)
|
||||
process.env.NEXT_PUBLIC_BUILD_HASH = (buildHash || '').slice(0, 10);
|
||||
process.env.NEXT_PUBLIC_BUILD_PKGVER = JSON.parse('' + readFileSync(new URL('./package.json', import.meta.url))).version;
|
||||
process.env.NEXT_PUBLIC_BUILD_TIMESTAMP = new Date().toISOString();
|
||||
process.env.NEXT_PUBLIC_DEPLOYMENT_TYPE = process.env.NEXT_PUBLIC_DEPLOYMENT_TYPE || (process.env.VERCEL_ENV ? `vercel-${process.env.VERCEL_ENV}` : 'local'); // Docker or custom, Vercel
|
||||
console.log(` 🧠 \x1b[1mbig-AGI\x1b[0m v${process.env.NEXT_PUBLIC_BUILD_PKGVER} (@${process.env.NEXT_PUBLIC_BUILD_HASH})`);
|
||||
|
||||
// Non-default build types
|
||||
const buildType =
|
||||
process.env.BIG_AGI_BUILD === 'standalone' ? 'standalone' as const
|
||||
: process.env.BIG_AGI_BUILD === 'static' ? 'export' as const
|
||||
: undefined;
|
||||
|
||||
buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
let nextConfig: NextConfig = {
|
||||
reactStrictMode: !process.env.NO_STRICT_MODE, // default: enabled
|
||||
|
||||
// [exports] https://nextjs.org/docs/advanced-features/static-html-export
|
||||
...(buildType && {
|
||||
output: buildType,
|
||||
distDir: 'dist',
|
||||
|
||||
// disable image optimization for exports
|
||||
images: { unoptimized: true },
|
||||
|
||||
// Optional: Change links `/me` -> `/me/` and emit `/me.html` -> `/me/index.html`
|
||||
// trailingSlash: true,
|
||||
}),
|
||||
|
||||
// [puppeteer] https://github.com/puppeteer/puppeteer/issues/11052
|
||||
// NOTE: we may not be needing this anymore, as we use '@cloudflare/puppeteer'
|
||||
serverExternalPackages: ['puppeteer-core'],
|
||||
|
||||
webpack: (config: any, { isServer, webpack /*, dev, nextRuntime*/ }: WebpackConfigContext) => {
|
||||
// @mui/joy: anything material gets redirected to Joy
|
||||
config.resolve.alias['@mui/material'] = '@mui/joy';
|
||||
|
||||
// @dqbd/tiktoken: enable asynchronous WebAssembly
|
||||
config.experiments = {
|
||||
asyncWebAssembly: true,
|
||||
layers: true,
|
||||
};
|
||||
|
||||
// client-side bundling
|
||||
if (!isServer) {
|
||||
/**
|
||||
* AIX client-side
|
||||
* We replace certain server-only modules with client-side mocks, to reuse the exact same imports
|
||||
* while avoiding importing server-only code which would break the build or break at runtime.
|
||||
*/
|
||||
const serverToClientMocks: ReadonlyArray<[RegExp, string]> = [
|
||||
[/\/posthog\.server/, '/posthog.client-mock'],
|
||||
[/\/env\.server/, '/env.client-mock'],
|
||||
];
|
||||
config.plugins = [
|
||||
...config.plugins,
|
||||
...serverToClientMocks.map(([pattern, replacement]) =>
|
||||
new webpack.NormalModuleReplacementPlugin(pattern, (resource: any) => {
|
||||
// console.log(' 🧠 [WEBPACK REPLACEMENT]:', resource.request, '->', resource.request.replace(pattern, replacement));
|
||||
resource.request = resource.request.replace(pattern, replacement);
|
||||
}),
|
||||
),
|
||||
];
|
||||
|
||||
// cosmetic: fix warnings for (absent!) top-level awaits in the browser (https://github.com/vercel/next.js/issues/64792)
|
||||
config.output.environment = { ...config.output.environment, asyncFunction: true };
|
||||
}
|
||||
|
||||
// prevent too many small chunks (40kb min) on 'client' packs (not 'server' or 'edge-server')
|
||||
// noinspection JSUnresolvedReference
|
||||
if (typeof config.optimization.splitChunks === 'object' && config.optimization.splitChunks.minSize) {
|
||||
// noinspection JSUnresolvedReference
|
||||
config.optimization.splitChunks.minSize = 40 * 1024;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
|
||||
// Optional Analytics > PostHog
|
||||
skipTrailingSlashRedirect: true, // required to support PostHog trailing slash API requests
|
||||
async rewrites() {
|
||||
return [
|
||||
{
|
||||
source: '/a/ph/static/:path*',
|
||||
destination: 'https://us-assets.i.posthog.com/static/:path*',
|
||||
},
|
||||
{
|
||||
source: '/a/ph/:path*',
|
||||
destination: 'https://us.i.posthog.com/:path*',
|
||||
},
|
||||
{
|
||||
source: '/a/ph/decide',
|
||||
destination: 'https://us.i.posthog.com/decide',
|
||||
},
|
||||
{
|
||||
source: '/a/ph/flags',
|
||||
destination: 'https://us.i.posthog.com/flags',
|
||||
},
|
||||
];
|
||||
},
|
||||
|
||||
// Note: disabled to check whether the project becomes slower with this
|
||||
// modularizeImports: {
|
||||
// '@mui/icons-material': {
|
||||
// transform: '@mui/icons-material/{{member}}',
|
||||
// },
|
||||
// },
|
||||
|
||||
// Uncomment the following leave console messages in production
|
||||
// compiler: {
|
||||
// removeConsole: false,
|
||||
// },
|
||||
};
|
||||
|
||||
// Validate environment variables at build time, if required. Server env vars will be actually read and used at runtime (cloud/edge).
|
||||
import { env as validateEnv } from '~/server/env.server';
|
||||
void validateEnv; // Triggers env validation - throws if required vars are missing
|
||||
|
||||
// PostHog error reporting with source maps for production builds
|
||||
import { withPostHogConfig } from '@posthog/nextjs-config';
|
||||
if (process.env.POSTHOG_API_KEY && process.env.POSTHOG_ENV_ID) {
|
||||
console.log(' 🧠 \x1b[1mbig-AGI\x1b[0m: building with PostHog issue reporting and source maps...');
|
||||
nextConfig = withPostHogConfig(nextConfig, {
|
||||
personalApiKey: process.env.POSTHOG_API_KEY,
|
||||
envId: process.env.POSTHOG_ENV_ID,
|
||||
host: 'https://us.i.posthog.com', // backtrace upload host
|
||||
logLevel: 'info',
|
||||
sourcemaps: {
|
||||
enabled: process.env.NODE_ENV === 'production',
|
||||
project: 'big-agi',
|
||||
version: process.env.NEXT_PUBLIC_BUILD_HASH,
|
||||
deleteAfterUpload: false, // false: leave them in the tree, which would also help debugging of open-source installs
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// conditionally enable the nextjs bundle analyzer
|
||||
import withBundleAnalyzer from '@next/bundle-analyzer';
|
||||
if (process.env.ANALYZE_BUNDLE) {
|
||||
nextConfig = withBundleAnalyzer({ openAnalyzer: true })(nextConfig) as NextConfig;
|
||||
}
|
||||
|
||||
export default nextConfig;
|
||||
Generated
+3743
-1496
File diff suppressed because it is too large
Load Diff
+55
-67
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "big-agi",
|
||||
"version": "1.91.0",
|
||||
"version": "2.0.1",
|
||||
"private": true,
|
||||
"author": "Enrico Ros <enrico.ros@gmail.com>",
|
||||
"repository": "https://github.com/enricoros/big-agi",
|
||||
@@ -14,7 +14,8 @@
|
||||
"postinstall": "prisma generate --no-hints",
|
||||
"db:push": "prisma db push",
|
||||
"db:studio": "prisma studio",
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local"
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local",
|
||||
"sharp:win32_x64": "npm install --os=win32 --cpu=x64 sharp"
|
||||
},
|
||||
"prisma": {
|
||||
"schema": "src/server/prisma/schema.prisma"
|
||||
@@ -27,85 +28,72 @@
|
||||
"@emotion/cache": "^11.14.0",
|
||||
"@emotion/react": "^11.14.0",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.14.0",
|
||||
"@mui/icons-material": "^5.16.14",
|
||||
"@mui/joy": "^5.0.0-beta.51",
|
||||
"@mui/material": "^5.16.14",
|
||||
"@next/bundle-analyzer": "^15.1.4",
|
||||
"@next/third-parties": "^15.1.4",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
"@mui/icons-material": "^5.18.0",
|
||||
"@mui/joy": "^5.0.0-beta.52",
|
||||
"@next/bundle-analyzer": "~15.1.8",
|
||||
"@prisma/client": "~5.22.0",
|
||||
"@t3-oss/env-nextjs": "^0.11.1",
|
||||
"@tanstack/react-query": "^5.63.0",
|
||||
"@tanstack/react-virtual": "^3.11.2",
|
||||
"@trpc/client": "11.0.0-rc.688",
|
||||
"@trpc/next": "11.0.0-rc.688",
|
||||
"@trpc/react-query": "11.0.0-rc.688",
|
||||
"@trpc/server": "11.0.0-rc.688",
|
||||
"@vercel/analytics": "^1.4.1",
|
||||
"@vercel/speed-insights": "^1.1.0",
|
||||
"browser-fs-access": "^0.35.0",
|
||||
"cheerio": "^1.0.0",
|
||||
"dexie": "^4.0.10",
|
||||
"dexie-react-hooks": "^1.1.7",
|
||||
"diff": "^7.0.0",
|
||||
"eventsource-parser": "^3.0.0",
|
||||
"idb-keyval": "^6.2.1",
|
||||
"mammoth": "^1.9.0",
|
||||
"nanoid": "^5.0.9",
|
||||
"next": "^15.1.4",
|
||||
"@tanstack/react-query": "5.90.10",
|
||||
"@tanstack/react-virtual": "^3.13.12",
|
||||
"@trpc/client": "11.5.1",
|
||||
"@trpc/next": "11.5.1",
|
||||
"@trpc/react-query": "11.5.1",
|
||||
"@trpc/server": "11.5.1",
|
||||
"@vercel/analytics": "^1.5.0",
|
||||
"@vercel/speed-insights": "^1.2.0",
|
||||
"browser-fs-access": "^0.38.0",
|
||||
"cheerio": "^1.1.2",
|
||||
"csv-stringify": "^6.6.0",
|
||||
"dexie": "~4.0.11",
|
||||
"dexie-react-hooks": "~1.1.7",
|
||||
"diff": "^8.0.2",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"idb-keyval": "^6.2.2",
|
||||
"mammoth": "^1.11.0",
|
||||
"nanoid": "^5.1.6",
|
||||
"next": "~15.1.8",
|
||||
"nprogress": "^0.2.0",
|
||||
"pdfjs-dist": "4.10.38",
|
||||
"plantuml-encoder": "^1.4.0",
|
||||
"prismjs": "^1.29.0",
|
||||
"pdfjs-dist": "5.4.54",
|
||||
"posthog-js": "^1.298.0",
|
||||
"posthog-node": "^5.14.0",
|
||||
"prismjs": "^1.30.0",
|
||||
"puppeteer-core": "^24.31.0",
|
||||
"react": "^18.3.1",
|
||||
"react-csv": "^2.2.2",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.54.2",
|
||||
"react-katex": "^3.0.1",
|
||||
"react-markdown": "^9.0.3",
|
||||
"react-player": "^2.16.0",
|
||||
"react-resizable-panels": "^2.1.7",
|
||||
"react-timeago": "^7.2.0",
|
||||
"react-hook-form": "^7.66.1",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-player": "^3.4.0",
|
||||
"react-resizable-panels": "^3.0.6",
|
||||
"react-timeago": "^8.3.0",
|
||||
"rehype-katex": "^7.0.1",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"remark-mark-highlight": "^0.1.1",
|
||||
"remark-math": "^6.0.0",
|
||||
"sharp": "^0.33.5",
|
||||
"superjson": "^2.2.2",
|
||||
"tesseract.js": "^6.0.0",
|
||||
"tiktoken": "^1.0.18",
|
||||
"turndown": "^7.2.0",
|
||||
"zod": "^3.24.1",
|
||||
"zod-to-json-schema": "^3.24.1",
|
||||
"zustand": "^5.0.3"
|
||||
"sharp": "^0.34.5",
|
||||
"superjson": "^2.2.5",
|
||||
"tesseract.js": "^6.0.1",
|
||||
"tiktoken": "^1.0.22",
|
||||
"turndown": "^7.2.2",
|
||||
"zod": "^4.1.13",
|
||||
"zustand": "5.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/diff": "^7.0.0",
|
||||
"@types/node": "^22.10.5",
|
||||
"@posthog/nextjs-config": "^1.6.0",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/nprogress": "^0.2.3",
|
||||
"@types/plantuml-encoder": "^1.4.2",
|
||||
"@types/prismjs": "^1.26.5",
|
||||
"@types/react": "^18.3.18",
|
||||
"@types/react-beautiful-dnd": "^13.1.8",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-csv": "^1.1.10",
|
||||
"@types/react-dom": "^18.3.5",
|
||||
"@types/react-katex": "^3.0.4",
|
||||
"@types/react-timeago": "^4.1.7",
|
||||
"@types/turndown": "^5.0.5",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-config-next": "^15.1.4",
|
||||
"prettier": "^3.4.2",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@types/turndown": "^5.0.6",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-config-next": "~15.1.8",
|
||||
"prettier": "^3.6.2",
|
||||
"prisma": "~5.22.0",
|
||||
"puppeteer-core": "^23.11.1",
|
||||
"typescript": "^5.7.3"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^22.0.0 || ^20.0.0"
|
||||
},
|
||||
"overrides": {
|
||||
"@types/react": "^18.3.18",
|
||||
"@types/react-dom": "^18.3.5",
|
||||
"uri-js": "npm:uri-js-replace"
|
||||
"node": "^26.0.0 || ^24.0.0 || ^22.0.0 || ^20.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
+19
-9
@@ -1,12 +1,17 @@
|
||||
import * as React from 'react';
|
||||
import Head from 'next/head';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { MyAppProps } from 'next/app';
|
||||
import { Analytics as VercelAnalytics } from '@vercel/analytics/next';
|
||||
import { SpeedInsights as VercelSpeedInsights } from '@vercel/speed-insights/next';
|
||||
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { apiQuery } from '~/common/util/trpc.client';
|
||||
|
||||
|
||||
// [server-client-safe] dynamic imports to avoid webpack bundling issues with next/navigation
|
||||
const VercelAnalytics = dynamic(() => import('@vercel/analytics/next').then(mod => mod.Analytics), { ssr: false });
|
||||
const VercelSpeedInsights = dynamic(() => import('@vercel/speed-insights/next').then(mod => mod.SpeedInsights), { ssr: false });
|
||||
|
||||
|
||||
import 'katex/dist/katex.min.css';
|
||||
import '~/common/styles/CodePrism.css';
|
||||
import '~/common/styles/GithubMarkdown.css';
|
||||
@@ -14,6 +19,7 @@ import '~/common/styles/NProgress.css';
|
||||
import '~/common/styles/agi.effects.css';
|
||||
import '~/common/styles/app.styles.css';
|
||||
|
||||
import { ErrorBoundary } from '~/common/components/ErrorBoundary';
|
||||
import { Is } from '~/common/util/pwaUtils';
|
||||
import { OverlaysInsert } from '~/common/layout/overlays/OverlaysInsert';
|
||||
import { ProviderBackendCapabilities } from '~/common/providers/ProviderBackendCapabilities';
|
||||
@@ -21,7 +27,8 @@ import { ProviderBootstrapLogic } from '~/common/providers/ProviderBootstrapLogi
|
||||
import { ProviderSingleTab } from '~/common/providers/ProviderSingleTab';
|
||||
import { ProviderTheming } from '~/common/providers/ProviderTheming';
|
||||
import { SnackbarInsert } from '~/common/components/snackbar/SnackbarInsert';
|
||||
import { hasGoogleAnalytics, OptionalGoogleAnalytics } from '~/common/components/GoogleAnalytics';
|
||||
import { hasGoogleAnalytics, OptionalGoogleAnalytics } from '~/common/components/3rdparty/GoogleAnalytics';
|
||||
import { hasPostHogAnalytics, OptionalPostHogAnalytics } from '~/common/components/3rdparty/PostHogAnalytics';
|
||||
|
||||
|
||||
const Big_AGI_App = ({ Component, emotionCache, pageProps }: MyAppProps) => {
|
||||
@@ -42,18 +49,21 @@ const Big_AGI_App = ({ Component, emotionCache, pageProps }: MyAppProps) => {
|
||||
<ProviderSingleTab>
|
||||
<ProviderBackendCapabilities>
|
||||
{/* ^ Backend capabilities & SSR boundary */}
|
||||
<ProviderBootstrapLogic>
|
||||
<SnackbarInsert />
|
||||
{getLayout(<Component {...pageProps} />)}
|
||||
<OverlaysInsert />
|
||||
</ProviderBootstrapLogic>
|
||||
<ErrorBoundary outer>
|
||||
<ProviderBootstrapLogic>
|
||||
<SnackbarInsert />
|
||||
{getLayout(<Component {...pageProps} />)}
|
||||
<OverlaysInsert />
|
||||
</ProviderBootstrapLogic>
|
||||
</ErrorBoundary>
|
||||
</ProviderBackendCapabilities>
|
||||
</ProviderSingleTab>
|
||||
</ProviderTheming>
|
||||
|
||||
{hasGoogleAnalytics && <OptionalGoogleAnalytics />}
|
||||
{hasPostHogAnalytics && <OptionalPostHogAnalytics />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
{hasGoogleAnalytics && <OptionalGoogleAnalytics />}
|
||||
|
||||
</>;
|
||||
};
|
||||
|
||||
+4
-1
@@ -100,6 +100,10 @@ MyDocument.getInitialProps = async (ctx: DocumentContext) => {
|
||||
});
|
||||
|
||||
const initialProps = await Document.getInitialProps(ctx);
|
||||
|
||||
// Inject the comment before the HTML tag
|
||||
initialProps.html = `<!-- ❤ Built with Big-AGI -->\n${initialProps.html}`;
|
||||
|
||||
// This is important. It prevents Emotion to render invalid HTML.
|
||||
// See https://github.com/mui/material-ui/issues/26561#issuecomment-855286153
|
||||
const emotionStyles = extractCriticalToChunks(initialProps.html);
|
||||
@@ -107,7 +111,6 @@ MyDocument.getInitialProps = async (ctx: DocumentContext) => {
|
||||
<style
|
||||
data-emotion={`${style.key} ${style.ids.join(' ')}`}
|
||||
key={style.key}
|
||||
// eslint-disable-next-line react/no-danger
|
||||
dangerouslySetInnerHTML={{ __html: style.css }}
|
||||
/>
|
||||
));
|
||||
|
||||
@@ -25,11 +25,11 @@ import { getLLMsDebugInfo } from '~/common/stores/llms/store-llms';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useFolderStore } from '~/common/stores/folders/store-chat-folders';
|
||||
import { useLogicSherpaStore } from '~/common/logic/store-logic-sherpa';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
import { useUXLabsStore } from '~/common/stores/store-ux-labs';
|
||||
|
||||
// utils access
|
||||
import { BrowserLang, clientHostName, Is, isPwa } from '~/common/util/pwaUtils';
|
||||
import { getGA4MeasurementId } from '~/common/components/GoogleAnalytics';
|
||||
import { getGA4MeasurementId } from '~/common/components/3rdparty/GoogleAnalytics';
|
||||
import { prettyTimestampForFilenames } from '~/common/util/timeUtils';
|
||||
import { supportsClipboardRead } from '~/common/util/clipboardUtils';
|
||||
import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
|
||||
@@ -109,7 +109,6 @@ function AppDebug() {
|
||||
reloads: usageCount,
|
||||
},
|
||||
release: {
|
||||
app: Release.App,
|
||||
build: frontendBuild,
|
||||
},
|
||||
};
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 2.3 MiB |
File diff suppressed because one or more lines are too long
@@ -10,7 +10,6 @@ import { createBeamVanillaStore } from '~/modules/beam/store-beam_vanilla';
|
||||
import { OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { createDConversation, DConversation } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageTextContent, DMessage } from '~/common/stores/chat/chat.message';
|
||||
import { getChatLLMId } from '~/common/stores/llms/store-llms';
|
||||
import { useIsMobile } from '~/common/components/useMatchMedia';
|
||||
|
||||
|
||||
@@ -21,8 +20,8 @@ function initTestConversation(): DConversation {
|
||||
return conversation;
|
||||
}
|
||||
|
||||
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi = createBeamVanillaStore()): BeamStoreApi {
|
||||
beamStore.getState().open(messages, getChatLLMId(), false, (content) => alert(content));
|
||||
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi): BeamStoreApi {
|
||||
beamStore.getState().open(messages, null, false, (content) => alert(content));
|
||||
return beamStore;
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ import { navigateBack } from '~/common/app.routes';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useUICounter } from '~/common/state/store-ui';
|
||||
import { useUICounter } from '~/common/stores/store-ui';
|
||||
|
||||
|
||||
function StatusCard(props: { icon: React.JSX.Element, hasIssue: boolean, text: string, button?: React.JSX.Element }) {
|
||||
|
||||
+34
-27
@@ -5,11 +5,11 @@ import { Avatar, Box, Card, CardContent, Chip, IconButton, Link as MuiLink, List
|
||||
import CallIcon from '@mui/icons-material/Call';
|
||||
|
||||
import { GitHubProjectIssueCard } from '~/common/components/GitHubProjectIssueCard';
|
||||
import { OptimaPanelGroup } from '~/common/layout/optima/panel/OptimaPanelGroup';
|
||||
import { OptimaPanelGroupedList } from '~/common/layout/optima/panel/OptimaPanelGroupedList';
|
||||
import { OptimaPanelIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { animationShadowRingLimey } from '~/common/util/animUtils';
|
||||
import { conversationTitle, DConversation, DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useSetOptimaAppMenu } from '~/common/layout/optima/useOptima';
|
||||
|
||||
import type { AppCallIntent } from './AppCall';
|
||||
import { MockPersona, useMockPersonas } from './state/useMockPersonas';
|
||||
@@ -210,7 +210,7 @@ function useConversationsByPersona() {
|
||||
}
|
||||
|
||||
|
||||
export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void }) {
|
||||
function ContactsMenuItems() {
|
||||
|
||||
// external state
|
||||
const {
|
||||
@@ -218,36 +218,43 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
|
||||
showConversations, toggleShowConversations,
|
||||
showSupport, toggleShowSupport,
|
||||
} = useAppCallStore();
|
||||
|
||||
return (
|
||||
<OptimaPanelGroupedList title='Contacts Settings'>
|
||||
|
||||
<MenuItem onClick={toggleGrayUI}>
|
||||
Grayed UI
|
||||
<Switch checked={grayUI} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleShowConversations}>
|
||||
Conversations
|
||||
<Switch checked={showConversations} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleShowSupport}>
|
||||
Show Support
|
||||
<Switch checked={showSupport} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
</OptimaPanelGroupedList>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void }) {
|
||||
|
||||
// external state
|
||||
const { personas } = useMockPersonas();
|
||||
const { grayUI, showConversations, showSupport } = useAppCallStore();
|
||||
const conversationsByPersona = useConversationsByPersona();
|
||||
|
||||
|
||||
// pluggable UI
|
||||
|
||||
const menuItems = React.useMemo(() => <OptimaPanelGroup title='Contacts Settings'>
|
||||
|
||||
<MenuItem onClick={toggleGrayUI}>
|
||||
Grayed UI
|
||||
<Switch checked={grayUI} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleShowConversations}>
|
||||
Conversations
|
||||
<Switch checked={showConversations} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleShowSupport}>
|
||||
Show Support
|
||||
<Switch checked={showSupport} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
</OptimaPanelGroup>, [grayUI, showConversations, showSupport, toggleGrayUI, toggleShowConversations, toggleShowSupport]);
|
||||
|
||||
useSetOptimaAppMenu(menuItems, 'CallUI-Contacts');
|
||||
|
||||
|
||||
return <>
|
||||
|
||||
{/* -> Panel */}
|
||||
<OptimaPanelIn><ContactsMenuItems /></OptimaPanelIn>
|
||||
|
||||
{/* Header "Call AGI" */}
|
||||
<Box sx={{
|
||||
my: 6,
|
||||
|
||||
+30
-29
@@ -15,23 +15,22 @@ import { useChatLLMDropdown } from '../chat/components/layout-bar/useLLMDropdown
|
||||
|
||||
import { SystemPurposeId, SystemPurposes } from '../../data';
|
||||
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { AixChatGenerateContent_DMessage, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
|
||||
import { AixChatGenerateContent_DMessageGuts, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
|
||||
import { useElevenLabsVoiceDropdown } from '~/modules/elevenlabs/useElevenLabsVoiceDropdown';
|
||||
|
||||
import type { OptimaBarControlMethods } from '~/common/layout/optima/bar/OptimaBarDropdown';
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { Link } from '~/common/components/Link';
|
||||
import { OptimaPanelGroup } from '~/common/layout/optima/panel/OptimaPanelGroup';
|
||||
import { OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { OptimaPanelGroupedList } from '~/common/layout/optima/panel/OptimaPanelGroupedList';
|
||||
import { OptimaPanelIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { SpeechResult, useSpeechRecognition } from '~/common/components/speechrecognition/useSpeechRecognition';
|
||||
import { conversationTitle, remapMessagesSysToUsr } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, messageFragmentsReduceText } from '~/common/stores/chat/chat.message';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, messageFragmentsReduceText, messageWasInterruptedAtStart } from '~/common/stores/chat/chat.message';
|
||||
import { createErrorContentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
import { launchAppChat, navigateToIndex } from '~/common/app.routes';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useGlobalShortcuts } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { usePlayUrl } from '~/common/util/audio/usePlayUrl';
|
||||
import { useSetOptimaAppMenu } from '~/common/layout/optima/useOptima';
|
||||
|
||||
import type { AppCallIntent } from './AppCall';
|
||||
import { CallAvatar } from './components/CallAvatar';
|
||||
@@ -41,7 +40,7 @@ import { CallStatus } from './components/CallStatus';
|
||||
import { useAppCallStore } from './state/store-app-call';
|
||||
|
||||
|
||||
function CallMenuItems(props: {
|
||||
function CallMenu(props: {
|
||||
pushToTalk: boolean,
|
||||
setPushToTalk: (pushToTalk: boolean) => void,
|
||||
override: boolean,
|
||||
@@ -56,7 +55,7 @@ function CallMenuItems(props: {
|
||||
|
||||
const handleChangeVoiceToggle = () => props.setOverride(!props.override);
|
||||
|
||||
return <OptimaPanelGroup title='Call'>
|
||||
return <OptimaPanelGroupedList title='Call'>
|
||||
|
||||
<MenuItem onClick={handlePushToTalkToggle}>
|
||||
<ListItemDecorator>{props.pushToTalk ? <MicNoneIcon /> : <MicIcon />}</ListItemDecorator>
|
||||
@@ -86,7 +85,7 @@ function CallMenuItems(props: {
|
||||
Voice Calls Feedback
|
||||
</MenuItem>
|
||||
|
||||
</OptimaPanelGroup>;
|
||||
</OptimaPanelGroupedList>;
|
||||
}
|
||||
|
||||
|
||||
@@ -107,7 +106,7 @@ export function Telephone(props: {
|
||||
const responseAbortController = React.useRef<AbortController | null>(null);
|
||||
|
||||
// external state
|
||||
const { chatLLMId, chatLLMDropdown } = useChatLLMDropdown(llmDropdownRef);
|
||||
const { chatLLMId: modelId, chatLLMDropdown: modelDropdown } = useChatLLMDropdown(llmDropdownRef);
|
||||
const { chatTitle, reMessages } = useChatStore(useShallow(state => {
|
||||
const conversation = props.callIntent.conversationId
|
||||
? state.conversations.find(conversation => conversation.id === props.callIntent.conversationId) ?? null
|
||||
@@ -226,7 +225,7 @@ export function Telephone(props: {
|
||||
}
|
||||
|
||||
// bail if no llm selected
|
||||
if (!chatLLMId) return;
|
||||
if (!modelId) return;
|
||||
|
||||
|
||||
// Call Message Generation Prompt
|
||||
@@ -249,19 +248,23 @@ export function Telephone(props: {
|
||||
setPersonaTextInterim('💭...');
|
||||
|
||||
aixChatGenerateContent_DMessage_FromConversation(
|
||||
chatLLMId,
|
||||
modelId,
|
||||
callSystemInstruction,
|
||||
callGenerationInputHistory,
|
||||
'call',
|
||||
callMessages[0].id,
|
||||
{ abortSignal: responseAbortController.current.signal },
|
||||
(update: AixChatGenerateContent_DMessage, _isDone: boolean) => {
|
||||
(update: AixChatGenerateContent_DMessageGuts, _isDone: boolean) => {
|
||||
const updatedText = messageFragmentsReduceText(update.fragments).trim();
|
||||
if (updatedText)
|
||||
setPersonaTextInterim(finalText = updatedText);
|
||||
},
|
||||
).then((status) => {
|
||||
|
||||
// don't add the message to conversation if it was interrupted with no content
|
||||
if (messageWasInterruptedAtStart(status.lastDMessage))
|
||||
return;
|
||||
|
||||
// whether status.outcome === 'success' or not, we get a valid DMessage, eventually with Error Fragments inside
|
||||
const fullMessage = createDMessageFromFragments('assistant', status.lastDMessage.fragments);
|
||||
fullMessage.generator = status.lastDMessage.generator;
|
||||
@@ -274,8 +277,8 @@ export function Telephone(props: {
|
||||
}).catch((err: DOMException) => {
|
||||
if (err?.name !== 'AbortError') {
|
||||
// create an error message to explain the exception
|
||||
const errorMesage = createDMessageFromFragments('assistant', [createErrorContentFragment(err.message || err.toString())]);
|
||||
setCallMessages(messages => [...messages, errorMesage]); // [state] append assistant:call_response-ERROR
|
||||
const errorMessage = createDMessageFromFragments('assistant', [createErrorContentFragment(err.message || err.toString())]);
|
||||
setCallMessages(messages => [...messages, errorMessage]); // [state] append assistant:call_response-ERROR
|
||||
}
|
||||
}).finally(() => {
|
||||
setPersonaTextInterim(null);
|
||||
@@ -285,7 +288,7 @@ export function Telephone(props: {
|
||||
responseAbortController.current?.abort();
|
||||
responseAbortController.current = null;
|
||||
};
|
||||
}, [isConnected, callMessages, chatLLMId, personaVoiceId, personaSystemMessage, reMessages]);
|
||||
}, [isConnected, callMessages, modelId, personaVoiceId, personaSystemMessage, reMessages]);
|
||||
|
||||
// [E] Message interrupter
|
||||
const abortTrigger = isConnected && recognitionState.hasSpeech;
|
||||
@@ -311,22 +314,20 @@ export function Telephone(props: {
|
||||
const isMicEnabled = recognitionState.isAvailable;
|
||||
const isTTSEnabled = true;
|
||||
const isEnabled = isMicEnabled && isTTSEnabled;
|
||||
|
||||
|
||||
// pluggable UI
|
||||
|
||||
const menuItems = React.useMemo(() =>
|
||||
<CallMenuItems
|
||||
pushToTalk={pushToTalk} setPushToTalk={setPushToTalk}
|
||||
override={overridePersonaVoice} setOverride={setOverridePersonaVoice} />
|
||||
, [overridePersonaVoice, pushToTalk],
|
||||
);
|
||||
|
||||
useSetOptimaAppMenu(menuItems, 'CallUI-Call');
|
||||
const micErrorMessage = recognitionState.errorMessage;
|
||||
|
||||
|
||||
return <>
|
||||
<OptimaToolbarIn>{chatLLMDropdown}</OptimaToolbarIn>
|
||||
|
||||
{/* -> Toolbar */}
|
||||
<OptimaToolbarIn>{modelDropdown}</OptimaToolbarIn>
|
||||
{/* -> Panel */}
|
||||
<OptimaPanelIn>
|
||||
<CallMenu
|
||||
pushToTalk={pushToTalk} setPushToTalk={setPushToTalk}
|
||||
override={overridePersonaVoice} setOverride={setOverridePersonaVoice}
|
||||
/>
|
||||
</OptimaPanelIn>
|
||||
|
||||
<Typography
|
||||
level='h1'
|
||||
@@ -350,7 +351,7 @@ export function Telephone(props: {
|
||||
callerName={isConnected ? undefined : personaName}
|
||||
statusText={isRinging ? '' /*'is calling you'*/ : isDeclined ? 'call declined' : isEnded ? 'call ended' : callElapsedTime}
|
||||
regardingText={chatTitle}
|
||||
micError={!isMicEnabled} speakError={!isTTSEnabled}
|
||||
micError={!isMicEnabled} micErrorMessage={micErrorMessage} speakError={!isTTSEnabled}
|
||||
/>
|
||||
|
||||
{/* Live Transcript, w/ streaming messages, audio indication, etc. */}
|
||||
|
||||
@@ -16,7 +16,7 @@ export function CallStatus(props: {
|
||||
callerName?: string,
|
||||
statusText: string,
|
||||
regardingText: string | null,
|
||||
micError: boolean, speakError: boolean,
|
||||
micError: boolean, micErrorMessage: string | null, speakError: boolean,
|
||||
// llmComponent?: React.JSX.Element,
|
||||
}) {
|
||||
return (
|
||||
@@ -37,7 +37,7 @@ export function CallStatus(props: {
|
||||
</Typography>}
|
||||
|
||||
{props.micError && <InlineError
|
||||
severity='danger' error='Looks like this Browser may not support speech recognition. You can try Chrome on Windows or Android instead.' />}
|
||||
severity='danger' error={props.micErrorMessage || 'Looks like this Browser may not support speech recognition. You can try Chrome on Windows or Android instead.'} />}
|
||||
|
||||
{props.speakError && <InlineError
|
||||
severity='danger' error='Text-to-speech does not appear to be configured. Please set it up in Preferences > Voice.' />}
|
||||
|
||||
+159
-81
@@ -2,12 +2,12 @@ import * as React from 'react';
|
||||
import { Panel, PanelGroup, PanelResizeHandle } from 'react-resizable-panels';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { useTheme } from '@mui/joy';
|
||||
import { Box, useTheme } from '@mui/joy';
|
||||
|
||||
import { DEV_MODE_SETTINGS } from '../settings-modal/UxLabsSettings';
|
||||
import { DiagramConfig, DiagramsModal } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
import { FlattenerModal } from '~/modules/aifn/flatten/FlattenerModal';
|
||||
import { TradeConfig, TradeModal } from '~/modules/trade/TradeModal';
|
||||
|
||||
import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
import type { TradeConfig } from '~/modules/trade/TradeModal';
|
||||
import { downloadSingleChat, importConversationsFromFilesAtRest, openConversationsAtRestPicker } from '~/modules/trade/trade.client';
|
||||
import { imaginePromptFromTextOrThrow } from '~/modules/aifn/imagine/imaginePromptFromText';
|
||||
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
@@ -18,9 +18,10 @@ import type { DConversation, DConversationId } from '~/common/stores/chat/chat.c
|
||||
import type { OptimaBarControlMethods } from '~/common/layout/optima/bar/OptimaBarDropdown';
|
||||
import { ConfirmationModal } from '~/common/components/modals/ConfirmationModal';
|
||||
import { ConversationsManager } from '~/common/chat-overlay/ConversationsManager';
|
||||
import { LLM_IF_ANT_PromptCaching, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { OptimaDrawerIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { PanelResizeInset } from '~/common/components/panes/GoodPanelResizeHandler';
|
||||
import { ErrorBoundary } from '~/common/components/ErrorBoundary';
|
||||
import { getLLMContextTokens, LLM_IF_ANT_PromptCaching, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { OptimaDrawerIn, OptimaPanelIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { PanelResizeInset } from '~/common/components/PanelResizeInset';
|
||||
import { Release } from '~/common/app.release';
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
|
||||
@@ -28,28 +29,31 @@ import { ShortcutKey, useGlobalShortcuts } from '~/common/components/shortcuts/u
|
||||
import { WorkspaceIdProvider } from '~/common/stores/workspace/WorkspaceIdProvider';
|
||||
import { addSnackbar, removeSnackbar } from '~/common/components/snackbar/useSnackbarsStore';
|
||||
import { createDMessageFromFragments, createDMessagePlaceholderIncomplete, DMessageMetadata, duplicateDMessageMetadata } from '~/common/stores/chat/chat.message';
|
||||
import { createErrorContentFragment, createTextContentFragment, DMessageAttachmentFragment, DMessageContentFragment, duplicateDMessageFragmentsNoVoid } from '~/common/stores/chat/chat.fragments';
|
||||
import { createErrorContentFragment, createTextContentFragment, DMessageAttachmentFragment, DMessageContentFragment, duplicateDMessageFragments } from '~/common/stores/chat/chat.fragments';
|
||||
import { gcChatImageAssets } from '~/common/stores/chat/chat.gc';
|
||||
import { getChatLLMId } from '~/common/stores/llms/store-llms';
|
||||
import { getConversation, getConversationSystemPurposeId, useConversation } from '~/common/stores/chat/store-chats';
|
||||
import { optimaActions, optimaOpenModels, optimaOpenPreferences, useSetOptimaAppMenu } from '~/common/layout/optima/useOptima';
|
||||
import { themeBgAppChatComposer } from '~/common/app.theme';
|
||||
import { useChatLLM } from '~/common/stores/llms/llms.hooks';
|
||||
import { optimaActions, optimaOpenModels, optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useFolderStore } from '~/common/stores/folders/store-chat-folders';
|
||||
import { useIsMobile, useIsTallScreen } from '~/common/components/useMatchMedia';
|
||||
import { useLLM } from '~/common/stores/llms/llms.hooks';
|
||||
import { useModelDomain } from '~/common/stores/llms/hooks/useModelDomain';
|
||||
import { useOverlayComponents } from '~/common/layout/overlays/useOverlayComponents';
|
||||
import { useRouterQuery } from '~/common/app.routes';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
import { useUIComplexityIsMinimal } from '~/common/stores/store-ui';
|
||||
import { useUXLabsStore } from '~/common/stores/store-ux-labs';
|
||||
|
||||
import { ChatPane } from './components/layout-pane/ChatPane';
|
||||
import { ChatBarAltBeam } from './components/layout-bar/ChatBarAltBeam';
|
||||
import { ChatBarBeam } from './components/layout-bar/ChatBarBeam';
|
||||
import { ChatBarAltTitle } from './components/layout-bar/ChatBarAltTitle';
|
||||
import { ChatBarDropdowns } from './components/layout-bar/ChatBarDropdowns';
|
||||
import { ChatBarChat } from './components/layout-bar/ChatBarChat';
|
||||
import { ChatBeamWrapper } from './components/ChatBeamWrapper';
|
||||
import { ChatDrawerMemo } from './components/layout-drawer/ChatDrawer';
|
||||
import { ChatMessageList } from './components/ChatMessageList';
|
||||
import { Composer } from './components/composer/Composer';
|
||||
import { usePanesManager } from './components/panes/usePanesManager';
|
||||
import { PaneTitleOverlay } from './components/PaneTitleOverlay';
|
||||
import { useComposerAutoHide } from './components/composer/useComposerAutoHide';
|
||||
import { usePanesManager } from './components/panes/store-panes-manager';
|
||||
|
||||
import type { ChatExecuteMode } from './execute-mode/execute-mode.types';
|
||||
|
||||
@@ -74,24 +78,52 @@ const chatMessageListSx: SxProps = {
|
||||
flexGrow: 1,
|
||||
};
|
||||
|
||||
/*const chatMessageListBrandedSx: SxProps = {
|
||||
flexGrow: 1,
|
||||
backgroundBlendMode: 'soft-light',
|
||||
backgroundColor: themeBgApp,
|
||||
backgroundImage: 'url(https://...)',
|
||||
backgroundPosition: 'center',
|
||||
backgroundRepeat: 'no-repeat',
|
||||
backgroundSize: 'contain',
|
||||
} as const;*/
|
||||
|
||||
const chatBeamWrapperSx: SxProps = {
|
||||
flexGrow: 1,
|
||||
// we added these after removing the minSize={20} (%) from the containing panel.
|
||||
minWidth: '18rem',
|
||||
// minHeight: 'calc(100vh - 69px - var(--AGI-Nav-width))',
|
||||
};
|
||||
|
||||
const composerOpenSx: SxProps = {
|
||||
zIndex: 21, // just to allocate a surface, and potentially have a shadow
|
||||
// NOTE: disabled on 2025-03-05: conflicts with the GlobalDragOverlay's
|
||||
// zIndex: 21, // just to allocate a surface, and potentially have a shadow
|
||||
minWidth: { md: 480 }, // don't get compresses too much on desktop
|
||||
backgroundColor: themeBgAppChatComposer,
|
||||
// backgroundColor: themeBgAppChatComposer, // inlined in the Composer
|
||||
transition: 'background-color 0.5s ease-out',
|
||||
borderTop: `1px solid`,
|
||||
borderTopColor: 'rgba(var(--joy-palette-neutral-mainChannel, 99 107 116) / 0.4)',
|
||||
// hack: eats the bottom of the last message (as it has a 1px divider)
|
||||
mt: '-1px',
|
||||
};
|
||||
// NOTE: commented on 2024-05-13, as other content was stepping on the border due to it and missing zIndex
|
||||
// mt: '-1px',
|
||||
} as const;
|
||||
|
||||
const composerClosedSx: SxProps = {
|
||||
display: 'none',
|
||||
};
|
||||
const composerOpenMobileSx: SxProps = {
|
||||
zIndex: 21, // allocates the surface, possibly enables shadow if we like
|
||||
py: 0.5, // have some breathing room
|
||||
// boxShadow: '0px -1px 8px -2px rgba(0, 0, 0, 0.4)',
|
||||
...composerOpenSx,
|
||||
} as const;
|
||||
|
||||
// const composerClosedSx: SxProps = {
|
||||
// display: 'none',
|
||||
// };
|
||||
|
||||
|
||||
// Lazy-loaded Modals
|
||||
const DiagramsModalLazy = React.lazy(() => import('~/modules/aifn/digrams/DiagramsModal').then(module => ({ default: module.DiagramsModal })));
|
||||
const FlattenerModalLazy = React.lazy(() => import('~/modules/aifn/flatten/FlattenerModal').then(module => ({ default: module.FlattenerModal })));
|
||||
const TradeModalLazy = React.lazy(() => import('~/modules/trade/TradeModal').then(module => ({ default: module.TradeModal })));
|
||||
|
||||
|
||||
export function AppChat() {
|
||||
@@ -111,21 +143,25 @@ export function AppChat() {
|
||||
|
||||
// external state
|
||||
const theme = useTheme();
|
||||
const [composerHasContent, setComposerHasContent] = React.useState(false);
|
||||
|
||||
const isMobile = useIsMobile();
|
||||
const isTallScreen = useIsTallScreen();
|
||||
|
||||
const isZenMode = useUIComplexityIsMinimal();
|
||||
|
||||
const intent = useRouterQuery<Partial<AppChatIntent>>();
|
||||
|
||||
const showAltTitleBar = useUXLabsStore(state => DEV_MODE_SETTINGS && state.labsChatBarAlt === 'title');
|
||||
|
||||
const { chatLLM } = useChatLLM();
|
||||
const { domainModelId: chatLLMId } = useModelDomain('primaryChat');
|
||||
const chatLLM = useLLM(chatLLMId) ?? null;
|
||||
|
||||
const {
|
||||
// state
|
||||
chatPanes,
|
||||
focusedPaneConversationId, // <-- key
|
||||
focusedPaneIndex,
|
||||
focusedPaneConversationId,
|
||||
// actions
|
||||
navigateHistoryInFocusedPane,
|
||||
openConversationInFocusedPane,
|
||||
@@ -147,10 +183,10 @@ export function AppChat() {
|
||||
}, [chatPanes]);
|
||||
|
||||
const beamsOpens = useAreBeamsOpen(paneBeamStores);
|
||||
const beamOpenStoreInFocusedPane = React.useMemo(() => {
|
||||
const open = focusedPaneIndex !== null ? (beamsOpens?.[focusedPaneIndex] ?? false) : false;
|
||||
return open ? paneBeamStores?.[focusedPaneIndex!] ?? null : null;
|
||||
}, [beamsOpens, focusedPaneIndex, paneBeamStores]);
|
||||
const beamOpenStoreInFocusedPane = focusedPaneIndex === null ? null
|
||||
: !beamsOpens?.[focusedPaneIndex] ? null
|
||||
: paneBeamStores?.[focusedPaneIndex] ?? null;
|
||||
const focusedChatBeamOpen = focusedPaneIndex !== null && !!beamsOpens?.[focusedPaneIndex];
|
||||
|
||||
const {
|
||||
// focused
|
||||
@@ -171,7 +207,7 @@ export function AppChat() {
|
||||
// const focusedConversationWorkspaceId = workspaceForConversationIdentity(focusedPaneConversationId);
|
||||
//// const focusedConversationWorkspace = useWorkspaceIdForConversation(focusedPaneConversationId);
|
||||
|
||||
const { mayWork: capabilityHasT2I } = useCapabilityTextToImage();
|
||||
const { mayWork: capabilityHasT2I, mayEdit: capabilityHasT2IEdit } = useCapabilityTextToImage();
|
||||
|
||||
const activeFolderId = useFolderStore(({ enableFolders, folders }) => {
|
||||
const activeFolderId = enableFolders ? _activeFolderId : null;
|
||||
@@ -179,6 +215,9 @@ export function AppChat() {
|
||||
return activeFolder?.id ?? null;
|
||||
});
|
||||
|
||||
// Composer Auto-hiding
|
||||
const forceComposerHide = !!beamOpenStoreInFocusedPane /* || !focusedPaneConversationId */; // auto-hide when no chat (the 'please select a conversation...' state) doesn't feel good
|
||||
const composerAutoHide = useComposerAutoHide(forceComposerHide, composerHasContent);
|
||||
|
||||
// Window actions
|
||||
|
||||
@@ -211,7 +250,7 @@ export function AppChat() {
|
||||
else if (outcome === 'err-t2i-unconfigured')
|
||||
optimaOpenPreferences('draw');
|
||||
else if (outcome === 'err-no-persona')
|
||||
addSnackbar({ key: 'chat-no-persona', message: 'No persona selected.', type: 'issue' });
|
||||
addSnackbar({ key: 'chat-no-persona', message: 'No persona selected.', type: 'issue', overrides: { autoHideDuration: 4000 } });
|
||||
else if (outcome === 'err-no-conversation')
|
||||
addSnackbar({ key: 'chat-no-conversation', message: 'No active conversation.', type: 'issue' });
|
||||
else if (outcome === 'err-no-last-message')
|
||||
@@ -237,7 +276,7 @@ export function AppChat() {
|
||||
// create the user:message
|
||||
// NOTE: this can lead to multiple chat messages with data refs that are referring to the same dblobs,
|
||||
// however, we already got transferred ownership of the dblobs at this point.
|
||||
const userMessage = createDMessageFromFragments('user', duplicateDMessageFragmentsNoVoid(fragments)); // [chat] create user:message to send per-chat
|
||||
const userMessage = createDMessageFromFragments('user', duplicateDMessageFragments(fragments, true)); // [chat] create user:message to send per-chat
|
||||
if (metadata) userMessage.metadata = duplicateDMessageMetadata(metadata);
|
||||
|
||||
ConversationsManager.getHandler(conversation.id).messageAppend(userMessage); // [chat] append user message in each conversation
|
||||
@@ -329,9 +368,10 @@ export function AppChat() {
|
||||
useFolderStore.getState().addConversationToFolder(activeFolderId, conversationId);
|
||||
|
||||
// focus the composer
|
||||
composerTextAreaRef.current?.focus();
|
||||
if (!isMobile)
|
||||
composerTextAreaRef.current?.focus();
|
||||
|
||||
}, [activeFolderId, focusedPaneConversationId, handleOpenConversationInFocusedPane, prependNewConversation, recycleNewConversationId]);
|
||||
}, [activeFolderId, focusedPaneConversationId, handleOpenConversationInFocusedPane, isMobile, prependNewConversation, recycleNewConversationId]);
|
||||
|
||||
const handleConversationImportDialog = React.useCallback(() => setTradeConfig({ dir: 'import' }), []);
|
||||
|
||||
@@ -432,15 +472,15 @@ export function AppChat() {
|
||||
const barAltTitle = showAltTitleBar ? focusedChatTitle ?? 'No Chat' : null;
|
||||
|
||||
const focusedBarContent = React.useMemo(() => beamOpenStoreInFocusedPane
|
||||
? <ChatBarAltBeam beamStore={beamOpenStoreInFocusedPane} isMobile={isMobile} />
|
||||
? <ChatBarBeam conversationTitle={focusedChatTitle ?? 'No Chat'} beamStore={beamOpenStoreInFocusedPane} isMobile={isMobile} />
|
||||
: (barAltTitle === null)
|
||||
? <ChatBarDropdowns conversationId={focusedPaneConversationId} llmDropdownRef={llmDropdownRef} personaDropdownRef={personaDropdownRef} />
|
||||
? <ChatBarChat conversationId={focusedPaneConversationId} llmDropdownRef={llmDropdownRef} personaDropdownRef={personaDropdownRef} />
|
||||
: <ChatBarAltTitle conversationId={focusedPaneConversationId} conversationTitle={barAltTitle} />
|
||||
, [barAltTitle, beamOpenStoreInFocusedPane, focusedPaneConversationId, isMobile],
|
||||
, [barAltTitle, beamOpenStoreInFocusedPane, focusedChatTitle, focusedPaneConversationId, isMobile],
|
||||
);
|
||||
|
||||
|
||||
// Disabled by default, as it lags the opening of the drawer and immediatly vanishes during the closing animation
|
||||
// Disabled by default, as it lags the opening of the drawer and immediately vanishes during the closing animation
|
||||
const isDrawerOpen = true; // useOptimaDrawerOpen();
|
||||
|
||||
const drawerContent = React.useMemo(() => !isDrawerOpen ? null :
|
||||
@@ -450,6 +490,7 @@ export function AppChat() {
|
||||
activeFolderId={activeFolderId}
|
||||
chatPanesConversationIds={paneUniqueConversationIds}
|
||||
disableNewButton={disableNewButton}
|
||||
focusedChatBeamOpen={focusedChatBeamOpen}
|
||||
onConversationActivate={handleOpenConversationInFocusedPane}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationNew={handleConversationNewInFocusedPane}
|
||||
@@ -458,10 +499,10 @@ export function AppChat() {
|
||||
onConversationsImportDialog={handleConversationImportDialog}
|
||||
setActiveFolderId={setActiveFolderId}
|
||||
/>,
|
||||
[activeFolderId, disableNewButton, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isDrawerOpen, paneUniqueConversationIds],
|
||||
[activeFolderId, disableNewButton, focusedChatBeamOpen, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isDrawerOpen, paneUniqueConversationIds],
|
||||
);
|
||||
|
||||
const focusedMenuItems = React.useMemo(() =>
|
||||
const focusedChatPanelContent = React.useMemo(() => !focusedPaneConversationId ? null :
|
||||
<ChatPane
|
||||
conversationId={focusedPaneConversationId}
|
||||
disableItems={!focusedPaneConversationId || isFocusedChatEmpty}
|
||||
@@ -477,8 +518,6 @@ export function AppChat() {
|
||||
[focusedPaneConversationId, handleConversationBranch, handleConversationFlatten, handleConversationReset, hasConversations, isFocusedChatEmpty, isMessageSelectionMode, isMobile, isTallScreen],
|
||||
);
|
||||
|
||||
useSetOptimaAppMenu(focusedMenuItems, 'AppChat');
|
||||
|
||||
|
||||
// Effects
|
||||
|
||||
@@ -486,7 +525,7 @@ export function AppChat() {
|
||||
React.useEffect(() => {
|
||||
// Debug: open a null chat
|
||||
if (Release.IsNodeDevBuild && intent.initialConversationId === 'null')
|
||||
openConversationInFocusedPane(null! /* for debugging purporse */);
|
||||
openConversationInFocusedPane(null! /* for debugging purpose */);
|
||||
// Open the initial conversation if set
|
||||
else if (intent.initialConversationId)
|
||||
openConversationInFocusedPane(intent.initialConversationId);
|
||||
@@ -578,8 +617,11 @@ export function AppChat() {
|
||||
|
||||
|
||||
return <>
|
||||
<OptimaDrawerIn>{drawerContent}</OptimaDrawerIn>
|
||||
|
||||
{/* -> Toolbar, -> Drawer, -> Panel*/}
|
||||
<OptimaToolbarIn>{focusedBarContent}</OptimaToolbarIn>
|
||||
<OptimaDrawerIn>{drawerContent}</OptimaDrawerIn>
|
||||
<OptimaPanelIn>{focusedChatPanelContent}</OptimaPanelIn>
|
||||
|
||||
<PanelGroup
|
||||
direction={(isMobile || isTallScreen) ? 'vertical' : 'horizontal'}
|
||||
@@ -596,20 +638,22 @@ export function AppChat() {
|
||||
const _panesCount = chatPanes.length;
|
||||
const _keyAndId = `chat-pane-${pane.paneId}`;
|
||||
const _sepId = `sep-pane-${idx}`;
|
||||
return <WorkspaceIdProvider conversationId={_paneIsFocused ? _paneConversationId : null} key={_keyAndId}>
|
||||
return <WorkspaceIdProvider conversationId={_paneIsFocused ? _paneConversationId : null} key={_keyAndId}><ErrorBoundary>
|
||||
|
||||
<Panel
|
||||
id={_keyAndId}
|
||||
order={idx}
|
||||
collapsible={chatPanes.length === 2}
|
||||
defaultSize={(_panesCount === 3 && idx === 1) ? 34 : Math.round(100 / _panesCount)}
|
||||
minSize={20}
|
||||
// minSize={20 /* IMPORTANT: this forces a reflow even on a simple on hover */}
|
||||
onClick={(event) => {
|
||||
const setFocus = chatPanes.length < 2 || !event.altKey;
|
||||
setFocusedPaneIndex(setFocus ? idx : -1);
|
||||
// Alt + Click: undocumented feature to clear focus
|
||||
if (event.altKey && chatPanes.length > 1)
|
||||
return setFocusedPaneIndex(-1);
|
||||
setFocusedPaneIndex(idx);
|
||||
}}
|
||||
onCollapse={() => {
|
||||
// NOTE: despite the delay to try to let the draggin settle, there seems to be an issue with the Pane locking the screen
|
||||
// NOTE: despite the delay to try to let the dragging settle, there seems to be an issue with the Pane locking the screen
|
||||
// setTimeout(() => removePane(idx), 50);
|
||||
// more than 2 will result in an assertion from the framework
|
||||
if (chatPanes.length === 2) removePane(idx);
|
||||
@@ -618,28 +662,45 @@ export function AppChat() {
|
||||
// for anchoring the scroll button in place
|
||||
position: 'relative',
|
||||
...(isMultiPane ? {
|
||||
marginBottom: '1px', // compensates for the -1px in `composerOpenSx` for the Composer offset
|
||||
borderRadius: '0.375rem',
|
||||
border: `2px solid ${_paneIsFocused
|
||||
borderStyle: 'solid',
|
||||
borderColor: _paneIsFocused
|
||||
? ((willMulticast || !isMultiConversationId) ? theme.palette.primary.solidBg : theme.palette.primary.solidBg)
|
||||
: ((willMulticast || !isMultiConversationId) ? theme.palette.primary.softActiveBg : theme.palette.background.level1)}`,
|
||||
: ((willMulticast || !isMultiConversationId) ? theme.palette.primary.softActiveBg : theme.palette.divider),
|
||||
borderWidth: '2px',
|
||||
// borderBottomWidth: '3px',
|
||||
// DISABLED on 2024-03-13, it gets in the way quite a lot
|
||||
// filter: (!willMulticast && !_paneIsFocused)
|
||||
// ? (!isMultiConversationId ? 'grayscale(66.67%)' /* clone of the same */ : 'grayscale(66.67%)')
|
||||
// : undefined,
|
||||
// 2025-02-27: didn't try, here's another version
|
||||
// filter: _paneIsFocused ? 'none' : 'brightness(0.94) saturate(0.9)',
|
||||
} : {
|
||||
// NOTE: this is a workaround for the 'stuck-after-collapse-close' issue. We will collapse the 'other' pane, which
|
||||
// will get it removed (onCollapse), and somehow this pane will be stuck with a pointerEvents: 'none' style, which de-facto
|
||||
// disables further interaction with the chat. This is a workaround to re-enable the pointer events.
|
||||
// The root cause seems to be a Dragstate not being reset properly, however the pointerEvents has been set since 0.0.56 while
|
||||
// The root cause seems to be a Drag state not being reset properly, however the pointerEvents has been set since 0.0.56 while
|
||||
// it was optional before: https://github.com/bvaughn/react-resizable-panels/issues/241
|
||||
pointerEvents: 'auto',
|
||||
}),
|
||||
...((_paneIsIncognito && {
|
||||
backgroundColor: theme.palette.background.level3,
|
||||
backgroundImage: 'repeating-linear-gradient(45deg, rgba(0,0,0,0.03), rgba(0,0,0,0.03) 10px, transparent 10px, transparent 20px)',
|
||||
})),
|
||||
}}
|
||||
>
|
||||
|
||||
{isMultiPane && !isZenMode && (
|
||||
<PaneTitleOverlay
|
||||
paneIdx={idx}
|
||||
conversationId={_paneConversationId}
|
||||
isFocused={_paneIsFocused}
|
||||
isIncognito={_paneIsIncognito}
|
||||
onConversationDelete={handleDeleteConversations}
|
||||
/>
|
||||
)}
|
||||
|
||||
<ScrollToBottom
|
||||
bootToBottom
|
||||
stickToBottomInitial
|
||||
@@ -653,7 +714,7 @@ export function AppChat() {
|
||||
conversationHandler={_paneChatHandler}
|
||||
capabilityHasT2I={capabilityHasT2I}
|
||||
chatLLMAntPromptCaching={chatLLM?.interfaces?.includes(LLM_IF_ANT_PromptCaching) ?? false}
|
||||
chatLLMContextTokens={chatLLM?.contextTokens ?? null}
|
||||
chatLLMContextTokens={getLLMContextTokens(chatLLM) ?? null}
|
||||
chatLLMSupportsImages={chatLLM?.interfaces?.includes(LLM_IF_OAI_Vision) ?? false}
|
||||
fitScreen={isMobile || isMultiPane}
|
||||
isMobile={isMobile}
|
||||
@@ -691,50 +752,67 @@ export function AppChat() {
|
||||
</PanelResizeHandle>
|
||||
)}
|
||||
|
||||
</WorkspaceIdProvider>;
|
||||
</ErrorBoundary></WorkspaceIdProvider>;
|
||||
})}
|
||||
|
||||
</PanelGroup>
|
||||
|
||||
<Composer
|
||||
isMobile={isMobile}
|
||||
chatLLM={chatLLM}
|
||||
composerTextAreaRef={composerTextAreaRef}
|
||||
targetConversationId={focusedPaneConversationId}
|
||||
capabilityHasT2I={capabilityHasT2I}
|
||||
isMulticast={!isMultiConversationId ? null : isComposerMulticast}
|
||||
isDeveloperMode={isFocusedChatDeveloper}
|
||||
onAction={handleComposerAction}
|
||||
onConversationsImportFromFiles={handleConversationsImportFromFiles}
|
||||
onTextImagine={handleImagineFromText}
|
||||
setIsMulticast={setIsComposerMulticast}
|
||||
sx={beamOpenStoreInFocusedPane ? composerClosedSx : composerOpenSx}
|
||||
/>
|
||||
{/* Composer with auto-hide */}
|
||||
<Box {...composerAutoHide.compressorProps}>
|
||||
<div style={composerAutoHide.compressibleStyle}>
|
||||
<Composer
|
||||
isMobile={isMobile}
|
||||
chatLLM={chatLLM}
|
||||
composerTextAreaRef={composerTextAreaRef}
|
||||
targetConversationId={focusedPaneConversationId}
|
||||
capabilityHasT2I={capabilityHasT2I}
|
||||
capabilityHasT2IEdit={capabilityHasT2IEdit}
|
||||
isMulticast={!isMultiConversationId ? null : isComposerMulticast}
|
||||
isDeveloperMode={isFocusedChatDeveloper}
|
||||
onAction={handleComposerAction}
|
||||
onConversationBeamEdit={handleMessageBeamLastInFocusedPane}
|
||||
onConversationsImportFromFiles={handleConversationsImportFromFiles}
|
||||
onTextImagine={handleImagineFromText}
|
||||
setIsMulticast={setIsComposerMulticast}
|
||||
onComposerHasContent={setComposerHasContent}
|
||||
sx={isMobile ? composerOpenMobileSx : composerOpenSx}
|
||||
/>
|
||||
</div>
|
||||
</Box>
|
||||
|
||||
{/* Hover zone for auto-hide */}
|
||||
{!forceComposerHide && composerAutoHide.isHidden && <Box {...composerAutoHide.detectorProps} />}
|
||||
|
||||
{/* Diagrams */}
|
||||
{!!diagramConfig && (
|
||||
<DiagramsModal
|
||||
config={diagramConfig}
|
||||
onClose={() => setDiagramConfig(null)}
|
||||
/>
|
||||
<React.Suspense fallback={null}>
|
||||
<DiagramsModalLazy
|
||||
config={diagramConfig}
|
||||
onClose={() => setDiagramConfig(null)}
|
||||
/>
|
||||
</React.Suspense>
|
||||
)}
|
||||
|
||||
{/* Flatten */}
|
||||
{!!flattenConversationId && (
|
||||
<FlattenerModal
|
||||
conversationId={flattenConversationId}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onClose={() => setFlattenConversationId(null)}
|
||||
/>
|
||||
<React.Suspense fallback={null}>
|
||||
<FlattenerModalLazy
|
||||
conversationId={flattenConversationId}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onClose={() => setFlattenConversationId(null)}
|
||||
/>
|
||||
</React.Suspense>
|
||||
)}
|
||||
|
||||
{/* Import / Export */}
|
||||
{!!tradeConfig && (
|
||||
<TradeModal
|
||||
config={tradeConfig}
|
||||
onConversationActivate={handleOpenConversationInFocusedPane}
|
||||
onClose={() => setTradeConfig(null)}
|
||||
/>
|
||||
<React.Suspense fallback={null}>
|
||||
<TradeModalLazy
|
||||
config={tradeConfig}
|
||||
onConversationActivate={handleOpenConversationInFocusedPane}
|
||||
onClose={() => setTradeConfig(null)}
|
||||
/>
|
||||
</React.Suspense>
|
||||
)}
|
||||
|
||||
</>;
|
||||
|
||||
@@ -1,19 +1,41 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Modal, ModalClose } from '@mui/joy';
|
||||
import { Box, IconButton, Modal } from '@mui/joy';
|
||||
import CloseFullscreenIcon from '@mui/icons-material/CloseFullscreen';
|
||||
|
||||
import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
|
||||
import { BeamView } from '~/modules/beam/BeamView';
|
||||
|
||||
import { GoodTooltip } from '~/common/components/GoodTooltip';
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { themeZIndexBeamView } from '~/common/app.theme';
|
||||
|
||||
|
||||
/*const overlaySx: SxProps = {
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
zIndex: themeZIndexBeamView, // stay on top of Message > Chips (:1), and Overlays (:2) - note: Desktop Drawer (:26)
|
||||
}*/
|
||||
const beamWrapperStyles = {
|
||||
|
||||
wrapper: {
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
backgroundColor: 'background.level2', // darker than the expected Level1, for a change
|
||||
} as const,
|
||||
|
||||
closeContainer: {
|
||||
position: 'absolute',
|
||||
top: '0.25rem',
|
||||
// left: '0.25rem',
|
||||
left: { xs: 'calc(50% - 3rem)', md: '50%' }, // center on desktop, a bit left (for the islands) on mobile
|
||||
// transform: 'translate(-50%, 0)',
|
||||
zIndex: themeZIndexBeamView, // stay on top of Message > Chips (:1), and Overlays (:2) - note: Desktop Drawer (:26)
|
||||
} as const,
|
||||
|
||||
closeButton: {
|
||||
// color: 'white',
|
||||
// borderRadius: '25%',
|
||||
boxShadow: 'md',
|
||||
} as const,
|
||||
|
||||
} as const;
|
||||
|
||||
|
||||
export function ChatBeamWrapper(props: {
|
||||
@@ -40,15 +62,22 @@ export function ChatBeamWrapper(props: {
|
||||
|
||||
return isMaximized ? (
|
||||
<Modal open onClose={handleUnMaximize}>
|
||||
<Box sx={{
|
||||
backgroundColor: 'background.level1',
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
}}>
|
||||
<Box sx={beamWrapperStyles.wrapper}>
|
||||
|
||||
<ScrollToBottom disableAutoStick>
|
||||
{beamView}
|
||||
</ScrollToBottom>
|
||||
<ModalClose sx={{ color: 'white', backgroundColor: 'background.surface', boxShadow: 'xs', mr: 2 }} />
|
||||
|
||||
{/* Modal-Close-alike */}
|
||||
<Box sx={beamWrapperStyles.closeContainer}>
|
||||
<GoodTooltip title='Exit maximized mode'>
|
||||
<IconButton variant='solid' onClick={handleUnMaximize} sx={beamWrapperStyles.closeButton}>
|
||||
<CloseFullscreenIcon />
|
||||
{/*<CloseRoundedIcon />*/}
|
||||
</IconButton>
|
||||
</GoodTooltip>
|
||||
</Box>
|
||||
|
||||
</Box>
|
||||
</Modal>
|
||||
) : (
|
||||
|
||||
@@ -9,14 +9,14 @@ import type { SystemPurposeExample } from '../../../data';
|
||||
import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
|
||||
import type { ConversationHandler } from '~/common/chat-overlay/ConversationHandler';
|
||||
import type { DLLMContextTokens } from '~/common/stores/llms/llms.types';
|
||||
import { DConversationId, excludeSystemMessages } from '~/common/stores/chat/chat.conversation';
|
||||
import { ShortcutKey, useGlobalShortcuts } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { convertFilesToDAttachmentFragments } from '~/common/attachment-drafts/attachment.pipeline';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, DMessageId, DMessageUserFlag, DMetaReferenceItem, MESSAGE_FLAG_AIX_SKIP } from '~/common/stores/chat/chat.message';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, DMessageId, DMessageUserFlag, DMetaReferenceItem, MESSAGE_FLAG_AIX_SKIP, messageHasUserFlag } from '~/common/stores/chat/chat.message';
|
||||
import { createTextContentFragment, DMessageFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
|
||||
import { openFileForAttaching } from '~/common/components/ButtonAttachFiles';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating';
|
||||
import { useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useChatOverlayStore } from '~/common/chat-overlay/store-perchat_vanilla';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
@@ -40,7 +40,7 @@ export function ChatMessageList(props: {
|
||||
conversationHandler: ConversationHandler | null,
|
||||
capabilityHasT2I: boolean,
|
||||
chatLLMAntPromptCaching: boolean,
|
||||
chatLLMContextTokens: number | null,
|
||||
chatLLMContextTokens: DLLMContextTokens,
|
||||
chatLLMSupportsImages: boolean,
|
||||
fitScreen: boolean,
|
||||
isMobile: boolean,
|
||||
@@ -64,7 +64,6 @@ export function ChatMessageList(props: {
|
||||
const { notifyBooting } = useScrollToBottom();
|
||||
const danger_experimentalHtmlWebUi = useChatAutoSuggestHTMLUI();
|
||||
const [showSystemMessages] = useChatShowSystemMessages();
|
||||
const optionalTranslationWarning = useBrowserTranslationWarning();
|
||||
const { conversationMessages, historyTokenCount } = useChatStore(useShallow(({ conversations }) => {
|
||||
const conversation = conversations.find(conversation => conversation.id === props.conversationId);
|
||||
return {
|
||||
@@ -118,9 +117,9 @@ export function ChatMessageList(props: {
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory, props.chatLLMSupportsImages]);
|
||||
|
||||
const handleMessageContinue = React.useCallback(async (_messageId: DMessageId /* Ignored for now */) => {
|
||||
const handleMessageContinue = React.useCallback(async (_messageId: DMessageId /* Ignored for now */, continueText: null | string) => {
|
||||
if (conversationId && conversationHandler) {
|
||||
conversationHandler.messageAppend(createDMessageTextContent('user', 'Continue')); // [chat] append user:Continue
|
||||
conversationHandler.messageAppend(createDMessageTextContent('user', continueText || 'Continue')); // [chat] append user:Continue (or custom text, likely from an 'option')
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory]);
|
||||
@@ -137,8 +136,8 @@ export function ChatMessageList(props: {
|
||||
|
||||
const handleMessageBeam = React.useCallback(async (messageId: DMessageId) => {
|
||||
// Message option menu Beam
|
||||
if (!conversationId || !props.conversationHandler || !props.conversationHandler.isValid()) return;
|
||||
const inputHistory = props.conversationHandler.historyViewHeadOrThrow('chat-beam-message');
|
||||
if (!conversationId || !conversationHandler || !conversationHandler.isValid()) return;
|
||||
const inputHistory = conversationHandler.historyViewHeadOrThrow('chat-beam-message');
|
||||
if (!inputHistory.length) return;
|
||||
|
||||
// TODO: replace the Persona and Auto-Cache-hint in the history?
|
||||
@@ -151,52 +150,52 @@ export function ChatMessageList(props: {
|
||||
// assistant: do an in-place beam
|
||||
if (lastTruncatedMessage.role === 'assistant') {
|
||||
if (truncatedHistory.length >= 2)
|
||||
props.conversationHandler.beamInvoke(truncatedHistory.slice(0, -1), [lastTruncatedMessage], lastTruncatedMessage.id);
|
||||
conversationHandler.beamInvoke(truncatedHistory.slice(0, -1), [lastTruncatedMessage], lastTruncatedMessage.id);
|
||||
} else if (lastTruncatedMessage.role === 'user') {
|
||||
// user: truncate and append (but if the next message is an assistant message, import it)
|
||||
const possibleNextMessage = inputHistory[truncatedHistory.length];
|
||||
if (possibleNextMessage?.role === 'assistant')
|
||||
props.conversationHandler.beamInvoke(truncatedHistory, [possibleNextMessage], null);
|
||||
conversationHandler.beamInvoke(truncatedHistory, [possibleNextMessage], null);
|
||||
else
|
||||
props.conversationHandler.beamInvoke(truncatedHistory, [], null);
|
||||
conversationHandler.beamInvoke(truncatedHistory, [], null);
|
||||
}
|
||||
}, [conversationId, props.conversationHandler]);
|
||||
}, [conversationHandler, conversationId]);
|
||||
|
||||
const handleMessageBranch = React.useCallback((messageId: DMessageId) => {
|
||||
conversationId && onConversationBranch(conversationId, messageId, true);
|
||||
}, [conversationId, onConversationBranch]);
|
||||
|
||||
const handleMessageTruncate = React.useCallback((messageId: DMessageId) => {
|
||||
props.conversationHandler?.historyTruncateTo(messageId, 0);
|
||||
}, [props.conversationHandler]);
|
||||
conversationHandler?.historyTruncateTo(messageId, 0);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleMessageDelete = React.useCallback((messageId: DMessageId) => {
|
||||
props.conversationHandler?.messagesDelete([messageId]);
|
||||
}, [props.conversationHandler]);
|
||||
conversationHandler?.messagesDelete([messageId]);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleMessageAppendFragment = React.useCallback((messageId: DMessageId, fragment: DMessageFragment) => {
|
||||
props.conversationHandler?.messageFragmentAppend(messageId, fragment, false, false);
|
||||
}, [props.conversationHandler]);
|
||||
conversationHandler?.messageFragmentAppend(messageId, fragment, false, false);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleMessageDeleteFragment = React.useCallback((messageId: DMessageId, fragmentId: DMessageFragmentId) => {
|
||||
props.conversationHandler?.messageFragmentDelete(messageId, fragmentId, false, true);
|
||||
}, [props.conversationHandler]);
|
||||
conversationHandler?.messageFragmentDelete(messageId, fragmentId, false, true);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleMessageReplaceFragment = React.useCallback((messageId: DMessageId, fragmentId: DMessageFragmentId, newFragment: DMessageFragment) => {
|
||||
props.conversationHandler?.messageFragmentReplace(messageId, fragmentId, newFragment, false);
|
||||
}, [props.conversationHandler]);
|
||||
conversationHandler?.messageFragmentReplace(messageId, fragmentId, newFragment, true);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleMessageToggleUserFlag = React.useCallback((messageId: DMessageId, userFlag: DMessageUserFlag, _maxPerConversation?: number) => {
|
||||
props.conversationHandler?.messageToggleUserFlag(messageId, userFlag, true /* touch */);
|
||||
conversationHandler?.messageToggleUserFlag(messageId, userFlag, true /* touch */);
|
||||
// Note: we don't support 'maxPerConversation' yet, which is supposed to turn off the flag from the beginning if it's too numerous
|
||||
// if (_maxPerConversation) {
|
||||
// ...
|
||||
// }
|
||||
}, [props.conversationHandler]);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleAddInReferenceTo = React.useCallback((item: DMetaReferenceItem) => {
|
||||
props.conversationHandler?.overlayActions.addInReferenceTo(item);
|
||||
}, [props.conversationHandler]);
|
||||
conversationHandler?.overlayActions.addInReferenceTo(item);
|
||||
}, [conversationHandler]);
|
||||
|
||||
const handleTextDiagram = React.useCallback(async (messageId: DMessageId, text: string) => {
|
||||
conversationId && onTextDiagram({ conversationId: conversationId, messageId, text });
|
||||
@@ -223,6 +222,16 @@ export function ChatMessageList(props: {
|
||||
|
||||
// operate on the local selection set
|
||||
|
||||
const areAllSelectedMessagesHidden = React.useMemo(() => {
|
||||
if (selectedMessages.size === 0) return false;
|
||||
for (const messageId of selectedMessages) {
|
||||
const message = conversationMessages.find(m => m.id === messageId);
|
||||
if (message && !messageHasUserFlag(message, MESSAGE_FLAG_AIX_SKIP))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}, [selectedMessages, conversationMessages]);
|
||||
|
||||
const handleSelectAll = (selected: boolean) => {
|
||||
const newSelected = new Set<string>();
|
||||
if (selected)
|
||||
@@ -238,15 +247,15 @@ export function ChatMessageList(props: {
|
||||
};
|
||||
|
||||
const handleSelectionDelete = React.useCallback(() => {
|
||||
props.conversationHandler?.messagesDelete(Array.from(selectedMessages));
|
||||
conversationHandler?.messagesDelete(Array.from(selectedMessages));
|
||||
setSelectedMessages(new Set());
|
||||
}, [props.conversationHandler, selectedMessages]);
|
||||
}, [conversationHandler, selectedMessages]);
|
||||
|
||||
const handleSelectionHide = React.useCallback(() => {
|
||||
const handleSelectionToggleVisibility = React.useCallback(() => {
|
||||
for (let selectedMessage of Array.from(selectedMessages))
|
||||
props.conversationHandler?.messageSetUserFlag(selectedMessage, MESSAGE_FLAG_AIX_SKIP, true, true);
|
||||
conversationHandler?.messageSetUserFlag(selectedMessage, MESSAGE_FLAG_AIX_SKIP, !areAllSelectedMessagesHidden, true);
|
||||
setSelectedMessages(new Set());
|
||||
}, [props.conversationHandler, selectedMessages]);
|
||||
}, [conversationHandler, selectedMessages, areAllSelectedMessagesHidden]);
|
||||
|
||||
const { isMessageSelectionMode, setIsMessageSelectionMode } = props;
|
||||
|
||||
@@ -282,6 +291,10 @@ export function ChatMessageList(props: {
|
||||
p: 0,
|
||||
...props.sx,
|
||||
|
||||
// we added these after removing the minSize={20} (%) from the containing panel.
|
||||
minWidth: '18rem',
|
||||
// minHeight: '180px', // not need for this, as it's already an overflow scrolling container, so one can reduce it to a pixel
|
||||
|
||||
// fix for the double-border on the last message (one by the composer, one to the bottom of the message)
|
||||
// marginBottom: '-1px',
|
||||
|
||||
@@ -311,8 +324,6 @@ export function ChatMessageList(props: {
|
||||
return (
|
||||
<List role='chat-messages-list' sx={listSx}>
|
||||
|
||||
{optionalTranslationWarning}
|
||||
|
||||
{props.isMessageSelectionMode && (
|
||||
<MessagesSelectionHeader
|
||||
hasSelected={selectedMessages.size > 0}
|
||||
@@ -320,7 +331,8 @@ export function ChatMessageList(props: {
|
||||
onClose={() => props.setIsMessageSelectionMode(false)}
|
||||
onSelectAll={handleSelectAll}
|
||||
onDeleteMessages={handleSelectionDelete}
|
||||
onHideMessages={handleSelectionHide}
|
||||
onToggleVisibility={handleSelectionToggleVisibility}
|
||||
areAllMessagesHidden={areAllSelectedMessagesHidden}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRendere
|
||||
import type { DEphemeral } from '~/common/chat-overlay/store-perchat-ephemerals_slice';
|
||||
import { ConversationHandler } from '~/common/chat-overlay/ConversationHandler';
|
||||
import { adjustContentScaling, ContentScaling, lineHeightChatTextMd } from '~/common/app.theme';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
import { useUIPreferencesStore } from '~/common/stores/store-ui';
|
||||
|
||||
|
||||
// State Pane
|
||||
|
||||
@@ -0,0 +1,194 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, IconButton, Sheet } from '@mui/joy';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import DeleteForeverIcon from '@mui/icons-material/DeleteForever';
|
||||
import EditRoundedIcon from '@mui/icons-material/EditRounded';
|
||||
import OpenInFullIcon from '@mui/icons-material/OpenInFull';
|
||||
|
||||
import type { DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { InlineTextarea } from '~/common/components/InlineTextarea';
|
||||
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
|
||||
import { useConversationTitle } from '~/common/stores/chat/hooks/useConversationTitle';
|
||||
|
||||
import { panesManagerActions } from './panes/store-panes-manager';
|
||||
|
||||
|
||||
// configuration
|
||||
const ENABLE_DELETE = false;
|
||||
|
||||
|
||||
const _styles = {
|
||||
tileBar: {
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: '50%',
|
||||
transform: 'translateX(-50%)',
|
||||
zIndex: 10,
|
||||
padding: '0 0.125rem 0.125rem',
|
||||
fontSize: 'sm',
|
||||
fontWeight: 'md',
|
||||
borderBottomLeftRadius: '8px',
|
||||
borderBottomRightRadius: '8px',
|
||||
// boxShadow: 'xs',
|
||||
// border: '1px solid',
|
||||
// borderColor: 'background.popup',
|
||||
borderTop: 'none',
|
||||
maxWidth: '78%',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 1,
|
||||
} as const,
|
||||
titleBarIncognito: {
|
||||
backgroundImage: 'repeating-linear-gradient(45deg, rgba(0,0,0,0.1), rgba(0,0,0,0.1) 10px, transparent 10px, transparent 20px)',
|
||||
backgroundColor: 'neutral.solidBg',
|
||||
} as const,
|
||||
title: {
|
||||
flex: 1,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
whiteSpace: 'nowrap',
|
||||
cursor: 'pointer',
|
||||
minWidth: '2.75rem',
|
||||
textAlign: 'center',
|
||||
} as const,
|
||||
toolButton: {
|
||||
'--IconButton-size': '1.5rem',
|
||||
backgroundColor: 'transparent',
|
||||
opacity: 0.5,
|
||||
transition: 'opacity 0.1s',
|
||||
'&:hover': {
|
||||
opacity: 1,
|
||||
},
|
||||
} as const,
|
||||
toolIcon: {} as const,
|
||||
toolIconLg: {
|
||||
fontSize: 'lg',
|
||||
} as const,
|
||||
} as const;
|
||||
|
||||
|
||||
export function PaneTitleOverlay(props: {
|
||||
paneIdx: number,
|
||||
conversationId: DConversationId | null,
|
||||
isFocused: boolean,
|
||||
isIncognito: boolean,
|
||||
onConversationDelete: (conversationIds: DConversationId[], bypassConfirmation: boolean) => void,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const [editingTitle, setEditingTitle] = React.useState(false);
|
||||
|
||||
// external state
|
||||
const { title, setUserTitle } = useConversationTitle(props.conversationId);
|
||||
// if (!title || title?.length < 3)
|
||||
// return null;
|
||||
|
||||
|
||||
// close tabs handlers
|
||||
|
||||
const handleCloseThis = React.useCallback(() => {
|
||||
panesManagerActions().removePane(props.paneIdx);
|
||||
}, [props.paneIdx]);
|
||||
|
||||
const handleCloseOthers = React.useCallback(() => {
|
||||
panesManagerActions().removeOtherPanes(props.paneIdx);
|
||||
}, [props.paneIdx]);
|
||||
|
||||
|
||||
// title handles
|
||||
|
||||
const handleTitleEditBegin = React.useCallback(() => {
|
||||
setEditingTitle(true);
|
||||
}, []);
|
||||
|
||||
const handleTitleEditChange = React.useCallback((newTitle: string) => {
|
||||
setUserTitle(newTitle);
|
||||
setEditingTitle(false);
|
||||
}, [setUserTitle]);
|
||||
|
||||
const handleTitleEditEnd = React.useCallback(() => {
|
||||
setEditingTitle(false);
|
||||
}, []);
|
||||
|
||||
|
||||
// delete handlers
|
||||
|
||||
const { onConversationDelete } = props;
|
||||
|
||||
const handleDeleteClicked = React.useCallback((event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
if (props.conversationId)
|
||||
onConversationDelete([props.conversationId], event.shiftKey);
|
||||
}, [onConversationDelete, props.conversationId]);
|
||||
|
||||
|
||||
// don't render if not focused
|
||||
// if (!props.isFocused)
|
||||
// return null;
|
||||
|
||||
const hasTitle = title && title.length > 0;
|
||||
const color = props.isFocused ? 'primary' : 'neutral';
|
||||
const variantO = props.isFocused ? 'solid' : 'outlined';
|
||||
const variantP = props.isFocused ? 'solid' : 'plain';
|
||||
|
||||
return (
|
||||
<Sheet
|
||||
color={color}
|
||||
variant={variantO}
|
||||
sx={!props.isIncognito ? _styles.tileBar : { ..._styles.tileBar, ..._styles.titleBarIncognito }}
|
||||
>
|
||||
{/* Close Others*/}
|
||||
{/*<TooltipOutlined title='Close Other Tabs'>*/}
|
||||
{!editingTitle && <IconButton title='Close Other Tabs' size='sm' color={color} variant={variantP} onClick={handleCloseOthers} sx={_styles.toolButton}>
|
||||
<OpenInFullIcon sx={_styles.toolIcon} />
|
||||
</IconButton>}
|
||||
{/*</TooltipOutlined>*/}
|
||||
|
||||
{/* Title */}
|
||||
{editingTitle ? (
|
||||
<InlineTextarea
|
||||
initialText={title || ''}
|
||||
placeholder='Chat title...'
|
||||
invertedColors
|
||||
centerText
|
||||
onEdit={handleTitleEditChange}
|
||||
onCancel={handleTitleEditEnd}
|
||||
sx={{
|
||||
// flexGrow: 1,
|
||||
// minWidth: 120,
|
||||
mx: { md: 1 },
|
||||
}}
|
||||
/>
|
||||
) : !!props.conversationId && <>
|
||||
{hasTitle && <Box sx={_styles.title} onClick={handleTitleEditBegin}>
|
||||
{title}
|
||||
</Box>}
|
||||
{!hasTitle && <Box fontStyle='italic' onClick={handleTitleEditBegin}>
|
||||
untitled
|
||||
</Box>}
|
||||
{!hasTitle && <TooltipOutlined title='Edit Chat Title'>
|
||||
<IconButton title='' size='sm' color={color} variant={variantP} onClick={handleTitleEditBegin} sx={_styles.toolButton}>
|
||||
<EditRoundedIcon sx={_styles.toolIcon} />
|
||||
</IconButton>
|
||||
</TooltipOutlined>}
|
||||
</>}
|
||||
|
||||
{/* Delete This */}
|
||||
{ENABLE_DELETE && hasTitle && !!props.conversationId && (
|
||||
<TooltipOutlined title='Delete Chat (Shift+Click to bypass confirmation)'>
|
||||
<IconButton size='sm' variant={variantP} onClick={handleDeleteClicked} sx={_styles.toolButton}>
|
||||
<DeleteForeverIcon />
|
||||
</IconButton>
|
||||
</TooltipOutlined>
|
||||
)}
|
||||
|
||||
{/* Close This */}
|
||||
{/*<TooltipOutlined title='Close'>*/}
|
||||
{!editingTitle && <IconButton title='Close Tab' size='sm' color={color} variant={variantP} onClick={handleCloseThis} sx={_styles.toolButton}>
|
||||
<ClearIcon sx={_styles.toolIconLg} />
|
||||
</IconButton>}
|
||||
{/*</TooltipOutlined>*/}
|
||||
</Sheet>
|
||||
);
|
||||
}
|
||||
@@ -1,19 +1,17 @@
|
||||
import * as React from 'react';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, IconButton, styled, Typography } from '@mui/joy';
|
||||
import { Box, IconButton, Typography } from '@mui/joy';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
import ExpandLessIcon from '@mui/icons-material/ExpandLess';
|
||||
import MinimizeIcon from '@mui/icons-material/Minimize';
|
||||
|
||||
// import { isMacUser } from '~/common/util/pwaUtils';
|
||||
import type { ShortcutObject } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { ShortcutKey, ShortcutObject } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { ConfirmationModal } from '~/common/components/modals/ConfirmationModal';
|
||||
import { GoodTooltip } from '~/common/components/GoodTooltip';
|
||||
import { useGlobalShortcutsStore } from '~/common/components/shortcuts/store-global-shortcuts';
|
||||
import { useOverlayComponents } from '~/common/layout/overlays/useOverlayComponents';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
import { useUXLabsStore } from '~/common/stores/store-ux-labs';
|
||||
|
||||
|
||||
// configuration
|
||||
@@ -27,12 +25,92 @@ const hideButtonTooltip = (
|
||||
</Box>
|
||||
);
|
||||
|
||||
const hideButtonSx: SxProps = {
|
||||
'--IconButton-size': '28px',
|
||||
'--Icon-fontSize': '16px',
|
||||
'--Icon-color': 'var(--joy-palette-text-tertiary)',
|
||||
mr: -0.5,
|
||||
};
|
||||
const _styles = {
|
||||
|
||||
bar: {
|
||||
borderBottom: '1px solid',
|
||||
// borderBottomColor: 'var(--joy-palette-divider)',
|
||||
borderBottomColor: 'rgba(var(--joy-palette-neutral-mainChannel) / 0.1)',
|
||||
// borderTopColor: 'rgba(var(--joy-palette-neutral-mainChannel, 99 107 116) / 0.4)',
|
||||
// backgroundColor: 'var(--joy-palette-background-surface)',
|
||||
// paddingBlock: '0.25rem',
|
||||
paddingInline: '0.5rem',
|
||||
// layout
|
||||
display: 'flex',
|
||||
flexFlow: 'row nowrap',
|
||||
columnGap: '1.5rem', // space between shortcuts
|
||||
lineHeight: '1em',
|
||||
// animation: `${animateAppear} 0.3s ease-out`,
|
||||
// transition: 'all 0.2s ease',
|
||||
// '&:hover': {
|
||||
// backgroundColor: 'var(--joy-palette-background-level1)',
|
||||
// },
|
||||
} as const,
|
||||
|
||||
hideButton: {
|
||||
'--IconButton-size': '28px',
|
||||
'--Icon-fontSize': '16px',
|
||||
'--Icon-color': 'var(--joy-palette-text-tertiary)',
|
||||
mr: -0.5,
|
||||
} as const,
|
||||
|
||||
shortcut: {
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
whiteSpace: 'nowrap',
|
||||
gap: '2px', // space between modifiers
|
||||
marginBlock: '0.25rem',
|
||||
// transition: 'transform 0.2s ease',
|
||||
// '&:hover': {
|
||||
// transform: 'scale(1.05)',
|
||||
// },
|
||||
'&:hover > div': {
|
||||
backgroundColor: 'background.level1',
|
||||
} as const,
|
||||
cursor: 'pointer',
|
||||
[`&[aria-disabled="true"]`]: {
|
||||
opacity: 0.5,
|
||||
pointerEvents: 'none',
|
||||
} as const,
|
||||
} as const,
|
||||
|
||||
itemKeyGroup: {
|
||||
fontSize: 'xs',
|
||||
fontWeight: 'md',
|
||||
outline: '1px solid',
|
||||
outlineColor: 'neutral.outlinedBorder',
|
||||
borderRadius: 'xs',
|
||||
// backgroundColor: 'var(--joy-palette-neutral-outlinedBorder)',
|
||||
backgroundColor: 'background.popup',
|
||||
// boxShadow: 'inset 2px 0px 4px -2px var(--joy-palette-background-backdrop)',
|
||||
boxShadow: 'xs',
|
||||
// minWidth: '1rem',
|
||||
paddingBlock: '2px',
|
||||
paddingInline: '1px',
|
||||
// pointerEvents: 'none',
|
||||
cursor: 'pointer',
|
||||
transition: 'background-color 1s ease',
|
||||
display: 'flex',
|
||||
textAlign: 'center',
|
||||
// Remove the gap and use dividers instead
|
||||
gap: 0,
|
||||
'& > span': {
|
||||
position: 'relative',
|
||||
paddingInline: '4px',
|
||||
minWidth: '1rem',
|
||||
'&:not(:last-child)': {
|
||||
borderRight: '1px solid',
|
||||
borderRightColor: 'neutral.outlinedBorder',
|
||||
},
|
||||
},
|
||||
} as const,
|
||||
|
||||
itemIcon: {
|
||||
fontSize: 'md',
|
||||
} as const,
|
||||
|
||||
} as const;
|
||||
|
||||
|
||||
// const animateAppear = keyframes`
|
||||
// from {
|
||||
@@ -45,64 +123,6 @@ const hideButtonSx: SxProps = {
|
||||
// }
|
||||
// `;
|
||||
|
||||
const StatusBarContainer = styled(Box)({
|
||||
borderBottom: '1px solid',
|
||||
// borderBottomColor: 'var(--joy-palette-divider)',
|
||||
borderBottomColor: 'rgba(var(--joy-palette-neutral-mainChannel) / 0.1)',
|
||||
// borderTopColor: 'rgba(var(--joy-palette-neutral-mainChannel, 99 107 116) / 0.4)',
|
||||
// backgroundColor: 'var(--joy-palette-background-surface)',
|
||||
// paddingBlock: '0.25rem',
|
||||
paddingInline: '0.5rem',
|
||||
// layout
|
||||
display: 'flex',
|
||||
flexFlow: 'row nowrap',
|
||||
columnGap: '1.5rem', // space between shortcuts
|
||||
lineHeight: '1em',
|
||||
// animation: `${animateAppear} 0.3s ease-out`,
|
||||
// transition: 'all 0.2s ease',
|
||||
// '&:hover': {
|
||||
// backgroundColor: 'var(--joy-palette-background-level1)',
|
||||
// },
|
||||
});
|
||||
|
||||
const ShortcutContainer = styled(Box)({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
whiteSpace: 'nowrap',
|
||||
gap: '2px', // space between modifiers
|
||||
marginBlock: '0.25rem',
|
||||
// transition: 'transform 0.2s ease',
|
||||
// '&:hover': {
|
||||
// transform: 'scale(1.05)',
|
||||
// },
|
||||
'&:hover > div': {
|
||||
backgroundColor: 'var(--joy-palette-background-level1)',
|
||||
},
|
||||
cursor: 'pointer',
|
||||
[`&[aria-disabled="true"]`]: {
|
||||
opacity: 0.5,
|
||||
pointerEvents: 'none',
|
||||
}
|
||||
});
|
||||
|
||||
const ShortcutKey = styled(Box)({
|
||||
fontSize: 'var(--joy-fontSize-xs)',
|
||||
fontWeight: 'var(--joy-fontWeight-md)',
|
||||
border: '1px solid',
|
||||
borderColor: 'var(--joy-palette-neutral-outlinedBorder)',
|
||||
borderRadius: 'var(--joy-radius-xs)',
|
||||
// backgroundColor: 'var(--joy-palette-neutral-outlinedBorder)',
|
||||
backgroundColor: 'var(--joy-palette-background-popup)',
|
||||
// boxShadow: 'inset 2px 0px 4px -2px var(--joy-palette-background-backdrop)',
|
||||
boxShadow: 'var(--joy-shadow-xs)',
|
||||
// minWidth: '1rem',
|
||||
paddingBlock: '1px',
|
||||
paddingInline: '4px',
|
||||
// pointerEvents: 'none',
|
||||
cursor: 'pointer',
|
||||
transition: 'background-color 1s ease',
|
||||
});
|
||||
|
||||
|
||||
// Display mac-style shortcuts on windows as well
|
||||
const displayMacModifiers = true;
|
||||
@@ -118,6 +138,8 @@ function _platformAwareModifier(symbol: 'Ctrl' | 'Alt' | 'Shift') {
|
||||
}
|
||||
}
|
||||
|
||||
const ShortcutItemMemo = React.memo(ShortcutItem);
|
||||
|
||||
function ShortcutItem(props: { shortcut: ShortcutObject }) {
|
||||
|
||||
const handleClicked = React.useCallback(() => {
|
||||
@@ -126,17 +148,24 @@ function ShortcutItem(props: { shortcut: ShortcutObject }) {
|
||||
}, [props.shortcut]);
|
||||
|
||||
return (
|
||||
<ShortcutContainer onClick={!props.shortcut.disabled ? handleClicked : undefined} aria-disabled={props.shortcut.disabled}>
|
||||
{!!props.shortcut.ctrl && <ShortcutKey>{_platformAwareModifier('Ctrl')}</ShortcutKey>}
|
||||
{!!props.shortcut.shift && <ShortcutKey>{_platformAwareModifier('Shift')}</ShortcutKey>}
|
||||
{/*{!!props.shortcut.altForNonMac && <ShortcutKey onClick={handleClicked}>{_platformAwareModifier('Alt')}</ShortcutKey>}*/}
|
||||
<ShortcutKey>{props.shortcut.key === 'Escape' ? 'Esc' : props.shortcut.key === 'Enter' ? '↵' : props.shortcut.key.toUpperCase()}</ShortcutKey>
|
||||
<Box
|
||||
onClick={!props.shortcut.disabled ? handleClicked : undefined}
|
||||
aria-disabled={props.shortcut.disabled}
|
||||
sx={_styles.shortcut}
|
||||
>
|
||||
<Box sx={_styles.itemKeyGroup}>
|
||||
{!!props.shortcut.ctrl && <span>{_platformAwareModifier('Ctrl')}</span>}
|
||||
{!!props.shortcut.shift && <span>{_platformAwareModifier('Shift')}</span>}
|
||||
{/*{!!props.shortcut.altForNonMac && <span>{_platformAwareModifier('Alt')}</span>}*/}
|
||||
<span>{props.shortcut.key === 'Escape' ? 'Esc' : props.shortcut.key === 'Enter' ? '↵' : props.shortcut.key.toUpperCase()}</span>
|
||||
</Box>
|
||||
<Typography level='body-xs'>{props.shortcut.description}</Typography>
|
||||
{props.shortcut.endDecoratorIcon && <props.shortcut.endDecoratorIcon sx={{ fontSize: 'md' }} />}
|
||||
</ShortcutContainer>
|
||||
{!!props.shortcut.endDecoratorIcon && <props.shortcut.endDecoratorIcon sx={_styles.itemIcon} />}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
export const StatusBarMemo = React.memo(StatusBar);
|
||||
|
||||
export function StatusBar(props: { toggleMinimized?: () => void, isMinimized?: boolean }) {
|
||||
|
||||
@@ -148,18 +177,34 @@ export function StatusBar(props: { toggleMinimized?: () => void, isMinimized?: b
|
||||
// external state
|
||||
const labsShowShortcutBar = useUXLabsStore(state => state.labsShowShortcutBar);
|
||||
const shortcuts = useGlobalShortcutsStore(useShallow(state => {
|
||||
// get visible shortcuts
|
||||
let visibleShortcuts = !labsShowShortcutBar ? [] : state.getAllShortcuts().filter(shortcut => !!shortcut.description);
|
||||
|
||||
// filter by highest level if levels are present
|
||||
const maxLevel = Math.max(...visibleShortcuts.map(s => s.level ?? 0));
|
||||
if (maxLevel > 0)
|
||||
visibleShortcuts = visibleShortcuts.filter(s => s.level === maxLevel);
|
||||
|
||||
visibleShortcuts.sort((a, b) => {
|
||||
// if they don't have a 'shift', they are sorted first
|
||||
if (a.shift !== b.shift)
|
||||
return a.shift ? 1 : -1;
|
||||
// (Hack) If the description is 'Beam', it goes last
|
||||
if (a.description === 'Beam Edit')
|
||||
return 1;
|
||||
// alphabetical for the rest
|
||||
// 1. First by level
|
||||
if ((a.level ?? 0) !== (b.level ?? 0))
|
||||
return (b.level ?? 0) - (a.level ?? 0);
|
||||
|
||||
// 2. Then by modifiers presence (no modifiers first)
|
||||
const aModifiers = (a.ctrl ? 1 : 0) + (a.shift ? 1 : 0);
|
||||
const bModifiers = (b.ctrl ? 1 : 0) + (b.shift ? 1 : 0);
|
||||
if (aModifiers !== bModifiers)
|
||||
return aModifiers - bModifiers;
|
||||
|
||||
// 3a. Special case for ShortcutKey.Esc, at the beginning
|
||||
if (a.key === ShortcutKey.Esc) return -1;
|
||||
if (b.key === ShortcutKey.Esc) return 1;
|
||||
|
||||
// 3. Special case for 'Beam Edit'
|
||||
if (a.description === 'Beam Edit') return 1;
|
||||
if (b.description === 'Beam Edit') return -1;
|
||||
|
||||
// 4. Finally alphabetically by key
|
||||
return a.key.localeCompare(b.key);
|
||||
});
|
||||
return visibleShortcuts;
|
||||
@@ -202,27 +247,30 @@ export function StatusBar(props: { toggleMinimized?: () => void, isMinimized?: b
|
||||
return null;
|
||||
|
||||
return (
|
||||
<StatusBarContainer aria-label='Status bar'>
|
||||
<Box
|
||||
aria-label='Shortcuts and status bar'
|
||||
sx={_styles.bar}
|
||||
>
|
||||
|
||||
{(!props.toggleMinimized || !COMPOSER_ENABLE_MINIMIZE) && !props.isMinimized ? (
|
||||
// Close Button
|
||||
<GoodTooltip variantOutlined arrow placement='top' title={hideButtonTooltip}>
|
||||
<IconButton size='sm' sx={hideButtonSx} onClick={handleHideShortcuts}>
|
||||
<IconButton size='sm' onClick={handleHideShortcuts} sx={_styles.hideButton}>
|
||||
<CloseRoundedIcon />
|
||||
</IconButton>
|
||||
</GoodTooltip>
|
||||
) : (
|
||||
// Minimize / Maximize Button - note the Maximize icon would be more correct, but also less discoverable
|
||||
<IconButton size='sm' sx={hideButtonSx} onClick={props.toggleMinimized}>
|
||||
<IconButton size='sm' onClick={props.toggleMinimized} sx={_styles.hideButton}>
|
||||
{props.isMinimized ? <ExpandLessIcon /> : <MinimizeIcon />}
|
||||
</IconButton>
|
||||
)}
|
||||
|
||||
{/* Show all shortcuts */}
|
||||
{shortcuts.map((shortcut, idx) => (
|
||||
<ShortcutItem key={shortcut.key + idx} shortcut={shortcut} />
|
||||
<ShortcutItemMemo key={shortcut.key + idx} shortcut={shortcut} />
|
||||
))}
|
||||
|
||||
</StatusBarContainer>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -127,7 +127,7 @@ export function CameraCaptureModal(props: {
|
||||
|
||||
const handleVideoDownloadClicked = React.useCallback(async () => {
|
||||
if (!videoRef.current) return;
|
||||
await downloadVideoFrame(videoRef.current, 'camera', 'image/jpeg', 0.98);
|
||||
await downloadVideoFrame(videoRef.current, 'camera', 'image/jpeg', 0.98).catch(alert);
|
||||
}, [videoRef]);
|
||||
|
||||
|
||||
|
||||
@@ -2,12 +2,10 @@ import * as React from 'react';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
import type { FileWithHandle } from 'browser-fs-access';
|
||||
|
||||
import { Box, Button, ButtonGroup, Card, Dropdown, Grid, IconButton, Menu, MenuButton, MenuItem, Textarea, Tooltip, Typography } from '@mui/joy';
|
||||
import { Box, Button, ButtonGroup, Card, Dropdown, Grid, IconButton, Menu, MenuButton, MenuItem, Textarea, Typography } from '@mui/joy';
|
||||
import { ColorPaletteProp, SxProps, VariantProp } from '@mui/joy/styles/types';
|
||||
import AddCircleOutlineIcon from '@mui/icons-material/AddCircleOutline';
|
||||
import AutoAwesomeIcon from '@mui/icons-material/AutoAwesome';
|
||||
import ExpandLessIcon from '@mui/icons-material/ExpandLess';
|
||||
import FormatPaintTwoToneIcon from '@mui/icons-material/FormatPaintTwoTone';
|
||||
import PsychologyIcon from '@mui/icons-material/Psychology';
|
||||
import SendIcon from '@mui/icons-material/Send';
|
||||
import StopOutlinedIcon from '@mui/icons-material/StopOutlined';
|
||||
@@ -19,35 +17,34 @@ import { useChatAutoSuggestAttachmentPrompts, useChatMicTimeoutMsValue } from '.
|
||||
import { useAgiAttachmentPrompts } from '~/modules/aifn/agiattachmentprompts/useAgiAttachmentPrompts';
|
||||
import { useBrowseCapability } from '~/modules/browse/store-module-browsing';
|
||||
|
||||
import { DLLM, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { DLLM, getLLMContextTokens, getLLMPricing, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
|
||||
import { AudioGenerator } from '~/common/util/audio/AudioGenerator';
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { ButtonAttachFilesMemo, openFileForAttaching } from '~/common/components/ButtonAttachFiles';
|
||||
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
|
||||
import { ConfirmationModal } from '~/common/components/modals/ConfirmationModal';
|
||||
import { ConversationsManager } from '~/common/chat-overlay/ConversationsManager';
|
||||
import { DMessageMetadata, DMetaReferenceItem, messageFragmentsReduceText } from '~/common/stores/chat/chat.message';
|
||||
import { DMessageId, DMessageMetadata, DMetaReferenceItem, messageFragmentsReduceText } from '~/common/stores/chat/chat.message';
|
||||
import { ShortcutKey, ShortcutObject, useGlobalShortcuts } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { addSnackbar } from '~/common/components/snackbar/useSnackbarsStore';
|
||||
import { animationEnterBelow } from '~/common/util/animUtils';
|
||||
import { browserSpeechRecognitionCapability, PLACEHOLDER_INTERIM_TRANSCRIPT, SpeechResult, useSpeechRecognition } from '~/common/components/speechrecognition/useSpeechRecognition';
|
||||
import { DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { copyToClipboard, supportsClipboardRead } from '~/common/util/clipboardUtils';
|
||||
import { createTextContentFragment, DMessageAttachmentFragment, DMessageContentFragment, duplicateDMessageFragmentsNoVoid } from '~/common/stores/chat/chat.fragments';
|
||||
import { estimateTextTokens, glueForMessageTokens, marshallWrapDocFragments } from '~/common/stores/chat/chat.tokens';
|
||||
import { createTextContentFragment, DMessageAttachmentFragment, DMessageContentFragment, duplicateDMessageFragments } from '~/common/stores/chat/chat.fragments';
|
||||
import { glueForMessageTokens, marshallWrapDocFragments } from '~/common/stores/chat/chat.tokens';
|
||||
import { isValidConversation, useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { getModelParameterValueOrThrow } from '~/common/stores/llms/llms.parameters';
|
||||
import { launchAppCall, removeQueryParam, useRouterQuery } from '~/common/app.routes';
|
||||
import { lineHeightTextareaMd } from '~/common/app.theme';
|
||||
import { lineHeightTextareaMd, themeBgAppChatComposer } from '~/common/app.theme';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { platformAwareKeystrokes } from '~/common/components/KeyStroke';
|
||||
import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
|
||||
import { useChatComposerOverlayStore } from '~/common/chat-overlay/store-perchat_vanilla';
|
||||
import { useComposerStartupText, useLogicSherpaStore } from '~/common/logic/store-logic-sherpa';
|
||||
import { useDebouncer } from '~/common/components/useDebouncer';
|
||||
import { useOverlayComponents } from '~/common/layout/overlays/useOverlayComponents';
|
||||
import { useUICounter, useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
import { useUICounter, useUIPreferencesStore } from '~/common/stores/store-ui';
|
||||
import { useUXLabsStore } from '~/common/stores/store-ux-labs';
|
||||
|
||||
import type { ActileItem } from './actile/ActileProvider';
|
||||
import { providerAttachmentLabels } from './actile/providerAttachmentLabels';
|
||||
@@ -57,6 +54,7 @@ import { useActileManager } from './actile/useActileManager';
|
||||
|
||||
import type { AttachmentDraftId } from '~/common/attachment-drafts/attachment.types';
|
||||
import { LLMAttachmentDraftsAction, LLMAttachmentsList } from './llmattachments/LLMAttachmentsList';
|
||||
import { PhPaintBrush } from '~/common/components/icons/phosphor/PhPaintBrush';
|
||||
import { useAttachmentDrafts } from '~/common/attachment-drafts/useAttachmentDrafts';
|
||||
import { useLLMAttachmentDrafts } from './llmattachments/useLLMAttachmentDrafts';
|
||||
|
||||
@@ -69,19 +67,24 @@ import { ButtonAttachScreenCaptureMemo } from './buttons/ButtonAttachScreenCaptu
|
||||
import { ButtonAttachWebMemo } from './buttons/ButtonAttachWeb';
|
||||
import { ButtonBeamMemo } from './buttons/ButtonBeam';
|
||||
import { ButtonCallMemo } from './buttons/ButtonCall';
|
||||
import { ButtonGroupDrawRepeat } from './buttons/ButtonGroupDrawRepeat';
|
||||
import { ButtonMicContinuationMemo } from './buttons/ButtonMicContinuation';
|
||||
import { ButtonMicMemo } from './buttons/ButtonMic';
|
||||
import { ButtonMultiChatMemo } from './buttons/ButtonMultiChat';
|
||||
import { ButtonOptionsDraw } from './buttons/ButtonOptionsDraw';
|
||||
import { ComposerTextAreaActions } from './textarea/ComposerTextAreaActions';
|
||||
import { StatusBar } from '../StatusBar';
|
||||
import { ComposerTextAreaDrawActions } from './textarea/ComposerTextAreaDrawActions';
|
||||
import { StatusBarMemo } from '../StatusBar';
|
||||
import { TokenBadgeMemo } from './tokens/TokenBadge';
|
||||
import { TokenProgressbarMemo } from './tokens/TokenProgressbar';
|
||||
import { useComposerDragDrop } from './useComposerDragDrop';
|
||||
import { useTextTokenCount } from './tokens/useTextTokenCounter';
|
||||
import { useWebInputModal } from './WebInputModal';
|
||||
|
||||
|
||||
// configuration
|
||||
const zIndexComposerOverlayMic = 10;
|
||||
const SHOW_TIPS_AFTER_RELOADS = 25;
|
||||
|
||||
|
||||
const paddingBoxSx: SxProps = {
|
||||
@@ -101,20 +104,24 @@ const minimizedSx: SxProps = {
|
||||
export function Composer(props: {
|
||||
isMobile: boolean;
|
||||
chatLLM: DLLM | null;
|
||||
composerTextAreaRef: React.RefObject<HTMLTextAreaElement>;
|
||||
composerTextAreaRef: React.RefObject<HTMLTextAreaElement | null>;
|
||||
targetConversationId: DConversationId | null;
|
||||
capabilityHasT2I: boolean;
|
||||
capabilityHasT2IEdit: boolean;
|
||||
isMulticast: boolean | null;
|
||||
isDeveloperMode: boolean;
|
||||
onAction: (conversationId: DConversationId, chatExecuteMode: ChatExecuteMode, fragments: (DMessageContentFragment | DMessageAttachmentFragment)[], metadata?: DMessageMetadata) => boolean;
|
||||
onConversationBeamEdit: (conversationId: DConversationId, editMessageId?: DMessageId) => Promise<void>;
|
||||
onConversationsImportFromFiles: (files: File[]) => Promise<void>;
|
||||
onTextImagine: (conversationId: DConversationId, text: string) => void;
|
||||
setIsMulticast: (on: boolean) => void;
|
||||
onComposerHasContent: (hasContent: boolean) => void;
|
||||
sx?: SxProps;
|
||||
}) {
|
||||
|
||||
// state
|
||||
const [composeText, debouncedText, setComposeText] = useDebouncer('', 300, 1200, true);
|
||||
const [composeText, setComposeText] = React.useState('');
|
||||
const [drawRepeat, setDrawRepeat] = React.useState(1);
|
||||
const [micContinuation, setMicContinuation] = React.useState(false);
|
||||
const [speechInterimResult, setSpeechInterimResult] = React.useState<SpeechResult | null>(null);
|
||||
const [sendStarted, setSendStarted] = React.useState(false);
|
||||
@@ -135,12 +142,13 @@ export function Composer(props: {
|
||||
labsShowCost: state.labsShowCost,
|
||||
labsShowShortcutBar: state.labsShowShortcutBar,
|
||||
})));
|
||||
const timeToShowTips = useLogicSherpaStore(state => state.usageCount >= 5);
|
||||
const timeToShowTips = useLogicSherpaStore(state => state.usageCount >= SHOW_TIPS_AFTER_RELOADS);
|
||||
const { novel: explainShiftEnter, touch: touchShiftEnter } = useUICounter('composer-shift-enter');
|
||||
const { novel: explainAltEnter, touch: touchAltEnter } = useUICounter('composer-alt-enter');
|
||||
const { novel: explainCtrlEnter, touch: touchCtrlEnter } = useUICounter('composer-ctrl-enter');
|
||||
const [startupText, setStartupText] = useComposerStartupText();
|
||||
const enterIsNewline = useUIPreferencesStore(state => state.enterIsNewline);
|
||||
const composerQuickButton = useUIPreferencesStore(state => state.composerQuickButton);
|
||||
const chatMicTimeoutMs = useChatMicTimeoutMsValue();
|
||||
const { assistantAbortible, systemPurposeId, tokenCount: _historyTokenCount, abortConversationTemp } = useChatStore(useShallow(state => {
|
||||
const conversation = state.conversations.find(_c => _c.id === props.targetConversationId);
|
||||
@@ -170,7 +178,7 @@ export function Composer(props: {
|
||||
const enableLoadURLsInComposer = hasComposerBrowseCapability && !composeText.startsWith('/');
|
||||
|
||||
// user message for attachments
|
||||
const { onConversationsImportFromFiles } = props;
|
||||
const { onConversationBeamEdit, onConversationsImportFromFiles } = props;
|
||||
const handleFilterAGIFile = React.useCallback(async (file: File): Promise<boolean> =>
|
||||
await showPromisedOverlay('composer-open-or-attach', { rejectWithValue: false }, ({ onResolve, onUserReject }) => (
|
||||
<ConfirmationModal
|
||||
@@ -186,11 +194,12 @@ export function Composer(props: {
|
||||
)), [onConversationsImportFromFiles, showPromisedOverlay]);
|
||||
|
||||
// attachments-overlay: comes from the attachments slice of the conversation overlay
|
||||
const showChatAttachments = chatExecuteModeCanAttach(chatExecuteMode, props.capabilityHasT2IEdit);
|
||||
const {
|
||||
/* items */ attachmentDrafts,
|
||||
/* append */ attachAppendClipboardItems, attachAppendDataTransfer, attachAppendEgoFragments, attachAppendFile, attachAppendUrl,
|
||||
/* take */ attachmentsRemoveAll, attachmentsTakeAllFragments, attachmentsTakeFragmentsByType,
|
||||
} = useAttachmentDrafts(conversationOverlayStore, enableLoadURLsInComposer, chatLLMSupportsImages, handleFilterAGIFile);
|
||||
} = useAttachmentDrafts(conversationOverlayStore, enableLoadURLsInComposer, chatLLMSupportsImages, handleFilterAGIFile, showChatAttachments === 'only-images');
|
||||
|
||||
// attachments derived state
|
||||
const llmAttachmentDraftsCollection = useLLMAttachmentDrafts(attachmentDrafts, props.chatLLM, chatLLMSupportsImages);
|
||||
@@ -208,7 +217,8 @@ export function Composer(props: {
|
||||
const isMobile = props.isMobile;
|
||||
const isDesktop = !props.isMobile;
|
||||
const noConversation = !targetConversationId;
|
||||
const showChatAttachments = chatExecuteModeCanAttach(chatExecuteMode);
|
||||
|
||||
const composerTextSuffix = chatExecuteMode === 'generate-image' && isDesktop && drawRepeat > 1 ? ` x${drawRepeat}` : '';
|
||||
|
||||
const micIsRunning = !!speechInterimResult;
|
||||
// more mic way below, as we use complex hooks
|
||||
@@ -216,18 +226,14 @@ export function Composer(props: {
|
||||
|
||||
// tokens derived state
|
||||
|
||||
const tokensComposerTextDebounced = React.useMemo(() => {
|
||||
return (debouncedText && props.chatLLM)
|
||||
? estimateTextTokens(debouncedText, props.chatLLM, 'composer text')
|
||||
: 0;
|
||||
}, [props.chatLLM, debouncedText]);
|
||||
let tokensComposer = tokensComposerTextDebounced + (llmAttachmentDraftsCollection.llmTokenCountApprox || 0);
|
||||
const tokensComposerTextDebounced = useTextTokenCount(composeText, props.chatLLM, 800, 1600);
|
||||
let tokensComposer = (tokensComposerTextDebounced ?? 0) + (llmAttachmentDraftsCollection.llmTokenCountApprox || 0);
|
||||
if (props.chatLLM && tokensComposer > 0)
|
||||
tokensComposer += glueForMessageTokens(props.chatLLM);
|
||||
const tokensHistory = _historyTokenCount;
|
||||
const tokensResponseMax = getModelParameterValueOrThrow('llmResponseTokens', props.chatLLM?.initialParameters, props.chatLLM?.userParameters, 0) ?? 0;
|
||||
const tokenLimit = props.chatLLM?.contextTokens || 0;
|
||||
const tokenChatPricing = props.chatLLM?.pricing?.chat;
|
||||
const tokenLimit = getLLMContextTokens(props.chatLLM) ?? 0;
|
||||
const tokenChatPricing = getLLMPricing(props.chatLLM)?.chat;
|
||||
|
||||
|
||||
// Effect: load initial text if queued up (e.g. by /link/share_targetF)
|
||||
@@ -238,6 +244,13 @@ export function Composer(props: {
|
||||
}
|
||||
}, [setComposeText, setStartupText, startupText]);
|
||||
|
||||
// Effect: notify the parent of presence/absence of content
|
||||
const isContentful = composeText.length > 0 || !!attachmentDrafts.length;
|
||||
const { onComposerHasContent } = props;
|
||||
React.useEffect(() => {
|
||||
onComposerHasContent?.(isContentful);
|
||||
}, [isContentful, onComposerHasContent]);
|
||||
|
||||
|
||||
// Overlay actions
|
||||
|
||||
@@ -298,9 +311,9 @@ export function Composer(props: {
|
||||
// prepare the fragments: content (if any) and attachments (if allowed, and any)
|
||||
const fragments: (DMessageContentFragment | DMessageAttachmentFragment)[] = [];
|
||||
if (composerText)
|
||||
fragments.push(createTextContentFragment(composerText));
|
||||
fragments.push(createTextContentFragment(composerText + composerTextSuffix));
|
||||
|
||||
const canAttach = chatExecuteModeCanAttach(_chatExecuteMode);
|
||||
const canAttach = chatExecuteModeCanAttach(_chatExecuteMode, props.capabilityHasT2IEdit);
|
||||
if (canAttach) {
|
||||
const attachmentFragments = await attachmentsTakeAllFragments('global', 'app-chat');
|
||||
fragments.push(...attachmentFragments);
|
||||
@@ -319,7 +332,7 @@ export function Composer(props: {
|
||||
if (enqueued)
|
||||
_handleClearText();
|
||||
return enqueued;
|
||||
}, [attachmentsTakeAllFragments, confirmProceedIfAttachmentsNotSupported, _handleClearText, inReferenceTo, onAction, targetConversationId]);
|
||||
}, [targetConversationId, confirmProceedIfAttachmentsNotSupported, composerTextSuffix, props.capabilityHasT2IEdit, inReferenceTo, onAction, _handleClearText, attachmentsTakeAllFragments]);
|
||||
|
||||
const handleSendAction = React.useCallback(async (chatExecuteMode: ChatExecuteMode, composerText: string): Promise<boolean> => {
|
||||
setSendStarted(true);
|
||||
@@ -445,8 +458,13 @@ export function Composer(props: {
|
||||
addSnackbar({ key: 'chat-mic-running', message: 'Please wait for the microphone to finish.', type: 'info' });
|
||||
return;
|
||||
}
|
||||
await handleSendAction('beam-content', composeText); // 'beam' button
|
||||
}, [composeText, handleSendAction, micIsRunning]);
|
||||
if (composeText) {
|
||||
await handleSendAction('beam-content', composeText); // 'beam' button
|
||||
} else {
|
||||
if (targetConversationId)
|
||||
void onConversationBeamEdit(targetConversationId); // beam-edit conversation
|
||||
}
|
||||
}, [composeText, handleSendAction, micIsRunning, onConversationBeamEdit, targetConversationId]);
|
||||
|
||||
const handleStopClicked = React.useCallback(() => {
|
||||
targetConversationId && abortConversationTemp(targetConversationId);
|
||||
@@ -493,7 +511,7 @@ export function Composer(props: {
|
||||
const cHandler = ConversationsManager.getHandler(conversationId);
|
||||
const messageToEmbed = cHandler.historyFindMessageOrThrow(messageId);
|
||||
if (messageToEmbed) {
|
||||
const fragmentsCopy = duplicateDMessageFragmentsNoVoid(messageToEmbed.fragments); // [attach] deep copy a message's fragments to attach to ego
|
||||
const fragmentsCopy = duplicateDMessageFragments(messageToEmbed.fragments, true); // [attach] deep copy a message's fragments to attach to ego
|
||||
if (fragmentsCopy.length) {
|
||||
const chatTitle = cHandler.title() ?? '';
|
||||
const messageText = messageFragmentsReduceText(fragmentsCopy);
|
||||
@@ -600,7 +618,7 @@ export function Composer(props: {
|
||||
links.forEach(link => void attachAppendUrl('input-link', link.url));
|
||||
}, [attachAppendUrl]);
|
||||
|
||||
const { openWebInputDialog, webInputDialogComponent } = useWebInputModal(handleAttachWebLinks);
|
||||
const { openWebInputDialog, webInputDialogComponent } = useWebInputModal(handleAttachWebLinks, composeText);
|
||||
|
||||
|
||||
// Attachments Down
|
||||
@@ -630,8 +648,12 @@ export function Composer(props: {
|
||||
const composerShortcuts: ShortcutObject[] = [];
|
||||
if (showChatAttachments) {
|
||||
composerShortcuts.push({ key: 'f', ctrl: true, shift: true, action: () => openFileForAttaching(true, handleAttachFiles), description: 'Attach File' });
|
||||
composerShortcuts.push({ key: 'l', ctrl: true, shift: true, action: openWebInputDialog, description: 'Attach Link' });
|
||||
if (supportsClipboardRead())
|
||||
composerShortcuts.push({ key: 'v', ctrl: true, shift: true, action: attachAppendClipboardItems, description: 'Attach Clipboard' });
|
||||
// Future: keep reactive state here to support Live Screen Capture and more
|
||||
// if (labsAttachScreenCapture && supportsScreenCapture)
|
||||
// composerShortcuts.push({ key: 's', ctrl: true, shift: true, action: openScreenCaptureDialog, description: 'Attach Screen Capture' });
|
||||
}
|
||||
if (recognitionState.isActive) {
|
||||
composerShortcuts.push({ key: 'm', ctrl: true, action: handleFinishMicAndSend, description: 'Mic · Send', disabled: !recognitionState.hasSpeech || sendStarted, endDecoratorIcon: TelegramIcon as any, level: 4 });
|
||||
@@ -650,7 +672,7 @@ export function Composer(props: {
|
||||
}, description: 'Microphone',
|
||||
});
|
||||
return composerShortcuts;
|
||||
}, [attachAppendClipboardItems, handleAttachFiles, handleFinishMicAndSend, recognitionState.hasSpeech, recognitionState.isActive, sendStarted, showChatAttachments, toggleRecognition]));
|
||||
}, [attachAppendClipboardItems, handleAttachFiles, handleFinishMicAndSend, openWebInputDialog, recognitionState.hasSpeech, recognitionState.isActive, sendStarted, showChatAttachments, toggleRecognition]));
|
||||
|
||||
|
||||
// ...
|
||||
@@ -662,7 +684,7 @@ export function Composer(props: {
|
||||
const isDraw = chatExecuteMode === 'generate-image';
|
||||
|
||||
const showChatInReferenceTo = !!inReferenceTo?.length;
|
||||
const showChatExtras = isText && !showChatInReferenceTo;
|
||||
const showChatExtras = isText && !showChatInReferenceTo && !assistantAbortible && composerQuickButton !== 'off';
|
||||
|
||||
const sendButtonVariant: VariantProp = (isAppend || (isMobile && isTextBeam)) ? 'outlined' : 'solid';
|
||||
|
||||
@@ -678,13 +700,15 @@ export function Composer(props: {
|
||||
: isAppend ? <SendIcon sx={{ fontSize: 18 }} />
|
||||
: isReAct ? <PsychologyIcon />
|
||||
: isTextBeam ? <ChatBeamIcon /> /* <GavelIcon /> */
|
||||
: isDraw ? <FormatPaintTwoToneIcon />
|
||||
: isDraw ? <PhPaintBrush />
|
||||
: <TelegramIcon />;
|
||||
|
||||
const beamButtonColor: ColorPaletteProp | undefined =
|
||||
!llmAttachmentDraftsCollection.canAttachAllFragments ? 'warning'
|
||||
: undefined;
|
||||
|
||||
const showTint: ColorPaletteProp | undefined = isDraw ? 'warning' : isReAct ? 'success' : undefined;
|
||||
|
||||
// stable randomization of the /verb, between '/draw', '/react'
|
||||
const placeholderAction = React.useMemo(() => {
|
||||
const actions: string[] = ['/react'];
|
||||
@@ -704,13 +728,13 @@ export function Composer(props: {
|
||||
+ (recognitionState.isAvailable ? ' · ramble' : '')
|
||||
+ '...';
|
||||
|
||||
if (isDesktop && timeToShowTips) {
|
||||
if (isDesktop && timeToShowTips && !isDraw) {
|
||||
if (explainShiftEnter)
|
||||
textPlaceholder += !enterIsNewline ? '\n\n💡 Shift + Enter to add a new line' : '\n\n💡 Shift + Enter to send';
|
||||
else if (explainAltEnter)
|
||||
textPlaceholder += platformAwareKeystrokes('\n\n💡 Tip: Alt + Enter to just append the message');
|
||||
textPlaceholder += !enterIsNewline ? '\n\n⏎ Shift + Enter to add a new line' : '\n\n➤ Shift + Enter to send';
|
||||
// else if (explainAltEnter)
|
||||
// textPlaceholder += platformAwareKeystrokes('\n\n⭳ Tip: Alt + Enter to just append the message');
|
||||
else if (explainCtrlEnter)
|
||||
textPlaceholder += platformAwareKeystrokes('\n\n💡 Tip: Ctrl + Enter to beam');
|
||||
textPlaceholder += platformAwareKeystrokes('\n\n⫷ Tip: Ctrl + Enter to beam');
|
||||
}
|
||||
|
||||
const stableGridSx: SxProps = React.useMemo(() => ({
|
||||
@@ -721,9 +745,14 @@ export function Composer(props: {
|
||||
}), [dragContainerSx]);
|
||||
|
||||
return (
|
||||
<Box aria-label='User Message' component='section' sx={props.sx}>
|
||||
<Box
|
||||
aria-label='New Message'
|
||||
component='section'
|
||||
bgcolor={showTint ? `var(--joy-palette-${showTint}-softBg)` : themeBgAppChatComposer}
|
||||
sx={props.sx}
|
||||
>
|
||||
|
||||
{!isMobile && labsShowShortcutBar && <StatusBar toggleMinimized={handleToggleMinimized} isMinimized={isMinimized} />}
|
||||
{!isMobile && labsShowShortcutBar && <StatusBarMemo toggleMinimized={handleToggleMinimized} isMinimized={isMinimized} />}
|
||||
|
||||
{/* This container is here just to let the potential statusbar fill the whole space, so we moved the padding here and not in the parent */}
|
||||
<Box sx={(!isMinimized || isMobile || !labsShowShortcutBar) ? paddingBoxSx : minimizedSx}>
|
||||
@@ -744,13 +773,16 @@ export function Composer(props: {
|
||||
<Box sx={{ flexGrow: 0, display: 'grid', gap: 1, alignSelf: 'flex-start' }}>
|
||||
|
||||
{/* [mobile] Mic button */}
|
||||
{recognitionState.isAvailable && <ButtonMicMemo variant={micVariant} color={micColor} errorMessage={recognitionState.errorMessage} onClick={handleToggleMic} />}
|
||||
{recognitionState.isAvailable && <ButtonMicMemo variant={micVariant} color={micColor === 'danger' ? 'danger' : showTint || micColor} errorMessage={recognitionState.errorMessage} onClick={handleToggleMic} />}
|
||||
|
||||
{/* Responsive Camera OCR button */}
|
||||
{showChatAttachments && <ButtonAttachCameraMemo isMobile onOpenCamera={openCamera} />}
|
||||
{showChatAttachments && <ButtonAttachCameraMemo color={showTint} isMobile onOpenCamera={openCamera} />}
|
||||
|
||||
{/* [mobile] Attach file button (in draw with image mode) */}
|
||||
{showChatAttachments === 'only-images' && <ButtonAttachFilesMemo color={showTint} isMobile onAttachFiles={handleAttachFiles} fullWidth multiple />}
|
||||
|
||||
{/* [mobile] [+] button */}
|
||||
{showChatAttachments && (
|
||||
{showChatAttachments === true && (
|
||||
<Dropdown>
|
||||
<MenuButton slots={{ root: IconButton }}>
|
||||
<AddCircleOutlineIcon />
|
||||
@@ -791,19 +823,19 @@ export function Composer(props: {
|
||||
{/*</FormHelperText>*/}
|
||||
|
||||
{/* Responsive Open Files button */}
|
||||
<ButtonAttachFilesMemo onAttachFiles={handleAttachFiles} fullWidth multiple />
|
||||
<ButtonAttachFilesMemo color={showTint} onAttachFiles={handleAttachFiles} fullWidth multiple />
|
||||
|
||||
{/* Responsive Web button */}
|
||||
<ButtonAttachWebMemo disabled={!hasComposerBrowseCapability} onOpenWebInput={openWebInputDialog} />
|
||||
{showChatAttachments !== 'only-images' && <ButtonAttachWebMemo color={showTint} disabled={!hasComposerBrowseCapability} onOpenWebInput={openWebInputDialog} />}
|
||||
|
||||
{/* Responsive Paste button */}
|
||||
{supportsClipboardRead() && <ButtonAttachClipboardMemo onAttachClipboard={attachAppendClipboardItems} />}
|
||||
{supportsClipboardRead() && showChatAttachments !== 'only-images' && <ButtonAttachClipboardMemo color={showTint} onAttachClipboard={attachAppendClipboardItems} />}
|
||||
|
||||
{/* Responsive Screen Capture button */}
|
||||
{labsAttachScreenCapture && supportsScreenCapture && <ButtonAttachScreenCaptureMemo onAttachScreenCapture={handleAttachScreenCapture} />}
|
||||
{labsAttachScreenCapture && supportsScreenCapture && <ButtonAttachScreenCaptureMemo color={showTint} onAttachScreenCapture={handleAttachScreenCapture} />}
|
||||
|
||||
{/* Responsive Camera OCR button */}
|
||||
{labsCameraDesktop && <ButtonAttachCameraMemo onOpenCamera={openCamera} />}
|
||||
{labsCameraDesktop && <ButtonAttachCameraMemo color={showTint} onOpenCamera={openCamera} />}
|
||||
|
||||
</Box>)}
|
||||
|
||||
@@ -827,8 +859,8 @@ export function Composer(props: {
|
||||
<Textarea
|
||||
variant='outlined'
|
||||
color={isDraw ? 'warning' : isReAct ? 'success' : undefined}
|
||||
autoFocus
|
||||
minRows={isMobile ? 4 : agiAttachmentPrompts.hasData ? 3 : showChatInReferenceTo ? 4 : 5}
|
||||
autoFocus={isDesktop}
|
||||
minRows={isMobile ? 3.5 : isDraw ? 4 : agiAttachmentPrompts.hasData ? 3 : showChatInReferenceTo ? 4 : 5}
|
||||
maxRows={isMobile ? 8 : 10}
|
||||
placeholder={textPlaceholder}
|
||||
value={composeText}
|
||||
@@ -837,8 +869,12 @@ export function Composer(props: {
|
||||
onPasteCapture={handleAttachCtrlV}
|
||||
// onFocusCapture={handleFocusModeOn}
|
||||
// onBlurCapture={handleFocusModeOff}
|
||||
endDecorator={
|
||||
<ComposerTextAreaActions
|
||||
endDecorator={isDraw
|
||||
? <ComposerTextAreaDrawActions
|
||||
composerText={composeText}
|
||||
onReplaceText={setComposeText}
|
||||
/>
|
||||
: <ComposerTextAreaActions
|
||||
agiAttachmentPrompts={agiAttachmentPrompts}
|
||||
inReferenceTo={inReferenceTo}
|
||||
onAppendAndSend={handleAppendTextAndSend}
|
||||
@@ -847,6 +883,7 @@ export function Composer(props: {
|
||||
}
|
||||
slotProps={{
|
||||
textarea: {
|
||||
tabIndex: !recognitionState.isActive ? undefined : -1,
|
||||
height: '100%',
|
||||
enterKeyHint: enterIsNewline ? 'enter' : 'send',
|
||||
sx: {
|
||||
@@ -858,17 +895,17 @@ export function Composer(props: {
|
||||
}}
|
||||
sx={{
|
||||
height: '100%',
|
||||
backgroundColor: 'background.level1',
|
||||
backgroundColor: showTint ? undefined : 'background.level1',
|
||||
'&:focus-within': { backgroundColor: 'background.popup', '.within-composer-focus': { backgroundColor: 'background.popup' } },
|
||||
lineHeight: lineHeightTextareaMd,
|
||||
}} />
|
||||
|
||||
{!showChatInReferenceTo && tokenLimit > 0 && (tokensComposer > 0 || (tokensHistory + tokensResponseMax) > 0) && (
|
||||
{!showChatInReferenceTo && !isDraw && tokenLimit > 0 && (tokensComposer > 0 || (tokensHistory + tokensResponseMax) > 0) && (
|
||||
<TokenProgressbarMemo chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} />
|
||||
)}
|
||||
|
||||
{!showChatInReferenceTo && tokenLimit > 0 && (
|
||||
<TokenBadgeMemo hideBelowDollars={0.0001} chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} showCost={labsShowCost} enableHover={!isMobile} showExcess absoluteBottomRight />
|
||||
{!showChatInReferenceTo && !isDraw && tokenLimit > 0 && (
|
||||
<TokenBadgeMemo hideBelowDollars={0.005} chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} showCost={labsShowCost} enableHover={!isMobile} showExcess absoluteBottomRight />
|
||||
)}
|
||||
|
||||
</Box>
|
||||
@@ -936,7 +973,7 @@ export function Composer(props: {
|
||||
fontStyle: 'italic',
|
||||
},
|
||||
}}>
|
||||
{!!debouncedText && <span className='preceding'>{debouncedText.endsWith(' ') ? debouncedText : debouncedText + ' '}</span>}
|
||||
{!!composeText && <span className='preceding'>{composeText.endsWith(' ') ? composeText : composeText + ' '}</span>}
|
||||
{speechInterimResult.transcript}
|
||||
<span className={speechInterimResult.interimTranscript === PLACEHOLDER_INTERIM_TRANSCRIPT ? 'placeholder' : 'interim'}>{speechInterimResult.interimTranscript}</span>
|
||||
</Typography>
|
||||
@@ -971,7 +1008,9 @@ export function Composer(props: {
|
||||
|
||||
{/* [mobile] bottom-corner secondary button */}
|
||||
{isMobile && (showChatExtras
|
||||
? <ButtonCallMemo isMobile disabled={noConversation || noLLM} onClick={handleCallClicked} />
|
||||
? (composerQuickButton === 'call'
|
||||
? <ButtonCallMemo isMobile disabled={noConversation || noLLM} onClick={handleCallClicked} />
|
||||
: <ButtonBeamMemo isMobile disabled={noConversation /*|| noLLM*/} color={beamButtonColor} hasContent={!!composeText} onClick={handleSendTextBeamClicked} />)
|
||||
: isDraw
|
||||
? <ButtonOptionsDraw isMobile onClick={handleDrawOptionsClicked} sx={{ mr: { xs: 1, md: 2 } }} />
|
||||
: <IconButton disabled sx={{ mr: { xs: 1, md: 2 } }} />
|
||||
@@ -991,7 +1030,7 @@ export function Composer(props: {
|
||||
<Button
|
||||
key='composer-act'
|
||||
fullWidth
|
||||
disabled={noConversation || noLLM}
|
||||
disabled={noConversation /* || noLLM*/}
|
||||
loading={sendStarted}
|
||||
loadingPosition='end'
|
||||
onClick={handleSendClicked}
|
||||
@@ -1022,16 +1061,17 @@ export function Composer(props: {
|
||||
{/*</Tooltip>}*/}
|
||||
|
||||
{/* [Draw] Imagine */}
|
||||
{isDraw && !!composeText && <Tooltip title='Generate an image prompt'>
|
||||
<IconButton variant='outlined' disabled={noConversation || noLLM} onClick={handleTextImagineClicked}>
|
||||
<AutoAwesomeIcon />
|
||||
</IconButton>
|
||||
</Tooltip>}
|
||||
{/* NOTE: disabled: as we have prompt enhancement in the TextArea (Draw Mode) already */}
|
||||
{/*{isDraw && !!composeText && <Tooltip title='Generate an image prompt'>*/}
|
||||
{/* <IconButton variant='outlined' disabled={noConversation || noLLM} onClick={handleTextImagineClicked}>*/}
|
||||
{/* <AutoAwesomeIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/*</Tooltip>}*/}
|
||||
|
||||
{/* Mode expander */}
|
||||
<IconButton
|
||||
variant={assistantAbortible ? 'soft' : isDraw ? undefined : undefined}
|
||||
disabled={noConversation || noLLM || chatExecuteMenuShown}
|
||||
variant={chatExecuteMenuShown ? 'outlined' : assistantAbortible ? 'soft' : isDraw ? undefined : undefined}
|
||||
disabled={noConversation /*|| chatExecuteMenuShown*/}
|
||||
onClick={showChatExecuteMenu}
|
||||
>
|
||||
<ExpandLessIcon />
|
||||
@@ -1042,7 +1082,7 @@ export function Composer(props: {
|
||||
{isDesktop && showChatExtras && !assistantAbortible && (
|
||||
<ButtonBeamMemo
|
||||
color={beamButtonColor}
|
||||
disabled={noConversation || noLLM}
|
||||
disabled={noConversation /*|| noLLM*/}
|
||||
hasContent={!!composeText}
|
||||
onClick={handleSendTextBeamClicked}
|
||||
/>
|
||||
@@ -1050,6 +1090,9 @@ export function Composer(props: {
|
||||
|
||||
</Box>
|
||||
|
||||
{/* [desktop] Draw mode N buttons */}
|
||||
{isDesktop && isDraw && <ButtonGroupDrawRepeat drawRepeat={drawRepeat} setDrawRepeat={setDrawRepeat} />}
|
||||
|
||||
{/* [desktop] Multicast switch (under the Chat button) */}
|
||||
{isDesktop && props.isMulticast !== null && <ButtonMultiChatMemo multiChat={props.isMulticast} onSetMultiChat={props.setIsMulticast} />}
|
||||
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import * as React from 'react';
|
||||
import { Controller, useFieldArray, useForm } from 'react-hook-form';
|
||||
|
||||
import { Box, Button, FormControl, FormHelperText, IconButton, Input, Stack, Typography } from '@mui/joy';
|
||||
import { Box, Button, Chip, FormControl, FormHelperText, IconButton, Input, Stack, Typography } from '@mui/joy';
|
||||
import AddCircleOutlineRoundedIcon from '@mui/icons-material/AddCircleOutlineRounded';
|
||||
import AddIcon from '@mui/icons-material/Add';
|
||||
import BrowserUpdatedOutlinedIcon from '@mui/icons-material/BrowserUpdatedOutlined';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import LanguageRoundedIcon from '@mui/icons-material/LanguageRounded';
|
||||
import YouTubeIcon from '@mui/icons-material/YouTube';
|
||||
@@ -11,7 +13,7 @@ import { extractYoutubeVideoIDFromURL } from '~/modules/youtube/youtube.utils';
|
||||
|
||||
import { GoodModal } from '~/common/components/modals/GoodModal';
|
||||
import { addSnackbar } from '~/common/components/snackbar/useSnackbarsStore';
|
||||
import { asValidURL } from '~/common/util/urlUtils';
|
||||
import { asValidURL, extractUrlsFromText } from '~/common/util/urlUtils';
|
||||
|
||||
|
||||
// configuration
|
||||
@@ -26,8 +28,25 @@ type WebInputModalInputs = {
|
||||
links: WebInputData[];
|
||||
}
|
||||
|
||||
const _styles = {
|
||||
|
||||
ytIcon: {
|
||||
color: 'red',
|
||||
} as const,
|
||||
|
||||
chipLink: {
|
||||
ml: 'auto',
|
||||
pr: 1.125,
|
||||
// '--Chip-radius': '4px',
|
||||
// whiteSpace: 'break-spaces',
|
||||
// gap: 1.5,
|
||||
} as const,
|
||||
|
||||
} as const;
|
||||
|
||||
|
||||
function WebInputModal(props: {
|
||||
composerText?: string,
|
||||
onClose: () => void,
|
||||
onWebLinks: (urls: WebInputData[]) => void,
|
||||
}) {
|
||||
@@ -35,13 +54,31 @@ function WebInputModal(props: {
|
||||
// state
|
||||
const { control: formControl, handleSubmit: formHandleSubmit, formState: { isValid: formIsValid, isDirty: formIsDirty } } = useForm<WebInputModalInputs>({
|
||||
values: { links: [{ url: '' }] },
|
||||
// mode: 'onChange', // validate on change
|
||||
mode: 'onChange', // validate on change
|
||||
});
|
||||
const { fields: formFields, append: formFieldsAppend, remove: formFieldsRemove } = useFieldArray({ control: formControl, name: 'links' });
|
||||
const { fields: formFields, append: formFieldsAppend, remove: formFieldsRemove, update: formFieldsUpdate } = useFieldArray({ control: formControl, name: 'links' });
|
||||
const firstInputRef = React.useRef<HTMLInputElement>(null);
|
||||
|
||||
// derived
|
||||
const urlFieldCount = formFields.length;
|
||||
const canAddMoreUrls = urlFieldCount < MAX_URLS;
|
||||
|
||||
// [effect] auto-focus first input
|
||||
React.useEffect(() => {
|
||||
setTimeout(() => {
|
||||
if (firstInputRef.current)
|
||||
firstInputRef.current.focus();
|
||||
}, 0);
|
||||
}, []);
|
||||
|
||||
|
||||
// memos
|
||||
|
||||
const extractedComposerUrls = React.useMemo(() => {
|
||||
return !props.composerText ? null : extractUrlsFromText(props.composerText);
|
||||
}, [props.composerText]);
|
||||
|
||||
const extractedUrlsCount = extractedComposerUrls?.length ?? 0;
|
||||
|
||||
// handlers
|
||||
|
||||
@@ -70,6 +107,46 @@ function WebInputModal(props: {
|
||||
}, [handleClose, onWebLinks]);
|
||||
|
||||
|
||||
// const handleAddUrl = React.useCallback((newUrl: string) => {
|
||||
// // bail if can't add
|
||||
// if (!canAddMoreUrls)
|
||||
// return addSnackbar({ key: 'max-urls', message: `Maximum ${MAX_URLS} URLs allowed`, type: 'precondition-fail' });
|
||||
//
|
||||
// // bail if already in
|
||||
// const exists = formFields.some(({ url }) => url === newUrl);
|
||||
// if (exists)
|
||||
// return addSnackbar({ key: 'duplicate-url', message: 'URL already added', type: 'info' });
|
||||
//
|
||||
// // replace the first empty field, or append
|
||||
// const emptyFieldIndex = formFields.findIndex(field => !field.url.trim());
|
||||
// if (emptyFieldIndex >= 0)
|
||||
// formFieldsUpdate(emptyFieldIndex, { url: newUrl });
|
||||
// else
|
||||
// formFieldsAppend({ url: newUrl });
|
||||
// }, [canAddMoreUrls, formFields, formFieldsAppend, formFieldsUpdate]);
|
||||
|
||||
|
||||
const handleAddAllUrls = React.useCallback(() => {
|
||||
if (!extractedComposerUrls) return;
|
||||
|
||||
// new URLs that are not already in the form
|
||||
const newURLs = extractedComposerUrls.filter(url => !formFields.some(field => field.url.trim() === url));
|
||||
if (!newURLs.length) return;
|
||||
|
||||
// find empty fields first
|
||||
for (let i = 0; i < formFields.length; i++) {
|
||||
const field = formFields[i];
|
||||
if (!field.url.trim()) {
|
||||
formFieldsUpdate(i, { url: newURLs.shift()! });
|
||||
if (!newURLs.length) break;
|
||||
}
|
||||
}
|
||||
|
||||
// append remaining
|
||||
newURLs.forEach(url => formFieldsAppend({ url }));
|
||||
}, [extractedComposerUrls, formFields, formFieldsAppend, formFieldsUpdate]);
|
||||
|
||||
|
||||
return (
|
||||
<GoodModal
|
||||
open
|
||||
@@ -89,6 +166,26 @@ function WebInputModal(props: {
|
||||
{/*You can add up to {MAX_URLS} URLs.*/}
|
||||
</Typography>
|
||||
|
||||
|
||||
{/* Modified URLs section */}
|
||||
{!!extractedUrlsCount && (
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography level='title-sm' startDecorator={<BrowserUpdatedOutlinedIcon />}>
|
||||
{extractedUrlsCount} URL{extractedUrlsCount > 1 ? 's' : ''} in your message
|
||||
{/*{extractedUrlsCount} URL{extractedUrlsCount > 1 ? 's' : ''} found in your message*/}
|
||||
</Typography>
|
||||
<Chip
|
||||
variant='soft'
|
||||
onClick={handleAddAllUrls}
|
||||
startDecorator={<AddCircleOutlineRoundedIcon />}
|
||||
sx={_styles.chipLink}
|
||||
>
|
||||
Add
|
||||
</Chip>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
|
||||
<form onSubmit={formHandleSubmit(handleSubmit)}>
|
||||
<Stack spacing={1}>
|
||||
{formFields.map((field, index) => (
|
||||
@@ -101,12 +198,16 @@ function WebInputModal(props: {
|
||||
<FormControl error={!!error}>
|
||||
<Box sx={{ display: 'flex', gap: 1 }}>
|
||||
<Input
|
||||
autoFocus={index === 0}
|
||||
required={index === 0}
|
||||
placeholder='https://...'
|
||||
endDecorator={extractYoutubeVideoIDFromURL(value) ? <YouTubeIcon sx={{ color: 'red' }} /> : undefined}
|
||||
endDecorator={extractYoutubeVideoIDFromURL(value) ? <YouTubeIcon sx={_styles.ytIcon} /> : undefined}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
slotProps={index !== 0 ? undefined : {
|
||||
input: {
|
||||
ref: firstInputRef,
|
||||
},
|
||||
}}
|
||||
sx={{ flex: 1 }}
|
||||
/>
|
||||
{urlFieldCount > 1 && (
|
||||
@@ -133,7 +234,7 @@ function WebInputModal(props: {
|
||||
{formIsDirty && <Button
|
||||
color='neutral'
|
||||
variant='soft'
|
||||
disabled={urlFieldCount >= MAX_URLS}
|
||||
disabled={!canAddMoreUrls}
|
||||
onClick={() => formFieldsAppend({ url: '' })}
|
||||
startDecorator={<AddIcon />}
|
||||
>
|
||||
@@ -147,7 +248,7 @@ function WebInputModal(props: {
|
||||
disabled={!formIsValid || !formIsDirty}
|
||||
sx={{ minWidth: 160, ml: 'auto' }}
|
||||
>
|
||||
Add {urlFieldCount > 1 ? `(${urlFieldCount})` : ''}
|
||||
Import {urlFieldCount > 1 ? `(${urlFieldCount})` : ''}
|
||||
</Button>
|
||||
|
||||
</Box>
|
||||
@@ -158,15 +259,20 @@ function WebInputModal(props: {
|
||||
}
|
||||
|
||||
|
||||
export function useWebInputModal(onAttachWebLinks: (urls: WebInputData[]) => void) {
|
||||
export function useWebInputModal(onAttachWebLinks: (urls: WebInputData[]) => void, composerText?: string) {
|
||||
|
||||
// state
|
||||
const [open, setOpen] = React.useState(false);
|
||||
const composerTextRef = React.useRef(composerText);
|
||||
|
||||
// copy the text to a ref, constantly - we just care about a recent snapshot, but don't want to invalidate hooks
|
||||
composerTextRef.current = composerText;
|
||||
|
||||
const openWebInputDialog = React.useCallback(() => setOpen(true), []);
|
||||
|
||||
const webInputDialogComponent = React.useMemo(() => open && (
|
||||
<WebInputModal
|
||||
composerText={composerTextRef.current}
|
||||
onClose={() => setOpen(false)}
|
||||
onWebLinks={onAttachWebLinks}
|
||||
/>
|
||||
|
||||
@@ -38,6 +38,7 @@ export function ActilePopup(props: {
|
||||
maxHeightGapPx={320}
|
||||
minWidth={320}
|
||||
noBottomPadding
|
||||
noAutoFocus={true /* we control keyboard navigation */}
|
||||
noTopPadding
|
||||
>
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { ActileItem, ActileProvider } from './ActileProvider';
|
||||
import { ActilePopup } from './ActilePopup';
|
||||
|
||||
|
||||
export const useActileManager = (providers: ActileProvider[], anchorRef: React.RefObject<HTMLElement>) => {
|
||||
export const useActileManager = (providers: ActileProvider[], anchorRef: React.RefObject<HTMLElement | null>) => {
|
||||
|
||||
// state
|
||||
const [popupOpen, setPopupOpen] = React.useState(false);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, IconButton, Tooltip } from '@mui/joy';
|
||||
import { Box, Button, ColorPaletteProp, IconButton, Tooltip } from '@mui/joy';
|
||||
import AddAPhotoIcon from '@mui/icons-material/AddAPhoto';
|
||||
import CameraAltOutlinedIcon from '@mui/icons-material/CameraAltOutlined';
|
||||
|
||||
@@ -12,6 +12,7 @@ import { CameraCaptureModal } from '../CameraCaptureModal';
|
||||
export const ButtonAttachCameraMemo = React.memo(ButtonAttachCamera);
|
||||
|
||||
function ButtonAttachCamera(props: {
|
||||
color?: ColorPaletteProp,
|
||||
isMobile?: boolean,
|
||||
disabled?: boolean,
|
||||
fullWidth?: boolean,
|
||||
@@ -19,7 +20,7 @@ function ButtonAttachCamera(props: {
|
||||
onOpenCamera: () => void,
|
||||
}) {
|
||||
return props.isMobile ? (
|
||||
<IconButton disabled={props.disabled} onClick={props.onOpenCamera}>
|
||||
<IconButton color={props.color} disabled={props.disabled} onClick={props.onOpenCamera}>
|
||||
<AddAPhotoIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
@@ -30,8 +31,8 @@ function ButtonAttachCamera(props: {
|
||||
</Box>
|
||||
)}>
|
||||
<Button
|
||||
variant='plain'
|
||||
color='neutral'
|
||||
variant={props.color ? 'soft' : 'plain'}
|
||||
color={props.color || 'neutral'}
|
||||
disabled={props.disabled}
|
||||
fullWidth={props.fullWidth}
|
||||
startDecorator={<CameraAltOutlinedIcon />}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, IconButton, Tooltip } from '@mui/joy';
|
||||
import { Box, Button, ColorPaletteProp, IconButton, Tooltip } from '@mui/joy';
|
||||
import ContentPasteGoIcon from '@mui/icons-material/ContentPasteGo';
|
||||
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
@@ -10,6 +10,7 @@ import { buttonAttachSx } from '~/common/components/ButtonAttachFiles';
|
||||
export const ButtonAttachClipboardMemo = React.memo(ButtonAttachClipboard);
|
||||
|
||||
function ButtonAttachClipboard(props: {
|
||||
color?: ColorPaletteProp,
|
||||
isMobile?: boolean,
|
||||
disabled?: boolean,
|
||||
fullWidth?: boolean,
|
||||
@@ -17,7 +18,7 @@ function ButtonAttachClipboard(props: {
|
||||
onAttachClipboard: () => void,
|
||||
}) {
|
||||
return props.isMobile ? (
|
||||
<IconButton disabled={props.disabled} onClick={props.onAttachClipboard}>
|
||||
<IconButton color={props.color} disabled={props.disabled} onClick={props.onAttachClipboard}>
|
||||
<ContentPasteGoIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
@@ -29,8 +30,8 @@ function ButtonAttachClipboard(props: {
|
||||
</Box>
|
||||
)}>
|
||||
<Button
|
||||
variant='plain'
|
||||
color='neutral'
|
||||
variant={props.color ? 'soft' : 'plain'}
|
||||
color={props.color || 'neutral'}
|
||||
disabled={props.disabled}
|
||||
fullWidth={props.fullWidth}
|
||||
startDecorator={<ContentPasteGoIcon />}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, IconButton, Tooltip } from '@mui/joy';
|
||||
import { Box, Button, ColorPaletteProp, IconButton, Tooltip } from '@mui/joy';
|
||||
import AddRoundedIcon from '@mui/icons-material/AddRounded';
|
||||
|
||||
import { buttonAttachSx } from '~/common/components/ButtonAttachFiles';
|
||||
@@ -9,6 +9,7 @@ import { buttonAttachSx } from '~/common/components/ButtonAttachFiles';
|
||||
export const ButtonAttachNewMemo = React.memo(ButtonAttachNew);
|
||||
|
||||
function ButtonAttachNew(props: {
|
||||
color?: ColorPaletteProp,
|
||||
isMobile?: boolean,
|
||||
disabled?: boolean,
|
||||
fullWidth?: boolean,
|
||||
@@ -16,7 +17,7 @@ function ButtonAttachNew(props: {
|
||||
onAttachNew: () => void,
|
||||
}) {
|
||||
return props.isMobile ? (
|
||||
<IconButton disabled={props.disabled} onClick={props.onAttachNew}>
|
||||
<IconButton color={props.color} disabled={props.disabled} onClick={props.onAttachNew}>
|
||||
<AddRoundedIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
@@ -29,15 +30,15 @@ function ButtonAttachNew(props: {
|
||||
</Box>
|
||||
)}>
|
||||
<Button
|
||||
variant='plain'
|
||||
color='neutral'
|
||||
variant={props.color ? 'soft' : 'plain'}
|
||||
color={props.color || 'neutral'}
|
||||
disabled={props.disabled}
|
||||
fullWidth={props.fullWidth}
|
||||
startDecorator={<AddRoundedIcon />}
|
||||
onClick={props.onAttachNew}
|
||||
sx={buttonAttachSx.desktop}
|
||||
>
|
||||
New
|
||||
Note
|
||||
</Button>
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, IconButton, Tooltip } from '@mui/joy';
|
||||
import { Box, Button, ColorPaletteProp, IconButton, Tooltip } from '@mui/joy';
|
||||
import ScreenshotMonitorIcon from '@mui/icons-material/ScreenshotMonitor';
|
||||
|
||||
import { Is } from '~/common/util/pwaUtils';
|
||||
@@ -11,6 +11,7 @@ import { takeScreenCapture } from '~/common/util/screenCaptureUtils';
|
||||
export const ButtonAttachScreenCaptureMemo = React.memo(ButtonAttachScreenCapture);
|
||||
|
||||
function ButtonAttachScreenCapture(props: {
|
||||
color?: ColorPaletteProp,
|
||||
isMobile?: boolean,
|
||||
disabled?: boolean,
|
||||
fullWidth?: boolean,
|
||||
@@ -41,7 +42,7 @@ function ButtonAttachScreenCapture(props: {
|
||||
|
||||
|
||||
return props.isMobile ? (
|
||||
<IconButton disabled={props.disabled} onClick={handleTakeScreenCapture}>
|
||||
<IconButton color={props.color} disabled={props.disabled} onClick={handleTakeScreenCapture}>
|
||||
<ScreenshotMonitorIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
@@ -55,8 +56,8 @@ function ButtonAttachScreenCapture(props: {
|
||||
</Box>
|
||||
)}>
|
||||
<Button
|
||||
variant={capturing ? 'solid' : 'plain'}
|
||||
color={!!error ? 'danger' : 'neutral'}
|
||||
variant={capturing ? 'solid' : props.color ? 'soft' : 'plain'}
|
||||
color={!!error ? 'danger' : props.color || 'neutral'}
|
||||
disabled={props.disabled}
|
||||
fullWidth={props.fullWidth}
|
||||
loading={capturing}
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, IconButton, Tooltip } from '@mui/joy';
|
||||
import { Box, Button, ColorPaletteProp, IconButton, Tooltip } from '@mui/joy';
|
||||
import LanguageRoundedIcon from '@mui/icons-material/LanguageRounded';
|
||||
|
||||
import { buttonAttachSx } from '~/common/components/ButtonAttachFiles';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
|
||||
|
||||
export const ButtonAttachWebMemo = React.memo(ButtonAttachWeb);
|
||||
|
||||
function ButtonAttachWeb(props: {
|
||||
color?: ColorPaletteProp,
|
||||
isMobile?: boolean,
|
||||
disabled?: boolean,
|
||||
fullWidth?: boolean,
|
||||
@@ -17,13 +19,13 @@ function ButtonAttachWeb(props: {
|
||||
}) {
|
||||
|
||||
const button = props.isMobile ? (
|
||||
<IconButton disabled={props.disabled} onClick={props.onOpenWebInput}>
|
||||
<IconButton color={props.color} disabled={props.disabled} onClick={props.onOpenWebInput}>
|
||||
<LanguageRoundedIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
<Button
|
||||
variant='plain'
|
||||
color='neutral'
|
||||
variant={props.color ? 'soft' : 'plain'}
|
||||
color={props.color || 'neutral'}
|
||||
disabled={props.disabled}
|
||||
fullWidth={props.fullWidth}
|
||||
startDecorator={<LanguageRoundedIcon />}
|
||||
@@ -35,12 +37,13 @@ function ButtonAttachWeb(props: {
|
||||
);
|
||||
|
||||
return (props.noToolTip || props.isMobile) ? button : (
|
||||
<Tooltip arrow disableInteractive placement='top-start' title={(
|
||||
<Tooltip arrow disableInteractive placement='top-start' title={
|
||||
<Box sx={buttonAttachSx.tooltip}>
|
||||
<b>Add Web Content 🌐</b><br />
|
||||
Import from websites and YouTube
|
||||
<KeyStroke combo='Ctrl + Shift + L' sx={{ mt: 1, mb: 0.5 }} />
|
||||
</Box>
|
||||
)}>
|
||||
}>
|
||||
{button}
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
@@ -43,7 +43,7 @@ function ButtonBeam(props: {
|
||||
onClick: () => void,
|
||||
}) {
|
||||
return props.isMobile ? (
|
||||
<IconButton variant='soft' color={props.color ?? 'primary'} disabled={props.disabled} onClick={props.onClick} sx={mobileSx}>
|
||||
<IconButton variant='outlined' color={props.color ?? 'primary'} disabled={props.disabled} onClick={props.onClick} sx={mobileSx}>
|
||||
<ChatBeamIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, FormControl, IconButton } from '@mui/joy';
|
||||
|
||||
|
||||
const _styles = {
|
||||
control: {
|
||||
gap: 1,
|
||||
mt: 1,
|
||||
} as const,
|
||||
|
||||
buttonGroup: {
|
||||
display: 'flex',
|
||||
justifyContent: 'space-evenly',
|
||||
// overflowX: 'hidden',
|
||||
flexWrap: 'wrap',
|
||||
minWidth: '131px',
|
||||
} as const,
|
||||
|
||||
buttonActive: {
|
||||
'--IconButton-size': { xs: '1.75rem', lg: '2rem' },
|
||||
} as const,
|
||||
|
||||
button: {
|
||||
'--IconButton-size': { xs: '1.75rem', lg: '2rem' },
|
||||
border: '1px solid',
|
||||
borderColor: 'warning.outlinedBorder',
|
||||
backgroundColor: 'background.popup',
|
||||
// boxShadow: drawRepeat === n ? '0px 2px 8px 0px rgb(var(--joy-palette-warning-mainChannel) / 40%)' : 'none',
|
||||
// fontWeight: drawRepeat === n ? 'xl' : 400, /* reset, from 600 */
|
||||
transition: 'transform 0.14s, box-shadow 0.14s',
|
||||
'&:hover': {
|
||||
transform: 'translateY(-1px)',
|
||||
// backgroundColor: drawRepeat === n ? 'background.popup' : 'background.surface',
|
||||
// boxShadow: '0 0 8px 1px rgb(var(--joy-palette-warning-mainChannel) / 40%)',
|
||||
} as const,
|
||||
} as const,
|
||||
|
||||
text: {
|
||||
mx: 'auto',
|
||||
fontSize: 'xs',
|
||||
opacity: '0.5',
|
||||
} as const,
|
||||
} as const;
|
||||
|
||||
|
||||
export function ButtonGroupDrawRepeat(props: {
|
||||
drawRepeat: number,
|
||||
setDrawRepeat: (n: number) => void,
|
||||
}) {
|
||||
|
||||
const { drawRepeat, setDrawRepeat } = props;
|
||||
|
||||
return (
|
||||
<FormControl sx={_styles.control}>
|
||||
<Box sx={_styles.buttonGroup}>
|
||||
{[1, 2, 4, 5, 10].map((n) => (
|
||||
<IconButton
|
||||
key={n}
|
||||
size='sm'
|
||||
color='warning'
|
||||
variant={drawRepeat === n ? 'solid' : 'soft'}
|
||||
onClick={() => setDrawRepeat(n)}
|
||||
sx={drawRepeat === n ? _styles.buttonActive : _styles.button}
|
||||
>
|
||||
{n}
|
||||
</IconButton>
|
||||
))}
|
||||
</Box>
|
||||
<Box sx={_styles.text}>
|
||||
{drawRepeat > 1
|
||||
? `Create ${drawRepeat} Images`
|
||||
: 'Number of Images'}
|
||||
</Box>
|
||||
</FormControl>
|
||||
);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import MicIcon from '@mui/icons-material/Mic';
|
||||
import { ExternalDocsLink } from '~/common/components/ExternalDocsLink';
|
||||
import { GoodTooltip } from '~/common/components/GoodTooltip';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { useDontBlurTextarea } from '~/common/components/useDontBlurTextarea';
|
||||
|
||||
|
||||
const micLegend = (errorMessage: string | null) =>
|
||||
@@ -35,12 +36,7 @@ function ButtonMic(props: {
|
||||
}) {
|
||||
|
||||
// Mobile: don't blur the textarea when clicking the mic button
|
||||
const handleDontBlurTextArea = React.useCallback((event: React.MouseEvent) => {
|
||||
const isTextAreaFocused = document.activeElement?.tagName === 'TEXTAREA';
|
||||
// If a textarea is focused, prevent the default blur behavior
|
||||
if (isTextAreaFocused)
|
||||
event.preventDefault();
|
||||
}, []);
|
||||
const handleDontBlurTextArea = useDontBlurTextarea();
|
||||
|
||||
return (
|
||||
<GoodTooltip placement='top' arrow enableInteractive title={micLegend(props.errorMessage)}>
|
||||
|
||||
@@ -16,7 +16,7 @@ export function ButtonMultiChat(props: { isMobile?: boolean, multiChat: boolean,
|
||||
color={multiChat ? 'warning' : undefined}
|
||||
onClick={() => props.onSetMultiChat(!multiChat)}
|
||||
>
|
||||
{multiChat ? <ChatMulticastOnIcon /> : <ChatMulticastOffIcon />}
|
||||
{multiChat ? <ChatMulticastOnIcon /> : <ChatMulticastOnIcon />}
|
||||
</IconButton>
|
||||
) : (
|
||||
<FormControl orientation='horizontal' sx={{ minHeight: '2.25rem', justifyContent: 'space-between' }}>
|
||||
|
||||
@@ -4,6 +4,8 @@ import { Button, IconButton } from '@mui/joy';
|
||||
import { SxProps } from '@mui/joy/styles/types';
|
||||
import FormatPaintTwoToneIcon from '@mui/icons-material/FormatPaintTwoTone';
|
||||
|
||||
import { PhSlidersHorizontalIcon } from '~/common/components/icons/phosphor/PhSlidersHorizontalIcon';
|
||||
|
||||
|
||||
export function ButtonOptionsDraw(props: { isMobile?: boolean, onClick: () => void, sx?: SxProps }) {
|
||||
return props.isMobile ? (
|
||||
@@ -11,8 +13,8 @@ export function ButtonOptionsDraw(props: { isMobile?: boolean, onClick: () => vo
|
||||
<FormatPaintTwoToneIcon />
|
||||
</IconButton>
|
||||
) : (
|
||||
<Button variant='soft' color='warning' onClick={props.onClick} sx={props.sx}>
|
||||
Options
|
||||
<Button variant='soft' color='warning' onClick={props.onClick} sx={props.sx} endDecorator={<PhSlidersHorizontalIcon />}>
|
||||
Image Settings
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
@@ -22,7 +22,7 @@ import { RenderImageRefDBlob } from '~/modules/blocks/image/RenderImageRefDBlob'
|
||||
import { RenderImageURL } from '~/modules/blocks/image/RenderImageURL';
|
||||
|
||||
import type { AttachmentDraft, AttachmentDraftConverterType, AttachmentDraftId } from '~/common/attachment-drafts/attachment.types';
|
||||
import { DMessageDataRef, DMessageImageRefPart, isImageRefPart } from '~/common/stores/chat/chat.fragments';
|
||||
import { DMessageDataRef, DMessageImageRefPart, isImageRefPart, isZyncAssetImageReferencePartWithLegacyDBlob } from '~/common/stores/chat/chat.fragments';
|
||||
import { LiveFileIcon } from '~/common/livefile/liveFile.icons';
|
||||
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
|
||||
import { ellipsizeFront, ellipsizeMiddle } from '~/common/util/textUtils';
|
||||
@@ -98,6 +98,7 @@ const converterTypeToIconMap: { [key in AttachmentDraftConverterType]: React.Com
|
||||
'image-resized-high': PhotoSizeSelectLargeOutlinedIcon,
|
||||
'image-resized-low': PhotoSizeSelectSmallOutlinedIcon,
|
||||
'image-to-default': ImageOutlinedIcon,
|
||||
'image-caption': AbcIcon,
|
||||
'image-ocr': AbcIcon,
|
||||
'pdf-text': PictureAsPdfIcon,
|
||||
'pdf-images': PermMediaOutlinedIcon,
|
||||
@@ -115,8 +116,8 @@ const converterTypeToIconMap: { [key in AttachmentDraftConverterType]: React.Com
|
||||
};
|
||||
|
||||
function attachmentIcons(attachmentDraft: AttachmentDraft, noTooltips: boolean, onViewImageRefPart: (imageRefPart: DMessageImageRefPart) => void) {
|
||||
const activeConterters = attachmentDraft.converters.filter(c => c.isActive);
|
||||
if (activeConterters.length === 0)
|
||||
const activeConverters = attachmentDraft.converters.filter(c => c.isActive);
|
||||
if (activeConverters.length === 0)
|
||||
return null;
|
||||
|
||||
// Alternate icon for the Web Page Screenshot
|
||||
@@ -127,15 +128,21 @@ function attachmentIcons(attachmentDraft: AttachmentDraft, noTooltips: boolean,
|
||||
let outputSingleImageRefDBlobs: Extract<DMessageDataRef, { reftype: 'dblob' }>[] = [];
|
||||
if (!urlImageData && attachmentDraft.outputFragments.length === 1) {
|
||||
const fragment = attachmentDraft.outputFragments[0];
|
||||
if (isImageRefPart(fragment.part) && fragment.part.dataRef && fragment.part.dataRef.reftype === 'dblob')
|
||||
if (isZyncAssetImageReferencePartWithLegacyDBlob(fragment.part))
|
||||
outputSingleImageRefDBlobs = [fragment.part._legacyImageRefPart!.dataRef];
|
||||
else if (isImageRefPart(fragment.part) && fragment.part.dataRef && fragment.part.dataRef.reftype === 'dblob')
|
||||
outputSingleImageRefDBlobs = [fragment.part.dataRef];
|
||||
}
|
||||
|
||||
const handleViewFirstImage = (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
if (attachmentDraft.outputFragments[0] && isImageRefPart(attachmentDraft.outputFragments[0].part))
|
||||
onViewImageRefPart(attachmentDraft.outputFragments[0].part);
|
||||
const fragment = attachmentDraft.outputFragments[0];
|
||||
if (!fragment) return;
|
||||
if (isZyncAssetImageReferencePartWithLegacyDBlob(fragment.part))
|
||||
onViewImageRefPart(fragment.part._legacyImageRefPart!);
|
||||
else if (isImageRefPart(fragment.part))
|
||||
onViewImageRefPart(fragment.part);
|
||||
};
|
||||
|
||||
// Whether to render the converters
|
||||
@@ -162,12 +169,13 @@ function attachmentIcons(attachmentDraft: AttachmentDraft, noTooltips: boolean,
|
||||
)}
|
||||
|
||||
{/* Render DBlob referred images in place of converter icons */}
|
||||
{outputSingleImageRefDBlobs.map((dataRef, i) => dataRef && (
|
||||
{outputSingleImageRefDBlobs.map((dataRef, _i) => dataRef && (
|
||||
<TooltipOutlined key={`image-${dataRef.dblobAssetId}`} title={noTooltips ? null : <>View converted image{/* <br/>{dataRef?.bytesSize?.toLocaleString()} bytes */}</>} placement='top-start'>
|
||||
<div>
|
||||
<RenderImageRefDBlob
|
||||
dataRefDBlobAssetId={dataRef.dblobAssetId}
|
||||
dataRefMimeType={dataRef.mimeType}
|
||||
dataRefBytesSize={dataRef.bytesSize}
|
||||
variant='attachment-button'
|
||||
scaledImageSx={attachmentIconSx}
|
||||
onClick={handleViewFirstImage}
|
||||
@@ -176,8 +184,8 @@ function attachmentIcons(attachmentDraft: AttachmentDraft, noTooltips: boolean,
|
||||
</TooltipOutlined>
|
||||
))}
|
||||
|
||||
{/*{activeConterters.some(c => c.id.startsWith('url-page-')) ? <LanguageIcon sx={{ opacity: 0.2, ml: -2.5 }} /> : null}*/}
|
||||
{renderConverterIcons && activeConterters.map((_converter, idx) => {
|
||||
{/*{activeConverters.some(c => c.id.startsWith('url-page-')) ? <LanguageIcon sx={{ opacity: 0.2, ml: -2.5 }} /> : null}*/}
|
||||
{renderConverterIcons && activeConverters.map((_converter, idx) => {
|
||||
const Icon = converterTypeToIconMap[_converter.id] ?? null;
|
||||
return !Icon ? null : (
|
||||
<TooltipOutlined key={`${_converter.id}-${idx}`} title={noTooltips ? null : `Attached as ${_converter.name}`} placement='top-start'>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Checkbox, Chip, CircularProgress, LinearProgress, Link, ListDivider, ListItem, ListItemDecorator, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
import { Box, Checkbox, Chip, CircularProgress, LinearProgress, ListDivider, ListItem, ListItemDecorator, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
import AttachmentIcon from '@mui/icons-material/Attachment';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
|
||||
@@ -10,17 +10,16 @@ import ExpandLessIcon from '@mui/icons-material/ExpandLess';
|
||||
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
|
||||
import KeyboardArrowLeftIcon from '@mui/icons-material/KeyboardArrowLeft';
|
||||
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
|
||||
import LaunchIcon from '@mui/icons-material/Launch';
|
||||
import ReadMoreIcon from '@mui/icons-material/ReadMore';
|
||||
import VerticalAlignBottomIcon from '@mui/icons-material/VerticalAlignBottom';
|
||||
import VisibilityIcon from '@mui/icons-material/Visibility';
|
||||
|
||||
import { CloseablePopup } from '~/common/components/CloseablePopup';
|
||||
import { DMessageAttachmentFragment, DMessageDocPart, DMessageImageRefPart, isDocPart, isImageRefPart } from '~/common/stores/chat/chat.fragments';
|
||||
import { DMessageAttachmentFragment, DMessageDocPart, DMessageImageRefPart, isDocPart, isImageRefPart, isZyncAssetImageReferencePartWithLegacyDBlob } from '~/common/stores/chat/chat.fragments';
|
||||
import { LiveFileIcon } from '~/common/livefile/liveFile.icons';
|
||||
import { copyToClipboard } from '~/common/util/clipboardUtils';
|
||||
import { showImageDataURLInNewTab } from '~/common/util/imageUtils';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
import { themeZIndexOverMobileDrawer } from '~/common/app.theme';
|
||||
import { useUIPreferencesStore } from '~/common/stores/store-ui';
|
||||
|
||||
import type { AttachmentDraftId } from '~/common/attachment-drafts/attachment.types';
|
||||
import type { AttachmentDraftsStoreApi } from '~/common/attachment-drafts/store-attachment-drafts_slice';
|
||||
@@ -158,6 +157,7 @@ export function LLMAttachmentMenu(props: {
|
||||
minWidth={260}
|
||||
noTopPadding
|
||||
placement='top'
|
||||
zIndex={themeZIndexOverMobileDrawer /* was not set, but the Attachment Menu can be used from the Personas Modal */}
|
||||
>
|
||||
|
||||
{/* Move Arrows */}
|
||||
@@ -311,13 +311,23 @@ export function LLMAttachmentMenu(props: {
|
||||
<Typography level='body-sm' sx={indicatorGapSx}>
|
||||
{draftInput.urlImage.mimeType} · {draftInput.urlImage.width} x {draftInput.urlImage.height} · {draftInput.urlImage.imgDataUrl?.length.toLocaleString()}
|
||||
{' · '}
|
||||
<Link onClick={(event) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
showImageDataURLInNewTab(draftInput?.urlImage?.imgDataUrl || '');
|
||||
<Chip component='span' size='sm' color='primary' variant='outlined' startDecorator={<VisibilityIcon />} onClick={(event) => {
|
||||
if (draftInput?.urlImage?.imgDataUrl) {
|
||||
// Invoke the viewer but with a virtual 'temp' part description to see this preview image
|
||||
handleViewImageRefPart(event, {
|
||||
pt: 'image_ref',
|
||||
dataRef: {
|
||||
reftype: 'url',
|
||||
url: draftInput.urlImage.imgDataUrl,
|
||||
},
|
||||
altText: draft.label || 'URL Image Preview',
|
||||
width: draftInput.urlImage.width || undefined,
|
||||
height: draftInput.urlImage.height || undefined,
|
||||
});
|
||||
}
|
||||
}}>
|
||||
open <LaunchIcon sx={{ mx: 0.5, fontSize: 16 }} />
|
||||
</Link>
|
||||
view
|
||||
</Chip>
|
||||
</Typography>
|
||||
)}
|
||||
|
||||
@@ -343,13 +353,17 @@ export function LLMAttachmentMenu(props: {
|
||||
</Chip>
|
||||
</Typography>
|
||||
);
|
||||
} else if (isImageRefPart(part)) {
|
||||
const resolution = part.width && part.height ? `${part.width} x ${part.height}` : 'no resolution';
|
||||
const mime = part.dataRef.reftype === 'dblob' ? part.dataRef.mimeType : 'unknown image';
|
||||
} else if (isZyncAssetImageReferencePartWithLegacyDBlob(part) || isImageRefPart(part)) {
|
||||
// Unified Image Reference handling (both Zync Asset References with legacy fallback and legacy image_ref)
|
||||
const legacyImageRefPart = isZyncAssetImageReferencePartWithLegacyDBlob(part) ? part._legacyImageRefPart! : part;
|
||||
const { dataRef, width, height } = legacyImageRefPart;
|
||||
const resolution = width && height ? `${width} x ${height}` : 'no resolution';
|
||||
const mime = dataRef.reftype === 'dblob' ? dataRef.mimeType : 'unknown image';
|
||||
return (
|
||||
<Typography key={index} level='body-sm' sx={{ color: 'text.primary' }} startDecorator={<ReadMoreIcon sx={indicatorSx} />}>
|
||||
<span>{mime /*.replace('image/', 'img: ')*/} · {resolution} · {part.dataRef.reftype === 'dblob' ? (part.dataRef.bytesSize?.toLocaleString() || 'no size') : '(remote)'} · </span>
|
||||
<Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='primary' variant='outlined' startDecorator={<VisibilityIcon />} onClick={(event) => handleViewImageRefPart(event, part)}>
|
||||
<span>{mime /*.replace('image/', 'img: ')*/} · {resolution} · {dataRef.reftype === 'dblob' ? (dataRef.bytesSize?.toLocaleString() || 'no size') : '(remote)'} · </span>
|
||||
<Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='primary' variant='outlined' startDecorator={<VisibilityIcon />}
|
||||
onClick={(event) => handleViewImageRefPart(event, legacyImageRefPart)}>
|
||||
view
|
||||
</Chip>
|
||||
{isOutputMultiple && <Chip component='span' size={isOutputMultiple ? 'sm' : 'md'} color='danger' variant='outlined' startDecorator={<DeleteForeverIcon />} onClick={(event) => handleDeleteOutputFragment(event, index)}>
|
||||
|
||||
@@ -193,7 +193,7 @@ export function LLMAttachmentsList(props: {
|
||||
</Box>
|
||||
|
||||
{/* Overall Menu button */}
|
||||
{!_style.barWraps && (
|
||||
{!props.buttonsCanWrap && (
|
||||
<IconButton
|
||||
onClick={handleOverallMenuToggle}
|
||||
onContextMenu={handleOverallMenuToggle}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, CircularProgress, IconButton, Tooltip } from '@mui/joy';
|
||||
import { Box, CircularProgress, IconButton } from '@mui/joy';
|
||||
import AutoFixHighIcon from '@mui/icons-material/AutoFixHigh';
|
||||
|
||||
import type { AgiAttachmentPromptsData } from '~/modules/aifn/agiattachmentprompts/useAgiAttachmentPrompts';
|
||||
|
||||
import { AgiSquircleIcon } from '~/common/components/icons/AgiSquircleIcon';
|
||||
import { BigAgiSquircleIcon } from '~/common/components/icons/big-agi/BigAgiSquircleIcon';
|
||||
import { GoodTooltip } from '~/common/components/GoodTooltip';
|
||||
|
||||
import { AGI_SUGGESTIONS_COLOR } from '../textarea/ComposerTextAreaActions';
|
||||
|
||||
@@ -42,7 +43,7 @@ function LLMAttachmentsPromptsButton({ data }: { data: AgiAttachmentPromptsData
|
||||
const tooltipTitle =
|
||||
data.error ? (data.error.message || 'Error guessing actions')
|
||||
: data.isFetching ? null
|
||||
: data.isPending ? <Box sx={{ display: 'flex', gap: 1 }}><AgiSquircleIcon inverted sx={{ color: 'white', borderRadius: '1rem' }} /> What can I do?</Box>
|
||||
: data.isPending ? <Box sx={{ display: 'flex', gap: 1 }}><BigAgiSquircleIcon inverted sx={{ color: 'white', borderRadius: '1rem' }} /> What can I do?</Box>
|
||||
: 'Give me more ideas';
|
||||
|
||||
const button = (
|
||||
@@ -64,8 +65,8 @@ function LLMAttachmentsPromptsButton({ data }: { data: AgiAttachmentPromptsData
|
||||
);
|
||||
|
||||
return !tooltipTitle ? button : (
|
||||
<Tooltip variant='outlined' disableInteractive placement='left' arrow title={tooltipTitle}>
|
||||
<GoodTooltip variantOutlined arrow title={tooltipTitle}>
|
||||
{button}
|
||||
</Tooltip>
|
||||
</GoodTooltip>
|
||||
);
|
||||
}
|
||||
@@ -11,6 +11,7 @@ export interface LLMAttachmentDraftsCollection {
|
||||
canAttachAllFragments: boolean;
|
||||
canInlineSomeFragments: boolean;
|
||||
llmTokenCountApprox: number | null;
|
||||
hasImageFragments: boolean;
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +20,7 @@ export interface LLMAttachmentDraft {
|
||||
llmSupportsAllFragments: boolean;
|
||||
llmSupportsTextFragments: boolean;
|
||||
llmTokenCountApprox: number | null;
|
||||
hasImageFragments: boolean;
|
||||
}
|
||||
|
||||
|
||||
@@ -44,7 +46,10 @@ export function useLLMAttachmentDrafts(attachmentDrafts: AttachmentDraft[], chat
|
||||
const equalChatLLM = chatLLM === prevStateRef.current.chatLLM;
|
||||
|
||||
// LLM-dependent multi-modal enablement
|
||||
const supportedTypes: DMessageAttachmentFragment['part']['pt'][] = chatLLMSupportsImages ? ['image_ref', 'doc'] : ['doc'];
|
||||
// TODO: consider also Audio inputs, maybe PDF binary inputs
|
||||
// FIXME: reference fragments could refer to non-image as well
|
||||
const imageTypes: DMessageAttachmentFragment['part']['pt'][] = ['reference', 'image_ref'];
|
||||
const supportedTypes: DMessageAttachmentFragment['part']['pt'][] = chatLLMSupportsImages ? [...imageTypes, 'doc'] : ['doc'];
|
||||
const supportedTextTypes: DMessageAttachmentFragment['part']['pt'][] = supportedTypes.filter(pt => pt === 'doc');
|
||||
|
||||
// Add LLM-specific properties to each attachment draft
|
||||
@@ -66,6 +71,7 @@ export function useLLMAttachmentDrafts(attachmentDrafts: AttachmentDraft[], chat
|
||||
llmTokenCountApprox: chatLLM
|
||||
? estimateTokensForFragments(chatLLM, 'user', a.outputFragments, true, 'useLLMAttachmentDrafts')
|
||||
: null,
|
||||
hasImageFragments: !a.outputFragments ? false : a.outputFragments.some(op => imageTypes.includes(op.part.pt)),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -75,6 +81,7 @@ export function useLLMAttachmentDrafts(attachmentDrafts: AttachmentDraft[], chat
|
||||
const llmTokenCountApprox = chatLLM
|
||||
? llmAttachmentDrafts.reduce((acc, a) => acc + (a.llmTokenCountApprox || 0), 0)
|
||||
: null;
|
||||
const hasImageFragments = llmAttachmentDrafts.some(a => a.hasImageFragments);
|
||||
|
||||
// [Optimization] Update the ref with the new state
|
||||
prevStateRef.current = { llmAttachmentDrafts, chatLLM };
|
||||
@@ -84,6 +91,7 @@ export function useLLMAttachmentDrafts(attachmentDrafts: AttachmentDraft[], chat
|
||||
canAttachAllFragments,
|
||||
canInlineSomeFragments,
|
||||
llmTokenCountApprox,
|
||||
hasImageFragments,
|
||||
};
|
||||
|
||||
}, [attachmentDrafts, chatLLM, chatLLMSupportsImages]); // Dependencies for the outer useMemo
|
||||
|
||||
@@ -15,7 +15,7 @@ export const AGI_SUGGESTIONS_COLOR: ColorPaletteProp = 'success';
|
||||
|
||||
// Styles
|
||||
|
||||
const textAreaSx: SxProps = {
|
||||
export const composerTextAreaSx: SxProps = {
|
||||
flex: 1,
|
||||
|
||||
// layout
|
||||
@@ -29,8 +29,8 @@ const textAreaSx: SxProps = {
|
||||
'--Button-gap': '1.2rem',
|
||||
transition: 'background-color 0.2s, color 0.2s',
|
||||
// minWidth: 160,
|
||||
},
|
||||
};
|
||||
} as const,
|
||||
} as const;
|
||||
|
||||
|
||||
const promptButtonSx: SxProps = {
|
||||
@@ -75,7 +75,7 @@ export function ComposerTextAreaActions(props: {
|
||||
return null;
|
||||
|
||||
return (
|
||||
<Box sx={textAreaSx}>
|
||||
<Box sx={composerTextAreaSx}>
|
||||
|
||||
{/* In-Reference-To bubbles */}
|
||||
{props.inReferenceTo?.map((item, index) => (
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button } from '@mui/joy';
|
||||
import AutoFixHighIcon from '@mui/icons-material/AutoFixHigh';
|
||||
|
||||
import { composerTextAreaSx } from './ComposerTextAreaActions';
|
||||
import { imaginePromptFromTextOrThrow } from '~/modules/aifn/imagine/imaginePromptFromText';
|
||||
|
||||
|
||||
const _style = {
|
||||
enhance: {
|
||||
minWidth: 170,
|
||||
mx: 0.625,
|
||||
pr: 2,
|
||||
border: '1px solid',
|
||||
borderColor: 'warning.outlinedBorder',
|
||||
boxShadow: '0px 4px 4px -4px rgb(var(--joy-palette-warning-darkChannel) / 20%)',
|
||||
transition: 'background-color 0.14s',
|
||||
justifyContent: 'space-between',
|
||||
} as const,
|
||||
gone: {
|
||||
visibility: 'hidden',
|
||||
} as const,
|
||||
} as const;
|
||||
|
||||
export function ComposerTextAreaDrawActions(props: {
|
||||
composerText: string,
|
||||
onReplaceText: (text: string) => void,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const [isSimpleEnhancing, setIsSimpleEnhancing] = React.useState(false);
|
||||
|
||||
|
||||
// derived
|
||||
const trimmedPrompt = props.composerText.trim();
|
||||
const userHasText = trimmedPrompt.length >= 3;
|
||||
|
||||
|
||||
const { onReplaceText } = props;
|
||||
|
||||
const handleSimpleEnhance = React.useCallback(async () => {
|
||||
if (!trimmedPrompt || isSimpleEnhancing) return;
|
||||
setIsSimpleEnhancing(true);
|
||||
const improvedPrompt = await imaginePromptFromTextOrThrow(trimmedPrompt, 'DEV')
|
||||
.catch(console.error);
|
||||
if (improvedPrompt)
|
||||
onReplaceText(improvedPrompt);
|
||||
setIsSimpleEnhancing(false);
|
||||
}, [isSimpleEnhancing, onReplaceText, trimmedPrompt]);
|
||||
|
||||
|
||||
return (
|
||||
<Box sx={composerTextAreaSx}>
|
||||
|
||||
{/* Enhance button */}
|
||||
<Box sx={{ ml: 'auto' }}>
|
||||
<Button
|
||||
size='sm'
|
||||
variant={isSimpleEnhancing ? 'soft' : 'soft'}
|
||||
color='warning'
|
||||
disabled={!userHasText}
|
||||
loading={isSimpleEnhancing}
|
||||
loadingPosition='end'
|
||||
// className={promptButtonClass}
|
||||
endDecorator={<AutoFixHighIcon sx={{ fontSize: '20px' }} />}
|
||||
onClick={handleSimpleEnhance}
|
||||
sx={!userHasText ? _style.gone : _style.enhance}
|
||||
>
|
||||
{isSimpleEnhancing ? 'Enhancing...' : 'Enhance Prompt'}
|
||||
</Button>
|
||||
</Box>
|
||||
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { Box, ColorPaletteProp, Tooltip } from '@mui/joy';
|
||||
import { DPricingChatGenerate, getLlmCostForTokens } from '~/common/stores/llms/llms.pricing';
|
||||
import { adjustContentScaling, themeScalingMap } from '~/common/app.theme';
|
||||
import { formatModelsCost } from '~/common/util/costUtils';
|
||||
import { useUIContentScaling } from '~/common/state/store-ui';
|
||||
import { useUIContentScaling } from '~/common/stores/store-ui';
|
||||
|
||||
|
||||
export function tokenCountsMathAndMessage(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number, chatPricing?: DPricingChatGenerate): {
|
||||
|
||||
@@ -0,0 +1,109 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { DLLM } from '~/common/stores/llms/llms.types';
|
||||
import { estimateTextTokens } from '~/common/stores/chat/chat.tokens';
|
||||
|
||||
|
||||
/**
|
||||
* Efficient hook that calculates token count for text with debouncing and deadline,
|
||||
* and only updates when the token count changes.
|
||||
*
|
||||
* @param text The text to count tokens for.
|
||||
* @param llm The LLM (includes the config) we perform the token count FOR.
|
||||
* @param debounceMs The minimum time between updates (keeps rolling at every change)
|
||||
* @param deadlineMs The maximum time between updates (fires even if the text is still changing)
|
||||
*/
|
||||
export function useTextTokenCount(
|
||||
text: string,
|
||||
llm: DLLM | null,
|
||||
debounceMs: number = 300,
|
||||
deadlineMs: number = 1200,
|
||||
): number | undefined {
|
||||
|
||||
// state: text ref to just read point value
|
||||
const lastTextRef = React.useRef<string>(undefined);
|
||||
|
||||
// state
|
||||
const [tokenCount, setTokenCount] = React.useState<number | undefined>(undefined);
|
||||
const lastTokenCountRef = React.useRef<number | undefined>(undefined);
|
||||
|
||||
const resetTokenCount = React.useCallback((value: number | undefined = 0) => {
|
||||
if (lastTokenCountRef.current === value) return;
|
||||
lastTokenCountRef.current = value;
|
||||
setTokenCount(value);
|
||||
}, []);
|
||||
|
||||
|
||||
// Timers: Debounced/Deadlined
|
||||
|
||||
const debounceTimerRef = React.useRef<ReturnType<typeof setTimeout>>(undefined);
|
||||
const deadlineTimerRef = React.useRef<ReturnType<typeof setTimeout>>(undefined);
|
||||
|
||||
const clearTimers = React.useCallback((clearDebounce: boolean = true, clearDeadline: boolean = true) => {
|
||||
if (clearDebounce && debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current);
|
||||
debounceTimerRef.current = undefined;
|
||||
}
|
||||
if (clearDeadline && deadlineTimerRef.current) {
|
||||
clearTimeout(deadlineTimerRef.current);
|
||||
deadlineTimerRef.current = undefined;
|
||||
}
|
||||
}, []);
|
||||
|
||||
|
||||
// tokens calculation, given the input text and LLM (which includes the LLM configuration)
|
||||
// NOTE: we shall extend this for fragments? (images, etc.)
|
||||
|
||||
const calculateAndUpdateTextTokens = React.useCallback(() => {
|
||||
|
||||
// no llm: can't count
|
||||
const currentText = lastTextRef.current;
|
||||
if (!llm || currentText === undefined) {
|
||||
resetTokenCount(undefined);
|
||||
return;
|
||||
}
|
||||
|
||||
// [HEAVY] compute tokens
|
||||
const newTextTokens = !currentText ? 0
|
||||
: estimateTextTokens(currentText, llm, 'useTextTokenCount');
|
||||
|
||||
// only update state if changed
|
||||
if (newTextTokens !== lastTokenCountRef.current) {
|
||||
lastTokenCountRef.current = newTextTokens;
|
||||
setTokenCount(newTextTokens);
|
||||
}
|
||||
|
||||
// clear both timers since we're current now
|
||||
clearTimers(true, true);
|
||||
|
||||
}, [clearTimers, llm, resetTokenCount]);
|
||||
|
||||
|
||||
// debounce mechanics
|
||||
|
||||
React.useEffect(() => {
|
||||
|
||||
// if there's no LLM, we can't do anything
|
||||
if (!llm || text === undefined) {
|
||||
resetTokenCount(undefined);
|
||||
return;
|
||||
}
|
||||
|
||||
// update text reference for the calculation function
|
||||
lastTextRef.current = text;
|
||||
|
||||
// restart the debounce timer
|
||||
clearTimers(true, false);
|
||||
debounceTimerRef.current = setTimeout(calculateAndUpdateTextTokens, debounceMs);
|
||||
|
||||
// set a deadline timer if one isn't already running
|
||||
if (!deadlineTimerRef.current && deadlineMs > debounceMs)
|
||||
deadlineTimerRef.current = setTimeout(calculateAndUpdateTextTokens, deadlineMs);
|
||||
|
||||
}, [calculateAndUpdateTextTokens, clearTimers, deadlineMs, debounceMs, llm, resetTokenCount, text]);
|
||||
|
||||
// cleanup at unmount
|
||||
React.useEffect(() => () => clearTimers(true, true), [clearTimers]);
|
||||
|
||||
return tokenCount;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user