Compare commits
2475 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6ba1afa540 | |||
| 04e54d898e | |||
| 20a85a5dfb | |||
| 849eb58a8f | |||
| 558d73a858 | |||
| e6948d4186 | |||
| 55e2e09797 | |||
| b2a3b78d44 | |||
| 135c957b7e | |||
| 624b99c8fe | |||
| fef0b27a33 | |||
| 75897cc162 | |||
| 26733d6fea | |||
| 2a29ccdcf5 | |||
| efdcf26c10 | |||
| 89d915a266 | |||
| 31c9b5c516 | |||
| b9563eaac9 | |||
| 396abe01ec | |||
| 6880ba8651 | |||
| 4aa0d4a8ae | |||
| 50ede8baa8 | |||
| 685edd12b9 | |||
| 213241eae9 | |||
| 2b9a5b5746 | |||
| 6ea0347a06 | |||
| 0df68444b3 | |||
| 290fd285c5 | |||
| bd0b60ad26 | |||
| d3c7daac50 | |||
| c7805566ae | |||
| 6be3f274ed | |||
| 40d05771c1 | |||
| dfec14620d | |||
| 27f57d6f22 | |||
| 78db633f25 | |||
| 440f368c36 | |||
| 78186ad442 | |||
| 75182df441 | |||
| 213de18ecc | |||
| 08437f1e8d | |||
| 3ba9200d0c | |||
| 1ad20a1579 | |||
| 1a53484064 | |||
| bdcbf7ebc8 | |||
| 25b0cb47ac | |||
| a1fa8d6480 | |||
| 3240e527e8 | |||
| fa80848dd5 | |||
| 085f97b10c | |||
| d96937a92f | |||
| e2cd16d5d2 | |||
| 9a783ebe6d | |||
| 1db98ea0b0 | |||
| 775808c472 | |||
| 251f57372c | |||
| 32b0ac2fa1 | |||
| dc5b338914 | |||
| 473a73d2c5 | |||
| d303a24300 | |||
| c8cbeffda3 | |||
| b1d94e790f | |||
| 59966546cf | |||
| 31eac468bc | |||
| 672cc4801b | |||
| 9e7b950cda | |||
| 21e0180e88 | |||
| e2801a0fef | |||
| 9ae9170ddb | |||
| 7a092a9140 | |||
| d453adc438 | |||
| e5e9f489d3 | |||
| 792f3f20f7 | |||
| 2581632f4b | |||
| 7c4df2cbf5 | |||
| 5af67e4be8 | |||
| eaa534d590 | |||
| f8630e3213 | |||
| 48c64aed9f | |||
| e7e5d2b901 | |||
| 1051d6461c | |||
| baec2b8a3a | |||
| bfa5c510ef | |||
| 970d024c10 | |||
| bc74c97ca2 | |||
| a5b92ec93a | |||
| f593cb6b04 | |||
| 50717b339f | |||
| 4400f33209 | |||
| b2d21d3426 | |||
| f9b102ffb8 | |||
| 20d7d6b2ee | |||
| f947a650b0 | |||
| da2be7be50 | |||
| f1257556f3 | |||
| df3c4b5971 | |||
| 403f5795f8 | |||
| e971db1c60 | |||
| 9411cd4f9b | |||
| 271e5f3fc9 | |||
| ce58fb5419 | |||
| ceca769a20 | |||
| 473f73c23f | |||
| aab84f6e67 | |||
| 95e988dc9a | |||
| 3cb9183cc0 | |||
| 79119c4770 | |||
| 64933bea7e | |||
| bae15f3bb6 | |||
| 35733e86b9 | |||
| e42dc3a755 | |||
| 215c45c823 | |||
| 6d68f599df | |||
| c5b4dadb83 | |||
| 5d942eec41 | |||
| 613665665d | |||
| b14e28e03b | |||
| 9969ccb530 | |||
| ec9243b8dd | |||
| 8584a56a6d | |||
| af78068f48 | |||
| 0a002a0d78 | |||
| 66572a970f | |||
| ff1d3686b6 | |||
| 7626b48b70 | |||
| 9bfcb50735 | |||
| c18be131a1 | |||
| 692a1ef90e | |||
| 3c3fae6039 | |||
| c090334093 | |||
| a2942a32e6 | |||
| 1bd327e410 | |||
| 557886f93f | |||
| f52e35029c | |||
| b2d66af440 | |||
| 897d7fb7e0 | |||
| 0407d228fb | |||
| a58f703744 | |||
| 5186df3748 | |||
| a5f0527eec | |||
| 6e851713b9 | |||
| b4a586fab2 | |||
| cb2d6dbfd4 | |||
| 4162158413 | |||
| 18234ec199 | |||
| 18dbf916ef | |||
| 26df5bc889 | |||
| 0f33ae1a8d | |||
| e7a463399f | |||
| 632efafcbd | |||
| bfbfe4d122 | |||
| 21f42159c8 | |||
| b750a2bb9b | |||
| 475ff1e88e | |||
| c2a577bf65 | |||
| 12f4fdde93 | |||
| db0aa36ccb | |||
| 69192689b8 | |||
| 078c6400ba | |||
| 6074a5bb16 | |||
| 79c71a1740 | |||
| caa9cb6a16 | |||
| 795126ccde | |||
| 7728efebad | |||
| a896cb827a | |||
| b9c62b45f9 | |||
| 9e2f815e1e | |||
| f88c18ddde | |||
| 2de70fa87d | |||
| eb49717065 | |||
| 268b1d8a19 | |||
| 8768d9e946 | |||
| d83d29054d | |||
| c39e378235 | |||
| 7be72acff3 | |||
| 41a2f1e526 | |||
| 36eda51789 | |||
| 6ffcb731a3 | |||
| ce9fcbd4a1 | |||
| 022cdd0b37 | |||
| ab5b6f1769 | |||
| 1811e31ea5 | |||
| b0430b0a18 | |||
| 122d6c9da5 | |||
| 480aaed108 | |||
| bf312265de | |||
| ea15d8e0e9 | |||
| 0947100b13 | |||
| c0dca032e4 | |||
| f0df743f12 | |||
| b6ee04e140 | |||
| 75333df458 | |||
| f980bffd11 | |||
| 8aeb4dce76 | |||
| 24e8539b3e | |||
| 8bc118c060 | |||
| f0e595b3ff | |||
| 44b9eebe1c | |||
| 68630a0bd5 | |||
| 3bf0c10b42 | |||
| 6fcd87aba7 | |||
| 4183e1e2c5 | |||
| 2506d60058 | |||
| 7221151f2f | |||
| 4c259eb9e5 | |||
| 334b3aba86 | |||
| 83e601ad3a | |||
| 6e8a25bc23 | |||
| 70764e9ca2 | |||
| 148eba8feb | |||
| 0ee2d04d48 | |||
| 311b91788b | |||
| ab30bf213c | |||
| f0cb5c63cf | |||
| b8f4ad674b | |||
| 0a27544db3 | |||
| fa8501b73d | |||
| 8074be9b2e | |||
| 2e2a664c82 | |||
| 88ed387b75 | |||
| 1006d8041e | |||
| 004b26bff4 | |||
| eb41a245e8 | |||
| 22a219f1e4 | |||
| 6435619fba | |||
| 68ca331e02 | |||
| bf1c734101 | |||
| e5870e43ce | |||
| 1cda4d443a | |||
| cdc3fe38ca | |||
| f5cb3150b1 | |||
| 38f700b383 | |||
| dc46497dfe | |||
| cf0ca2a17e | |||
| 0df17ec7bf | |||
| 26d6924ba1 | |||
| aef56df7a3 | |||
| 25a6f1fd5c | |||
| 010de2b1f1 | |||
| c498c31050 | |||
| ea9aaa6524 | |||
| dfcd590fb3 | |||
| 5d8b06c928 | |||
| a10ddbe103 | |||
| b439180d4e | |||
| 629dc3c3b8 | |||
| 99a8f900df | |||
| d713edfcab | |||
| cfead98e01 | |||
| 85242d2417 | |||
| 3682a86b49 | |||
| 7c418966f0 | |||
| f773910846 | |||
| cfbc1160ab | |||
| 2d11873af8 | |||
| ee322b45eb | |||
| c472fbad1c | |||
| b62932ac9c | |||
| bd35be8e13 | |||
| b5eff8b2da | |||
| 6bf0b7c83a | |||
| 3a5dc71fc0 | |||
| bddd1474c4 | |||
| b90b71bb58 | |||
| d75307691f | |||
| acc3bc4403 | |||
| 4c1b1213b1 | |||
| 40850dde0a | |||
| 10d5fca3b3 | |||
| 71085408a5 | |||
| 52cdc95103 | |||
| 638977cc74 | |||
| 5e7e838714 | |||
| 965e3282a8 | |||
| 7a1d9cc352 | |||
| 07dfc8ee07 | |||
| 414250f5d0 | |||
| e6854ab946 | |||
| 026d2fbc40 | |||
| 66a557a7e1 | |||
| 28ae055c28 | |||
| 0f8fda5c43 | |||
| 9bacedb48c | |||
| 9894684784 | |||
| 741b9dae41 | |||
| 45b4deb7c7 | |||
| 38bdbfeee3 | |||
| 4e3386f589 | |||
| 88999fe3f5 | |||
| ce1b5b73b7 | |||
| 864eaa7a02 | |||
| 8848b13f33 | |||
| 147da2bc95 | |||
| f94edc7543 | |||
| 9439007f2b | |||
| fa80b7278f | |||
| a7af365dc2 | |||
| 013d952092 | |||
| 0182f9885d | |||
| 5934a715ee | |||
| 41b21af3c3 | |||
| 4db42c5f91 | |||
| f05472baea | |||
| 72b0e77168 | |||
| aa06ac4756 | |||
| 2ff060ba38 | |||
| 9468f29aae | |||
| c4b894ec65 | |||
| b774073c7f | |||
| 911bf97a38 | |||
| e91e0f273a | |||
| 4279e94499 | |||
| ec34f3a6b1 | |||
| 86fed99288 | |||
| 934f29dcfd | |||
| b3cf624a39 | |||
| f4463e0113 | |||
| 57ca1270e6 | |||
| 03417c5186 | |||
| 9bcbe58f4a | |||
| 8d66e1c804 | |||
| 089ddee9de | |||
| 12965b81c7 | |||
| 62b64ac1bd | |||
| 9fecbe1005 | |||
| 0c90b296f9 | |||
| 1bc38e3760 | |||
| 32497dbb7b | |||
| 0a07f2a447 | |||
| fbc6265543 | |||
| 82c0ed475b | |||
| e9add70f8a | |||
| f3bd5e4d58 | |||
| a525b7437a | |||
| 915bc6cc89 | |||
| 51fdf2705d | |||
| 5d814c6bb5 | |||
| e379526f14 | |||
| c9057f0c25 | |||
| c3c65ea3d3 | |||
| ddee6aecfb | |||
| 1a6f56ef62 | |||
| 680e031be6 | |||
| a7ab95e905 | |||
| 57bb1edcfc | |||
| 216fe20e52 | |||
| a27669fd63 | |||
| 5034ecc4df | |||
| 55700d5218 | |||
| 35c03e5b44 | |||
| cfb3a071c8 | |||
| 677645b81c | |||
| 297f93b8ad | |||
| 765eec0492 | |||
| da32c2c59a | |||
| 091f1b1936 | |||
| e9dc735989 | |||
| 4d01f8620a | |||
| 3edf89d98f | |||
| 7681ff3f3e | |||
| 42d947f13e | |||
| b5772f3765 | |||
| 639f02346e | |||
| cf1cb7b112 | |||
| 1b60f1062f | |||
| d105e28ca8 | |||
| 3ff88fffe5 | |||
| d87d0aa146 | |||
| c7c43780a3 | |||
| bc24afe859 | |||
| e25f46b5c0 | |||
| 3907449ec0 | |||
| 8230eafe25 | |||
| a43e83d9ed | |||
| fce3b6e538 | |||
| f7decf8210 | |||
| 6d6c9afe97 | |||
| 6876390456 | |||
| 1e95ec6154 | |||
| 6756e4e027 | |||
| c0573c56c4 | |||
| 1338014942 | |||
| 011b76f8e2 | |||
| b2c07471d8 | |||
| 950e1c8779 | |||
| 61df9819e6 | |||
| 5afc8ad69b | |||
| a472c618a3 | |||
| 9f00772dd4 | |||
| 478d792565 | |||
| b20f1667f7 | |||
| 899695e32c | |||
| 56e99785ea | |||
| a778165ee7 | |||
| dc55be87d2 | |||
| 8eb7d7f80b | |||
| ee01085f8e | |||
| 1adff7481b | |||
| 054a8d9050 | |||
| ed98829869 | |||
| 4810975148 | |||
| a99895166c | |||
| cf62128095 | |||
| 2a5d418425 | |||
| 3e865d68dc | |||
| e882358af4 | |||
| f86649fe02 | |||
| 3892f0c82a | |||
| da2266fcfe | |||
| 7bf6c9c77a | |||
| 27f031eb57 | |||
| 7ce58fb594 | |||
| bc0f6a4cd2 | |||
| 0b3b2860bb | |||
| a21342384e | |||
| 79bb6f07c9 | |||
| 80f77aee4e | |||
| b0b3bcbf61 | |||
| f83d6ca4ac | |||
| 2c6a9fbea6 | |||
| df945dc36b | |||
| b6eccbf860 | |||
| 4332d21637 | |||
| b0809734aa | |||
| 74ed8c9e5e | |||
| 98b1d5068b | |||
| 07cc34ba2b | |||
| 239867d8c5 | |||
| aa31d92986 | |||
| f012b99b17 | |||
| d5fd66f07c | |||
| f07c19f69c | |||
| 5541a4e531 | |||
| 3055fe9347 | |||
| 369324ad5d | |||
| 4e5b1f1306 | |||
| 81c70b21b9 | |||
| 44a00df49d | |||
| 8754bbada9 | |||
| f37cdcb20c | |||
| 393e19dda9 | |||
| 766cc06206 | |||
| 710f03d037 | |||
| ec956da51c | |||
| da3dddf502 | |||
| 77efabb883 | |||
| 116415b238 | |||
| dd33e3ea28 | |||
| ad703f9483 | |||
| 9058c2d2fa | |||
| 3cd2b29a37 | |||
| 51cd6a7e07 | |||
| 30fe814eb1 | |||
| ff0ea188c1 | |||
| 34c6129d25 | |||
| 0378eb8f1e | |||
| d09d4455aa | |||
| dcce5a5b1d | |||
| 2f856e64bb | |||
| cd8c931df8 | |||
| cefe208abd | |||
| 0e566edf42 | |||
| 8c735e971e | |||
| 48740c7183 | |||
| 1ec2acd250 | |||
| b572c6d962 | |||
| 01197952f4 | |||
| f94240110d | |||
| 7a139e50c5 | |||
| d11a38db83 | |||
| 9f10ed1930 | |||
| 8a9e2151b8 | |||
| 98f8117b50 | |||
| 21d2d7be30 | |||
| 3a644ec4d5 | |||
| 4cd7936bf3 | |||
| 19769c8238 | |||
| 7ecd1aa371 | |||
| 9cd5194629 | |||
| a2cda7e791 | |||
| 1084e3d5e2 | |||
| 7aa02e91c8 | |||
| 35b9bbf0e6 | |||
| 2ae834366c | |||
| cdd08b5df3 | |||
| 3d566aa102 | |||
| b110580123 | |||
| 7df767119b | |||
| 17008c30f6 | |||
| 1efd9bc55a | |||
| 1e7b77928f | |||
| fc8c984cd4 | |||
| 23cf01d4b4 | |||
| 8dde79c607 | |||
| 78f409b056 | |||
| fd4f1ae0c0 | |||
| 93797afa7a | |||
| 5242d09b53 | |||
| d9cb9e6aed | |||
| 913fa45e73 | |||
| b21bd47ea5 | |||
| 340c298572 | |||
| 06e899343b | |||
| 104b95a5a4 | |||
| 1d9b309552 | |||
| 7c3d7a8596 | |||
| 966f57cb0e | |||
| 36260aaf8d | |||
| 3158d5b340 | |||
| 938a896de3 | |||
| 7573f014d0 | |||
| 76b8a1582b | |||
| 96acd7a307 | |||
| a41688f378 | |||
| ff9b1650f9 | |||
| 116bdcdd43 | |||
| bb8e7a283c | |||
| 825c565a24 | |||
| fe35c2ec82 | |||
| 21451af9b3 | |||
| 0852dc5dc9 | |||
| e90b71b2c2 | |||
| 6ea2674f35 | |||
| 62f809a8d3 | |||
| 0eb04a3f6a | |||
| 367894abc9 | |||
| febf758aa7 | |||
| a2407157d1 | |||
| 6eccdf74d1 | |||
| 57ebb8f9fb | |||
| 1d02079b09 | |||
| 6a7e9ea131 | |||
| 8c5a8d2044 | |||
| 6d47b6024a | |||
| e7bd6f6746 | |||
| d3a7e2a310 | |||
| d901cafe82 | |||
| 77e374b92b | |||
| 0989796f87 | |||
| 1e61998b50 | |||
| 4a896be01b | |||
| a44408c87c | |||
| 504d2b7c5a | |||
| 9446f0b23d | |||
| 671ba95975 | |||
| 6fc5acfeb9 | |||
| 1236d7c1ac | |||
| ef2ee9280d | |||
| 3e05f751e3 | |||
| b0f2ad6742 | |||
| 63a82a6da6 | |||
| 57082b23d8 | |||
| 361511fc5d | |||
| f81beb5cae | |||
| 2318cd3329 | |||
| 0fa3ea0ff6 | |||
| 00093e42be | |||
| cfa7ce2019 | |||
| f1397eff4a | |||
| 67a4a79a6c | |||
| 29fba943de | |||
| 69e6fb4dbd | |||
| 96cbd95996 | |||
| e88180a6f5 | |||
| 6b817cde0a | |||
| b7b7dfb4af | |||
| afdfb83fc8 | |||
| f3a1fa624a | |||
| 90c05ba2d9 | |||
| de3aa4a5f7 | |||
| 2258dee8c7 | |||
| feec32b3ac | |||
| e189f30285 | |||
| 58e97d20fd | |||
| 378948e522 | |||
| a19804d450 | |||
| c445c36f84 | |||
| fa9886e2dc | |||
| b7c7609ac4 | |||
| 217a40c19c | |||
| 5cb568023b | |||
| 8ced2593ff | |||
| 4db373e46e | |||
| 79a870a752 | |||
| e6d68a4970 | |||
| 3087e2316f | |||
| c59a7cc0bc | |||
| c948b58eb5 | |||
| fde57ca12d | |||
| e9cebe435a | |||
| 3280a58dd0 | |||
| 597214b955 | |||
| d43f84e31e | |||
| 7ad118d51b | |||
| d980d97227 | |||
| ee7c1cc12b | |||
| 94200cfc6d | |||
| 8c5a2bce0d | |||
| 259505190b | |||
| e407eba674 | |||
| 13462b6b71 | |||
| 15661b3b8c | |||
| b97c4c5a43 | |||
| aeda478d51 | |||
| 9bd6b12789 | |||
| 476a212ee8 | |||
| 0470285338 | |||
| ab28ae2940 | |||
| 169fa86ec4 | |||
| cafd491e35 | |||
| b8ae9648e6 | |||
| 0de8d5dd1c | |||
| 356b15a3ea | |||
| 2002255c07 | |||
| b19a4be485 | |||
| eec845486d | |||
| 29f7224b22 | |||
| e7a4c92e61 | |||
| 4d59ba6797 | |||
| acae4c31ab | |||
| f1449b60e2 | |||
| 32739fa15c | |||
| fff385b5e8 | |||
| 40faa35678 | |||
| 78af46ed4a | |||
| ea94fc2185 | |||
| c8abb75194 | |||
| 6a59ad338d | |||
| 40aa737480 | |||
| 717a6c90ef | |||
| f4b1b292b7 | |||
| 446df24f5a | |||
| 39ab5cabf1 | |||
| 2671ff1228 | |||
| 1932c31c35 | |||
| 9b1b6ebceb | |||
| f683d263ad | |||
| 53c87cc560 | |||
| 63c90dc70d | |||
| 20927d4da6 | |||
| e1664458c5 | |||
| 09da682bcf | |||
| ac70a7d8c2 | |||
| 638155eae7 | |||
| 782c0cf172 | |||
| c3b907f94c | |||
| 78a773dec7 | |||
| b1ffe04ccb | |||
| cf9568aaa0 | |||
| 5e2d609332 | |||
| 1962f119e5 | |||
| 06d2bf2ba5 | |||
| 9b902e0f8b | |||
| 05e74a7cd8 | |||
| 4ea0d3b974 | |||
| 0b1a914015 | |||
| 6c405c3807 | |||
| 49cbda39b8 | |||
| f9d511c4b4 | |||
| 30c7421877 | |||
| 91a5878460 | |||
| 1af77749e9 | |||
| d3fdd5f85a | |||
| f5c819f576 | |||
| 4640b13195 | |||
| 52a89c3d76 | |||
| a92b605c25 | |||
| 2de8a8f2c0 | |||
| 9e5f332dc6 | |||
| fce21ad8eb | |||
| 36b4ee9589 | |||
| ec24b70d81 | |||
| b3c2794a60 | |||
| 194115231a | |||
| 175015eade | |||
| 6ce3cf9e29 | |||
| 8358949c9b | |||
| 78d8b69bc7 | |||
| 80a5cca30e | |||
| 21e7756a74 | |||
| b1b23e7c4e | |||
| bfa73ec6c5 | |||
| 39c5c7c9ba | |||
| e64a5e59ef | |||
| 574c2cf0e3 | |||
| 1d3321b336 | |||
| de25e5822d | |||
| 6a904c9f37 | |||
| 30c3283572 | |||
| 10bba19079 | |||
| 713079f2f2 | |||
| 6e16e989ac | |||
| 4e89e0b1e4 | |||
| 6067c289ab | |||
| 508c4bf80c | |||
| 93b03bf87c | |||
| 32ebfea9cb | |||
| f1f28bca61 | |||
| 1960e0bb3a | |||
| 362eb46a7a | |||
| 9d86c30267 | |||
| 7a34fb4aba | |||
| f3ccb46570 | |||
| 2c60571702 | |||
| 4c073202c0 | |||
| 015534dbeb | |||
| 2ca0f0fc1c | |||
| 7bea2a8191 | |||
| e828aa8580 | |||
| b4285832e5 | |||
| f5a2313e46 | |||
| 1ae14e94ae | |||
| f396ce189a | |||
| e83ae9150c | |||
| a93f99d5a2 | |||
| fb055b6cf6 | |||
| ce2f327b79 | |||
| 15694a59ba | |||
| 54e890b8e2 | |||
| efbbc8ec43 | |||
| cec3065830 | |||
| 9b98c43b92 | |||
| 0603a48e1e | |||
| e001fc5449 | |||
| b0c46f4fe8 | |||
| f2f9365d64 | |||
| c62423d7e3 | |||
| f4e488d658 | |||
| 87b57ff8ac | |||
| e13070e15c | |||
| c4ce659c88 | |||
| 3e4a6432f0 | |||
| 721fc0a33f | |||
| 7354ff4023 | |||
| a70e31e199 | |||
| 2958b2a96a | |||
| a4ccd53ccc | |||
| dd428f433c | |||
| 8f577396d5 | |||
| 4ca7b60162 | |||
| 48b85edf20 | |||
| f9980deac4 | |||
| 9dda4ff241 | |||
| 3ad83f79ff | |||
| 3361fb3921 | |||
| 25b88237a0 | |||
| bb20234a74 | |||
| 88bf939d23 | |||
| 7c1d2cad66 | |||
| 71f5ab96d4 | |||
| 746dd0ad05 | |||
| d4324a8ba9 | |||
| 2550f345fc | |||
| 8d6ae13d65 | |||
| 1df31946c2 | |||
| 79108537d7 | |||
| 5f61ccb6c3 | |||
| 02739a9951 | |||
| af49ee72b6 | |||
| 47b146aa38 | |||
| 9ae8542100 | |||
| 77596ffdfb | |||
| 07665ece59 | |||
| c1fbd72785 | |||
| ae43a1d8af | |||
| 6265868658 | |||
| c6d4f2834e | |||
| 77777da122 | |||
| 93ac4d59a3 | |||
| e483050608 | |||
| 4f1afc3221 | |||
| c098afe818 | |||
| 21045f5e72 | |||
| e827e9810a | |||
| 9bc6fd6ad1 | |||
| 590843f2a5 | |||
| d4bba26a2b | |||
| b591e1ab64 | |||
| 245add3e15 | |||
| 42c8095665 | |||
| 17f7057ec3 | |||
| c4a6f60562 | |||
| 0faf0c4422 | |||
| e6163d227a | |||
| 81bbd9f4f5 | |||
| 380e5fa664 | |||
| ebfc67ed4a | |||
| 1a215390e2 | |||
| 8f9d5cd5d7 | |||
| 128b259288 | |||
| e96c9247c3 | |||
| a7812b7bb9 | |||
| 2f7de4f43b | |||
| cbcdb8a572 | |||
| 6ed57cca24 | |||
| 8fa7181a6d | |||
| 7400a03ee8 | |||
| 1df2b82c56 | |||
| 136dcbcd4c | |||
| 29f9f22eff | |||
| b5308caea1 | |||
| 0bb4fd4517 | |||
| c745aae281 | |||
| 9194b3b5f5 | |||
| 12c5c0a058 | |||
| 04a96e47d7 | |||
| 4ddaa75880 | |||
| 085ff56654 | |||
| 5251232319 | |||
| 72fb677087 | |||
| 23be99aef7 | |||
| 9f5093bf32 | |||
| f386cf9b1b | |||
| 6e1bb66dbf | |||
| 1c0b8120e5 | |||
| d342220b09 | |||
| 1c09786d37 | |||
| 694cd7483e | |||
| aeb6aa52e8 | |||
| 04938c80f2 | |||
| f99c266255 | |||
| 830f972c43 | |||
| b07d7b81a7 | |||
| 859453fc02 | |||
| 7810445c39 | |||
| f7f1b5ad21 | |||
| bee7ee406d | |||
| b035b85b15 | |||
| 908712045c | |||
| 5a4715e608 | |||
| 433354e938 | |||
| 3d1efea655 | |||
| 5b6cb644ae | |||
| b1abff01ea | |||
| 4c221f89cc | |||
| 9ffbd45961 | |||
| 8b88d4afe7 | |||
| 0061fb4c30 | |||
| 2f3977ca8d | |||
| 915c876cd4 | |||
| 42d9d70bdc | |||
| 336819a2dd | |||
| 0ec261f7ca | |||
| dfe6d3cf72 | |||
| 8ab35a5fab | |||
| f6c71d98f5 | |||
| 973e15363f | |||
| 0a7a1b7a5f | |||
| beb415213a | |||
| 7087d45b62 | |||
| 395d977e97 | |||
| 6db608e2c8 | |||
| 2ca025818a | |||
| 5dab98bef9 | |||
| ce29954c6e | |||
| cd62e101c1 | |||
| 187d85db1f | |||
| ad85f743b1 | |||
| 30300a2134 | |||
| c9bf43e444 | |||
| 8a8e42c01d | |||
| 78db1091ac | |||
| b4fdf4c7c4 | |||
| 227e51a97d | |||
| b0ca462998 | |||
| 7187e55afa | |||
| 419c53322d | |||
| c86bd3088a | |||
| b943352569 | |||
| 962619de47 | |||
| 0fb408af8c | |||
| f9262e0f1f | |||
| 2c3f237ccc | |||
| 1680ba4223 | |||
| 14c0e23b75 | |||
| bc6d9d5de1 | |||
| 973ba8669f | |||
| 37a38c3c89 | |||
| 816d97e7d1 | |||
| 89965d09cf | |||
| da10e3214a | |||
| 5a29b17b27 | |||
| 2f00921698 | |||
| b2cd19a7e0 | |||
| d69bd91aca | |||
| 2ac84a0b2f | |||
| e7c38c3785 | |||
| c903c7f7ed | |||
| 9d1c87b3f2 | |||
| 929f419760 | |||
| c31662e528 | |||
| 79151c0528 | |||
| d125b97d97 | |||
| 65d9fc1dc7 | |||
| ef30a45388 | |||
| c3565d99fa | |||
| 1a6fbea8c7 | |||
| ab386079b1 | |||
| 448169563c | |||
| 1647a528a3 | |||
| eb78713cc3 | |||
| ee71013f93 | |||
| d2ffec2d4e | |||
| 0132df0bf2 | |||
| bf269ecbac | |||
| 8af6ac853b | |||
| aff50499a2 | |||
| c30fa2aafb | |||
| 02d6f5c10e | |||
| b99869544f | |||
| e07b5aa988 | |||
| bd873b84c9 | |||
| f28b7ebeb9 | |||
| cfcffa9a65 | |||
| 226e4b0f8b | |||
| 8b9a103fd3 | |||
| 6a0a76df92 | |||
| 3d81f2a814 | |||
| 1aab4a6e51 | |||
| dec280d54d | |||
| 4823e97783 | |||
| 6a5685995f | |||
| 22b32d571d | |||
| a416cafc4e | |||
| 5f5efe6133 | |||
| d066aba00e | |||
| 83e9965254 | |||
| ce4e447032 | |||
| 568b16d2cf | |||
| be2d857f82 | |||
| c4c2fb9d39 | |||
| 79f5d24e2f | |||
| a69944c019 | |||
| e4bb546442 | |||
| 04ff184c23 | |||
| 4718e26f96 | |||
| bd808594cb | |||
| 00c2186106 | |||
| a788b7a41b | |||
| 024489939e | |||
| 33a0bd2d72 | |||
| 029bcc7980 | |||
| acb2f6a1aa | |||
| 67184536a6 | |||
| 85aed347cf | |||
| 910f8c5a2f | |||
| bf47d40fdf | |||
| c36440d576 | |||
| 2e14c5a3d4 | |||
| 7640e027d3 | |||
| dccb493cf7 | |||
| 31a7ee0f4d | |||
| 3ac1102274 | |||
| a1c6cce61d | |||
| 433f9a8162 | |||
| d0880da782 | |||
| e331393987 | |||
| 7047e98e91 | |||
| 6c8a8bce88 | |||
| a78f739935 | |||
| 40c9b5a668 | |||
| 46bb02f946 | |||
| e57d4c724f | |||
| d3c940bebc | |||
| d71f94110f | |||
| 50e97e7523 | |||
| 30ffd1a7ee | |||
| 030db4f769 | |||
| 454f8620f7 | |||
| f007f57b93 | |||
| 573658ec7e | |||
| 1fc61f7c78 | |||
| f0240018d6 | |||
| 4a40cca39e | |||
| 3add2b7a94 | |||
| 4fbcda03a5 | |||
| 0020a49cd0 | |||
| 7f7e484d4c | |||
| e9a162e952 | |||
| 0a0270992d | |||
| 2d40e1b7ef | |||
| 323e69d2eb | |||
| f125b323b4 | |||
| b47b171e30 | |||
| f93a769c85 | |||
| fe2103b8cf | |||
| 94278068c2 | |||
| b18a3f0fd9 | |||
| 1e5bb5aa7e | |||
| 3dd672a526 | |||
| 92f3eb3184 | |||
| a8d1eb71c3 | |||
| 4fb64d2117 | |||
| e8beaa90ad | |||
| 5c3ecb86e1 | |||
| 13456da524 | |||
| da15ab07e8 | |||
| 350c84fbda | |||
| 3efbe65ca6 | |||
| 7dffd797a4 | |||
| a6318e93a9 | |||
| 7a7ab9b1ec | |||
| 6d8bb46985 | |||
| 0b18e6d18a | |||
| 02dfda80a2 | |||
| cb1e6f61f5 | |||
| 2a359dfc4e | |||
| 9f494288a9 | |||
| d90f2181b8 | |||
| 8be4582b5d | |||
| 8c817fba7c | |||
| 543d32543c | |||
| f3dfb3383f | |||
| 8b733ed1f3 | |||
| ffeb35a20b | |||
| 2a1fccc43c | |||
| a1097d2bb5 | |||
| b58465a769 | |||
| 0f6af47eee | |||
| 1573d61a7e | |||
| eb575a2320 | |||
| bbe4e36bd5 | |||
| b7de3669c3 | |||
| 4011e55823 | |||
| 808992ecea | |||
| 42596c6054 | |||
| 2cb3c82bd5 | |||
| 573143c57d | |||
| dfd77a3832 | |||
| c3d54defb7 | |||
| ae842984f2 | |||
| 421c586adb | |||
| d06f9e17e1 | |||
| b26954f326 | |||
| a7ee987e04 | |||
| cbdf48814b | |||
| df0854b897 | |||
| 2ea79cf466 | |||
| 8e2db899c6 | |||
| c24876ba45 | |||
| 71bd05bbd1 | |||
| 7551848a29 | |||
| 05c304a053 | |||
| 4caa61da36 | |||
| 080655b769 | |||
| de4ab78664 | |||
| e72db54027 | |||
| 8984c3d59c | |||
| ce048c0521 | |||
| 6c54220050 | |||
| 179ad9bbed | |||
| b6336a1fb0 | |||
| f2676599c4 | |||
| ba6cac9c2d | |||
| 95633e0a88 | |||
| 515241edca | |||
| 928231ce06 | |||
| a46c6becbc | |||
| 273889eda0 | |||
| d0ce4359df | |||
| f1a0a22c2d | |||
| 64af43de4e | |||
| 8c4f896e8a | |||
| ce33166c5c | |||
| 10696fd5e7 | |||
| d53336baab | |||
| af96060e69 | |||
| faccc1735f | |||
| ab033807c4 | |||
| 62c6353ca0 | |||
| 79c1786bf3 | |||
| 4b6759f2ad | |||
| e166fd6703 | |||
| 91f5f07a7f | |||
| 9678b6943d | |||
| b439308023 | |||
| 8e1167d848 | |||
| 575efb07f4 | |||
| ce93ab8234 | |||
| 73ecc91188 | |||
| f0a0dfc72a | |||
| 95788f5dcd | |||
| af18dc6683 | |||
| 8d6540289d | |||
| 7a5a24f210 | |||
| 1f2f4b61b0 | |||
| 71d6bec954 | |||
| 4da95a389a | |||
| a28936199c | |||
| da5cb20c3b | |||
| f01dc76b7f | |||
| ae7bcb84ab | |||
| 971686af2c | |||
| 4608d5fc2d | |||
| cb6ffc294c | |||
| 52b1df0b4d | |||
| 4b7cb28d3b | |||
| b9ab6d87c7 | |||
| a5055ab67b | |||
| f72f9f32cf | |||
| 1eee2b8710 | |||
| d3aaa8ae75 | |||
| 69e80fd415 | |||
| 836760accf | |||
| cd504285b4 | |||
| 3b4d5691d7 | |||
| 45c09d021a | |||
| 8ef759fe0f | |||
| 977bcbb741 | |||
| 3d11b85842 | |||
| b8d0a5064b | |||
| b50b796f15 | |||
| 04111e8a06 | |||
| 432369abec | |||
| 8743c91275 | |||
| b79dd3dbe0 | |||
| d9df5d02be | |||
| 2413a4d081 | |||
| 4646394fe3 | |||
| 288006ac1e | |||
| af65723a2c | |||
| ac5834f54d | |||
| bdc5143260 | |||
| 067d600fd3 | |||
| ee57c5f78f | |||
| 45722bbf56 | |||
| 8fee5e0fd3 | |||
| 728c98f26d | |||
| daab5ea0bc | |||
| 04e2d6e8eb | |||
| 58038a72d3 | |||
| 38ac0f1287 | |||
| c870047f44 | |||
| c65d472b97 | |||
| 239e332a28 | |||
| c42c43165c | |||
| b1810a2dbe | |||
| bab25bf763 | |||
| 037ed5cbf6 | |||
| 1d0de6172f | |||
| 8a9dfa6143 | |||
| be2de6f90d | |||
| 604797b3f0 | |||
| 7c4e08cad3 | |||
| 075cd45c4c | |||
| 096e2784d2 | |||
| 0bba84d42d | |||
| 4a9feef7d1 | |||
| 818595aeb5 | |||
| c2cd098003 | |||
| 7639eaa942 | |||
| 96e9f9f780 | |||
| d7177dd4b7 | |||
| cd04ae461d | |||
| 5049ff24fa | |||
| 1bc22b15e3 | |||
| d9e46378ba | |||
| 2c30790d4f | |||
| f5ccd060a7 | |||
| be40150515 | |||
| b6c6317c62 | |||
| 5b00ddc43f | |||
| 75be822b1b | |||
| feae7687e8 | |||
| e1ceb02af1 | |||
| 160f440588 | |||
| 3e439e1bef | |||
| 073b81cfb9 | |||
| 85a832007a | |||
| 2760b7d431 | |||
| 2bc429851d | |||
| dbdb94ebcb | |||
| 975672a551 | |||
| 9e095022f4 | |||
| 7fcedf452b | |||
| 8618664578 | |||
| 7405e45db2 | |||
| 8241386e7f | |||
| cf36c4eb8f | |||
| a311531621 | |||
| 2876958ca6 | |||
| 2ad60cddfc | |||
| 12fd0275ee | |||
| 3f66dcb0f6 | |||
| 6d01c8f41d | |||
| 652b902ece | |||
| 1225d4da15 | |||
| f313481727 | |||
| 9c84697094 | |||
| bcd3e3fdb6 | |||
| 05bfbd8028 | |||
| 9c07cada05 | |||
| 3e15ccee59 | |||
| b169b2132c | |||
| 6ed461788e | |||
| c3fea28592 | |||
| fcae1c9902 | |||
| b177ddf3bb | |||
| 2c64e8e4ec | |||
| bba51ca5f3 | |||
| a727350f37 | |||
| bde369df29 | |||
| 1ba227e449 | |||
| 9ad772e39e | |||
| 68b682ecb9 | |||
| c06735fdd2 | |||
| cf4297a1af | |||
| 5d458d68bd | |||
| e24fc94af1 | |||
| a04f91ae64 | |||
| 4120f7e923 | |||
| ad28e86d4c | |||
| 9978c45548 | |||
| a874032fba | |||
| b9ba0ad7c9 | |||
| 1602664130 | |||
| da82810334 | |||
| 73e9357364 | |||
| f00a87fc06 | |||
| 730ed42931 | |||
| b2787f8bf9 | |||
| d254775f24 | |||
| 3a6a95704c | |||
| 6bb753d0ae | |||
| 3aea2b02b7 | |||
| fb9c50f6b3 | |||
| 332440a6d3 | |||
| 3b5a6fabee | |||
| 426764fcce | |||
| 02ca84a467 | |||
| 6fd393949c | |||
| c8d16c7261 | |||
| 2332372bca | |||
| 5bd45e2c2a | |||
| 5a7b50ed2f | |||
| ee469c7759 | |||
| e075803907 | |||
| c3db077ae8 | |||
| 779b265b20 | |||
| 3d57f10a2f | |||
| 2ece0698cf | |||
| 43d424f1f8 | |||
| 8428d2af7f | |||
| ebcb827400 | |||
| e7773f6227 | |||
| c06250ecb1 | |||
| 97786eb396 | |||
| b1a16517bf | |||
| 0bfc9545d3 | |||
| 9caf7763a5 | |||
| 0db62d0af1 | |||
| 0bbaf9bf95 | |||
| 80406f855a | |||
| 300b5cafe2 | |||
| cb36cba9ab | |||
| 7c2026ac37 | |||
| 5703c52fd7 | |||
| b3eba5c841 | |||
| 4a6d272017 | |||
| 26f5f35754 | |||
| 8c36502aeb | |||
| 0918d6be07 | |||
| b48b102b35 | |||
| 46f98d643f | |||
| a9eccf2819 | |||
| 98ff38f8e1 | |||
| d969f55730 | |||
| 082236e7d8 | |||
| d43c81401e | |||
| c9824f1486 | |||
| 1e8f49d3a3 | |||
| 42696b82d5 | |||
| 004596a9c2 | |||
| c994f52a79 | |||
| ccff695f3a | |||
| d1c318ed7e | |||
| 059dbe684c | |||
| ec3c35095b | |||
| ad8f5fec5b | |||
| 376f1ac5e0 | |||
| 3978c50afc | |||
| 0d25226c30 | |||
| f99cc74466 | |||
| 160d09f1d8 | |||
| f591b1711d | |||
| b4ce247c71 | |||
| adc7cd82fa | |||
| 9327be14b7 | |||
| 8fa4fe85c4 | |||
| 2eb61f4777 | |||
| 0bdd3addc8 | |||
| 756738b540 | |||
| 6b55bba634 | |||
| 3f679ffb6a | |||
| e87fe870bb | |||
| 6fc6b23f38 | |||
| 03d633715a | |||
| 99087ccdf0 | |||
| b832025e88 | |||
| 1e5e3a225c | |||
| c6f79b35de | |||
| fa97bcc9f5 | |||
| a1ef070d49 | |||
| 1db71d9ba7 | |||
| de139cada0 | |||
| b994ec8bbc | |||
| 90b326da53 | |||
| fa70e6ac9d | |||
| a2b5a78454 | |||
| 167c944b64 | |||
| 77c7836a93 | |||
| 388194e8bc | |||
| 1f5558a757 | |||
| 04c1bd8a1f | |||
| f431ccf307 | |||
| d06ad58826 | |||
| 4d10698cfd | |||
| a253a5a07a | |||
| 3ee3c312ef | |||
| 83b1e0ffba | |||
| cc7242dfd3 | |||
| 45f6cf29de | |||
| 26dc01e079 | |||
| f2659c52e9 | |||
| 12d690e264 | |||
| 0e0a945686 | |||
| 53cdca277c | |||
| 0d7ca3a67e | |||
| cab726a327 | |||
| 0fbdc465a7 | |||
| fcf95457c2 | |||
| 415c4e2ec3 | |||
| 7afe4ab477 | |||
| 69a58c435b | |||
| eecf220bfe | |||
| 10aefa2da3 | |||
| ecd0734a6b | |||
| 0f6673d6fd | |||
| 65feb3f032 | |||
| 17731931e5 | |||
| 06df834435 | |||
| c18806394b | |||
| 21ec7219c3 | |||
| b58e0f85f9 | |||
| 110ca34e1d | |||
| 1d543169e2 | |||
| 45d464cb93 | |||
| f60158f0fe | |||
| cf903ab6ce | |||
| 6b11291284 | |||
| dd3993ec4f | |||
| faaf31e426 | |||
| 30ad8f107d | |||
| 2922b4c1dc | |||
| 958ac6b3b9 | |||
| 548f7e47c5 | |||
| 1ebb04bae6 | |||
| cb37f05728 | |||
| 0b3b4a6417 | |||
| 7cf30e3a3a | |||
| 0bf2700687 | |||
| c773359c0b | |||
| b80d801acb | |||
| 1bdd1dbcc4 | |||
| 66637feb73 | |||
| 22fb33b8a5 | |||
| 8ea3eb7493 | |||
| 3addc4e2ac | |||
| 7ff7e489ab | |||
| 95aa0da014 | |||
| b12637267b | |||
| 3a44f70db9 | |||
| 92206d9740 | |||
| bddd91df2a | |||
| 144ead8cfe | |||
| 185f8e7f44 | |||
| 1538cd83af | |||
| 027f7deb3a | |||
| 4043a6098b | |||
| 92b913be98 | |||
| 8505ba6b84 | |||
| c6973f6b4e | |||
| 94eddaff3f | |||
| f38be4aff3 | |||
| 3ea78fcf9f | |||
| 78cfcc6206 | |||
| 9c5d4a18ce | |||
| aa48b4d596 | |||
| 265acd9345 | |||
| 34ec1d5671 | |||
| 4a1f4f0a01 | |||
| 850528820f | |||
| 4dc8197c51 | |||
| 42e97eed4c | |||
| 065f30ac38 | |||
| 9e705a12b1 | |||
| b8144f0748 | |||
| e5b5faad3e | |||
| f840c1d424 | |||
| eabd268874 | |||
| 06aadc543a | |||
| 2a410f52b5 | |||
| eb7a32ed16 | |||
| 14118d3056 | |||
| c8b3d8ad9b | |||
| a097b32d5c | |||
| 0a88a9cee6 | |||
| bef1c0c5fc | |||
| 52e6ef436f | |||
| ad0617de90 | |||
| 1753c1a40a | |||
| 13b7004959 | |||
| 3b9a21bbf7 | |||
| 5f0beb9d00 | |||
| 8411a73589 | |||
| 009a3751c0 | |||
| adef88e358 | |||
| f8b9df7bf0 | |||
| c6fa3e1d24 | |||
| ae24dd1e28 | |||
| 1efca7dd48 | |||
| 3178f4e7e9 | |||
| e00f61dcd0 | |||
| 6a5774aae7 | |||
| 5119061861 | |||
| fdfbae334a | |||
| e3fce43e62 | |||
| 9251f8ff0e | |||
| 18ef40f6f4 | |||
| 46887d1d9f | |||
| 632d10e9e3 | |||
| 9fa33eea73 | |||
| 2c4c13bc2c | |||
| 33f8a4eb3a | |||
| aa7959a970 | |||
| 7471bc0bb2 | |||
| b257f75e53 | |||
| 455e279216 | |||
| 7fd359852a | |||
| 82ecfdbd37 | |||
| 478452983f | |||
| 5c1a7d485f | |||
| 39c4ce9240 | |||
| da49585df5 | |||
| 0b9bee02fe | |||
| 00e5d1ae27 | |||
| b290d63926 | |||
| 1b5438cc6c | |||
| 17323facce | |||
| bc9dedeea4 | |||
| 1b3a383b53 | |||
| 4e0a535402 | |||
| 0005db1b33 | |||
| 5cd74031be | |||
| facb85b5da | |||
| 5f97d17837 | |||
| af722e09f8 | |||
| 959edf6010 | |||
| d08f183394 | |||
| da541ae182 | |||
| 4582c4c03d | |||
| 8c7d70d434 | |||
| fcf9f9e562 | |||
| 7bb0fb294a | |||
| 2e7b5ba5f0 | |||
| 6b017f3678 | |||
| a303d00900 | |||
| aaa351dca4 | |||
| ee5fb5361c | |||
| aaffcdbfeb | |||
| a8fefb5a90 | |||
| 8e3b07fa49 | |||
| 36ac618e88 | |||
| ab0eeae1e3 | |||
| f74adffa12 | |||
| 8f23f41e2f | |||
| 7d04844c6a | |||
| c301dcc226 | |||
| 8dd4ece730 | |||
| 75bd68f9fe | |||
| 96af022afa | |||
| c570c68f1b | |||
| 21a226a486 | |||
| 2695cb8e46 | |||
| 2207405ebc | |||
| 3802123147 | |||
| c6c630f5c6 | |||
| 7c76a17c08 | |||
| 5ba7723fa0 | |||
| 87ff07c850 | |||
| 71e1a2eeec | |||
| 88fba0f53a | |||
| 07260a8e06 | |||
| c1d155b569 | |||
| 7e7cfe1db1 | |||
| d27a44ab7f | |||
| 2adcca1cda | |||
| cf854b7262 | |||
| ecb0e07312 | |||
| 7d6d7e619b | |||
| 8b2b88c7cb | |||
| 9af1a6a16b | |||
| 34caa16e39 | |||
| 976426dbd3 | |||
| d1ac9adc7e | |||
| 513edf90f7 | |||
| 60d47510ab | |||
| 5b7b9837f0 | |||
| 333c3327c4 | |||
| 9723c98940 | |||
| 97604f3c5b | |||
| 044f18da46 | |||
| 53946b9523 | |||
| fd8f88c5e4 | |||
| e7d15ce2b0 | |||
| ff1d98a87e | |||
| accc68cd28 | |||
| b2c7bc980f | |||
| 75fbe8d5d8 | |||
| 13ebf3b3aa | |||
| 916d3812db | |||
| 90610c819b | |||
| a5f6f62559 | |||
| bfb3501dec | |||
| c0513c50b1 | |||
| bcf4baf004 | |||
| 53bf948a04 | |||
| 2186d91f89 | |||
| aaf856a503 | |||
| 8af625b7dc | |||
| 4690891757 | |||
| bb3e17c0fa | |||
| 7965df5ff2 | |||
| 5b5f0a5a8d | |||
| fdb087a39b | |||
| 97749378d6 | |||
| 63dc2301ff | |||
| 5659c0bc70 | |||
| 1e288ab0fd | |||
| 4f058a0174 | |||
| 7284114565 | |||
| 0b2592dbd7 | |||
| edfaf6f002 | |||
| da3990b614 | |||
| 25740ae13c | |||
| fb4c05f698 | |||
| a0c4e37c94 | |||
| 278caf6f0c | |||
| 2ce0c61f83 | |||
| afb25324a7 | |||
| ba1b761c08 | |||
| 0e2d4af617 | |||
| 1b0b54a072 | |||
| 9c629d3c5c | |||
| 173af4e459 | |||
| c0f12c0a5d | |||
| 390605fe66 | |||
| e4bd5f865c | |||
| b31c891772 | |||
| 08e4016972 | |||
| aea7eb6ba3 | |||
| 5496750085 | |||
| 4b9709898c | |||
| 705daac737 | |||
| a802b32f47 | |||
| 8b8db5e447 | |||
| 3ee44599c7 | |||
| 2955a41ed5 | |||
| a52802c882 | |||
| b46c70512a | |||
| 18f91e2eeb | |||
| 9296984569 | |||
| 7b835d9855 | |||
| ce23b9169b | |||
| 47a535d309 | |||
| 6342801aa0 | |||
| 50c00f5516 | |||
| 4a49678fb6 | |||
| 0f10b8f677 | |||
| d8433b79cc | |||
| f94f640212 | |||
| 5cf779757f | |||
| d49acf379e | |||
| b9bff4abc0 | |||
| 6fc4dbe9d1 | |||
| cca8132a2c | |||
| 91654ca219 | |||
| 547d7eca59 | |||
| b86bf31baa | |||
| 5b5b4efe42 | |||
| e9fb65edba | |||
| cc1cba9aa8 | |||
| a765c566c8 | |||
| 63e9022b84 | |||
| 368a995e7f | |||
| c844c66b5a | |||
| 73b18313e9 | |||
| bdd68dc6c9 | |||
| 3901b94382 | |||
| 82ac276338 | |||
| 02c9f3ebdb | |||
| 364ad63877 | |||
| 5fc4196d01 | |||
| 3a1e10bd21 | |||
| 73519ec562 | |||
| bf9c9916b1 | |||
| 01d017c6cd | |||
| ca98ab02d8 | |||
| 347804a02e | |||
| 4c80f8dbf4 | |||
| 73ee96040f | |||
| 6180da1333 | |||
| 2756ff6ad0 | |||
| e57491b812 | |||
| 9d8ae538d9 | |||
| dd7defd2c7 | |||
| e79ec45b5b | |||
| 1a138bbc16 | |||
| b067165471 | |||
| 6fbcbb9399 | |||
| aaf77b4e20 | |||
| f5cc2e952b | |||
| eeab362567 | |||
| 834205c426 | |||
| fbad8ca62e | |||
| 1e4c6f13c5 | |||
| b7c2b3d4cb | |||
| 0d5b7d36f1 | |||
| 059886fede | |||
| db7dd0ca43 | |||
| f4c611b47d | |||
| 39c32646c5 | |||
| 1720fffbdc | |||
| b4d8e39d56 | |||
| 6c51cd0d1d | |||
| cb9cdc508a | |||
| 7d037a206f | |||
| ace10ab4be | |||
| bc0a7b6ac3 | |||
| e77e2045e3 | |||
| abbd55c740 | |||
| bf5e80a462 | |||
| 121deaae5f | |||
| 80317232ba | |||
| 22f815dcd1 | |||
| fb96c3ab47 | |||
| 3b15ad51a1 | |||
| 11c41e7381 | |||
| 358d8a54ff | |||
| 3c8fedce68 | |||
| 5066336c75 | |||
| 1744b5b9d0 | |||
| 0807744577 | |||
| 59f871d3ec | |||
| fed351a2fc | |||
| 0c15476dd2 | |||
| 94ef76c67e | |||
| bd5bf6f94f | |||
| 1fbf454c3c | |||
| 07b62fe5c1 | |||
| 7fbf6ee2e8 | |||
| ba66fc30c5 | |||
| 45b7ed3220 | |||
| 20f1c4c0ae | |||
| 97b6fc5e2b | |||
| 44d8c30187 | |||
| e3957bf08b | |||
| acfe0aba21 | |||
| 6247b5411b | |||
| 5cc0b0a011 | |||
| 1fed2fb18c | |||
| 8a0e7a4e3d | |||
| 29a784c6c6 | |||
| 409a3ee194 | |||
| 54caa3e01a | |||
| e1a723a39f | |||
| 463ea35d7c | |||
| f751c91c68 | |||
| ad24c8771a | |||
| 6f82e2c3ed | |||
| f4b39071f0 | |||
| 621c968f3f | |||
| aeb129e422 | |||
| 3050b546ac | |||
| 1429726ba6 | |||
| 4075581acd | |||
| 56774fd974 | |||
| 5e674d2299 | |||
| 06f5b6d6ff | |||
| b25b4e6c8f | |||
| 645e07dba8 | |||
| 46181fcaa2 | |||
| 8d7ae425f9 | |||
| 7d572334a1 | |||
| 5dab6f68e6 | |||
| d1c595d8db | |||
| eaa2635b51 | |||
| dc2d226ddb | |||
| 336a4e1f35 | |||
| 4d3b6b4f43 | |||
| a12601b49c | |||
| 15a895064e | |||
| 8bd1507ace | |||
| 89d7ec5d0b | |||
| 670e57735a | |||
| fa703c25e8 | |||
| f58161b1d1 | |||
| 8db2a37a59 | |||
| bfdb9c2624 | |||
| 240e984737 | |||
| fe128c18b1 | |||
| b208d8c40d | |||
| 556641e1f4 | |||
| 464eb671db | |||
| 12b8f1e3ef | |||
| ab199afe0d | |||
| fe1a498da0 | |||
| 4f9d55eb42 | |||
| 70f450f547 | |||
| 28fc7deefc | |||
| 428babf856 | |||
| b824ddf2e3 | |||
| 2396966740 | |||
| 23ca49128a | |||
| ec6bdede20 | |||
| 4ada2013d2 | |||
| 79afef6bc1 | |||
| e7000df89f | |||
| 59f77a64ea | |||
| 8be152666e | |||
| 10488854ce | |||
| 6586aafed8 | |||
| 4568a60be3 | |||
| 193bc8bb8e | |||
| ce381b7690 | |||
| b238428816 | |||
| 0ac37f50cf | |||
| 54b9389b77 | |||
| a183c26e51 | |||
| 01a03d164c | |||
| cdff1fde2d | |||
| c38b9998a6 | |||
| 77c1a335ad | |||
| 07a0fe6249 | |||
| 204bc46976 | |||
| b910506519 | |||
| 3cef39da17 | |||
| 3aea29bcb5 | |||
| dd0d19168b | |||
| 6727fcd111 | |||
| 9d347f4a5a | |||
| 084e48ddc2 | |||
| 31e89ce9a1 | |||
| baad3ae1c3 | |||
| 7c099cab94 | |||
| 811875dd2e | |||
| 127443d550 | |||
| d2064605bf | |||
| 4c6fb61ca8 | |||
| 608ba8bcb4 | |||
| b53c054dee | |||
| 05aa4b547f | |||
| 6afb61d25d | |||
| a7ce5c1ca6 | |||
| 952bd2bd93 | |||
| f9d33d4888 | |||
| 81d99f19d4 | |||
| 454a4257da | |||
| e513b42786 | |||
| b607e3c034 | |||
| d5c3f5012b | |||
| 21d045be59 | |||
| a9c1c34dc9 | |||
| 44ab0483b6 | |||
| 9eb0cc0b62 | |||
| 2db74867f5 | |||
| fd30baafb8 | |||
| 3623eef47f | |||
| 7b07bb7884 | |||
| 7946cd6614 | |||
| 51b6e30986 | |||
| 002df7b0f9 | |||
| 564cf0fed0 | |||
| dee9492d4c | |||
| 6ae026f7c5 | |||
| 6bcbe286f3 | |||
| 6f35f72607 | |||
| 3a7aa75538 | |||
| e4e7ac260a | |||
| b8aaa4bb42 | |||
| 7793e2694b | |||
| 83f2c72f29 | |||
| 1caeaee7f0 | |||
| f354134234 | |||
| 66219d30e0 | |||
| b9e3942ed8 | |||
| 2354cdc1d1 | |||
| d929438df9 | |||
| 1acaed1de7 | |||
| 16195f8a55 | |||
| d7fc8c178f | |||
| 2894e16706 | |||
| c2340f3432 | |||
| 3b7b3106db | |||
| cff92819f9 | |||
| 2f981d852b | |||
| 8eef74d776 | |||
| 60e46204dc | |||
| 6a5d783435 | |||
| 0223e076c4 | |||
| ce80c78319 | |||
| cc0085ae61 | |||
| f28e243b9d | |||
| 2e4532593f | |||
| 1f10905a03 | |||
| 88762db484 | |||
| 3b5ab0ac70 | |||
| 8903c9296b | |||
| 97858a3c94 | |||
| 0ec3e83518 | |||
| 8c007b5bf7 | |||
| 768236b0e2 | |||
| 495d78b885 | |||
| 34b1e515fe | |||
| 2ac1789312 | |||
| 79edbd3fa5 | |||
| f50d9994e2 | |||
| 1603d3085f | |||
| ccf7036f33 | |||
| a0a1a5e3c1 | |||
| fbf9120859 | |||
| 8a770beec3 | |||
| 6b31669765 | |||
| 26d72fc2d8 | |||
| 5eb56d0994 | |||
| dbc4a922d5 | |||
| 141f423842 | |||
| 667f2433ab | |||
| fd930ef548 | |||
| 7eadfb1a63 | |||
| 67cb07ac92 | |||
| 96d28c43fc | |||
| e57e3f5f0a | |||
| 7b99bd71da | |||
| 861a037321 | |||
| 84cbe6c434 | |||
| 2cbb811523 | |||
| 8ef4faa10f | |||
| f6a1c9bf52 | |||
| 5d9f6fb4f5 | |||
| 66840a8ecd | |||
| a8ee6b255a | |||
| bd73d1c533 | |||
| e33c0ebc42 | |||
| 57e4a35fee | |||
| d490b57410 | |||
| 0416602e5f | |||
| ddc27b2eb9 | |||
| 374deb147b | |||
| d2eabd1ad0 | |||
| efbc625cc3 | |||
| 91ae0b8cb0 | |||
| ddc5741b00 | |||
| 4729aca6b0 | |||
| bb4fc3a70c | |||
| 5d8084b650 | |||
| f316b892f5 | |||
| cbda1d7cd0 | |||
| 2f8e879976 | |||
| cc0ac5ae3c | |||
| 0185d24fb3 | |||
| 97dbdc9c31 | |||
| a07c66c9a3 | |||
| 308bd25bc0 | |||
| 70066a03b6 | |||
| a7f3872af3 | |||
| 22e10e675a | |||
| 89679e946d | |||
| 1d1bb9d3df | |||
| 8faf2b2595 | |||
| e47ad9700e | |||
| 372b19a057 | |||
| cbe156a868 | |||
| 181a3881e2 | |||
| 3eef03b303 | |||
| ad56e3165c | |||
| b1a96b6e75 | |||
| 56419b1b4e | |||
| 372f14a9c5 | |||
| e1ec56a120 | |||
| 5bb11249d6 | |||
| 9fbcca1ff2 | |||
| 323f2b2c3e | |||
| b971d38dd5 | |||
| 278f479a3a | |||
| 03aea5678d | |||
| b62b8ee7e6 | |||
| 63f55551e5 | |||
| b185fbc57d | |||
| ceb9d58e72 | |||
| a0bb515a4f | |||
| 2cfac2f18b | |||
| d412f538b2 | |||
| 94f90ad861 | |||
| 4a402e7937 | |||
| c226d6c391 | |||
| 67410e6c59 | |||
| 419c361147 | |||
| 3769a53ffa | |||
| ec4aaa3bfb | |||
| be52680fcd | |||
| 9d41ab9339 | |||
| f126fc3087 | |||
| 764377037c | |||
| 8e09eaab45 | |||
| 6523da186c | |||
| 6471fd8b6f | |||
| 247a74881a | |||
| 3ef09f0a5f | |||
| b924d331f9 | |||
| 14041b6012 | |||
| 2c6cc5ecec | |||
| ac022b1df0 | |||
| 0a2081de08 | |||
| 64a8e554c7 | |||
| 082d29fd2f | |||
| ba5cf9d002 | |||
| 57a55318df | |||
| e70f4f7a59 | |||
| 1d217fad67 | |||
| e95d46f085 | |||
| f4577878e1 | |||
| 1bd1e5c8e3 | |||
| c975dee965 | |||
| 9d690f4219 | |||
| 29ddb3f58d | |||
| 8626bc0b1c | |||
| c362cf6596 | |||
| 97264fc5ff | |||
| 494c4409c1 | |||
| d46e366c81 | |||
| 6afe33ee9c | |||
| 903c9e1cc3 | |||
| 3ef43fc3f5 | |||
| b1c3be05dd | |||
| efee23b4a7 | |||
| 06b67a7586 | |||
| 889a2dbf9d | |||
| 2f80fcc888 | |||
| f7ee479c1d | |||
| 94fa0981fe | |||
| 4c74afe438 | |||
| f76cea22de | |||
| 3d49110808 | |||
| 88a4579f7a | |||
| 241bde0333 | |||
| 73c7867cd6 | |||
| b35254f7ad | |||
| 213e78c956 | |||
| 7bf552c491 | |||
| 3bf9923f86 | |||
| a6a8a28f59 | |||
| 56a8e452bf | |||
| 6bec0bf70d | |||
| 5dc9c8f90e | |||
| e3290e12b1 | |||
| 9f37ce9e42 | |||
| 8904c0c811 | |||
| b0d021b7f2 | |||
| 0175f3b8a1 | |||
| 0fa9d5bf62 | |||
| 4919e38e3e | |||
| 2e99533f96 | |||
| f095645d89 | |||
| 757c83142e | |||
| 36d274ca9f | |||
| ec11b61f67 | |||
| 7765271d63 | |||
| 7c2464bba7 | |||
| 17e010f93c | |||
| 452d630a2a | |||
| f317a3e38f | |||
| f56195058e | |||
| 2e93dbb10c | |||
| f862456d73 | |||
| d99b0b2137 | |||
| 1d390f9aa7 | |||
| 514beb7940 | |||
| c7bdfce734 | |||
| e5fe4b06ad | |||
| 89b7c265d3 | |||
| 698c31943e | |||
| b70060d46e | |||
| 6ddc5ef53e | |||
| 212023c7e4 | |||
| b687f23c95 | |||
| 7a05d01554 | |||
| 78e3a57857 | |||
| 79d0c96b20 | |||
| 21ed38a20e | |||
| d8b1f99114 | |||
| b0fb1b9890 | |||
| a63932cff2 | |||
| 0b22165d2a | |||
| 41b1951abe | |||
| 353431e54c | |||
| 7b232dd7d8 | |||
| d32adf9dbf | |||
| 940d490217 | |||
| 46e41e38cf | |||
| 276ff8f995 | |||
| 030837fccf | |||
| a7d38aefb1 | |||
| 230a0d7caf | |||
| 6e14e43c78 | |||
| e6389f08be | |||
| a4edeb098e | |||
| 093c536415 | |||
| 7479b50fea | |||
| ebce36d043 | |||
| 77bab1aa74 | |||
| ebcac3405c | |||
| d2781a6f87 | |||
| f5954f5bb3 | |||
| 6baf694d6f | |||
| cb3b586d4d | |||
| f68789ab20 | |||
| 0c6a3f1917 | |||
| 05fccaf982 | |||
| 7340b9ecc2 | |||
| 78eb4ebe0b | |||
| b1453a34ec | |||
| c357e9e2f5 | |||
| 98717bf8a9 | |||
| d7077ada0e | |||
| 64f63ed1d3 | |||
| 2a27f6c30d | |||
| 9fdddeaba8 | |||
| 2cfa5e93e4 | |||
| 778ac14344 | |||
| 85fcf8be61 | |||
| b31eb09015 | |||
| 5154dd1740 | |||
| 274f11ef1d | |||
| aeb1acf458 | |||
| a204f4a58e | |||
| 8e4a57aa01 | |||
| 797ed0a553 | |||
| 663bc0d471 | |||
| 8d7e2d2c46 | |||
| 19d96bb30b | |||
| 47f2f20d9c | |||
| 12c7c634c0 | |||
| 9a322c150a | |||
| 1a3bc4f666 | |||
| d4881b1ce5 | |||
| a2ad2df473 | |||
| 541c5bd1c3 | |||
| b744e9673b | |||
| bb94b7c5c6 | |||
| e9ff57d5e1 | |||
| 179245457c | |||
| 1493f74691 | |||
| 4857503ed3 | |||
| a0e38b4f0c | |||
| 1d62cad9e9 | |||
| 855761020c | |||
| 0950d06dfb | |||
| 1496402325 | |||
| 77e2c4babb | |||
| a465082984 | |||
| 025fdac686 | |||
| 6bde5ec64c | |||
| f099a9ec39 | |||
| 5bfcef92ee | |||
| 79a8fbd881 | |||
| 7f96a14cf6 | |||
| 5fe6d70713 | |||
| dcba4dd4bc | |||
| ccbe77913b | |||
| 2844cb81c2 | |||
| d86e8e5920 | |||
| 9665fa1eb4 | |||
| 2788ef679b | |||
| e1a88e1fd8 | |||
| 32163c5302 | |||
| 2d3d5efe87 | |||
| e1bbba392c | |||
| ed642c856b | |||
| 927e462f7a | |||
| e250499a3b | |||
| 91d96a6639 | |||
| 104ec4c87c | |||
| 0a7e8436c3 | |||
| 9e597e0a28 | |||
| 01fbb5d47c | |||
| 6517d16337 | |||
| 0e636adf28 | |||
| 0bb281237b | |||
| 2b224376c2 | |||
| e510b369d7 | |||
| a0de1f7230 | |||
| 4591132269 | |||
| a03de8d490 | |||
| 27bcfec17e | |||
| f6dbec3e1d | |||
| aebc45f705 | |||
| 310c60b9d9 | |||
| bcba67c209 | |||
| fc013aed52 | |||
| 8ad41c059b | |||
| 8eaf8db850 | |||
| 896883766c | |||
| 258dacf3ed | |||
| 242243f485 | |||
| a18436dce1 | |||
| 5323cbc00e | |||
| ddd3b137ac | |||
| 94550088e5 | |||
| 1375ca6f5c | |||
| e4c4fe0495 | |||
| 2fa5277e56 | |||
| b73ad8fdc1 | |||
| 9cc281e65e | |||
| d62107d39b | |||
| 4a8d20ad72 | |||
| 5acb72c39b | |||
| 67e8236a60 | |||
| 18b8853f82 | |||
| 65c7df7938 | |||
| 15678cdfa2 | |||
| 6cd6c62046 | |||
| dbf92805a2 | |||
| 11fc9a7b85 | |||
| 8bc970ff57 | |||
| a16eefd97b | |||
| ca5e5b820c | |||
| f73ad52441 | |||
| 729ec1d1bf | |||
| 4adb30b861 | |||
| 999f6de45f | |||
| 70686502b4 | |||
| d17a980151 | |||
| 7fa5947030 | |||
| de8f120fd4 | |||
| 9b54603264 | |||
| 698c77d7ba | |||
| 18d83a4d18 | |||
| 8e849d93b2 | |||
| 4ca42f028b | |||
| 3118337879 | |||
| db4490affb | |||
| 51ab79384e | |||
| 3ee30a252d | |||
| b883566ebb | |||
| ac78fb85b8 | |||
| 0d2b11d0c4 | |||
| 5b610c88c1 | |||
| bf444ce043 | |||
| c91c027dab | |||
| 81fd87c510 | |||
| 9da174a962 | |||
| 84f54a7e65 | |||
| baeecf1464 | |||
| f2fdd39c96 | |||
| 53b074d78e | |||
| f4fc1e6775 | |||
| dba791b8db | |||
| 750fa02621 | |||
| 7a67816111 | |||
| 613625644e | |||
| 0e25071ef0 | |||
| ed1932cd26 | |||
| 67b89213d0 | |||
| 814f142c5f | |||
| 16cd3e7d5a | |||
| c5dcb8faef | |||
| 6b46c022f9 | |||
| 88ef05fc72 | |||
| 445ea367fc | |||
| c819554f43 | |||
| bbc8a79ded | |||
| 3d181bc10d | |||
| ba5478f382 | |||
| 136c993c8d | |||
| 6cf18ea4e8 | |||
| fe7f56c82e | |||
| 6c580f1e43 | |||
| f171cd4f03 | |||
| ea109e6c30 | |||
| f514eed226 | |||
| 274ba80149 | |||
| 46b4dfc458 | |||
| 4af8f4ff6a | |||
| df5810d695 | |||
| d9ad96c374 | |||
| 06cc93fd82 | |||
| 41da63765f | |||
| 3975411c78 | |||
| fc2e75ef61 | |||
| ef0f2dd3d0 | |||
| 548c3c5d72 | |||
| d2e3a0cb8e | |||
| 9cdace6f81 | |||
| 12f020570e | |||
| bef2551eec | |||
| 7e20f8c189 | |||
| 56e8390e55 | |||
| 89fff16385 | |||
| 2cf15a24eb | |||
| 512e867034 | |||
| ce8c55c3c7 | |||
| 8e0d904d9a | |||
| 6c846a8ae7 | |||
| 5004469fe9 | |||
| 14d0af74ed | |||
| 5a76cf9486 | |||
| 82901ccd02 | |||
| 1dc9d66673 | |||
| a0cbfaf390 | |||
| 9a01ae61ef | |||
| 91837d5acd | |||
| 1b9ebdda22 | |||
| b6f6177af3 | |||
| d35486196b | |||
| 1603637e3b | |||
| 8f20840169 | |||
| 4fff2394de | |||
| afb74e68ee | |||
| d5fa7844c5 | |||
| b8470cd640 | |||
| 9a23f573a6 | |||
| efe8fa0fda | |||
| 2d16e8bb4f | |||
| bbd95eebff | |||
| ceb00b4e93 | |||
| cc60d26d1c | |||
| ba3ff739f6 | |||
| 6062647705 | |||
| 070c1c2de9 | |||
| d3aaa69409 | |||
| 0ac7753e35 | |||
| eba9d53d2e | |||
| d04d4ec8e7 | |||
| c7c3efcbe7 | |||
| 2b8d53a44c | |||
| ef6b573e08 | |||
| 61eedd41df | |||
| b265bcda20 | |||
| d703d32a1f | |||
| aab9334404 | |||
| c2570f6955 | |||
| 8e936a6334 | |||
| 46bfc22869 | |||
| db1620dd56 | |||
| e59f8a42a3 | |||
| 17d18bd85d | |||
| fb256cf578 | |||
| 1b6b5db76d | |||
| 41647ca83a | |||
| 07d2a17a87 | |||
| 6d744dfb7e | |||
| b9b946c35f | |||
| 17adfe2117 | |||
| 1e5e21102d | |||
| 4af992222f | |||
| a9447c6a11 | |||
| db71323313 | |||
| b9b2748e05 | |||
| 387231f743 | |||
| 2216a89aa3 | |||
| 4faa6326fa | |||
| cb22b3d9a1 | |||
| 152a3873bd | |||
| adc2760a89 | |||
| dde64acb06 | |||
| 008adbd8bc | |||
| 0e4866a5a2 | |||
| 5cb96cae3a | |||
| 8cbb82a67f | |||
| 848ddbe477 | |||
| 083c1cde8b | |||
| b792971062 | |||
| 07dde8f4b1 | |||
| 01f94127dd | |||
| 4d457b4e9e | |||
| 8ac93ff2da | |||
| ef33a4b08e | |||
| fdd3b25a27 | |||
| 4dc979da08 | |||
| 8f426e03c4 | |||
| 40cd085bf8 | |||
| 6aa75fc5d1 | |||
| eae5920f9d | |||
| 2f6bfa37cc | |||
| 9d6fd9b9b8 | |||
| 260cd67c96 | |||
| aff76e2d18 | |||
| 52e4343045 | |||
| 1ffbb135c6 | |||
| c3ec522261 | |||
| 4538839376 | |||
| 834edd3a71 | |||
| 581c3d9593 | |||
| 0c672fbaa5 | |||
| 6d96b9a312 | |||
| 691791ccd0 | |||
| f4299121d5 | |||
| 1adfb7eedd | |||
| 33ad583d15 | |||
| a7e2fe2277 | |||
| 5a479d5863 | |||
| 873ff034d2 | |||
| 61d3537617 | |||
| ae068a3f64 | |||
| f7402cd6f5 | |||
| c53f9c8020 | |||
| 798b4d57f4 | |||
| 98d428fb34 | |||
| 3ac5ace216 | |||
| 444a1a7ab9 | |||
| 43ea4bd4b5 | |||
| 6a9272e40a | |||
| 10589a11aa | |||
| a88f898bc0 | |||
| 7a84038b04 | |||
| 111c40732d | |||
| 69bb78c8be | |||
| ad3b327d69 | |||
| dc27f38534 | |||
| 5b0816cb92 | |||
| 57f6955303 | |||
| 78915f878d | |||
| 6ced6d626b | |||
| ee3cb819b4 | |||
| cc17b1d19d | |||
| 2c83240d47 | |||
| 54f18ff120 | |||
| 5e1fe363c3 | |||
| 3d2ec507e1 | |||
| 1dd7af3c8b | |||
| 06ec1fcebf | |||
| 86cb863fd4 | |||
| d5ef1288d8 | |||
| f3354c498d | |||
| 9557141b38 | |||
| 3144b66e73 | |||
| 6dbefa3d2f | |||
| c8f3b139e8 | |||
| 288663325d | |||
| 49947ee01d | |||
| fa7a45ebc7 | |||
| 9a074c222f | |||
| 4e0d7b6ed9 | |||
| 1f3defb04c | |||
| 6c52c43460 | |||
| deae2879f1 | |||
| 5b255a7d8b | |||
| 6e06c24b7a | |||
| 2fde1efdd3 | |||
| aeb29d983a | |||
| c8a7123da9 | |||
| 5c22061415 | |||
| 9a0fda8c02 | |||
| 2f9a17c44a | |||
| 50559015d8 | |||
| a8d4e143c2 | |||
| 2a6c69538d | |||
| 0ba5d61353 | |||
| d436ec5790 | |||
| 759b822b92 | |||
| 9df45af698 | |||
| 3474e81446 | |||
| e1f07eb957 | |||
| 71ff1b98be | |||
| 9b370dfa88 | |||
| 0be0661750 | |||
| eaa7230af7 | |||
| 11cb000481 | |||
| 8ae3554a58 | |||
| dfd4736386 | |||
| feb793c9fa | |||
| ee962fde08 | |||
| c08dd96de3 | |||
| b52f771133 | |||
| 4631232551 | |||
| df7f5047aa | |||
| 467d14324d | |||
| cbdce08e96 | |||
| d6bf8f8854 | |||
| 4599da3ded | |||
| 6d50952b2e | |||
| 7066947809 | |||
| e2924aacab | |||
| 1e86d2503f | |||
| eb67eee53a | |||
| dfdad45963 | |||
| 4735508d87 | |||
| c43c47eab8 | |||
| fafb2dc6b9 | |||
| 140e99c465 | |||
| 7ba1974390 | |||
| 51b8510f17 | |||
| 5d6949d471 | |||
| 8e9d0c1fd1 | |||
| 3852a3b779 | |||
| 8b4ba96936 | |||
| 0c17e18491 | |||
| 2bdbab3afc | |||
| b97499a95e | |||
| a70ac57872 | |||
| a9cf457024 | |||
| e5c938ac37 | |||
| edad54efa2 | |||
| f88426758f | |||
| 77a28eb810 | |||
| f834b27562 | |||
| 984e257cc5 | |||
| 729e7612bc | |||
| 59fadeae57 | |||
| bfbf7a298a | |||
| aad5d3bd65 | |||
| 504f19c445 | |||
| 19c47eb442 | |||
| ab6043df60 | |||
| 3305549a0f | |||
| c24c3cb571 | |||
| 952999258b | |||
| 0713eaa52c | |||
| 8fee689f60 | |||
| 75ddb17fed | |||
| 0c6a74626c | |||
| 41e3d0eaf9 | |||
| 8b9cfebd42 | |||
| 16badee259 | |||
| 9d5171dd36 | |||
| e0c0e81b7d | |||
| fd4e8985fc | |||
| 1d9b8503c0 | |||
| b3ef7b914d | |||
| 2f59e12e20 | |||
| 30e8652c2a | |||
| 5ee6aceb60 | |||
| 6940b6a6d1 | |||
| 4e33ce9415 | |||
| 944e22bde6 | |||
| 6054fa0a26 | |||
| 4db13cfed4 | |||
| 6a6adda2e0 | |||
| 4afa55c0db | |||
| bc120bfb2b | |||
| 88966699e7 | |||
| 9a5db3dcfb | |||
| 392aa1e654 | |||
| f2b32e47ff | |||
| 58136d0181 | |||
| 02733e55cb | |||
| 60df8456a7 | |||
| 6d0ecc805c | |||
| a0e9dd24a3 | |||
| d1eb89057d | |||
| 161c6dc83a | |||
| 54848b8a7e | |||
| 990563c604 | |||
| 8489ca8c8d | |||
| b57e2c89e3 | |||
| 66bedf78ac | |||
| 592c5cce60 | |||
| 2ccf9a4e92 | |||
| ed333c0513 | |||
| 89b65b7009 | |||
| 0cc2d346af | |||
| 5f81e78bc4 | |||
| 554b5fd4b5 | |||
| a705b16493 |
@@ -21,8 +21,9 @@ assignees: enricoros
|
||||
- [ ] Create a temporary tag `git tag v1.2.3 && git push opensource --tags`
|
||||
- [ ] Create a [New Draft GitHub Release](https://github.com/enricoros/big-agi/releases/new), and generate the automated changelog (for new contributors)
|
||||
- [ ] Update the release version in package.json, and `npm i`
|
||||
- [ ] Update in-app News [src/apps/news/news.data.tsx](/src/apps/news/news.data.tsx)
|
||||
- [ ] Update the in-app News version number
|
||||
- [ ] Update in-app News [src/apps/news/news.data.tsx](/src/apps/news/news.data.tsx)
|
||||
- [ ] Update in-app Cover graphics
|
||||
- [ ] Update the README.md with the new release
|
||||
- [ ] Copy the highlights to the [docs/changelog.md](/docs/changelog.md)
|
||||
- Release:
|
||||
@@ -50,7 +51,7 @@ To familiarize yourself with the application, the following are the Website and
|
||||
```
|
||||
|
||||
- paste the URL: https://big-agi.com
|
||||
- drag & drop: [README.md](https://raw.githubusercontent.com/enricoros/big-AGI/main/README.md)
|
||||
- drag & drop: [README.md](https://raw.githubusercontent.com/enricoros/big-AGI/v2-dev/README.md)
|
||||
|
||||
```markdown
|
||||
I am announcing a new version, 1.2.3.
|
||||
@@ -79,11 +80,32 @@ I need the following from you:
|
||||
|
||||
1. a table summarizing all the new features in 1.2.3 with the following columns: 4 words description (exactly what it is), short description, usefulness (what it does for the user), significance, link to the issue number (not the commit)), which will be used for the artifacts later
|
||||
2. then double-check the git log to see if there are any features of significance that are not in the table
|
||||
3. then score each feature in terms of importance for users (1-10), relative impact of the feature (1-10, where 10 applies to the broadest user base), and novelty and uniqueness (1-10, where 10 is truly unique and novel from what exists already)
|
||||
3. then score each feature in terms of importance for users (1-10), relative impact of the feature (1-10, where 10 applies to the broadest user base), and novelty and uniqueness (1-10, where 10 is truly unique and novel from what exists already)
|
||||
4. then improve the table, in decreasing order of importance for features, fixing any detail that's missing, in particular check if there are commits of significance from a user or developer point of view, which are not contained in the table
|
||||
5. then I want you then to update the news.data.tsx for the new release
|
||||
```
|
||||
|
||||
### release name
|
||||
|
||||
```markdown
|
||||
please brainstorm 10 different names for this release. see the former names here: https://big-agi.com/blog
|
||||
```
|
||||
|
||||
You can follow with 'What do you think of Modelmorphic?' or other selected name
|
||||
|
||||
### cover images
|
||||
|
||||
```markdown
|
||||
Great, now I need to generate images for this. Before I used the following prompts (2 releases before).
|
||||
|
||||
// An image of a capybara sculpted entirely from black cotton candy, set against a minimalist backdrop with splashes of bright, contrasting sparkles. The capybara is using a computer with split screen made of origami, split keyboard and is wearing origami sunglasses with very different split reflections. Split halves are very contrasting. Close up photography, bokeh, white background.
|
||||
import coverV113 from '../../../public/images/covers/release-cover-v1.13.0.png';
|
||||
// An image of a capybara sculpted entirely from black cotton candy, set against a minimalist backdrop with splashes of bright, contrasting sparkles. The capybara is calling on a 3D origami old-school pink telephone and the camera is zooming on the telephone. Close up photography, bokeh, white background.
|
||||
import coverV112 from '../../../public/images/covers/release-cover-v1.12.0.png';
|
||||
|
||||
What can I do now as far as images? Give me 4 prompt ideas with the same style as looks as the former, but different scene or action
|
||||
```
|
||||
|
||||
### Readme (and Changelog)
|
||||
|
||||
```markdown
|
||||
|
||||
@@ -12,8 +12,9 @@ name: Create and publish Docker images
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
#- main-stable # Disabled as the v* tag is used for stable releases
|
||||
- v2-dev
|
||||
#- v1-dev # Disabled because this is not needed anymore
|
||||
#- v1-stable # Disabled as the v* tag is used for stable releases
|
||||
tags:
|
||||
- 'v*' # Trigger on version tags (e.g., v1.7.0)
|
||||
|
||||
@@ -27,13 +28,22 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -41,21 +51,35 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=development,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=raw,value=stable,enable=${{ github.ref == 'refs/heads/main-stable' }}
|
||||
type=raw,value=development,enable=${{ github.ref == 'refs/heads/v2-dev' }} # For v2-dev branch
|
||||
type=raw,value=stable,enable=${{ github.ref == 'refs/heads/v1-stable' }}
|
||||
type=ref,event=tag # Use the tag name as a tag for tag builds
|
||||
type=semver,pattern={{version}} # Generate semantic versioning tags for tag builds
|
||||
type=sha,format=short,prefix=sha- # Just in case none of the above applies
|
||||
labels: |
|
||||
org.opencontainers.image.title=Big-AGI
|
||||
org.opencontainers.image.description=Generative AI suite powered by state-of-the-art models
|
||||
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
|
||||
org.opencontainers.image.documentation=https://big-agi.com
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
|
||||
# Enable build cache (future)
|
||||
#cache-from: type=gha
|
||||
#cache-to: type=gha,mode=max
|
||||
# Enable provenance and SBOM (future)
|
||||
#provenance: true
|
||||
#sbom: true
|
||||
@@ -3,6 +3,10 @@
|
||||
# Frontend Build: ignore API files disabled for this build
|
||||
/app/**/*.backup
|
||||
|
||||
# Supabase - ignored for now
|
||||
/supabase/
|
||||
/*.sql
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
@@ -41,4 +45,11 @@ yarn-error.log*
|
||||
next-env.d.ts
|
||||
|
||||
# other
|
||||
.idea/
|
||||
.idea/
|
||||
|
||||
# Ingore k8s/env-secret.yaml
|
||||
./k8s/env-secret.yaml
|
||||
/certificates
|
||||
.env*.local
|
||||
/.run/dev (ENV).run.xml
|
||||
/src/modules/3rdparty/aider/scratch*
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# Base
|
||||
FROM node:18-alpine AS base
|
||||
FROM node:22-alpine AS base
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
|
||||
# Dependencies
|
||||
FROM base AS deps
|
||||
WORKDIR /app
|
||||
@@ -11,6 +10,9 @@ WORKDIR /app
|
||||
COPY package*.json ./
|
||||
COPY src/server/prisma ./src/server/prisma
|
||||
|
||||
# link ssl3 for latest Alpine
|
||||
RUN sh -c '[ ! -e /lib/libssl.so.3 ] && ln -s /usr/lib/libssl.so.3 /lib/libssl.so.3 || echo "Link already exists"'
|
||||
|
||||
# Install dependencies, including dev (release builds should use npm ci)
|
||||
ENV NODE_ENV development
|
||||
RUN npm ci
|
||||
@@ -61,4 +63,4 @@ USER nextjs
|
||||
EXPOSE 3000
|
||||
|
||||
# Start the application
|
||||
CMD ["next", "start"]
|
||||
CMD ["next", "start"]
|
||||
|
||||
@@ -1,23 +1,96 @@
|
||||
# BIG-AGI 🧠✨
|
||||
|
||||
Welcome to big-AGI 👋, the GPT application for professionals that need function, form,
|
||||
Welcome to big-AGI, the AI suite for professionals that need function, form,
|
||||
simplicity, and speed. Powered by the latest models from 12 vendors and
|
||||
open-source model servers, `big-AGI` offers best-in-class Voice and Chat with AI Personas,
|
||||
visualizations, coding, drawing, calling, and quite more -- all in a polished UX.
|
||||
open-source servers, `big-AGI` offers best-in-class Chats,
|
||||
[Beams](https://github.com/enricoros/big-AGI/issues/470),
|
||||
and [Calls](https://github.com/enricoros/big-AGI/issues/354) with AI personas,
|
||||
visualizations, coding, drawing, side-by-side chatting, and more -- all wrapped in a polished UX.
|
||||
|
||||
Pros use big-AGI. 🚀 Developers love big-AGI. 🤖
|
||||
Stay ahead of the curve with big-AGI. 🚀 Pros & Devs love big-AGI. 🤖
|
||||
|
||||
[](https://big-agi.com)
|
||||
|
||||
> 🚀 Big-AGI 2 is launching Q4 2024. Be the first to experience it before the public release.
|
||||
>
|
||||
> 👉 [Apply for Early Access](https://y2rjg0zillz.typeform.com/to/ZSADpr5u?utm_source=gh-2&utm_medium=readme&utm_campaign=ea2)
|
||||
|
||||
Or fork & run on Vercel
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
## 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [documentation](docs/README.md)
|
||||
### New Version
|
||||
|
||||
big-AGI is an open book; see the **[ready-to-ship and future ideas](https://github.com/users/enricoros/projects/4/views/2)** in our open roadmap
|
||||
This repository contains two main versions:
|
||||
|
||||
### What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind
|
||||
- Big-AGI 2: next-generation, bringing the most advanced AI experience
|
||||
- `v2-dev`: V2 development branch, the exciting one, future default
|
||||
- Big-AGI Stable: as deployed on big-agi.com
|
||||
- `v1-dev`: V1 development branch (this branch)
|
||||
- `v1-stable`: Current stable version
|
||||
|
||||
Note: After the V2 release in Q4, `v2-dev` will become the default branch and `v1-dev` will reach EOL.
|
||||
|
||||
### Quick links: 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [installation](docs/installation.md) 👉 [documentation](docs/README.md)
|
||||
|
||||
### What's New in 1.16.1...1.16.8 · Sep 13, 2024 (patch releases)
|
||||
|
||||
- 1.16.8: OpenAI ChatGPT-4o Latest (o1-preview and o1-mini are supported in Big-AGI 2)
|
||||
- 1.16.7: OpenAI support for GPT-4o 2024-08-06
|
||||
- 1.16.6: Groq support for Llama 3.1 models
|
||||
- 1.16.5: GPT-4o Mini support
|
||||
- 1.16.4: 8192 tokens support for Claude 3.5 Sonnet
|
||||
- 1.16.3: Anthropic Claude 3.5 Sonnet model support
|
||||
- 1.16.2: Improve web downloads, as text, markdown, or HTML
|
||||
- 1.16.2: Proper support for Gemini models
|
||||
- 1.16.2: Added the latest Mistral model
|
||||
- 1.16.2: Tokenizer support for gpt-4o
|
||||
- 1.16.2: Updates to Beam
|
||||
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
|
||||
|
||||
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
|
||||
|
||||
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
|
||||
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
|
||||
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
|
||||
- Major enhancements to the Auto-Diagrams tool
|
||||
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
|
||||
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
|
||||
- Models update: **Anthropic**, **Groq**, **Ollama**, **OpenAI**, **OpenRouter**, **Perplexity**
|
||||
- Code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, and more [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
|
||||
|
||||
#### 3,000 Commits Milestone · April 7, 2024
|
||||
|
||||

|
||||
|
||||
- 🥇 Today we <b>celebrate commit 3000</b> in just over one year, and going stronger 🚀
|
||||
- 📢️ Thanks everyone for your support and words of love for Big-AGI, we are committed to creating the best AI experiences for everyone.
|
||||
|
||||
### What's New in 1.15.0 · April 1, 2024 · Beam
|
||||
|
||||
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI models setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
|
||||
- Enhanced the default Persona
|
||||
- Fixes to Gemini models and SVGs, improvements to UI and icons
|
||||
- 1.15.1: Support for Gemini Pro 1.5 and OpenAI Turbo models
|
||||
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.14.1 · March 7, 2024 · Modelmorphic</summary>
|
||||
|
||||
- **Anthropic** [Claude-3](https://www.anthropic.com/news/claude-3-family) model family support. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- New **[Perplexity](https://www.perplexity.ai/)** and **[Groq](https://groq.com/)** integration (thanks @Penagwin). [#407](https://github.com/enricoros/big-AGI/issues/407), [#427](https://github.com/enricoros/big-AGI/issues/427)
|
||||
- **[LocalAI](https://localai.io/models/)** deep integration, including support for [model galleries](https://github.com/enricoros/big-AGI/issues/411)
|
||||
- **Mistral** Large and Google **Gemini 1.5** support
|
||||
- Performance optimizations: runs [much faster](https://twitter.com/enricoros/status/1756553038293303434?utm_source=localhost:3000&utm_medium=big-agi), saves lots of power, reduces memory usage
|
||||
- Enhanced UX with auto-sizing charts, refined search and folder functionalities, perfected scaling
|
||||
- And with more UI improvements, documentation, bug fixes (20 tickets), and developer enhancements
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind</summary>
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385686b13
|
||||
|
||||
@@ -29,6 +102,8 @@ https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385
|
||||
- Better looking chats with improved spacing, fonts, and menus
|
||||
- More: new video player, [LM Studio tutorial](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-lmstudio.md) (thanks @aj47), [MongoDB support](https://github.com/enricoros/big-AGI/blob/main/docs/deploy-database.md) (thanks @ranfysvalle02), and speedups
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline</summary>
|
||||
|
||||
@@ -73,11 +148,11 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
|
||||
For full details and former releases, check out the [changelog](docs/changelog.md).
|
||||
|
||||
## ✨ Key Features 👊
|
||||
## 👉 Key Features ✨
|
||||
|
||||
|  |  |  |  |  |
|
||||
|---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|
|
||||
| **Chat**<br/>**Call** AGI<br/>**Draw** images<br/>**Agents**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
|
||||
| **Chat**<br/>**Call**<br/>**Beam**<br/>**Draw**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
|
||||
|
||||

|
||||
|
||||
@@ -85,7 +160,7 @@ You can easily configure 100s of AI models in big-AGI:
|
||||
|
||||
| **AI models** | _supported vendors_ |
|
||||
|:--------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Opensource Servers | [LocalAI](https://localai.com) (multimodal) · [Ollama](https://ollama.com/) · [Oobabooga](https://github.com/oobabooga/text-generation-webui) |
|
||||
| Opensource Servers | [LocalAI](https://localai.io/) (multimodal) · [Ollama](https://ollama.com/) |
|
||||
| Local Servers | [LM Studio](https://lmstudio.ai/) |
|
||||
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
|
||||
| Language services | [Anthropic](https://anthropic.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) |
|
||||
@@ -120,6 +195,22 @@ Add extra functionality with these integrations:
|
||||
|
||||
<br/>
|
||||
|
||||
## 🚀 Installation
|
||||
|
||||
To get started with big-AGI, follow our comprehensive [Installation Guide](docs/installation.md).
|
||||
The guide covers various installation options, whether you're spinning it up on
|
||||
your local computer, deploying on Vercel, on Cloudflare, or rolling it out
|
||||
through Docker.
|
||||
|
||||
Whether you're a developer, system integrator, or enterprise user, you'll find step-by-step instructions
|
||||
to set up big-AGI quickly and easily.
|
||||
|
||||
[](docs/installation.md)
|
||||
|
||||
Or bring your API keys and jump straight into our free instance on [big-AGI.com](https://big-agi.com).
|
||||
|
||||
<br/>
|
||||
|
||||
# 🌟 Get Involved!
|
||||
|
||||
[//]: # ([](https://discord.gg/MkH4qj2Jp9))
|
||||
@@ -129,86 +220,10 @@ Add extra functionality with these integrations:
|
||||
- [ ] ⭐ **Give us a star** on GitHub 👆
|
||||
- [ ] 🚀 **Do you like code**? You'll love this gem of a project! [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
|
||||
- [ ] 💡 Got a feature suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
- [ ] ✨ Deploy your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
|
||||
- [ ] Check out some of the big-AGI [**community projects**](docs/customizations.md)
|
||||
|
||||
| Project | Features | GitHub |
|
||||
|---------|----------------------------------------------------|-------------------------------------------------------------------------------------|
|
||||
| CoolAGI | Code Interpreter, Vision, Mind maps, and much more | [nextgen-user/CoolAGI](https://github.com/nextgen-user/CoolAGI) |
|
||||
| HL-GPT | Fully remodeled UI | [harlanlewis/nextjs-chatgpt-app](https://github.com/harlanlewis/nextjs-chatgpt-app) |
|
||||
- [ ] ✨ [Deploy](docs/installation.md) your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
|
||||
|
||||
<br/>
|
||||
|
||||
# 🧩 Develop
|
||||
|
||||
[//]: # ()
|
||||
|
||||
[//]: # ()
|
||||
|
||||
[//]: # ()
|
||||
|
||||
To download and run this Typescript/React/Next.js project locally, the only prerequisite is Node.js with the `npm` package manager.
|
||||
Clone this repo, install the dependencies (all local), and run the development server (which auto-watches the
|
||||
files for changes):
|
||||
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-agi.git
|
||||
cd big-agi
|
||||
npm install
|
||||
npm run dev
|
||||
|
||||
# You will see something like:
|
||||
#
|
||||
# ▲ Next.js 14.1.0
|
||||
# - Local: http://localhost:3000
|
||||
# ✓ Ready in 2.6s
|
||||
```
|
||||
|
||||
The development app will be running on `http://localhost:3000`. Development builds have the advantage of not requiring
|
||||
a build step, but can be slower than production builds. Also, development builds won't have timeout on edge functions.
|
||||
|
||||
## 🛠️ Deploy from source
|
||||
|
||||
The _production_ build of the application is optimized for performance and is performed by the `npm run build` command,
|
||||
after installing the required dependencies.
|
||||
|
||||
```bash
|
||||
# .. repeat the steps above up to `npm install`, then:
|
||||
npm run build
|
||||
next start --port 3000
|
||||
```
|
||||
|
||||
The app will be running on the specified port, e.g. `http://localhost:3000`.
|
||||
|
||||
Want to deploy with username/password? See the [Authentication](docs/deploy-authentication.md) guide.
|
||||
|
||||
## 🐳 Deploy with Docker
|
||||
|
||||
For more detailed information on deploying with Docker, please refer to the [docker deployment documentation](docs/deploy-docker.md).
|
||||
|
||||
Build and run:
|
||||
|
||||
```bash
|
||||
docker build -t big-agi .
|
||||
docker run -d -p 3000:3000 big-agi
|
||||
```
|
||||
|
||||
Or run the official container:
|
||||
|
||||
- manually: `docker run -d -p 3000:3000 ghcr.io/enricoros/big-agi`
|
||||
- or, with docker-compose: `docker-compose up` or see [the documentation](docs/deploy-docker.md) for a composer file with integrated browsing
|
||||
|
||||
## ☁️ Deploy on Cloudflare Pages
|
||||
|
||||
Please refer to the [Cloudflare deployment documentation](docs/deploy-cloudflare.md).
|
||||
|
||||
## 🚀 Deploy on Vercel
|
||||
|
||||
Create your GitHub fork, create a Vercel project over that fork, and deploy it. Or press the button below for convenience.
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/stargazers))
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/network))
|
||||
@@ -217,6 +232,13 @@ Create your GitHub fork, create a Vercel project over that fork, and deploy it.
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/LICENSE))
|
||||
|
||||
## 📜 Licensing
|
||||
|
||||
Big-AGI incorporates third-party software components that are subject
|
||||
to separate license terms. For detailed information about these
|
||||
components and their respective licenses, please refer to
|
||||
the [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md).
|
||||
|
||||
---
|
||||
|
||||
2023-2024 · Enrico Ros x [big-AGI](https://big-agi.com) · License: [MIT](LICENSE) · Made with 💙
|
||||
2023-2024 · Enrico Ros x [Big-AGI](https://big-agi.com) · Like this project? Leave a star! 💫⭐
|
||||
@@ -0,0 +1,24 @@
|
||||
import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
||||
|
||||
import { appRouterCloud } from '~/server/trpc/trpc.router-cloud';
|
||||
import { createTRPCFetchContext } from '~/server/trpc/trpc.server';
|
||||
|
||||
const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
|
||||
endpoint: '/api/cloud',
|
||||
router: appRouterCloud,
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-cloud failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
|
||||
// NOTE: the following statement breaks the build on non-pro deployments, and conditionals don't work either
|
||||
// so we resorted to raising the timeout from 10s to 25s in the vercel.json file instead
|
||||
// export const maxDuration = 25;
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
export { handlerNodeRoutes as GET, handlerNodeRoutes as POST };
|
||||
@@ -0,0 +1,18 @@
|
||||
import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
||||
|
||||
import { appRouterEdge } from '~/server/trpc/trpc.router-edge';
|
||||
import { createTRPCFetchContext } from '~/server/trpc/trpc.server';
|
||||
|
||||
const handlerEdgeRoutes = (req: Request) => fetchRequestHandler({
|
||||
endpoint: '/api/edge',
|
||||
router: appRouterEdge,
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
export const runtime = 'edge';
|
||||
export { handlerEdgeRoutes as GET, handlerEdgeRoutes as POST };
|
||||
@@ -1,2 +0,0 @@
|
||||
export const runtime = 'edge';
|
||||
export { elevenLabsHandler as POST } from '~/modules/elevenlabs/elevenlabs.server';
|
||||
@@ -1,2 +0,0 @@
|
||||
export const runtime = 'edge';
|
||||
export { llmStreamingRelayHandler as POST } from '~/modules/llms/server/llm.server.streaming';
|
||||
@@ -1,19 +0,0 @@
|
||||
import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
||||
|
||||
import { appRouterEdge } from '~/server/api/trpc.router-edge';
|
||||
import { createTRPCFetchContext } from '~/server/api/trpc.server';
|
||||
|
||||
const handlerEdgeRoutes = (req: Request) =>
|
||||
fetchRequestHandler({
|
||||
router: appRouterEdge,
|
||||
endpoint: '/api/trpc-edge',
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? "<no-path>"}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
export const runtime = 'edge';
|
||||
export { handlerEdgeRoutes as GET, handlerEdgeRoutes as POST };
|
||||
@@ -1,19 +0,0 @@
|
||||
import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
||||
|
||||
import { appRouterNode } from '~/server/api/trpc.router-node';
|
||||
import { createTRPCFetchContext } from '~/server/api/trpc.server';
|
||||
|
||||
const handlerNodeRoutes = (req: Request) =>
|
||||
fetchRequestHandler({
|
||||
router: appRouterNode,
|
||||
endpoint: '/api/trpc-node',
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-node failed on ${path ?? '<no-path>'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export { handlerNodeRoutes as GET, handlerNodeRoutes as POST };
|
||||
@@ -0,0 +1,70 @@
|
||||
# AIX dispatch server - API features comparison
|
||||
|
||||
This is updated as of 2024-07-09, and includes the latest features and capabilities of the three major AI APIs: Anthropic, Gemini, and OpenAI.
|
||||
The comparison covers a wide range of features, including function calling, vision, system instructions, etc.
|
||||
|
||||
| Feature Category | Specific Feature | Anthropic | Gemini | OpenAI |
|
||||
|------------------------------------------|-------------------------------|--------------------------------------------------------------------|------------------------------------------------------------------|---------------------------------------------------------------------|
|
||||
| **Message Structure** |
|
||||
| | Role types | user, assistant | user, model | user, assistant, system, tool |
|
||||
| | Named participants | No | No | Yes |
|
||||
| | Content array | Yes | Yes | Yes |
|
||||
| **Content Types and Multimodal Support** |
|
||||
| | Text generation | Yes | Yes | Yes |
|
||||
| | Image understanding | Yes | Yes | Yes |
|
||||
| | Audio processing | No | **Yes** | No |
|
||||
| | Video processing | No | **Yes** | No |
|
||||
| **Image Handling** |
|
||||
| | Supported formats | JPEG, PNG, GIF, WebP | JPEG, PNG, WebP, HEIC, HEIF | PNG, JPEG, WebP, non-animated GIF |
|
||||
| | Max image size | 5MB per image | (20MB per prompt) | 20MB per image |
|
||||
| | Image detail level | N/A | N/A | **Low, high, auto** |
|
||||
| | Image resolution | max: 1568x1568 | min: 768x768, max: 3072x3072 | min: 512x512, max: 2048 x 2048 |
|
||||
| | Token calculation for images | (width * height)/750; max 1,600 | 258 tokens | 85 + 170 * {patches} |
|
||||
| | Image retention | Deleted after processing | Not specified | Deleted after processing |
|
||||
| **Audio and Video Handling** |
|
||||
| | Audio formats | N/A | WAV, MP3, AIFF, AAC, OGG, FLAC | N/A |
|
||||
| | Video formats | N/A | MP4, MPEG, MOV, AVI, MPG, WebM, WMV, 3GPP | N/A |
|
||||
| **System Instructions and Tool Use** |
|
||||
| | System instructions | Yes (array of text blocks) | Yes (parts array) | Yes (as system message) |
|
||||
| **Function/Tool Handling** |
|
||||
| | Parallel tool calls | No | No | **Yes** |
|
||||
| | Tool Declaration | Defined in `tools` array | Defined in `tools` array | Defined in `tools` array |
|
||||
| | FC name restrictions | Yes | Yes (max 63 chars) | Yes (max 64 chars) |
|
||||
| | FC declaration | name, description, input_schema | name, description, parameters | name, description, parameters |
|
||||
| | FC options structure | JSON Schema for input | Object with properties | JSON Schema for parameters |
|
||||
| | FC Force invocation | Via `tool_choice` parameter | Via `toolConfig` parameter | Via `tool_choice` parameter |
|
||||
| | FC Model invocation | Model generates a `tool_use` block with predicted parameters | Generates a `functionCall` part with predicted parameters | Generates a message.`tool_calls` item with predicted arguments |
|
||||
| | FC Execution | Client-side | Client-side | Client-side |
|
||||
| | FC Result injection | Client appends a `user` message with a `tool_result` content block | Client appends a `function` message with `functionResponse` part | Client sends a new `tool` message with `tool_call_id` and `content` |
|
||||
| | Built-in Code execution | No | **Yes** | No |
|
||||
| | Tool use with vision | Yes | Yes | Yes |
|
||||
| **Generation Configuration** |
|
||||
| | temperature | Yes | Yes | Yes |
|
||||
| | max_tokens | Yes | Yes | Yes |
|
||||
| | stop_sequences | Yes | Yes | Yes |
|
||||
| | top_k | Yes | Yes | **No** |
|
||||
| | top_p | Yes | Yes | Yes |
|
||||
| | seed | No | No | **Yes** |
|
||||
| | Multiple candidates | No | No | Yes (with 'n' parameter, breaks streaming?) |
|
||||
| **Streaming and Response Structure** |
|
||||
| | Streaming support | Yes | Yes | Yes |
|
||||
| | Streaming initiation | stream=true | streamGenerateContent path | stream=true |
|
||||
| | Streaming event types | **Multiple specific types** | Not specified | Single delta type |
|
||||
| | Response container | content (array) | candidates (array) | choices (array) |
|
||||
| **Usage Metrics and Error Handling** |
|
||||
| | Token counts | Yes | Yes | Yes |
|
||||
| | Detailed token breakdown | input, output | prompt, cached, candidates, total | prompt, completion, total |
|
||||
| | Usage in stream | No | No | **Optional** |
|
||||
| | Error handling in response | Not specified | Not specified | **Yes (undocumented)** |
|
||||
| | Error handling in stream | Not specified | Not specified | **Yes (undocumented)** |
|
||||
| **Advanced Features** |
|
||||
| | JSON mode | **Partial (via structured prompts)** | **Yes (responseMimeType)** | **Yes** |
|
||||
| | Output consistency techniques | **Yes (multiple methods)** | Not specified | Not specified |
|
||||
| | Logprobs | No | No | **Yes (disabled in schema)** |
|
||||
| | System fingerprint | No | No | **Yes** |
|
||||
| | Semantic caching | No | **Yes** | No |
|
||||
| | Assistant prefill | **Yes** | No | No |
|
||||
| | Preferred formatting | **XML tags, JSON** | Not specified | Markdown |
|
||||
| **Safety and Compliance** |
|
||||
| | Safety settings in request | **Stop sequences** | **Detailed category-based** | **Moderation API** |
|
||||
| | Safety feedback in response | Yes | Yes | Not specified |
|
||||
@@ -1,65 +1,62 @@
|
||||
# big-AGI Documentation
|
||||
# Big-AGI Documentation
|
||||
|
||||
Find all the information you need to get started, configure, and effectively use big-AGI.
|
||||
Information you need to get started, configure, and use big-AGI productively.
|
||||
|
||||
[//]: # (## Quick Start)
|
||||
## Getting Started
|
||||
|
||||
[//]: # (- **[Introduction](big-agi.md)**: Overview of big-AGI's features.)
|
||||
Guides for basic big-AGI features:
|
||||
|
||||
## Configuration Guides
|
||||
- **[Enabling Microphone for Speech Recognition](help-feature-microphone.md)**: Instructions to
|
||||
allow speech recognition in browsers and apps.
|
||||
|
||||
Detailed guides to configure your big-AGI interface and models.
|
||||
## AI Model Configuration
|
||||
|
||||
👉 The following applies to the users of big-AGI.com, as the public instance is empty and to be configured by the user.
|
||||
Detailed guides to configure AI models and advanced features in big-AGI.
|
||||
|
||||
- **Cloud Model Services**:
|
||||
> 👉 The following applies to users of big-AGI.com, as the public instance is empty and requires user configuration.
|
||||
|
||||
- **Cloud AI Services**:
|
||||
- **[Azure OpenAI](config-azure-openai.md)**
|
||||
- **[OpenRouter](config-openrouter.md)**
|
||||
- easy API key: **Anthropic**, **Google AI**, **Groq**, **Mistral**, **OpenAI**, **Perplexity**, **TogetherAI**
|
||||
- Easy API key setup: **Anthropic**, **Deepseek**, **Google AI**, **Groq**, **Mistral**, **OpenAI**, **OpenPipe**, **Perplexity**, **TogetherAI**, **xAI**
|
||||
|
||||
|
||||
- **Local Model Servers**:
|
||||
- **Local AI Integrations**:
|
||||
- **[LocalAI](config-local-localai.md)**
|
||||
- **[LM Studio](config-local-lmstudio.md)**
|
||||
- **[Ollama](config-local-ollama.md)**
|
||||
- **[Oobabooga](config-local-oobabooga.md)**
|
||||
|
||||
|
||||
- **Advanced Feature Configuration**:
|
||||
- **[Browse](config-feature-browse.md)**: Enable web page download through third-party services or your own cloud (advanced)
|
||||
- **ElevenLabs API**: Voice and cutom voice generation, only requires their API key
|
||||
- **Google Search API**: guide not yet available, see the Google options in 'Environment Variables'
|
||||
- **Prodia API**: Stable Diffusion XL image generation, only requires their API key, alternative to DALL·E
|
||||
- **Enhanced AI Features**:
|
||||
- **[Web Browsing](config-feature-browse.md)**: Enable web page download through third-party services or your own cloud (advanced)
|
||||
- **Web Search**: Google Search API (see '[Environment Variables](environment-variables.md)')
|
||||
- **Image Generation**: DALL·E 3 and 2, or Prodia API for Stable Diffusion XL
|
||||
- **Voice Synthesis**: ElevenLabs API for voice generation
|
||||
|
||||
## Deployment
|
||||
## Deployment & Customization
|
||||
|
||||
System integrators, administrators, whitelabelers: instead of using the public big-AGI instance on get.big-agi.com, you can deploy your own instance.
|
||||
> 👉 The following applies to developers and experts who deploy their own big-AGI instance.
|
||||
|
||||
Step-by-step deployment and system configuration instructions.
|
||||
For deploying a custom big-AGI instance:
|
||||
|
||||
- **Deploy Your Own**
|
||||
- straightforward: **Local development**, **Vercel 1-Click**
|
||||
- **[Cloudflare Deployment](deploy-cloudflare.md)**
|
||||
- **[Docker Deployment](deploy-docker.md)**: Containers for Local or Cloud deployments
|
||||
- **[Installation Guide](installation.md)**: Set up your own big-AGI instance
|
||||
- Source build or pre-built options
|
||||
- Local, cloud, or on-premises deployment
|
||||
|
||||
|
||||
- **Deployment Server Features**
|
||||
- **[Database Setup](deploy-database.md)**: Optional, only required to enable "Chat Link Sharing"
|
||||
- **[Environment Variables](environment-variables.md)**: 📌 Set server-side API keys and special features in your deployments
|
||||
- **[HTTP Basic Authentication](deploy-authentication.md)**: Optional, Secure your big-AGI instance with a username and password
|
||||
- **Advanced Setup**:
|
||||
- **[Source Code Customization Guide](customizations.md)**: Modify the source code
|
||||
- **[Access Control](deploy-authentication.md)**: Optional, add basic user authentication
|
||||
- **[Database Setup](deploy-database.md)**: Optional, enables "Chat Link Sharing"
|
||||
- **[Reverse Proxy](deploy-reverse-proxy.md)**: Optional, enables custom domains and SSL
|
||||
- **[Environment Variables](environment-variables.md)**: Pre-configures models and services
|
||||
|
||||
## Customization & Derivative UIs
|
||||
## Community & Support
|
||||
|
||||
👏 Customize big-AGI to fit your needs.
|
||||
|
||||
- **[Customizing big-AGI](customizations.md)**: how to alter source code and server-side configuration
|
||||
|
||||
## Support and Community
|
||||
|
||||
Join our community or get support:
|
||||
Connect with the growing big-AGI community:
|
||||
|
||||
- Visit our [GitHub repository](https://github.com/enricoros/big-AGI) for source code and issue tracking
|
||||
- Check the latest updates and features on [Changelog](changelog.md) or the in-app [News](https://get.big-agi.com/news)
|
||||
- Connect with us and other users on [Discord](https://discord.gg/MkH4qj2Jp9) for discussions, help, and sharing your experiences with big-AGI
|
||||
|
||||
Thank you for choosing big-AGI. We're excited to see what you'll build.
|
||||
Thank you for choosing big-AGI. We're excited to give you the best tools to amplify yourself.
|
||||
|
||||
@@ -5,12 +5,65 @@ by release.
|
||||
|
||||
- For the live roadmap, please see [the GitHub project](https://github.com/users/enricoros/projects/4/views/2)
|
||||
|
||||
### 1.13.0 - Feb 2024
|
||||
### 1.17.0 - Jun 2024
|
||||
|
||||
- milestone: [1.13.0](https://github.com/enricoros/big-agi/milestone/13)
|
||||
- milestone: [1.17.0](https://github.com/enricoros/big-agi/milestone/17)
|
||||
- work in progress: [big-AGI open roadmap](https://github.com/users/enricoros/projects/4/views/2), [help here](https://github.com/users/enricoros/projects/4/views/4)
|
||||
|
||||
## What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind
|
||||
### What's New in 1.16.1...1.16.8 · Sep 13, 2024 (patch releases)
|
||||
|
||||
- 1.16.8: OpenAI ChatGPT-4o Latest (o1-preview and o1-mini are supported in Big-AGI 2)
|
||||
- 1.16.7: OpenAI support for GPT-4o 2024-08-06
|
||||
- 1.16.6: Groq support for Llama 3.1 models
|
||||
- 1.16.5: GPT-4o Mini support
|
||||
- 1.16.4: 8192 tokens support for Claude 3.5 Sonnet
|
||||
- 1.16.3: Anthropic Claude 3.5 Sonnet model support
|
||||
- 1.16.2: Improve web downloads, as text, markdown, or HTML
|
||||
- 1.16.2: Proper support for Gemini models
|
||||
- 1.16.2: Added the latest Mistral model
|
||||
- 1.16.2: Tokenizer support for gpt-4o
|
||||
- 1.16.2: Updates to Beam
|
||||
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
|
||||
|
||||
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
|
||||
|
||||
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
|
||||
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
|
||||
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
|
||||
- Major enhancements to the Auto-Diagrams tool
|
||||
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
|
||||
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
|
||||
- Models update: **Anthropic**, **Groq**, **Ollama**, **OpenAI**, **OpenRouter**, **Perplexity**
|
||||
- Code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, and more [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
|
||||
- Developers: update the LLMs data structures
|
||||
|
||||
### What's New in 1.15.1 · April 10, 2024 (minor release, models support)
|
||||
|
||||
- Support for the newly released Gemini Pro 1.5 models
|
||||
- Support for the new OpenAI 2024-04-09 Turbo models
|
||||
- Resilience fixes after the large success of 1.15.0
|
||||
|
||||
### What's New in 1.15.0 · April 1, 2024 · Beam
|
||||
|
||||
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI mdoels setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
|
||||
- Enhanced the default Persona
|
||||
- Fixes to Gemini models and SVGs, improvements to UI and icons
|
||||
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
|
||||
|
||||
### What's New in 1.14.1 · March 7, 2024 · Modelmorphic
|
||||
|
||||
- **Anthropic** [Claude-3](https://www.anthropic.com/news/claude-3-family) model family support. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- New **[Perplexity](https://www.perplexity.ai/)** and **[Groq](https://groq.com/)** integration (thanks @Penagwin). [#407](https://github.com/enricoros/big-AGI/issues/407), [#427](https://github.com/enricoros/big-AGI/issues/427)
|
||||
- **[LocalAI](https://localai.io/models/)** deep integration, including support for [model galleries](https://github.com/enricoros/big-AGI/issues/411)
|
||||
- **Mistral** Large and Google **Gemini 1.5** support
|
||||
- Performance optimizations: runs [much faster](https://twitter.com/enricoros/status/1756553038293303434?utm_source=localhost:3000&utm_medium=big-agi), saves lots of power, reduces memory usage
|
||||
- Enhanced UX with auto-sizing charts, refined search and folder functionalities, perfected scaling
|
||||
- And with more UI improvements, documentation, bug fixes (20 tickets), and developer enhancements
|
||||
- [Release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.14.0), and changes [v1.13.1...v1.14.0](https://github.com/enricoros/big-AGI/compare/v1.13.1...v1.14.0) (233 commits, 8,000+ lines changed)
|
||||
|
||||
### What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385686b13
|
||||
|
||||
@@ -22,7 +75,7 @@ https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385
|
||||
- Better looking chats with improved spacing, fonts, and menus
|
||||
- More: new video player, [LM Studio tutorial](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-lmstudio.md) (thanks @aj47), [MongoDB support](https://github.com/enricoros/big-AGI/blob/main/docs/deploy-database.md) (thanks @ranfysvalle02), and speedups
|
||||
|
||||
## What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline
|
||||
### What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/95ceb03c-945d-4fdd-9a9f-3317beb54f3f
|
||||
|
||||
@@ -85,7 +138,7 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
- **Overheat LLMs**: Push the creativity with higher LLM temperatures. [#256](https://github.com/enricoros/big-agi/issues/256)
|
||||
- **Model Options Shortcut**: Quick adjust with `Ctrl+Shift+O`
|
||||
- Optimized Voice Input and Performance
|
||||
- Latest Ollama and Oobabooga models
|
||||
- Latest Ollama models
|
||||
- For developers: **Password Protection**: HTTP Basic Auth. [Learn How](https://github.com/enricoros/big-agi/blob/main/docs/deploy-authentication.md)
|
||||
|
||||
### What's New in 1.6.0 - Nov 28, 2023 · Surf's Up
|
||||
@@ -117,7 +170,7 @@ For Developers:
|
||||
first request to get the configuration. See
|
||||
https://github.com/enricoros/big-agi/blob/main/src/modules/backend/backend.router.ts.
|
||||
- CloudFlare developers: please change the deployment command to
|
||||
`rm app/api/trpc-node/[trpc]/route.ts && npx @cloudflare/next-on-pages@1`,
|
||||
`rm app/api/cloud/[trpc]/route.ts && npx @cloudflare/next-on-pages@1`,
|
||||
as we transitioned to the App router in NextJS 14. The documentation in
|
||||
[docs/deploy-cloudflare.md](../docs/deploy-cloudflare.md) is updated
|
||||
|
||||
@@ -134,7 +187,6 @@ For Developers:
|
||||
- **Camera OCR** - real-world AI - take a picture of a text, and chat with it
|
||||
- **Anthropic models** support, e.g. Claude
|
||||
- **Backup/Restore** - save chats, and restore them later
|
||||
- **[Local model support with Oobabooga server](../docs/config-local-oobabooga)** - run your own LLMs!
|
||||
- **Flatten conversations** - conversations summarizer with 4 modes
|
||||
- **Fork conversations** - create a new chat, to try with different endings
|
||||
- New commands: /s to add a System message, and /a for an Assistant message
|
||||
|
||||
@@ -20,6 +20,9 @@ If you have an `API Endpoint` and `API Key`, you can configure big-AGI as follow
|
||||
The deployed models are now available in the application. If you don't have a configured
|
||||
Azure OpenAI service instance, continue with the next section.
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
## Setting Up Azure
|
||||
|
||||
### Step 1: Azure Account & Subscription
|
||||
|
||||
@@ -68,7 +68,7 @@ The chat agent won't be able to access the web sites if the browserless containe
|
||||
- MAX_CONCURRENT_SESSIONS=10
|
||||
```
|
||||
|
||||
You can then add the proyy lines to your `.env` file.
|
||||
You can then add the proxy lines to your `.env` file.
|
||||
|
||||
```
|
||||
https_proxy=http://PROXY-IP:PROXY-PORT
|
||||
@@ -115,4 +115,4 @@ If you encounter any issues or have questions about configuring the browse funct
|
||||
|
||||
Enjoy the enhanced browsing experience within `big-AGI` and explore the web without ever leaving your chat!
|
||||
|
||||
Last updated on Feb 27, 2024 ([edit on GitHub](https://github.com/enricoros/big-AGI/edit/main/docs/config-feature-browse.md))
|
||||
Last updated on Feb 27, 2024 ([edit on GitHub](https://github.com/enricoros/big-AGI/edit/main/docs/config-feature-browse.md))
|
||||
|
||||
@@ -37,6 +37,9 @@ Check the URL and modify if different.
|
||||
2. Enter the API URL: `http://localhost:1234` (modify if different)
|
||||
3. Refresh by clicking on the `Models` button to load models from LM Studio
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- **Missing @mui/material**: Execute `npm install @mui/material` or `yarn add @mui/material`
|
||||
|
||||
@@ -36,6 +36,9 @@ Follow the guide at: https://localai.io/basics/getting_started/
|
||||
- Load the models (click on `Models 🔄`)
|
||||
- Select the model and chat
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
### Integration: Models Gallery
|
||||
|
||||
If the running LocalAI instance is configured with a [Model Gallery](https://localai.io/models/):
|
||||
@@ -51,7 +54,7 @@ If the running LocalAI instance is configured with a [Model Gallery](https://loc
|
||||
|
||||
At the time of writing, LocalAI does not publish the model `context window size`.
|
||||
Every model is assumed to be capable of chatting, and with a context window of 4096 tokens.
|
||||
Please update the [src/modules/llms/transports/server/openai/models.data.ts](../src/modules/llms/server/openai/models.data.ts)
|
||||
Please update the [src/modules/llms/transports/server/openai/models/models.data.ts](../src/modules/llms/server/openai/models/models.data.ts)
|
||||
file with the mapping information between LocalAI model IDs and names/descriptions/tokens, etc.
|
||||
|
||||
# 🤝 Support
|
||||
|
||||
@@ -13,7 +13,7 @@ _Last updated Dec 16, 2023_
|
||||
|
||||
1. **Ensure Ollama API Server is Running**: Follow the official instructions to get Ollama up and running on your machine
|
||||
- For detailed instructions on setting up the Ollama API server, please refer to the
|
||||
[Ollama download page](https://ollama.ai/download) and [instructions for linux](https://github.com/jmorganca/ollama/blob/main/docs/linux.md).
|
||||
[Ollama download page](https://ollama.ai/download) and [instructions for linux](https://github.com/jmorganca/ollama/blob/main/docs/linux.md).
|
||||
2. **Add Ollama as a Model Source**: In `big-AGI`, navigate to the **Models** section, select **Add a model source**, and choose **Ollama**
|
||||
3. **Enter Ollama Host URL**: Provide the Ollama Host URL where the API server is accessible (e.g., `http://localhost:11434`)
|
||||
4. **Refresh Model List**: Once connected, refresh the list of available models to include the Ollama models
|
||||
@@ -22,6 +22,9 @@ _Last updated Dec 16, 2023_
|
||||
you'll have to press the 'Pull' button again, until a green message appears.
|
||||
5. **Chat with Ollama models**: select an Ollama model and begin chatting with AI personas
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
**Visual Configuration Guide**:
|
||||
|
||||
* After adding the `Ollama` model vendor, entering the IP address of an Ollama server, and refreshing models:<br/>
|
||||
@@ -37,7 +40,7 @@ _Last updated Dec 16, 2023_
|
||||
|
||||
### ⚠️ Network Troubleshooting
|
||||
|
||||
If you get errors about the server having trouble connecting with Ollama, please see
|
||||
If you get errors about the server having trouble connecting with Ollama, please see
|
||||
[this message](https://github.com/enricoros/big-AGI/issues/276#issuecomment-1858591483) on Issue #276.
|
||||
|
||||
And in brief, make sure the Ollama endpoint is accessible from the servers where you run big-AGI (which could
|
||||
@@ -69,15 +72,20 @@ Then, edit the nginx configuration file `/etc/nginx/sites-enabled/default` and a
|
||||
|
||||
```nginx
|
||||
location /ollama/ {
|
||||
proxy_pass http://localhost:11434;
|
||||
proxy_pass http://127.0.0.1:11434/;
|
||||
|
||||
# Disable buffering for the streaming responses (SSE)
|
||||
proxy_set_header Connection '';
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
|
||||
# Disable buffering for the streaming responses
|
||||
chunked_transfer_encoding off;
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
|
||||
# Longer timeouts (1hr)
|
||||
keepalive_timeout 3600;
|
||||
proxy_read_timeout 3600;
|
||||
proxy_connect_timeout 3600;
|
||||
proxy_send_timeout 3600;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
# Local LLM Integration with `text-web-ui` :llama:
|
||||
|
||||
Integrate local Large Language Models (LLMs) with
|
||||
[oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui),
|
||||
a specialized interface that includes a custom variant of the OpenAI API for a smooth integration process.
|
||||
|
||||
_Last updated on Dec 7, 2023_
|
||||
|
||||
### Components
|
||||
|
||||
The implementation of local LLMs involves the following components:
|
||||
|
||||
* **text-generation-webui**: A Python application with a Gradio web UI for operating Large Language Models.
|
||||
* **Local Large Language Models "LLMs"**: Use large language models on your personal computer with consumer-grade GPUs or CPUs.
|
||||
* **big-AGI**: An LLM UI that offers features such as Personas, OCR, Voice Support, Code Execution, AGI functions, and more.
|
||||
|
||||
## Instructions
|
||||
|
||||
This guide assumes that **big-AGI** is already installed on your system. Note that the text-generation-webui IP address must be accessible from the server running **big-AGI**.
|
||||
|
||||
### Text-web-ui Installation & Configuration:
|
||||
|
||||
1. Install [text-generation-webui](https://github.com/oobabooga/text-generation-webui#Installation):
|
||||
- Follow the instructions in the official page (basicall clone the repo and run a script) [~10 minutes]
|
||||
- Stop the Web UI as we need to modify the startup flags to enable the OpenAI API
|
||||
2. Enable the **openai extension**
|
||||
- Edit `CMD_FLAGS.txt`
|
||||
- Make sure that `--listen --api` is present and uncommented
|
||||
3. Restart text-generation-webui
|
||||
- Double-click on "start"
|
||||
- You should see something like:
|
||||
```
|
||||
2023-12-07 21:51:21 INFO:Loading the extension "openai"...
|
||||
2023-12-07 21:51:21 INFO:OpenAI-compatible API URL:
|
||||
|
||||
http://0.0.0.0:5000
|
||||
...
|
||||
INFO: Uvicorn running on http://0.0.0.0:5000 (Press CTRL+C to quit)
|
||||
Running on local URL: http://0.0.0.0:7860
|
||||
```
|
||||
- This shows that:
|
||||
- The Web UI is running on port 7860: http://127.0.0.1:7860
|
||||
- **The OpenAI API is running on port 5000: http://127.0.0.1:5000**
|
||||
4. Load your first model
|
||||
- Open the text-generation-webui at [127.0.0.1:7860](http://127.0.0.1:7860/)
|
||||
- Switch to the **Model** tab
|
||||
- Download, for instance, `TheBloke/Llama-2-7B-Chat-GPTQ`
|
||||
- Select the model once it's loaded
|
||||
|
||||
### Integrating text-web-ui with big-AGI:
|
||||
1. Integrating Text-Generation-WebUI with big-AGI:
|
||||
- Go to Models > Add a model source of type: **Oobabooga**
|
||||
- Enter the address: `http://127.0.0.1:5000`
|
||||
- If running remotely, replace 127.0.0.1 with the IP of the machine. Make sure to use the **IP:Port** format
|
||||
- Load the models
|
||||
- The active model must be selected and LOADED on the text-generation-webui as it doesn't support model switching or parallel requests.
|
||||
- Select model & Chat
|
||||
|
||||

|
||||
|
||||
Enjoy the privacy and flexibility of local LLMs with `big-AGI` and `text-generation-webui`!
|
||||
@@ -22,6 +22,9 @@ This document details the process of integrating OpenRouter with big-AGI.
|
||||

|
||||
4. OpenAI GPT4-32k and other models will now be accessible and selectable in the application.
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
### Pricing
|
||||
|
||||
OpenRouter independently manages its service and pricing and is not affiliated with big-AGI.
|
||||
|
||||
@@ -22,6 +22,25 @@ Understand the Architecture: big-AGI uses Next.js, React for the front end, and
|
||||
|
||||
This necessitates a code change (file renaming) before build initiation, detailed in [deploy-authentication.md](deploy-authentication.md).
|
||||
|
||||
### Increase Vercel Functions Timeout
|
||||
|
||||
For long-running operations, Vercel allows paid deployments to increase the timeout on Functions.
|
||||
Note that this applies to old-style Vercel Functions (based on Node.js) and not the new Edge Functions.
|
||||
|
||||
At time of writing, big-AGI has only 2 operations that run on Node.js Functions:
|
||||
browsing (fetching web pages) and sharing. They both can exceed 10 seconds, especially
|
||||
when fetching large pages or waiting for websites to be completed.
|
||||
|
||||
We provide `vercel_PRODUCTION.json` to raise the duration to 25 seconds (from a default of 10), to use it,
|
||||
make sure to rename it to `vercel.json` before build.
|
||||
|
||||
From the Vercel Project > Settings > General > Build & Development Settings,
|
||||
you can for instance set the build command to:
|
||||
|
||||
```bash
|
||||
mv vercel_PRODUCTION.json vercel.json; next build
|
||||
```
|
||||
|
||||
### Change the Personas
|
||||
|
||||
Edit the `src/data.ts` file to customize personas. This file houses the default personas. You can add, remove, or modify these to meet your project's needs.
|
||||
@@ -42,21 +61,26 @@ Test your application thoroughly using local development (refer to README.md for
|
||||
|
||||
- [deploy-cloudflare.md](deploy-cloudflare.md): for Cloudflare Workers deployment
|
||||
- [deploy-docker.md](deploy-docker.md): for Docker deployment instructions and examples
|
||||
- [deploy-k8s.md](deploy-k8s.md): for Kubernetes deployment instructions and examples
|
||||
|
||||
<br/>
|
||||
## Debugging
|
||||
|
||||
We introduced the `/info/debug` page that provides a detailed overview of the application's environment, including the API keys, environment variables, and other configuration settings.
|
||||
|
||||
<br/>
|
||||
|
||||
## Community Projects - Share Your Project
|
||||
|
||||
After deployment, share your project with the community. We will link to your project to help others discover and learn from your work.
|
||||
|
||||
| Project | Features | GitHub |
|
||||
|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|
|
||||
| 🚀 CoolAGI: Where AI meets Imagination<br/> | Code Interpreter, Vision, Mind maps, Web Searches, Advanced Data Analytics, Large Data Handling and more! | [nextgen-user/CoolAGI](https://github.com/nextgen-user/CoolAGI) |
|
||||
| HL-GPT | Fully remodeled UI | [harlanlewis/nextjs-chatgpt-app](https://github.com/harlanlewis/nextjs-chatgpt-app) |
|
||||
| Project | Features | GitHub |
|
||||
|----------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|
|
||||
| 🚀 CoolAGI: Where AI meets Imagination<br/> | Code Interpreter, Vision, Mind maps, Web Searches, Advanced Data Analytics, Large Data Handling and more! | [nextgen-user/CoolAGI](https://github.com/nextgen-user/CoolAGI) |
|
||||
| HL-GPT | Fully remodeled UI | [harlanlewis/nextjs-chatgpt-app](https://github.com/harlanlewis/nextjs-chatgpt-app) |
|
||||
|
||||
For public projects, update your README.md with your modifications and submit a pull request to add your project to our list, aiding in its discovery.
|
||||
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
## Best Practices
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ As of Feb 27, 2024, this feature is in development.
|
||||
|
||||
## Configurations
|
||||
|
||||
| Scope | Default | Description / Instructions |
|
||||
| Scope | Default | Description / Instructions |
|
||||
|-----------------------------------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------|
|
||||
| Your source builds of big-AGI | None | **Vercel**: enable Vercel Analytics from the dashboard. · **Google Analytics**: set environment variable at build time. |
|
||||
| Your docker builds of big-AGI | None | **Vercel**: n/a. · **Google Analytics**: set environment variable at `docker build` time. |
|
||||
|
||||
@@ -19,7 +19,7 @@ To enable it in `big-AGI`, you **must manually build the application**:
|
||||
- Build `big-AGI` with HTTP authentication enabled:
|
||||
- Clone the repository
|
||||
- Rename `middleware_BASIC_AUTH.ts` to `middleware.ts`
|
||||
- Build: usual simple build procedure (e.g. [Deploy manually](../README.md#-deploy-manually) or [Deploying with Docker](deploy-docker.md))
|
||||
- Build: usual simple build procedure (e.g. [Deploy manually](installation.md#Local-Production-build) or [Deploying with Docker](deploy-docker.md))
|
||||
|
||||
- Configure the following [environment variables](environment-variables.md) before launching `big-AGI`:
|
||||
```dotenv
|
||||
|
||||
@@ -34,7 +34,7 @@ Fork the repository to your personal GitHub account.
|
||||
2. On this page, set your **Project name**, **Production branch** (e.g., main), and your Build settings
|
||||
3. Choose `Next.js` from the **Framework preset** dropdown menu
|
||||
4. Set a custom **Build Command**:
|
||||
- `rm app/api/trpc-node/[trpc]/route.ts && npx @cloudflare/next-on-pages@1`
|
||||
- `rm app/api/cloud/[trpc]/route.ts && npx @cloudflare/next-on-pages@1`
|
||||
- see the tradeoffs for this deletion on the notice at the top
|
||||
5. Keep the **Build output directory** as default
|
||||
6. Click the **Save and Deploy** button
|
||||
|
||||
@@ -9,7 +9,7 @@ Docker ensures faster development cycles, easier collaboration, and seamless env
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-agi.git
|
||||
cd big-agi
|
||||
```
|
||||
```
|
||||
2. **Build the Docker Image**: Build a local docker image from the provided Dockerfile:
|
||||
```bash
|
||||
docker build -t big-agi .
|
||||
@@ -59,6 +59,17 @@ To make local services running on your host machine accessible to a Docker conta
|
||||
|
||||
<br/>
|
||||
|
||||
### Reverse Proxy Configuration
|
||||
|
||||
A reverse proxy is a server that sits in front of big-AGI's container and can forwards web
|
||||
requests to it. Often used to run multiple web applications, expose them to the internet,
|
||||
increase security.
|
||||
|
||||
If you're deploying big-AGI behind a reverse proxy, you may want to see
|
||||
our [Reverse Proxy Deployment Guide](deploy-reverse-proxy.md) for more information.
|
||||
|
||||
<br/>
|
||||
|
||||
### More Information
|
||||
|
||||
The [`Dockerfile`](../Dockerfile) describes how to create a Docker image. It establishes a Node.js environment,
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
# Deploy `big-AGI` with Kubernetes ☸️
|
||||
|
||||
In this tutorial, we will guide you through the process of deploying big-AGI
|
||||
in a Kubernetes environment using the kubectl command-line tool.
|
||||
|
||||
## First Deployment
|
||||
|
||||
### Step 1: Clone the big-AGI repository
|
||||
|
||||
```bash
|
||||
$ git clone https://github.com/enricoros/big-agi
|
||||
$ cd ./big-agi/docs/k8s
|
||||
```
|
||||
|
||||
### Step 2: Create the namespace
|
||||
|
||||
```bash
|
||||
$ kubectl create namespace ns-big-agi
|
||||
```
|
||||
|
||||
### Step 3: Fill in the key information into env-secret.yaml
|
||||
|
||||
All variables are optional. By default, Kubernetes Secret uses Base64 for
|
||||
encode/decode, so please don't do a git commit after filling in the keys
|
||||
to avoid leaking sensitive information.
|
||||
|
||||
We provide an empty `env-secret.yaml` file as a template.
|
||||
You can fill in the necessary information using a text editor.
|
||||
|
||||
```bash
|
||||
$ nano env-secret.yaml
|
||||
```
|
||||
|
||||
### Step 4: Deploying Kubernetes Resources
|
||||
|
||||
```bash
|
||||
$ kubectl apply -f big-agi-deployment.yaml -f env-secret.yaml
|
||||
```
|
||||
|
||||
### Step 5: Verifying the Resource Statuses
|
||||
|
||||
```bash
|
||||
$ kubectl -n ns-big-agi get svc,pod,deployment
|
||||
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
service/svc-big-agi ClusterIP 10.0.198.118 <none> 3000/TCP 63m
|
||||
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
pod/deployment-big-agi-xxxxxxxx-yyyyy 1/1 Running 0 39m
|
||||
|
||||
NAME READY UP-TO-DATE AVAILABLE AGE
|
||||
deployment.apps/deployment-big-agi 1/1 1 1 63m
|
||||
```
|
||||
|
||||
### Step 6: Testing the Service
|
||||
|
||||
You can test the service by port-forwarding the service to your local machine:
|
||||
|
||||
```bash
|
||||
$ kubectl -n ns-big-agi port-forward service/svc-big-agi 3000
|
||||
Forwarding from 127.0.0.1:3000 -> 3000
|
||||
Forwarding from [::1]:3000 -> 3000
|
||||
```
|
||||
|
||||
Now you can access the service at `http://localhost:3000`, and you should see the big-AGI homepage.
|
||||
|
||||
## Updating big-AGI
|
||||
|
||||
To update big-AGI to the latest version:
|
||||
|
||||
1. Pull the latest changes from the repository:
|
||||
```bash
|
||||
$ git pull origin main
|
||||
```
|
||||
|
||||
2. Apply the updated deployment:
|
||||
```bash
|
||||
$ kubectl apply -f big-agi-deployment.yaml
|
||||
```
|
||||
|
||||
This will trigger a rolling update of the deployment with the latest image.
|
||||
|
||||
**Note**: If you're deploying big-AGI behind a reverse proxy, you may need to configure
|
||||
your proxy to support streaming. See our [Reverse Proxy Deployment Guide](deploy-reverse-proxy.md) for more information.
|
||||
|
||||
Note: For production use, consider setting up an Ingress Controller or Load Balancer instead of using port-forward.
|
||||
@@ -0,0 +1,58 @@
|
||||
# Advanced: Deploying big-AGI behind a Reverse Proxy
|
||||
|
||||
Note: if you don't have a reverse proxy set up, you can skip this guide.
|
||||
|
||||
If you're deploying big-AGI behind a reverse proxy, you may want to configure your proxy to support streaming output.
|
||||
This guide provides instructions on how to configure your reverse proxy to support streaming output from big-AGI.
|
||||
|
||||
This is for advanced deployments, and you should have a basic understanding of how reverse proxies work.
|
||||
|
||||
## Nginx Configuration
|
||||
|
||||
If you're using Nginx as your reverse proxy, add the following configuration to your server block:
|
||||
|
||||
```nginx
|
||||
server {
|
||||
listen 80;
|
||||
server_name your-domain.com;
|
||||
|
||||
location / {
|
||||
# ...your specific proxy_pass configuration, example below...
|
||||
proxy_pass http://localhost:3000; # Assuming big-AGI is running on port 3000
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
# ...
|
||||
|
||||
# Important: Disable buffering for the streaming responses (SSE)
|
||||
chunked_transfer_encoding on; # Turn on chunked transfer encoding
|
||||
proxy_buffering off; # Turn off proxy buffering
|
||||
proxy_cache off; # Turn off caching
|
||||
tcp_nodelay on; # Turn on TCP NODELAY option, disable delay ACK algorithm
|
||||
tcp_nopush on; # Turn on TCP NOPUSH option, disable Nagle algorithm
|
||||
|
||||
# Important: Longer timeouts (5 min)
|
||||
keepalive_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
proxy_read_timeout 300;
|
||||
proxy_send_timeout 300;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This configuration disables caching and buffering, enables chunked transfer encoding, and adjusts TCP settings to optimize for streaming content.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you're experiencing issues with streaming not working, especially when deploying behind a reverse proxy,
|
||||
ensure that your proxy is configured to support streaming output as described above.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- For Docker deployments, see our [Docker Deployment Guide](deploy-docker.md)
|
||||
- For Kubernetes deployments, see our [Kubernetes Deployment Guide](deploy-k8s.md)
|
||||
- For general installation instructions, see our [Installation Guide](installation.md)
|
||||
|
||||
If you continue to experience issues, please reach out to our [community support channels](../README.md#-get-involved).
|
||||
@@ -27,38 +27,41 @@ AZURE_OPENAI_API_ENDPOINT=
|
||||
AZURE_OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
ANTHROPIC_API_HOST=
|
||||
DEEPSEEK_API_KEY=
|
||||
GEMINI_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
LOCALAI_API_HOST=
|
||||
LOCALAI_API_KEY=
|
||||
MISTRAL_API_KEY=
|
||||
OLLAMA_API_HOST=
|
||||
OPENPIPE_API_KEY=
|
||||
OPENROUTER_API_KEY=
|
||||
PERPLEXITY_API_KEY=
|
||||
TOGETHERAI_API_KEY=
|
||||
XAI_API_KEY=
|
||||
|
||||
# Model Observability: Helicone
|
||||
HELICONE_API_KEY=
|
||||
|
||||
# Text-To-Speech
|
||||
ELEVENLABS_API_KEY=
|
||||
ELEVENLABS_API_HOST=
|
||||
ELEVENLABS_VOICE_ID=
|
||||
# Text-To-Image
|
||||
PRODIA_API_KEY=
|
||||
# Google Custom Search
|
||||
GOOGLE_CLOUD_API_KEY=
|
||||
GOOGLE_CSE_ID=
|
||||
# Browse
|
||||
PUPPETEER_WSS_ENDPOINT=
|
||||
|
||||
# Backend Analytics
|
||||
BACKEND_ANALYTICS=
|
||||
# Search
|
||||
GOOGLE_CLOUD_API_KEY=
|
||||
GOOGLE_CSE_ID=
|
||||
|
||||
# Text-To-Speech: ElevenLabs
|
||||
ELEVENLABS_API_KEY=
|
||||
ELEVENLABS_API_HOST=
|
||||
ELEVENLABS_VOICE_ID=
|
||||
# Text-To-Image: Prodia
|
||||
PRODIA_API_KEY=
|
||||
|
||||
# Backend HTTP Basic Authentication (see `deploy-authentication.md` for turning on authentication)
|
||||
HTTP_BASIC_AUTH_USERNAME=
|
||||
HTTP_BASIC_AUTH_PASSWORD=
|
||||
|
||||
|
||||
# Frontend variables
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=
|
||||
NEXT_PUBLIC_PLANTUML_SERVER_URL=
|
||||
@@ -80,24 +83,27 @@ For Database configuration see [deploy-database.md](deploy-database.md).
|
||||
The following variables when set will enable the corresponding LLMs on the server-side, without
|
||||
requiring the user to enter an API key
|
||||
|
||||
| Variable | Description | Required |
|
||||
|-----------------------------|-------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------|
|
||||
| `OPENAI_API_KEY` | API key for OpenAI | Recommended |
|
||||
| `OPENAI_API_HOST` | Changes the backend host for the OpenAI vendor, to enable platforms such as Helicone and CloudFlare AI Gateway | Optional |
|
||||
| `OPENAI_API_ORG_ID` | Sets the "OpenAI-Organization" header field to support organization users | Optional |
|
||||
| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI endpoint - host only, without the path | Optional, but if set `AZURE_OPENAI_API_KEY` must also be set |
|
||||
| `AZURE_OPENAI_API_KEY` | Azure OpenAI API key, see [config-azure-openai.md](config-azure-openai.md) | Optional, but if set `AZURE_OPENAI_API_ENDPOINT` must also be set |
|
||||
| `ANTHROPIC_API_KEY` | The API key for Anthropic | Optional |
|
||||
| `ANTHROPIC_API_HOST` | Changes the backend host for the Anthropic vendor, to enable platforms such as [config-aws-bedrock.md](config-aws-bedrock.md) | Optional |
|
||||
| `GEMINI_API_KEY` | The API key for Google AI's Gemini | Optional |
|
||||
| `GROQ_API_KEY` | The API key for Groq Cloud | Optional |
|
||||
| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional |
|
||||
| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional |
|
||||
| `MISTRAL_API_KEY` | The API key for Mistral | Optional |
|
||||
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama) | |
|
||||
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
|
||||
| `PERPLEXITY_API_KEY` | The API key for Perplexity | Optional |
|
||||
| `TOGETHERAI_API_KEY` | The API key for Together AI | Optional |
|
||||
| Variable | Description | Required |
|
||||
|-----------------------------|----------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------|
|
||||
| `OPENAI_API_KEY` | API key for OpenAI | Recommended |
|
||||
| `OPENAI_API_HOST` | Changes the backend host for the OpenAI vendor, to enable platforms such as Helicone and CloudFlare AI Gateway | Optional |
|
||||
| `OPENAI_API_ORG_ID` | Sets the "OpenAI-Organization" header field to support organization users | Optional |
|
||||
| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI endpoint - host only, without the path | Optional, but if set `AZURE_OPENAI_API_KEY` must also be set |
|
||||
| `AZURE_OPENAI_API_KEY` | Azure OpenAI API key, see [config-azure-openai.md](config-azure-openai.md) | Optional, but if set `AZURE_OPENAI_API_ENDPOINT` must also be set |
|
||||
| `ANTHROPIC_API_KEY` | The API key for Anthropic | Optional |
|
||||
| `ANTHROPIC_API_HOST` | Changes the backend host for the Anthropic vendor, to enable platforms such as AWS Bedrock | Optional |
|
||||
| `DEEPSEEK_API_KEY` | The API key for Deepseek AI | Optional |
|
||||
| `GEMINI_API_KEY` | The API key for Google AI's Gemini | Optional |
|
||||
| `GROQ_API_KEY` | The API key for Groq Cloud | Optional |
|
||||
| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional |
|
||||
| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional |
|
||||
| `MISTRAL_API_KEY` | The API key for Mistral | Optional |
|
||||
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama.md) | |
|
||||
| `OPENPIPE_API_KEY` | The API key for OpenPipe | Optional |
|
||||
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
|
||||
| `PERPLEXITY_API_KEY` | The API key for Perplexity | Optional |
|
||||
| `TOGETHERAI_API_KEY` | The API key for Together AI | Optional |
|
||||
| `XAI_API_KEY` | The API key for xAI | Optional |
|
||||
|
||||
### LLM Observability: Helicone
|
||||
|
||||
@@ -128,8 +134,7 @@ Enable the app to Talk, Draw, and Google things up.
|
||||
| `GOOGLE_CSE_ID` | Google Custom/Programmable Search Engine ID - [Link to PSE](https://programmablesearchengine.google.com/) |
|
||||
| **Browse** | |
|
||||
| `PUPPETEER_WSS_ENDPOINT` | Puppeteer WebSocket endpoint - used for browsing (pade downloadeing), etc. |
|
||||
| **Backend** | |
|
||||
| `BACKEND_ANALYTICS` | Semicolon-separated list of analytics flags (see backend.analytics.ts). Flags: `domain` logs the responding domain. |
|
||||
| **Backend** | |
|
||||
| `HTTP_BASIC_AUTH_USERNAME` | See the [Authentication](deploy-authentication.md) guide. Username for HTTP Basic Authentication. |
|
||||
| `HTTP_BASIC_AUTH_PASSWORD` | Password for HTTP Basic Authentication. |
|
||||
|
||||
@@ -147,5 +152,5 @@ The value of these variables are passed to the frontend (Web UI) - make sure the
|
||||
|
||||
---
|
||||
|
||||
For a higher level overview of backend code and environemnt customization,
|
||||
For a higher level overview of backend code and environment customization,
|
||||
see the [big-AGI Customization](customizations.md) guide.
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
# Big-AGI Advanced Tips & Tricks
|
||||
|
||||
> 🚨 This file is not meant for publication, and it's just been created as a handbook with tips
|
||||
> and tricks to make Big-AGI more efficient and productive. 🚨
|
||||
|
||||
Welcome to the advanced tips and tricks guide for Big-AGI. This document will help you make the most of the platform's existing features.
|
||||
|
||||
---
|
||||
|
||||
## Hidden Gems
|
||||
|
||||
- **Shift + Double-Click** on a chat message to **edit** it.
|
||||
- **Shift + Trash Icon** to **delete** a chats and messages without confirmation.
|
||||
- also applies elsewhere: delete Attachments, etc.
|
||||
- **Shift + Click** on **New Chat** to create an incognito chat.
|
||||
- Drag a big-AGI saved chat into Big-AGI to load (or attach) it.
|
||||
|
||||
## Not-so-obvious Shortcuts
|
||||
|
||||
- When sending a message:
|
||||
- Enter is for newlines
|
||||
- **Shift + Enter** to send the message.
|
||||
- **Ctrl + Enter** to **Beam** the message.
|
||||
- **Alt/Option + Enter** to send the message without an answer.
|
||||
- When editing a message:
|
||||
- **Ctrl + Enter** to **Save** the changes.
|
||||
- **Shift + Ctrl + Enter** to **Save & Regenerate**.
|
||||
- Scroll between messages:
|
||||
- **Ctrl + Up/Down** to scroll between **messages** and/or **Beams**.
|
||||
|
||||
## Worth the Effort:
|
||||
|
||||
- [LiveFile](help-feature-livefile.md) works on **Chrome**: Pair and synchronize your documents and code blocks with files on your local system: refresh, save, update them.
|
||||
|
||||
## Best User Hacks:
|
||||
|
||||
-
|
||||
|
||||
---
|
||||
|
||||
Note: this document is just at the beginning. It's here so we can capture
|
||||
the best tips over time.
|
||||
@@ -0,0 +1,167 @@
|
||||
# LiveFile: Synchronize Your Documents with Local Files
|
||||
|
||||
## Introduction
|
||||
|
||||
**LiveFile** is a powerful feature in big-AGI that allows you to **pair and synchronize
|
||||
your documents and code blocks** with files on your local system.
|
||||
|
||||
This feature enables a **two-way connection between big-AGI and your local files on disk**,
|
||||
saving you time and effort.
|
||||
|
||||
With LiveFile, you can:
|
||||
|
||||
- **Pair** documents and code blocks with local files.
|
||||
- **Monitor** changes in local files and update content in big-AGI.
|
||||
- **Refresh** chat attachments with the latest content.
|
||||
- **Save** edits made in big-AGI back to your local files.
|
||||
- **Store** AI-generated code and content.
|
||||
|
||||
---
|
||||
|
||||
## Requirements
|
||||
|
||||
- **Supported Browsers:**
|
||||
- **Google Chrome** (desktop)
|
||||
- **Microsoft Edge** (desktop)
|
||||
- **Operating Systems:**
|
||||
- **Desktop platforms only**
|
||||
- **Note:** Mobile devices (iOS and Android) are **not supported** due to browser limitations.
|
||||
- **File Types:**
|
||||
- Designed for **text-based files** (e.g., `.txt`, `.md`, `.js`, `.py`).
|
||||
- **Performance:**
|
||||
- Can handle **dozens of files efficiently**.
|
||||
- **Limitations:**
|
||||
- **File Size Limit**:
|
||||
- Supports text files up to **10 MB**.
|
||||
- **Pairing Persistence:**
|
||||
- LiveFile connections **do not persist across sessions**.
|
||||
- After reloading the page, you will need to re-pair your files.
|
||||
- **Saving Overwrites:**
|
||||
- Saving changes in big-AGI will **overwrite the entire file**.
|
||||
- Use external tools for version control or incremental backups.
|
||||
|
||||
---
|
||||
|
||||
## Enabling LiveFile
|
||||
|
||||
LiveFile can be enabled automatically or manually in your Big-AGI workflow.
|
||||
|
||||
### Automatic Pairing
|
||||
|
||||
When you:
|
||||
|
||||
- **Attach**, **drop**, or **paste** a file into a chat message,
|
||||
|
||||
LiveFile is **automatically enabled** for that attachment. This means you can start
|
||||
monitoring and reloading changes without any additional setup.
|
||||
|
||||
### Manual Pairing
|
||||
|
||||
For existing attachments or code blocks that:
|
||||
|
||||
- **Do not have LiveFile enabled** (e.g., created on other devices),
|
||||
- **Are AI-generated code snippets without an associated file**,
|
||||
|
||||
You can manually pair them with a local file.
|
||||
|
||||
#### Pairing Attachments
|
||||
|
||||
1. **Select the Attachment:**
|
||||
- Click on the attachment in the chat to view it in the previewer.
|
||||
|
||||
2. **Initiate Pairing:**
|
||||
- Click on **"Pair File"** (🔗).
|
||||
- If you have open LiveFiles, they will be listed for easy selection.
|
||||
- Alternatively, you can select a new file from your local system.
|
||||
|
||||
3. **Grant Permissions**
|
||||
- When prompted, allow big-AGI to access the file.
|
||||
|
||||
#### Pairing Code Blocks
|
||||
|
||||
1. **Access Code Block Options:**
|
||||
- Click on the code block to reveal the header with options.
|
||||
|
||||
2. **Initiate Pairing:**
|
||||
- Click the **"Pair File"** button (🔗).
|
||||
- Select from your open LiveFiles or choose a new file.
|
||||
|
||||
3. **Confirm Pairing:**
|
||||
- Grant permission when prompted.
|
||||
|
||||
---
|
||||
|
||||
## Using LiveFile
|
||||
|
||||
### Monitoring Changes
|
||||
|
||||
- **Automatic Monitoring:**
|
||||
- LiveFile watches for changes in your paired local files.
|
||||
- If the file is modified outside of big-AGI, you'll be shown the changes in the LiveFile bar.
|
||||
- There is also a **"Replace with File"** option to manually load the latest content and see the changes.
|
||||
|
||||
- **Refreshing Content:**
|
||||
- Click **"Replace with File"** (🔄) to load the latest content from the paired file into big-AGI.
|
||||
|
||||
### Saving Edits Back to Paired Files
|
||||
|
||||
- **Editing Attachments or Code Blocks:**
|
||||
- Modify the content directly within big-AGI.
|
||||
- Attachments: Click on the attachment to open the previewer and click on "Edit" to make changes.
|
||||
- Code Blocks: Select "Edit" on the chat message to update code blocks.
|
||||
|
||||
- **Saving Changes:**
|
||||
- Click **"Save to File"** (💾) to overwrite the local file with your changes.
|
||||
- **Note:** This action overwrites the entire file. Ensure this is what you want before proceeding.
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Monitor External Changes:**
|
||||
- Refresh content in big-AGI if the local file has been modified outside the application.
|
||||
|
||||
- **Use a Version Control System:**
|
||||
- For critical files, consider using Git or other version control systems to track and monitor changes, authorship, and history.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- **LiveFile Options Not Visible:**
|
||||
- Ensure you are using a **supported desktop browser**.
|
||||
- Check that you have the latest version of big-AGI.
|
||||
|
||||
- **Permission Issues:**
|
||||
- Confirm that you granted big-AGI permission to access your files.
|
||||
- Check your browser's settings to ensure file access is allowed.
|
||||
|
||||
---
|
||||
|
||||
## Technical Details
|
||||
|
||||
LiveFile uses the [File System Access API](https://developer.mozilla.org/en-US/docs/Web/API/File_System_Access_API) to
|
||||
interact with your local files securely. It leverages the [browser-fs-access](https://github.com/GoogleChromeLabs/browser-fs-access) library,
|
||||
an open-source project by Google Chrome Labs, which provides an easy interface to the File System Access API with fallbacks for broader browser support.
|
||||
|
||||
- **Security:**
|
||||
- Access to files requires explicit user permission.
|
||||
|
||||
- **Performance:**
|
||||
- Designed to handle dozens of files efficiently (tested on hundreds).
|
||||
- Works with the Big-AGI attachment system to recursively add directories.
|
||||
|
||||
- **Browser Support:**
|
||||
- Fully supported on **Google Chrome** and **Microsoft Edge** desktop versions.
|
||||
|
||||
---
|
||||
|
||||
## Another Big-AGI First!
|
||||
|
||||
You can significantly boost your productivity and streamline your workflow within big-AGI
|
||||
by understanding how to utilize LiveFile's features fully.
|
||||
|
||||
This Feature is in Beta as there are a few limitations and improvements to be made.
|
||||
Join us in enjoying and enhancing this feature on [big-AGI.com](https://big-agi.com), or
|
||||
[GitHub](https://github.com/enricoros/big-AGI) for support and [Discord](https://discord.gg/MkH4qj2Jp9)
|
||||
to share the love.
|
||||
@@ -0,0 +1,141 @@
|
||||
# Enabling Microphone Access for Speech Recognition
|
||||
|
||||
This guide explains how to enable microphone access for speech recognition in various browsers and mobile devices.
|
||||
Ensuring microphone access is essential for using voice features in applications like big-AGI.
|
||||
|
||||
## Desktop Browsers
|
||||
|
||||
### Google Chrome (All Platforms, recommended)
|
||||
|
||||
1. Open the website (e.g., big-AGI) in Chrome.
|
||||
2. Click the **lock icon** in the address bar.
|
||||
3. In the dropdown, find **"Microphone"**.
|
||||
- Set it to **"Allow"**.
|
||||
4. If "Microphone" isn't listed:
|
||||
- Click on **"Site settings"**.
|
||||
- Find **"Microphone"** in the permissions list.
|
||||
- Change the setting to **"Allow"**.
|
||||
5. **Refresh** the page.
|
||||
|
||||
### Safari (macOS)
|
||||
|
||||
**[Watch the video tutorial: How to enable Speech Recognition in Safari](https://vimeo.com/1010342201)**
|
||||
|
||||
If you're seeing a "Speech Recognition permission denied" error, follow these steps:
|
||||
|
||||
1. Open **System Settings**.
|
||||
- Go to **Privacy & Security** > **Speech Recognition**.
|
||||
- Enable Safari in the list of allowed applications.
|
||||
- Quit & Open Safari.
|
||||
2. Click **Safari** in the top menu bar.
|
||||
- Select **Settings**.
|
||||
- Go to the **Websites** tab.
|
||||
- Select **Microphone** from the sidebar.
|
||||
- Find big-AGI (or localhost for developers) in the list and set it to **Allow**.
|
||||
- Close the Settings window.
|
||||
3. **Refresh** the page.
|
||||
|
||||
This quick and simple fix should get essential voice input working in big-AGI on your Mac.
|
||||
|
||||
### Microsoft Edge (Windows)
|
||||
|
||||
1. Open the website in Edge.
|
||||
2. Click the **lock icon** in the address bar.
|
||||
3. Click **"Permissions for this site"**.
|
||||
4. Find **"Microphone"**.
|
||||
- Set it to **"Allow"**.
|
||||
5. **Refresh** the page.
|
||||
|
||||
### Firefox (All Platforms)
|
||||
|
||||
> **Note:** The Speech Recognition API is **not supported** in Firefox. If you're using Firefox, please switch to a supported browser to use speech recognition
|
||||
> features.
|
||||
|
||||
## Mobile Devices
|
||||
|
||||
### Android (Chrome)
|
||||
|
||||
1. Open the website in Chrome.
|
||||
2. Tap the **lock icon** in the address bar.
|
||||
3. Tap **"Permissions"**.
|
||||
4. Find **"Microphone"**.
|
||||
- Set it to **"Allow"**.
|
||||
5. **Refresh** the page.
|
||||
|
||||
### iOS (Safari)
|
||||
|
||||
1. Open the **Settings** app on your device.
|
||||
2. Scroll down and tap **"Safari"**.
|
||||
3. Tap **"Microphone"**.
|
||||
4. Ensure **"Ask"** or **"Allow"** is selected.
|
||||
5. Return to Safari and open the website.
|
||||
6. If prompted, allow microphone access.
|
||||
7. **Refresh** the page.
|
||||
|
||||
### iOS (Chrome)
|
||||
|
||||
> **Note:** Chrome on iOS uses Safari's engine due to system limitations. Microphone permissions are managed through iOS settings.
|
||||
|
||||
1. Open the **Settings** app.
|
||||
2. Scroll down and tap **"Chrome"**.
|
||||
3. Ensure **"Microphone"** is toggled **on**.
|
||||
4. Open Chrome and navigate to the website.
|
||||
5. If prompted, allow microphone access.
|
||||
6. **Refresh** the page.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you're still experiencing issues after enabling microphone access:
|
||||
|
||||
**Check System Permissions (macOS):**
|
||||
|
||||
- Open **System Settings**.
|
||||
- Go to **"Privacy & Security"**.
|
||||
- Select the **"Privacy"** tab.
|
||||
- Click **"Microphone"** in the sidebar.
|
||||
- Ensure your browser (e.g., Chrome, Safari) is checked.
|
||||
- You may need to unlock the settings by clicking the lock icon at the bottom.
|
||||
|
||||
**Check Microphone Access (Windows):**
|
||||
|
||||
- Open **Settings**.
|
||||
- Go to **"Privacy"** > **"Microphone"**.
|
||||
- Ensure **"Allow apps to access your microphone"** is **on**.
|
||||
- Scroll down and make sure your browser is allowed.
|
||||
|
||||
**Close Other Applications:**
|
||||
|
||||
- Close any applications that might be using the microphone.
|
||||
|
||||
**Restart the Browser:**
|
||||
|
||||
- Close all browser windows and reopen.
|
||||
|
||||
**Update Your Browser:**
|
||||
|
||||
- Ensure you're using the latest version.
|
||||
|
||||
**Check for Browser Extensions:**
|
||||
|
||||
- Disable extensions that might block access to the microphone.
|
||||
|
||||
For persistent issues, consult your browser's official support resources or contact big-AGI support.
|
||||
|
||||
## Technical Details
|
||||
|
||||
Big-AGI uses the [Web Speech API (SpeechRecognition)](https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition)
|
||||
to transcribe spoken words into text. This API provides real-time transcription with live previews and works on most
|
||||
modern mobile and desktop browsers.
|
||||
|
||||
**Note on Browser Support:**
|
||||
|
||||
| Browser | Support Level | Notes |
|
||||
|----------------|-----------------|------------------------------------------------------------------------|
|
||||
| Google Chrome | ✅ Recommended | Fully supported on desktop and Android. Preferred for best experience. |
|
||||
| Safari | ✅ Supported | Requires macOS/iOS 14 or later. |
|
||||
| Microsoft Edge | ✅ Supported | Fully supported on desktop. |
|
||||
| Firefox | ❌ Not Supported | SpeechRecognition API not available. |
|
||||
|
||||
**Recommendation:**
|
||||
For the best experience with speech recognition features, we strongly recommend using Google Chrome.
|
||||
Ensure your browser is up to date to benefit from the latest features and security updates.
|
||||
@@ -0,0 +1,156 @@
|
||||
# Installation Guide
|
||||
|
||||
Welcome to the big-AGI Installation Guide - Whether you're a developer
|
||||
eager to explore, a system integrator, or an enterprise looking for a
|
||||
white-label solution, this comprehensive guide ensures a smooth setup
|
||||
process for your own instance of big-AGI and related products.
|
||||
|
||||
**Try big-AGI** - You don't need to install anything if you want to play with big-AGI
|
||||
and have your API keys to various model services. You can access our free instance on [big-AGI.com](https://big-agi.com).
|
||||
The free instance runs the latest `main-stable` branch from this repository.
|
||||
|
||||
## 🧩 Build-your-own
|
||||
|
||||
If you want to change the code, have a deeper configuration,
|
||||
add your own models, or run your own instance, follow the steps below.
|
||||
|
||||
### Local Development
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- Node.js and npm installed on your machine.
|
||||
|
||||
**Steps:**
|
||||
|
||||
1. Clone the big-AGI repository:
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-AGI.git
|
||||
cd big-AGI
|
||||
```
|
||||
2. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
3. Run the development server:
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
Your big-AGI instance is now running at `http://localhost:3000`.
|
||||
|
||||
### Local Production build
|
||||
|
||||
The production build is optimized for performance and follows
|
||||
the same steps 1 and 2 as for [local development](#local-development).
|
||||
|
||||
3. Build the production version:
|
||||
```bash
|
||||
# .. repeat the steps above up to `npm install`, then:
|
||||
npm run build
|
||||
```
|
||||
4. Start the production server (`npx` may be optional):
|
||||
```bash
|
||||
npx next start --port 3000
|
||||
```
|
||||
Your big-AGI production instance is on `http://localhost:3000`.
|
||||
|
||||
### Advanced Customization
|
||||
|
||||
Want to pre-enable models, customize the interface, or deploy with username/password or alter code to your needs?
|
||||
Check out the [Customizations Guide](README.md) for detailed instructions.
|
||||
|
||||
## ☁️ Cloud Deployment Options
|
||||
|
||||
To deploy big-AGI on a public server, you have several options. Choose the one that best fits your needs.
|
||||
|
||||
### Deploy on Vercel
|
||||
|
||||
Install big-AGI on Vercel with just a few clicks.
|
||||
|
||||
Create your GitHub fork, create a Vercel project over that fork, and deploy it. Or press the button below for convenience.
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
### Deploy on Cloudflare
|
||||
|
||||
Deploy on Cloudflare's global network by installing big-AGI on
|
||||
Cloudflare Pages. Check out the [Cloudflare Installation Guide](deploy-cloudflare.md)
|
||||
for step-by-step instructions.
|
||||
|
||||
### Docker Deployments
|
||||
|
||||
Containerize your big-AGI installation using Docker for portability and scalability.
|
||||
Our [Docker Deployment Guide](deploy-docker.md) will walk you through the process,
|
||||
or follow the steps below for a quick start.
|
||||
|
||||
1. (optional) Build the Docker image - if you do not want to use the [pre-built Docker images](https://github.com/enricoros/big-AGI/pkgs/container/big-agi):
|
||||
```bash
|
||||
docker build -t big-agi .
|
||||
```
|
||||
2. Run the Docker container with either:
|
||||
```bash
|
||||
# 2A. if you built the image yourself:
|
||||
docker run -d -p 3000:3000 big-agi
|
||||
|
||||
# 2B. or use the pre-built image:
|
||||
docker run -d -p 3000:3000 ghcr.io/enricoros/big-agi
|
||||
|
||||
# 2C. or use docker-compose:
|
||||
docker-compose up
|
||||
```
|
||||
Access your big-AGI instance at `http://localhost:3000`.
|
||||
|
||||
If you deploy big-AGI behind a reverse proxy, you may want to check out the [Reverse Proxy Configuration Guide](deploy-reverse-proxy.md).
|
||||
|
||||
### Kubernetes Deployment
|
||||
|
||||
Deploy big-AGI on a Kubernetes cluster for enhanced scalability and management. Follow these steps for a Kubernetes deployment:
|
||||
|
||||
1. Clone the big-AGI repository:
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-AGI.git
|
||||
cd big-AGI
|
||||
```
|
||||
|
||||
2. Configure the environment variables:
|
||||
```bash
|
||||
cp docs/k8s/env-secret.yaml env-secret.yaml
|
||||
vim env-secret.yaml # Edit the file to set your environment variables
|
||||
```
|
||||
|
||||
3. Apply the Kubernetes configurations:
|
||||
```bash
|
||||
kubectl create namespace ns-big-agi
|
||||
kubectl apply -f docs/k8s/big-agi-deployment.yaml -f env-secret.yaml
|
||||
```
|
||||
|
||||
4. Verify the deployment:
|
||||
```bash
|
||||
kubectl -n ns-big-agi get svc,pod,deployment
|
||||
```
|
||||
|
||||
5. Access the big-AGI application:
|
||||
```bash
|
||||
kubectl -n ns-big-agi port-forward service/svc-big-agi 3000:3000
|
||||
```
|
||||
Your big-AGI instance is now accessible at `http://localhost:3000`.
|
||||
|
||||
For more detailed instructions on Kubernetes deployment, including updating and troubleshooting, refer to our [Kubernetes Deployment Guide](deploy-k8s.md).
|
||||
|
||||
### Midori AI Subsystem for Docker Deployment
|
||||
|
||||
Follow the instructions found on [Midori AI Subsystem Site](https://io.midori-ai.xyz/subsystem/manager/)
|
||||
for your host OS. After completing the setup process, install the Big-AGI docker backend to the Midori AI Subsystem.
|
||||
|
||||
## Enterprise-Grade Installation
|
||||
|
||||
For businesses seeking a fully-managed, scalable solution, consider our managed installations.
|
||||
Enjoy all the features of big-AGI without the hassle of infrastructure management. [hello@big-agi.com](mailto:hello@big-agi.com) to learn more.
|
||||
|
||||
## Support
|
||||
|
||||
Join our vibrant community of developers, researchers, and AI enthusiasts. Share your projects, get help, and collaborate with others.
|
||||
|
||||
- [Discord Community](https://discord.gg/MkH4qj2Jp9)
|
||||
- [Twitter](https://twitter.com/yourusername)
|
||||
|
||||
For any questions or inquiries, please don't hesitate to [reach out to our team](mailto:hello@big-agi.com).
|
||||
@@ -0,0 +1,52 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: ns-big-agi
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app: big-agi
|
||||
name: deployment-big-agi
|
||||
namespace: ns-big-agi
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: big-agi
|
||||
strategy: {}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: big-agi
|
||||
spec:
|
||||
containers:
|
||||
- image: ghcr.io/enricoros/big-agi:latest
|
||||
name: big-agi
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
args:
|
||||
- next
|
||||
- start
|
||||
- -p
|
||||
- "3000"
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: env
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
app: big-agi
|
||||
name: svc-big-agi
|
||||
namespace: ns-big-agi
|
||||
spec:
|
||||
ports:
|
||||
- name: "http"
|
||||
port: 3000
|
||||
targetPort: 3000
|
||||
selector:
|
||||
app: big-agi
|
||||
@@ -0,0 +1,49 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: env
|
||||
namespace: ns-big-agi
|
||||
type: Opaque
|
||||
stringData:
|
||||
# IMPORTANT: This file contains sensitive information. Do not commit changes to version control.
|
||||
# All variables are optional. Fill in only the ones you need.
|
||||
#
|
||||
# For the latest information on all the environment variables, see /docs/environment-variables.md
|
||||
#
|
||||
|
||||
# LLMs
|
||||
OPENAI_API_KEY: ""
|
||||
OPENAI_API_HOST: ""
|
||||
OPENAI_API_ORG_ID: ""
|
||||
AZURE_OPENAI_API_ENDPOINT: ""
|
||||
AZURE_OPENAI_API_KEY: ""
|
||||
ANTHROPIC_API_KEY: ""
|
||||
ANTHROPIC_API_HOST: ""
|
||||
DEEPSEEK_API_KEY: ""
|
||||
GEMINI_API_KEY: ""
|
||||
GROQ_API_KEY: ""
|
||||
LOCALAI_API_HOST: ""
|
||||
LOCALAI_API_KEY: ""
|
||||
MISTRAL_API_KEY: ""
|
||||
OLLAMA_API_HOST: ""
|
||||
OPENPIPE_API_KEY: ""
|
||||
OPENROUTER_API_KEY: ""
|
||||
PERPLEXITY_API_KEY: ""
|
||||
TOGETHERAI_API_KEY: ""
|
||||
XAI_API_KEY: ""
|
||||
|
||||
# Browse
|
||||
PUPPETEER_WSS_ENDPOINT: ""
|
||||
|
||||
# Search
|
||||
GOOGLE_CLOUD_API_KEY: ""
|
||||
GOOGLE_CSE_ID: ""
|
||||
|
||||
# Text-To-Speech: Eleven Labs
|
||||
ELEVENLABS_API_KEY: ""
|
||||
ELEVENLABS_API_HOST: ""
|
||||
ELEVENLABS_VOICE_ID: ""
|
||||
|
||||
# Text-To-Image: Prodia
|
||||
PRODIA_API_KEY: ""
|
||||
@@ -0,0 +1,43 @@
|
||||
# ReAct: question answering with Reasoning and Actions
|
||||
|
||||
## What is ReAct?
|
||||
|
||||
[ReAct](https://arxiv.org/abs/2210.03629) (Reason+Act) is a classis AI question-answering feature,
|
||||
that combines reasoning with actions to provide informed answers.
|
||||
|
||||
Within Big-AGI, users can invoke ReAct to ask complex questions that require multiple steps to answer.
|
||||
|
||||
| Mode | Activation | Information Sources | Reasoning Visibility | When to Use |
|
||||
|-------|-----------------------------------|------------------------------------------------------|------------------------------------|--------------------------------------------------|
|
||||
| Chat | Just type and send | **Pre-trained knowledge only** | Only shows final response | Quick answers, general knowledge queries |
|
||||
| ReAct | Type "/react" before the question | **Web loads, Web searches, Wikipedia, calculations** | Shows step-by-step thought process | Complex, multi-step, or research-based questions |
|
||||
|
||||
Example of ReAct in action, taking a question about current events, googling results, opening a page, and summarizing the information:
|
||||
|
||||
https://github.com/user-attachments/assets/c3480428-9ab8-4257-a869-2541bf44a062
|
||||
|
||||
The following tools are implemented in Big-AGI:
|
||||
|
||||
- **browse**: loads web pages (URLs) and extracts information, using a correctly configured `Tools > Browsing` API
|
||||
- **search**: searches the web to produce page URLs, using a correctly configured `Tools > Google Search` ([Google Programmable Search Engine](https://programmablesearchengine.google.com/about/)) API
|
||||
- **wikipedia**: looks up information on Wikipedia pages
|
||||
- **calculate**: performs mathematical calculations by executing typescript code
|
||||
- warning: (!) unsafe and dangerous, do not use for untrusted code/LLMs
|
||||
|
||||
## How to Use ReAct in Big-AGI
|
||||
|
||||
1. **Invoking ReAct**: Type "/react" followed by your question in the chat.
|
||||
2. **What to Expect**:
|
||||
|
||||
- An ephemeral space will show the AI's thought process and actions, showing all the steps taken.
|
||||
- The final answer will appear in the main chat.
|
||||
|
||||
3. **Available Actions**: Web searches, Wikipedia lookups, calculations, and optionally web browsing.
|
||||
|
||||
## Good to know:
|
||||
|
||||
- **ReAct operates in isolation** from the main chat history.
|
||||
- It **will take longer than standard responses** due to multiple steps.
|
||||
- Web searches and browsing may have privacy implications, and require **tool configuration** in the UI.
|
||||
- Errors or limitations in accessing external resources may affect results.
|
||||
- ReAct does not use the [Tool or Function Calling](https://platform.openai.com/docs/guides/function-calling) feature of AI models, rather uses the old school approach of parsing and executing actions.
|
||||
@@ -1,10 +1,18 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
// Build information
|
||||
process.env.NEXT_PUBLIC_BUILD_HASH = 'big-agi-2-dev';
|
||||
process.env.NEXT_PUBLIC_BUILD_PKGVER = JSON.parse('' + await readFile(new URL('./package.json', import.meta.url))).version;
|
||||
process.env.NEXT_PUBLIC_BUILD_TIMESTAMP = new Date().toISOString();
|
||||
console.log(` 🧠 \x1b[1mbig-AGI\x1b[0m v${process.env.NEXT_PUBLIC_BUILD_PKGVER} (@${process.env.NEXT_PUBLIC_BUILD_HASH})`);
|
||||
|
||||
// Non-default build types
|
||||
const buildType =
|
||||
process.env.BIG_AGI_BUILD === 'standalone' ? 'standalone'
|
||||
: process.env.BIG_AGI_BUILD === 'static' ? 'export'
|
||||
: undefined;
|
||||
|
||||
buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
|
||||
buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
let nextConfig = {
|
||||
@@ -23,11 +31,10 @@ let nextConfig = {
|
||||
},
|
||||
|
||||
// [puppeteer] https://github.com/puppeteer/puppeteer/issues/11052
|
||||
experimental: {
|
||||
serverComponentsExternalPackages: ['puppeteer-core'],
|
||||
},
|
||||
// NOTE: we may not be needing this anymore, as we use '@cloudflare/puppeteer'
|
||||
serverExternalPackages: ['puppeteer-core'],
|
||||
|
||||
webpack: (config, _options) => {
|
||||
webpack: (config, { isServer }) => {
|
||||
// @mui/joy: anything material gets redirected to Joy
|
||||
config.resolve.alias['@mui/material'] = '@mui/joy';
|
||||
|
||||
@@ -37,9 +44,17 @@ let nextConfig = {
|
||||
layers: true,
|
||||
};
|
||||
|
||||
// fix warnings for async functions in the browser (https://github.com/vercel/next.js/issues/64792)
|
||||
if (!isServer) {
|
||||
config.output.environment = { ...config.output.environment, asyncFunction: true };
|
||||
}
|
||||
|
||||
// prevent too many small chunks (40kb min) on 'client' packs (not 'server' or 'edge-server')
|
||||
if (typeof config.optimization.splitChunks === 'object' && config.optimization.splitChunks.minSize)
|
||||
// noinspection JSUnresolvedReference
|
||||
if (typeof config.optimization.splitChunks === 'object' && config.optimization.splitChunks.minSize) {
|
||||
// noinspection JSUnresolvedReference
|
||||
config.optimization.splitChunks.minSize = 40 * 1024;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
{
|
||||
"name": "big-agi",
|
||||
"version": "1.13.0",
|
||||
"version": "1.91.0",
|
||||
"private": true,
|
||||
"author": "Enrico Ros <enrico.ros@gmail.com>",
|
||||
"repository": "https://github.com/enricoros/big-agi",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"dev": "next dev --turbopack",
|
||||
"dev-debug": "cross-env NODE_OPTIONS='--inspect' next dev",
|
||||
"dev-https": "next dev --experimental-https",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"postinstall": "prisma generate",
|
||||
"postinstall": "prisma generate --no-hints",
|
||||
"db:push": "prisma db push",
|
||||
"db:studio": "prisma studio",
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local"
|
||||
@@ -18,70 +20,92 @@
|
||||
"schema": "src/server/prisma/schema.prisma"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/cache": "^11.11.0",
|
||||
"@emotion/react": "^11.11.3",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/modifiers": "^9.0.0",
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
"@dnd-kit/utilities": "^3.2.2",
|
||||
"@emotion/cache": "^11.14.0",
|
||||
"@emotion/react": "^11.14.0",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@mui/icons-material": "^5.15.11",
|
||||
"@mui/joy": "^5.0.0-beta.29",
|
||||
"@next/bundle-analyzer": "^14.1.0",
|
||||
"@next/third-parties": "^14.1.0",
|
||||
"@prisma/client": "^5.10.2",
|
||||
"@sanity/diff-match-patch": "^3.1.1",
|
||||
"@t3-oss/env-nextjs": "^0.9.2",
|
||||
"@tanstack/react-query": "~4.36.1",
|
||||
"@trpc/client": "10.44.1",
|
||||
"@trpc/next": "10.44.1",
|
||||
"@trpc/react-query": "10.44.1",
|
||||
"@trpc/server": "10.44.1",
|
||||
"@vercel/analytics": "^1.2.2",
|
||||
"@vercel/speed-insights": "^1.0.10",
|
||||
"@emotion/styled": "^11.14.0",
|
||||
"@mui/icons-material": "^5.16.14",
|
||||
"@mui/joy": "^5.0.0-beta.51",
|
||||
"@mui/material": "^5.16.14",
|
||||
"@next/bundle-analyzer": "^15.1.4",
|
||||
"@next/third-parties": "^15.1.4",
|
||||
"@prisma/client": "~5.22.0",
|
||||
"@t3-oss/env-nextjs": "^0.11.1",
|
||||
"@tanstack/react-query": "^5.63.0",
|
||||
"@tanstack/react-virtual": "^3.11.2",
|
||||
"@trpc/client": "11.0.0-rc.688",
|
||||
"@trpc/next": "11.0.0-rc.688",
|
||||
"@trpc/react-query": "11.0.0-rc.688",
|
||||
"@trpc/server": "11.0.0-rc.688",
|
||||
"@vercel/analytics": "^1.4.1",
|
||||
"@vercel/speed-insights": "^1.1.0",
|
||||
"browser-fs-access": "^0.35.0",
|
||||
"eventsource-parser": "^1.1.2",
|
||||
"cheerio": "^1.0.0",
|
||||
"dexie": "^4.0.10",
|
||||
"dexie-react-hooks": "^1.1.7",
|
||||
"diff": "^7.0.0",
|
||||
"eventsource-parser": "^3.0.0",
|
||||
"idb-keyval": "^6.2.1",
|
||||
"next": "^14.1.0",
|
||||
"mammoth": "^1.9.0",
|
||||
"nanoid": "^5.0.9",
|
||||
"next": "^15.1.4",
|
||||
"nprogress": "^0.2.0",
|
||||
"pdfjs-dist": "4.0.379",
|
||||
"pdfjs-dist": "4.10.38",
|
||||
"plantuml-encoder": "^1.4.0",
|
||||
"prismjs": "^1.29.0",
|
||||
"react": "^18.2.0",
|
||||
"react-beautiful-dnd": "^13.1.1",
|
||||
"react": "^18.3.1",
|
||||
"react-csv": "^2.2.2",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hook-form": "^7.54.2",
|
||||
"react-katex": "^3.0.1",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-player": "^2.14.1",
|
||||
"react-resizable-panels": "^2.0.11",
|
||||
"react-markdown": "^9.0.3",
|
||||
"react-player": "^2.16.0",
|
||||
"react-resizable-panels": "^2.1.7",
|
||||
"react-timeago": "^7.2.0",
|
||||
"rehype-katex": "^7.0.1",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"sharp": "^0.33.2",
|
||||
"superjson": "^2.2.1",
|
||||
"tesseract.js": "^5.0.5",
|
||||
"tiktoken": "^1.0.13",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zustand": "^4.5.1"
|
||||
"remark-mark-highlight": "^0.1.1",
|
||||
"remark-math": "^6.0.0",
|
||||
"sharp": "^0.33.5",
|
||||
"superjson": "^2.2.2",
|
||||
"tesseract.js": "^6.0.0",
|
||||
"tiktoken": "^1.0.18",
|
||||
"turndown": "^7.2.0",
|
||||
"zod": "^3.24.1",
|
||||
"zod-to-json-schema": "^3.24.1",
|
||||
"zustand": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/puppeteer": "^0.0.5",
|
||||
"@types/node": "^20.11.20",
|
||||
"@types/diff": "^7.0.0",
|
||||
"@types/node": "^22.10.5",
|
||||
"@types/nprogress": "^0.2.3",
|
||||
"@types/plantuml-encoder": "^1.4.2",
|
||||
"@types/prismjs": "^1.26.3",
|
||||
"@types/react": "^18.2.59",
|
||||
"@types/prismjs": "^1.26.5",
|
||||
"@types/react": "^18.3.18",
|
||||
"@types/react-beautiful-dnd": "^13.1.8",
|
||||
"@types/react-csv": "^1.1.10",
|
||||
"@types/react-dom": "^18.2.19",
|
||||
"@types/react-dom": "^18.3.5",
|
||||
"@types/react-katex": "^3.0.4",
|
||||
"@types/react-timeago": "^4.1.7",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "^14.1.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prisma": "^5.10.2",
|
||||
"typescript": "^5.3.3"
|
||||
"@types/turndown": "^5.0.5",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-config-next": "^15.1.4",
|
||||
"prettier": "^3.4.2",
|
||||
"prisma": "~5.22.0",
|
||||
"puppeteer-core": "^23.11.1",
|
||||
"typescript": "^5.7.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^20.0.0 || ^18.0.0"
|
||||
"node": "^22.0.0 || ^20.0.0"
|
||||
},
|
||||
"overrides": {
|
||||
"@types/react": "^18.3.18",
|
||||
"@types/react-dom": "^18.3.5",
|
||||
"uri-js": "npm:uri-js-replace"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,20 +11,27 @@ import 'katex/dist/katex.min.css';
|
||||
import '~/common/styles/CodePrism.css';
|
||||
import '~/common/styles/GithubMarkdown.css';
|
||||
import '~/common/styles/NProgress.css';
|
||||
import '~/common/styles/agi.effects.css';
|
||||
import '~/common/styles/app.styles.css';
|
||||
|
||||
import { ProviderBackendAndNoSSR } from '~/common/providers/ProviderBackendAndNoSSR';
|
||||
import { Is } from '~/common/util/pwaUtils';
|
||||
import { OverlaysInsert } from '~/common/layout/overlays/OverlaysInsert';
|
||||
import { ProviderBackendCapabilities } from '~/common/providers/ProviderBackendCapabilities';
|
||||
import { ProviderBootstrapLogic } from '~/common/providers/ProviderBootstrapLogic';
|
||||
import { ProviderSingleTab } from '~/common/providers/ProviderSingleTab';
|
||||
import { ProviderSnacks } from '~/common/providers/ProviderSnacks';
|
||||
import { ProviderTRPCQueryClient } from '~/common/providers/ProviderTRPCQueryClient';
|
||||
import { ProviderTheming } from '~/common/providers/ProviderTheming';
|
||||
import { SnackbarInsert } from '~/common/components/snackbar/SnackbarInsert';
|
||||
import { hasGoogleAnalytics, OptionalGoogleAnalytics } from '~/common/components/GoogleAnalytics';
|
||||
import { isVercelFromFrontend } from '~/common/util/pwaUtils';
|
||||
|
||||
|
||||
const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) =>
|
||||
<>
|
||||
const Big_AGI_App = ({ Component, emotionCache, pageProps }: MyAppProps) => {
|
||||
|
||||
// We are using a nextjs per-page layout pattern to bring the (Optima) layout creation to a shared place
|
||||
// This reduces the flicker and the time switching between apps, and seems to not have impact on
|
||||
// the build. This is a good trade-off for now.
|
||||
const getLayout = Component.getLayout ?? ((page: any) => page);
|
||||
|
||||
return <>
|
||||
|
||||
<Head>
|
||||
<title>{Brand.Title.Common}</title>
|
||||
@@ -33,23 +40,23 @@ const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) =>
|
||||
|
||||
<ProviderTheming emotionCache={emotionCache}>
|
||||
<ProviderSingleTab>
|
||||
<ProviderBootstrapLogic>
|
||||
<ProviderTRPCQueryClient>
|
||||
<ProviderSnacks>
|
||||
<ProviderBackendAndNoSSR>
|
||||
<Component {...pageProps} />
|
||||
</ProviderBackendAndNoSSR>
|
||||
</ProviderSnacks>
|
||||
</ProviderTRPCQueryClient>
|
||||
</ProviderBootstrapLogic>
|
||||
<ProviderBackendCapabilities>
|
||||
{/* ^ Backend capabilities & SSR boundary */}
|
||||
<ProviderBootstrapLogic>
|
||||
<SnackbarInsert />
|
||||
{getLayout(<Component {...pageProps} />)}
|
||||
<OverlaysInsert />
|
||||
</ProviderBootstrapLogic>
|
||||
</ProviderBackendCapabilities>
|
||||
</ProviderSingleTab>
|
||||
</ProviderTheming>
|
||||
|
||||
{isVercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{isVercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{Is.Deployment.VercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
{hasGoogleAnalytics && <OptionalGoogleAnalytics />}
|
||||
|
||||
</>;
|
||||
};
|
||||
|
||||
// enables the React Query API invocation
|
||||
export default apiQuery.withTRPC(MyApp);
|
||||
// Initializes React Query and tRPC, and enables the tRPC React Query hooks (apiQuery).
|
||||
export default apiQuery.withTRPC(Big_AGI_App);
|
||||
@@ -2,7 +2,7 @@ import * as React from 'react';
|
||||
import { AppType, MyAppProps } from 'next/app';
|
||||
import { default as Document, DocumentContext, DocumentProps, Head, Html, Main, NextScript } from 'next/document';
|
||||
import createEmotionServer from '@emotion/server/create-instance';
|
||||
import { getInitColorSchemeScript } from '@mui/joy/styles';
|
||||
import InitColorSchemeScript from '@mui/joy/InitColorSchemeScript';
|
||||
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { createEmotionCache } from '~/common/app.theme';
|
||||
@@ -26,7 +26,7 @@ export default function MyDocument({ emotionStyleTags }: MyDocumentProps) {
|
||||
<link rel='icon' type='image/png' sizes='16x16' href='/icons/favicon-16x16.png' />
|
||||
<link rel='apple-touch-icon' sizes='180x180' href='/apple-touch-icon.png' />
|
||||
<link rel='manifest' href='/manifest.json' />
|
||||
<meta name='apple-mobile-web-app-capable' content='yes' />
|
||||
<meta name='mobile-web-app-capable' content='yes' />
|
||||
<meta name='apple-mobile-web-app-status-bar-style' content='black' />
|
||||
|
||||
{/* Opengraph */}
|
||||
@@ -51,7 +51,7 @@ export default function MyDocument({ emotionStyleTags }: MyDocumentProps) {
|
||||
{emotionStyleTags}
|
||||
</Head>
|
||||
<body>
|
||||
{getInitColorSchemeScript()}
|
||||
<InitColorSchemeScript />
|
||||
<Main />
|
||||
<NextScript />
|
||||
</body>
|
||||
|
||||
@@ -2,9 +2,7 @@ import * as React from 'react';
|
||||
|
||||
import { AppCall } from '../src/apps/call/AppCall';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function CallPage() {
|
||||
return withLayout({ type: 'optima' }, <AppCall />);
|
||||
}
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppCall />);
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppBeam } from '../../src/apps/beam/AppBeam';
|
||||
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppBeam />);
|
||||
@@ -0,0 +1,8 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppDiff } from '../src/apps/diff/AppDiff';
|
||||
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppDiff />);
|
||||
@@ -2,9 +2,7 @@ import * as React from 'react';
|
||||
|
||||
import { AppDraw } from '../src/apps/draw/AppDraw';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function DrawPage() {
|
||||
return withLayout({ type: 'optima' }, <AppDraw />);
|
||||
}
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppDraw />);
|
||||
|
||||
@@ -2,13 +2,13 @@ import * as React from 'react';
|
||||
|
||||
import { AppChat } from '../src/apps/chat/AppChat';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function IndexPage() {
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => {
|
||||
|
||||
// TODO: This Index page will point to the Dashboard (or a landing page)
|
||||
// For now it offers the chat experience, but this will change. #299
|
||||
|
||||
return withLayout({ type: 'optima' }, <AppChat />);
|
||||
}
|
||||
return <AppChat />;
|
||||
});
|
||||
|
||||
@@ -6,32 +6,31 @@ import DownloadIcon from '@mui/icons-material/Download';
|
||||
|
||||
import { AppPlaceholder } from '../../src/apps/AppPlaceholder';
|
||||
|
||||
import { backendCaps } from '~/modules/backend/state-backend';
|
||||
import { getPlantUmlServerUrl } from '~/modules/blocks/code/RenderCode';
|
||||
import { getBackendCapabilities } from '~/modules/backend/store-backend-capabilities';
|
||||
import { getPlantUmlServerUrl } from '~/modules/blocks/code/code-renderers/RenderCodePlantUML';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
// app config
|
||||
// basics
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { ROUTE_APP_CHAT, ROUTE_INDEX } from '~/common/app.routes';
|
||||
|
||||
// apps access
|
||||
import { incrementalNewsVersion } from '../../src/apps/news/news.version';
|
||||
import { Release } from '~/common/app.release';
|
||||
|
||||
// capabilities access
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs, useCapabilityTextToImage } from '~/common/components/useCapabilities';
|
||||
|
||||
// stores access
|
||||
import { getLLMsDebugInfo } from '~/modules/llms/store-llms';
|
||||
import { useAppStateStore } from '~/common/state/store-appstate';
|
||||
import { useChatStore } from '~/common/state/store-chats';
|
||||
import { useFolderStore } from '~/common/state/store-folders';
|
||||
import { getLLMsDebugInfo } from '~/common/stores/llms/store-llms';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useFolderStore } from '~/common/stores/folders/store-chat-folders';
|
||||
import { useLogicSherpaStore } from '~/common/logic/store-logic-sherpa';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
|
||||
// utils access
|
||||
import { clientHostName, isChromeDesktop, isFirefox, isIPhoneUser, isMacUser, isPwa, isVercelFromFrontend } from '~/common/util/pwaUtils';
|
||||
import { BrowserLang, clientHostName, Is, isPwa } from '~/common/util/pwaUtils';
|
||||
import { getGA4MeasurementId } from '~/common/components/GoogleAnalytics';
|
||||
import { prettyTimestampForFilenames } from '~/common/util/timeUtils';
|
||||
import { supportsClipboardRead } from '~/common/util/clipboardUtils';
|
||||
import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
|
||||
|
||||
@@ -70,28 +69,27 @@ function DebugJsonCard(props: { title: string, data: any }) {
|
||||
}
|
||||
|
||||
|
||||
const frontendBuild = Release.buildInfo('frontend');
|
||||
|
||||
function AppDebug() {
|
||||
|
||||
// state
|
||||
const [saved, setSaved] = React.useState(false);
|
||||
|
||||
// external state
|
||||
const backendCapabilities = backendCaps();
|
||||
const backendCaps = getBackendCapabilities();
|
||||
const chatsCount = useChatStore.getState().conversations?.length;
|
||||
const uxLabsExperiments = Object.entries(useUXLabsStore.getState()).filter(([_k, v]) => v === true).map(([k, _]) => k).join(', ');
|
||||
const { folders, enableFolders } = useFolderStore.getState();
|
||||
const { lastSeenNewsVersion, usageCount } = useAppStateStore.getState();
|
||||
|
||||
const { lastSeenNewsVersion, usageCount } = useLogicSherpaStore.getState();
|
||||
|
||||
// derived state
|
||||
const cClient = {
|
||||
// isBrowser,
|
||||
isChromeDesktop,
|
||||
isFirefox,
|
||||
isIPhone: isIPhoneUser,
|
||||
isMac: isMacUser,
|
||||
Is,
|
||||
BrowserLang,
|
||||
isPWA: isPwa(),
|
||||
supportsClipboardPaste: supportsClipboardRead,
|
||||
supportsClipboardPaste: supportsClipboardRead(),
|
||||
supportsScreenCapture,
|
||||
};
|
||||
const cProduct = {
|
||||
@@ -105,18 +103,21 @@ function AppDebug() {
|
||||
chatsCount,
|
||||
foldersCount: folders?.length,
|
||||
foldersEnabled: enableFolders,
|
||||
newsCurrent: incrementalNewsVersion,
|
||||
newsCurrent: Release.Monotonics.NewsVersion,
|
||||
newsSeen: lastSeenNewsVersion,
|
||||
labsActive: uxLabsExperiments,
|
||||
reloads: usageCount,
|
||||
},
|
||||
release: {
|
||||
app: Release.App,
|
||||
build: frontendBuild,
|
||||
},
|
||||
};
|
||||
const cBackend = {
|
||||
configuration: backendCapabilities,
|
||||
configuration: backendCaps,
|
||||
deployment: {
|
||||
home: Brand.URIs.Home,
|
||||
hostName: clientHostName(),
|
||||
isVercelFromFrontend,
|
||||
measurementId: getGA4MeasurementId(),
|
||||
plantUmlServerUrl: getPlantUmlServerUrl(),
|
||||
routeIndex: ROUTE_INDEX,
|
||||
@@ -127,7 +128,7 @@ function AppDebug() {
|
||||
const handleDownload = async () => {
|
||||
fileSave(
|
||||
new Blob([JSON.stringify({ client: cClient, agi: cProduct, backend: cBackend }, null, 2)], { type: 'application/json' }),
|
||||
{ fileName: `big-agi-debug-${new Date().toISOString().replace(/:/g, '-')}.json`, extensions: ['.json'] },
|
||||
{ fileName: `big-agi_debug_${prettyTimestampForFilenames()}.json`, extensions: ['.json'] },
|
||||
)
|
||||
.then(() => setSaved(true))
|
||||
.catch(e => console.error('Error saving debug.json', e));
|
||||
@@ -162,6 +163,4 @@ function AppDebug() {
|
||||
}
|
||||
|
||||
|
||||
export default function DebugPage() {
|
||||
return withLayout({ type: 'plain' }, <AppDebug />);
|
||||
};
|
||||
export default withNextJSPerPageLayout({ type: 'container' }, () => <AppDebug />);
|
||||
|
||||
@@ -2,20 +2,19 @@ import * as React from 'react';
|
||||
|
||||
import { Box, Typography } from '@mui/joy';
|
||||
|
||||
import { useModelsStore } from '~/modules/llms/store-llms';
|
||||
import { llmsStoreActions } from '~/common/stores/llms/store-llms';
|
||||
|
||||
import { InlineError } from '~/common/components/InlineError';
|
||||
import { apiQuery } from '~/common/util/trpc.client';
|
||||
import { navigateToIndex, useRouterQuery } from '~/common/app.routes';
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
function CallbackOpenRouterPage(props: { openRouterCode: string | undefined }) {
|
||||
|
||||
// external state
|
||||
const { data, isError, error, isLoading } = apiQuery.backend.exchangeOpenRouterKey.useQuery({ code: props.openRouterCode || '' }, {
|
||||
const { data, isError, error, isPending } = apiQuery.backend.exchangeOpenRouterKey.useQuery({ code: props.openRouterCode || '' }, {
|
||||
enabled: !!props.openRouterCode,
|
||||
refetchOnWindowFocus: false,
|
||||
staleTime: Infinity,
|
||||
});
|
||||
|
||||
@@ -31,7 +30,7 @@ function CallbackOpenRouterPage(props: { openRouterCode: string | undefined }) {
|
||||
return;
|
||||
|
||||
// 1. Save the key as the client key
|
||||
useModelsStore.getState().setOpenRoutersKey(openRouterKey);
|
||||
llmsStoreActions().setOpenRouterKey(openRouterKey);
|
||||
|
||||
// 2. Navigate to the chat app
|
||||
void navigateToIndex(true); //.then(openModelsSetup);
|
||||
@@ -56,7 +55,7 @@ function CallbackOpenRouterPage(props: { openRouterCode: string | undefined }) {
|
||||
Welcome Back
|
||||
</Typography>
|
||||
|
||||
{isLoading && <Typography level='body-sm'>Loading...</Typography>}
|
||||
{isPending && <Typography level='body-sm'>Loading...</Typography>}
|
||||
|
||||
{isErrorInput && <InlineError error='There was an issue retrieving the code from OpenRouter.' />}
|
||||
|
||||
@@ -81,10 +80,11 @@ function CallbackOpenRouterPage(props: { openRouterCode: string | undefined }) {
|
||||
* Docs: https://openrouter.ai/docs#oauth
|
||||
* Example URL: https://localhost:3000/link/callback_openrouter?code=SomeCode
|
||||
*/
|
||||
export default function CallbackPage() {
|
||||
export default withNextJSPerPageLayout({ type: 'container' }, () => {
|
||||
|
||||
// external state - get the 'code=...' from the URL
|
||||
const { code } = useRouterQuery<{ code: string | undefined }>();
|
||||
|
||||
return withLayout({ type: 'plain' }, <CallbackOpenRouterPage openRouterCode={code} />);
|
||||
}
|
||||
return <CallbackOpenRouterPage openRouterCode={code} />;
|
||||
|
||||
});
|
||||
|
||||
@@ -3,13 +3,14 @@ import * as React from 'react';
|
||||
import { AppLinkChat } from '../../../src/apps/link-chat/AppLinkChat';
|
||||
|
||||
import { useRouterQuery } from '~/common/app.routes';
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function ChatLinkPage() {
|
||||
export default withNextJSPerPageLayout({ type: 'optima', suspendAutoModelsSetup: true }, () => {
|
||||
|
||||
// external state
|
||||
const { chatLinkId } = useRouterQuery<{ chatLinkId: string | undefined }>();
|
||||
|
||||
return withLayout({ type: 'optima', suspendAutoModelsSetup: true }, <AppLinkChat chatLinkId={chatLinkId || null} />);
|
||||
}
|
||||
return <AppLinkChat chatLinkId={chatLinkId || null} />;
|
||||
|
||||
});
|
||||
@@ -3,14 +3,14 @@ import * as React from 'react';
|
||||
import { Alert, Box, Button, Typography } from '@mui/joy';
|
||||
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
|
||||
|
||||
import { setComposerStartupText } from '../../src/apps/chat/components/composer/store-composer';
|
||||
import { setComposerStartupText } from '~/common/logic/store-logic-sherpa';
|
||||
|
||||
import { callBrowseFetchPage } from '~/modules/browse/browse.client';
|
||||
import { callBrowseFetchPageOrThrow } from '~/modules/browse/browse.client';
|
||||
|
||||
import { LogoProgress } from '~/common/components/LogoProgress';
|
||||
import { asValidURL } from '~/common/util/urlUtils';
|
||||
import { navigateToIndex, useRouterQuery } from '~/common/app.routes';
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
/**
|
||||
@@ -75,11 +75,18 @@ function AppShareTarget() {
|
||||
React.useEffect(() => {
|
||||
if (intentURL) {
|
||||
setIsDownloading(true);
|
||||
callBrowseFetchPage(intentURL)
|
||||
callBrowseFetchPageOrThrow(intentURL)
|
||||
.then(page => {
|
||||
if (page.stopReason !== 'error')
|
||||
queueComposerTextAndLaunchApp('\n\n```' + intentURL + '\n' + page.content + '\n```\n');
|
||||
else
|
||||
if (page.stopReason !== 'error') {
|
||||
if (!page.content) {
|
||||
setErrorMessage(page.file ? 'No web page found, and we do not support files at the moment.' : 'No content found');
|
||||
return;
|
||||
}
|
||||
let pageContent = page.content.markdown || page.content.text || page.content.html || '';
|
||||
if (pageContent)
|
||||
pageContent = '\n\n```' + intentURL + '\n' + pageContent + '\n```\n';
|
||||
queueComposerTextAndLaunchApp(pageContent);
|
||||
} else
|
||||
setErrorMessage('Could not read any data' + page.error ? ': ' + page.error : '');
|
||||
})
|
||||
.catch(error => setErrorMessage(error?.message || error || 'Unknown error'))
|
||||
@@ -132,6 +139,4 @@ function AppShareTarget() {
|
||||
* This page will be invoked on mobile when sharing Text/URLs/Files from other APPs
|
||||
* Example URL: https://localhost:3000/link/share_target?title=This+Title&text=https%3A%2F%2Fexample.com%2Fapp%2Fpath
|
||||
*/
|
||||
export default function ShareTargetPage() {
|
||||
return withLayout({ type: 'plain' }, <AppShareTarget />);
|
||||
}
|
||||
export default withNextJSPerPageLayout({ type: 'container' }, () => <AppShareTarget />);
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppNews } from '../src/apps/news/AppNews';
|
||||
import { markNewsAsSeen } from '../src/apps/news/news.version';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { markNewsAsSeen } from '~/common/logic/store-logic-sherpa';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function NewsPage() {
|
||||
export default withNextJSPerPageLayout({ type: 'optima', suspendAutoModelsSetup: true }, () => {
|
||||
|
||||
// 'touch' the last seen news version
|
||||
React.useEffect(() => markNewsAsSeen(), []);
|
||||
|
||||
return withLayout({ type: 'optima', suspendAutoModelsSetup: true }, <AppNews />);
|
||||
}
|
||||
return <AppNews />;
|
||||
});
|
||||
@@ -2,9 +2,7 @@ import * as React from 'react';
|
||||
|
||||
import { AppPersonas } from '../src/apps/personas/AppPersonas';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function PersonasPage() {
|
||||
return withLayout({ type: 'optima' }, <AppPersonas />);
|
||||
}
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppPersonas />);
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppTokens } from '../src/apps/tokens/AppTokens';
|
||||
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppTokens />);
|
||||
@@ -1,12 +1,8 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box } from '@mui/joy';
|
||||
import { AppPlaceholder } from '../src/apps/AppPlaceholder';
|
||||
|
||||
// import { AppWorkspace } from '../src/apps/personas/AppWorkspace';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
import { withNextJSPerPageLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function PersonasPage() {
|
||||
return withLayout({ type: 'optima' }, <Box />);
|
||||
}
|
||||
export default withNextJSPerPageLayout({ type: 'optima' }, () => <AppPlaceholder />);
|
||||
|
||||
|
After Width: | Height: | Size: 19 KiB |
|
After Width: | Height: | Size: 1.5 KiB |
|
After Width: | Height: | Size: 270 KiB |
|
After Width: | Height: | Size: 348 KiB |
|
After Width: | Height: | Size: 248 KiB |
|
After Width: | Height: | Size: 180 KiB |
|
After Width: | Height: | Size: 191 KiB |
@@ -3,25 +3,45 @@
|
||||
"short_name": "big-AGI",
|
||||
"theme_color": "#32383E",
|
||||
"background_color": "#9FA6AD",
|
||||
"description": "Personal AGI App",
|
||||
"description": "Your Generative AI Suite",
|
||||
"categories": [
|
||||
"productivity",
|
||||
"AI",
|
||||
"tool",
|
||||
"utilities"
|
||||
],
|
||||
"display": "standalone",
|
||||
"start_url": "/",
|
||||
"start_url": "/?source=pwa",
|
||||
"scope": "/",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icons/icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"src": "/icons/icon-1024x1024.png",
|
||||
"sizes": "1024x1024",
|
||||
"type": "image/png",
|
||||
"purpose": "maskable"
|
||||
"purpose": "any maskable"
|
||||
},
|
||||
{
|
||||
"src": "/icons/icon-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
"type": "image/png",
|
||||
"purpose": "any"
|
||||
},
|
||||
{
|
||||
"src": "/icons/icon-1024x1024.png",
|
||||
"sizes": "1024x1024",
|
||||
"type": "image/png"
|
||||
"src": "/icons/icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png",
|
||||
"purpose": "any"
|
||||
}
|
||||
],
|
||||
"file_handlers": [
|
||||
{
|
||||
"action": "/link/share_target",
|
||||
"accept": {
|
||||
"application/big-agi": [
|
||||
".agi",
|
||||
".agi.json"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"share_target": {
|
||||
@@ -33,5 +53,31 @@
|
||||
"text": "text",
|
||||
"url": "url"
|
||||
}
|
||||
}
|
||||
},
|
||||
"shortcuts": [
|
||||
{
|
||||
"name": "Call",
|
||||
"url": "/call",
|
||||
"description": "Call a Persona",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icons/icon-call-96x96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "New Voice Chat",
|
||||
"url": "/?newChat=voiceInput",
|
||||
"description": "Start a new chat with voice input",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icons/icon-voicechat-96x96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import { useRouterRoute } from '~/common/app.routes';
|
||||
* https://github.com/enricoros/big-AGI/issues/299
|
||||
*/
|
||||
export function AppPlaceholder(props: {
|
||||
title?: string,
|
||||
title?: string | null,
|
||||
text?: React.ReactNode,
|
||||
children?: React.ReactNode,
|
||||
}) {
|
||||
@@ -29,23 +29,25 @@ export function AppPlaceholder(props: {
|
||||
border: '1px solid blue',
|
||||
}}>
|
||||
|
||||
<Box sx={{
|
||||
my: 'auto',
|
||||
display: 'flex', flexDirection: 'column', alignItems: 'center',
|
||||
gap: 4,
|
||||
border: '1px solid red',
|
||||
}}>
|
||||
{(props.title !== null || !!props.text) && (
|
||||
<Box sx={{
|
||||
my: 'auto',
|
||||
display: 'flex', flexDirection: 'column', alignItems: 'center',
|
||||
gap: 4,
|
||||
border: '1px solid red',
|
||||
}}>
|
||||
|
||||
<Typography level='h1'>
|
||||
{placeholderAppName}
|
||||
</Typography>
|
||||
{!!props.text && (
|
||||
<Typography>
|
||||
{props.text}
|
||||
<Typography level='h1'>
|
||||
{placeholderAppName}
|
||||
</Typography>
|
||||
)}
|
||||
{!!props.text && (
|
||||
<Typography>
|
||||
{props.text}
|
||||
</Typography>
|
||||
)}
|
||||
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{props.children}
|
||||
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Container, Typography } from '@mui/joy';
|
||||
|
||||
|
||||
export function AppSmallContainer({ title, description, children }: {
|
||||
title: string;
|
||||
description: React.ReactNode;
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
return (
|
||||
<Box sx={{ flexGrow: 1, overflowY: 'auto', p: { xs: 3, md: 6 } }}>
|
||||
|
||||
<Container disableGutters maxWidth='md' sx={{ display: 'flex', flexDirection: 'column', gap: 3 }}>
|
||||
|
||||
<Box sx={{ mb: 2 }}>
|
||||
<Typography level='h1' sx={{ mb: 1 }}>{title}</Typography>
|
||||
<Typography>{description}</Typography>
|
||||
</Box>
|
||||
|
||||
{children}
|
||||
|
||||
</Container>
|
||||
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,108 @@
|
||||
import * as React from 'react';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import { Box, Button, Typography } from '@mui/joy';
|
||||
|
||||
import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
|
||||
import { BeamView } from '~/modules/beam/BeamView';
|
||||
import { createBeamVanillaStore } from '~/modules/beam/store-beam_vanilla';
|
||||
|
||||
import { OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { createDConversation, DConversation } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageTextContent, DMessage } from '~/common/stores/chat/chat.message';
|
||||
import { getChatLLMId } from '~/common/stores/llms/store-llms';
|
||||
import { useIsMobile } from '~/common/components/useMatchMedia';
|
||||
|
||||
|
||||
function initTestConversation(): DConversation {
|
||||
const conversation = createDConversation();
|
||||
conversation.messages.push(createDMessageTextContent('system', 'You are a helpful assistant.')); // Beam Test - seed1
|
||||
conversation.messages.push(createDMessageTextContent('user', 'Hello, who are you? (please expand...)')); // Beam Test - seed2
|
||||
return conversation;
|
||||
}
|
||||
|
||||
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi = createBeamVanillaStore()): BeamStoreApi {
|
||||
beamStore.getState().open(messages, getChatLLMId(), false, (content) => alert(content));
|
||||
return beamStore;
|
||||
}
|
||||
|
||||
|
||||
export function AppBeam() {
|
||||
|
||||
// state
|
||||
const [showDebug, setShowDebug] = React.useState(false);
|
||||
|
||||
const [conversation, setConversation] = React.useState<DConversation>(() => initTestConversation());
|
||||
const [beamStoreApi] = React.useState(() => createBeamVanillaStore());
|
||||
|
||||
|
||||
// reinit the beam store if the conversation changes
|
||||
React.useEffect(() => {
|
||||
initTestBeamStore(conversation.messages, beamStoreApi);
|
||||
}, [beamStoreApi, conversation]);
|
||||
|
||||
|
||||
// external state
|
||||
const isMobile = useIsMobile();
|
||||
const { isOpen, beamState } = useBeamStore(beamStoreApi, useShallow(state => {
|
||||
return {
|
||||
isOpen: state.isOpen,
|
||||
beamState: showDebug ? state : null,
|
||||
};
|
||||
}));
|
||||
|
||||
|
||||
const handleClose = React.useCallback(() => {
|
||||
beamStoreApi.getState().terminateKeepingSettings();
|
||||
}, [beamStoreApi]);
|
||||
|
||||
|
||||
const toolbarItems = React.useMemo(() => <>
|
||||
{/* button to toggle debug info */}
|
||||
<Button size='sm' variant='plain' color='neutral' onClick={() => setShowDebug(on => !on)}>
|
||||
{showDebug ? 'Hide' : 'Show'} debug
|
||||
</Button>
|
||||
|
||||
{/* 'open' */}
|
||||
<Button size='sm' variant='plain' color='neutral' onClick={() => setConversation(initTestConversation())}>
|
||||
.open
|
||||
</Button>
|
||||
|
||||
{/* 'close' */}
|
||||
<Button size='sm' variant='plain' color='neutral' onClick={handleClose}>
|
||||
.close
|
||||
</Button>
|
||||
</>, [handleClose, showDebug]);
|
||||
|
||||
|
||||
return <>
|
||||
<OptimaToolbarIn>{toolbarItems}</OptimaToolbarIn>
|
||||
|
||||
<Box sx={{ flexGrow: 1, overflowY: 'auto', position: 'relative' }}>
|
||||
|
||||
{isOpen && (
|
||||
<BeamView
|
||||
beamStore={beamStoreApi}
|
||||
isMobile={isMobile}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showDebug && (
|
||||
<Typography level='body-xs' sx={{
|
||||
whiteSpace: 'pre',
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
zIndex: 1 /* debug on top of BeamView */,
|
||||
backdropFilter: 'blur(4px)',
|
||||
padding: '1rem',
|
||||
}}>
|
||||
{JSON.stringify(beamState, null, 2)
|
||||
// add an extra newline between first level properties (space, space, double quote) to make it more readable
|
||||
.split('\n').map(line => line.replace(/^\s\s"/g, '\n ')).join('\n')}
|
||||
</Typography>
|
||||
)}
|
||||
|
||||
</Box>
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import * as React from 'react';
|
||||
|
||||
import { Container, Sheet } from '@mui/joy';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
import type { DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { useRouterQuery } from '~/common/app.routes';
|
||||
|
||||
import { CallWizard } from './CallWizard';
|
||||
|
||||
@@ -1,60 +1,22 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, Card, CardContent, IconButton, ListItemDecorator, Typography } from '@mui/joy';
|
||||
import ArrowForwardIcon from '@mui/icons-material/ArrowForward';
|
||||
import ArrowForwardRoundedIcon from '@mui/icons-material/ArrowForwardRounded';
|
||||
import ChatIcon from '@mui/icons-material/Chat';
|
||||
import CheckIcon from '@mui/icons-material/Check';
|
||||
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
import WarningRoundedIcon from '@mui/icons-material/WarningRounded';
|
||||
|
||||
import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { cssRainbowColorKeyframes } from '~/common/app.theme';
|
||||
import { animationColorRainbow } from '~/common/util/animUtils';
|
||||
import { navigateBack } from '~/common/app.routes';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useChatStore } from '~/common/state/store-chats';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useUICounter } from '~/common/state/store-ui';
|
||||
|
||||
|
||||
/*export const cssRainbowBackgroundKeyframes = keyframes`
|
||||
100%, 0% {
|
||||
background-color: rgb(128, 0, 0);
|
||||
}
|
||||
8% {
|
||||
background-color: rgb(102, 51, 0);
|
||||
}
|
||||
16% {
|
||||
background-color: rgb(64, 64, 0);
|
||||
}
|
||||
25% {
|
||||
background-color: rgb(38, 76, 0);
|
||||
}
|
||||
33% {
|
||||
background-color: rgb(0, 89, 0);
|
||||
}
|
||||
41% {
|
||||
background-color: rgb(0, 76, 41);
|
||||
}
|
||||
50% {
|
||||
background-color: rgb(0, 64, 64);
|
||||
}
|
||||
58% {
|
||||
background-color: rgb(0, 51, 102);
|
||||
}
|
||||
66% {
|
||||
background-color: rgb(0, 0, 128);
|
||||
}
|
||||
75% {
|
||||
background-color: rgb(63, 0, 128);
|
||||
}
|
||||
83% {
|
||||
background-color: rgb(76, 0, 76);
|
||||
}
|
||||
91% {
|
||||
background-color: rgb(102, 0, 51);
|
||||
}`;*/
|
||||
|
||||
function StatusCard(props: { icon: React.JSX.Element, hasIssue: boolean, text: string, button?: React.JSX.Element }) {
|
||||
return (
|
||||
<Card sx={{ width: '100%' }}>
|
||||
@@ -67,7 +29,7 @@ function StatusCard(props: { icon: React.JSX.Element, hasIssue: boolean, text: s
|
||||
{props.button}
|
||||
</Typography>
|
||||
<ListItemDecorator>
|
||||
{props.hasIssue ? <WarningRoundedIcon color='warning' /> : <CheckIcon color='success' />}
|
||||
{props.hasIssue ? <WarningRoundedIcon color='warning' /> : <CheckRoundedIcon color='success' />}
|
||||
</ListItemDecorator>
|
||||
</CardContent>
|
||||
</Card>
|
||||
@@ -82,7 +44,6 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
const [recognitionOverride, setRecognitionOverride] = React.useState(false);
|
||||
|
||||
// external state
|
||||
const { openPreferencesTab } = useOptimaLayout();
|
||||
const recognition = useCapabilityBrowserSpeechRecognition();
|
||||
const synthesis = useCapabilityElevenLabs();
|
||||
const chatIsEmpty = useChatStore(state => {
|
||||
@@ -100,22 +61,22 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
const allGood = overriddenEmptyChat && overriddenRecognition && synthesis.mayWork;
|
||||
const fatalGood = overriddenRecognition && synthesis.mayWork;
|
||||
|
||||
if (!novel && fatalGood)
|
||||
return props.children;
|
||||
|
||||
const handleOverrideChatEmpty = () => setChatEmptyOverride(true);
|
||||
const handleOverrideChatEmpty = React.useCallback(() => setChatEmptyOverride(true), []);
|
||||
|
||||
const handleOverrideRecognition = () => setRecognitionOverride(true);
|
||||
const handleOverrideRecognition = React.useCallback(() => setRecognitionOverride(true), []);
|
||||
|
||||
const handleConfigureElevenLabs = () => {
|
||||
openPreferencesTab(PreferencesTab.Voice);
|
||||
};
|
||||
const handleConfigureElevenLabs = React.useCallback(() => optimaOpenPreferences('voice'), []);
|
||||
|
||||
const handleFinishButton = () => {
|
||||
const handleFinishButton = React.useCallback(() => {
|
||||
if (!allGood)
|
||||
return navigateBack();
|
||||
touch();
|
||||
};
|
||||
}, [allGood, touch]);
|
||||
|
||||
|
||||
if (!novel && fatalGood)
|
||||
return props.children;
|
||||
|
||||
|
||||
return <>
|
||||
@@ -124,7 +85,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
<Typography level='title-lg' sx={{ fontSize: '3rem', fontWeight: 'sm', textAlign: 'center' }}>
|
||||
Welcome to<br />
|
||||
<Box component='span' sx={{ animation: `${cssRainbowColorKeyframes} 15s linear infinite` }}>
|
||||
<Box component='span' sx={{ animation: `${animationColorRainbow} 15s linear infinite` }}>
|
||||
your first call
|
||||
</Box>
|
||||
</Typography>
|
||||
@@ -167,7 +128,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
{/* Text to Speech status */}
|
||||
<StatusCard
|
||||
icon={<RecordVoiceOverIcon />}
|
||||
icon={<RecordVoiceOverTwoToneIcon />}
|
||||
text={
|
||||
(synthesis.mayWork ? 'Voice synthesis should be ready.' : 'There might be an issue with ElevenLabs voice synthesis.')
|
||||
+ (synthesis.isConfiguredServerSide ? '' : (synthesis.isConfiguredClientSide ? '' : ' Please add your API key in the settings.'))
|
||||
@@ -208,7 +169,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
// boxShadow: allGood ? 'md' : 'none',
|
||||
}}
|
||||
>
|
||||
{allGood ? <ArrowForwardIcon sx={{ fontSize: '1.5em' }} /> : <CloseRoundedIcon sx={{ fontSize: '1.5em' }} />}
|
||||
{allGood ? <ArrowForwardRoundedIcon sx={{ fontSize: '1.5em' }} /> : <CloseRoundedIcon sx={{ fontSize: '1.5em' }} />}
|
||||
</IconButton>
|
||||
</Box>
|
||||
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
import { keyframes } from '@emotion/react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Avatar, Box, Card, CardContent, Chip, IconButton, Link as MuiLink, ListDivider, MenuItem, Sheet, Switch, Typography } from '@mui/joy';
|
||||
import CallIcon from '@mui/icons-material/Call';
|
||||
|
||||
import { GitHubProjectIssueCard } from '~/common/components/GitHubProjectIssueCard';
|
||||
import { conversationTitle, DConversation, DConversationId, useChatStore } from '~/common/state/store-chats';
|
||||
import { usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { OptimaPanelGroup } from '~/common/layout/optima/panel/OptimaPanelGroup';
|
||||
import { animationShadowRingLimey } from '~/common/util/animUtils';
|
||||
import { conversationTitle, DConversation, DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useSetOptimaAppMenu } from '~/common/layout/optima/useOptima';
|
||||
|
||||
import type { AppCallIntent } from './AppCall';
|
||||
import { MockPersona, useMockPersonas } from './state/useMockPersonas';
|
||||
@@ -19,27 +20,6 @@ import { useAppCallStore } from './state/store-app-call';
|
||||
const COLLAPSED_COUNT = 2;
|
||||
|
||||
|
||||
export const niceShadowKeyframes = keyframes`
|
||||
100%, 0% {
|
||||
//background-color: rgb(102, 0, 51);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgb(183, 255, 0);
|
||||
}
|
||||
25% {
|
||||
//background-color: rgb(76, 0, 76);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgb(255, 251, 0);
|
||||
//scale: 1.2;
|
||||
}
|
||||
50% {
|
||||
//background-color: rgb(63, 0, 128);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgba(0, 255, 81);
|
||||
//scale: 0.8;
|
||||
}
|
||||
75% {
|
||||
//background-color: rgb(0, 0, 128);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgb(255, 153, 0);
|
||||
}`;
|
||||
|
||||
|
||||
const ContactCardAvatar = (props: { size: string, symbol?: string, imageUrl?: string, onClick?: () => void, sx?: SxProps }) =>
|
||||
<Avatar
|
||||
// variant='outlined'
|
||||
@@ -81,7 +61,7 @@ const ContactCardConversationCall = (props: { conversation: DConversation, onCon
|
||||
function CallContactCard(props: {
|
||||
persona: MockPersona,
|
||||
callGrayUI: boolean,
|
||||
conversations: DConversation[],
|
||||
conversations: Readonly<DConversation[]>,
|
||||
setCallIntent: (intent: AppCallIntent) => void,
|
||||
}) {
|
||||
|
||||
@@ -125,7 +105,6 @@ function CallContactCard(props: {
|
||||
sx={{
|
||||
mx: 'auto',
|
||||
mt: '-2.5rem',
|
||||
zIndex: 1,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -211,7 +190,7 @@ function CallContactCard(props: {
|
||||
|
||||
|
||||
function useConversationsByPersona() {
|
||||
const conversations = useChatStore(state => state.conversations, shallow);
|
||||
const conversations = useChatStore(state => state.conversations);
|
||||
|
||||
return React.useMemo(() => {
|
||||
// group by personaId
|
||||
@@ -245,7 +224,12 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
|
||||
|
||||
// pluggable UI
|
||||
|
||||
const menuItems = React.useMemo(() => <>
|
||||
const menuItems = React.useMemo(() => <OptimaPanelGroup title='Contacts Settings'>
|
||||
|
||||
<MenuItem onClick={toggleGrayUI}>
|
||||
Grayed UI
|
||||
<Switch checked={grayUI} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleShowConversations}>
|
||||
Conversations
|
||||
@@ -253,18 +237,13 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleShowSupport}>
|
||||
Support
|
||||
Show Support
|
||||
<Switch checked={showSupport} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={toggleGrayUI}>
|
||||
Grayed UI
|
||||
<Switch checked={grayUI} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
</OptimaPanelGroup>, [grayUI, showConversations, showSupport, toggleGrayUI, toggleShowConversations, toggleShowSupport]);
|
||||
|
||||
</>, [grayUI, showConversations, showSupport, toggleGrayUI, toggleShowConversations, toggleShowSupport]);
|
||||
|
||||
usePluggableOptimaLayout(null, null, menuItems, 'CallUI');
|
||||
useSetOptimaAppMenu(menuItems, 'CallUI-Contacts');
|
||||
|
||||
|
||||
return <>
|
||||
@@ -282,7 +261,7 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
|
||||
borderRadius: '50%',
|
||||
pointerEvents: 'none',
|
||||
backgroundColor: 'background.popup',
|
||||
animation: `${niceShadowKeyframes} 5s infinite`,
|
||||
animation: `${animationShadowRingLimey} 5s infinite`,
|
||||
}}>
|
||||
<CallIcon />
|
||||
</IconButton>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import { Box, Card, ListDivider, ListItemDecorator, MenuItem, Switch, Typography } from '@mui/joy';
|
||||
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
|
||||
@@ -7,23 +7,31 @@ import CallEndIcon from '@mui/icons-material/CallEnd';
|
||||
import CallIcon from '@mui/icons-material/Call';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import MicNoneIcon from '@mui/icons-material/MicNone';
|
||||
import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
|
||||
import { ScrollToBottom } from '../chat/components/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '../chat/components/scroll-to-bottom/ScrollToBottomButton';
|
||||
import { useChatLLMDropdown } from '../chat/components/useLLMDropdown';
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
|
||||
import { useChatLLMDropdown } from '../chat/components/layout-bar/useLLMDropdown';
|
||||
|
||||
import { EXPERIMENTAL_speakTextStream } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { SystemPurposeId, SystemPurposes } from '../../data';
|
||||
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';
|
||||
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { AixChatGenerateContent_DMessage, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
|
||||
import { useElevenLabsVoiceDropdown } from '~/modules/elevenlabs/useElevenLabsVoiceDropdown';
|
||||
|
||||
import type { OptimaBarControlMethods } from '~/common/layout/optima/bar/OptimaBarDropdown';
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { Link } from '~/common/components/Link';
|
||||
import { SpeechResult, useSpeechRecognition } from '~/common/components/useSpeechRecognition';
|
||||
import { conversationTitle, createDMessage, DMessage, useChatStore } from '~/common/state/store-chats';
|
||||
import { OptimaPanelGroup } from '~/common/layout/optima/panel/OptimaPanelGroup';
|
||||
import { OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
|
||||
import { SpeechResult, useSpeechRecognition } from '~/common/components/speechrecognition/useSpeechRecognition';
|
||||
import { conversationTitle, remapMessagesSysToUsr } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, messageFragmentsReduceText } from '~/common/stores/chat/chat.message';
|
||||
import { createErrorContentFragment } from '~/common/stores/chat/chat.fragments';
|
||||
import { launchAppChat, navigateToIndex } from '~/common/app.routes';
|
||||
import { playSoundUrl, usePlaySoundUrl } from '~/common/util/audioUtils';
|
||||
import { usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useGlobalShortcuts } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { usePlayUrl } from '~/common/util/audio/usePlayUrl';
|
||||
import { useSetOptimaAppMenu } from '~/common/layout/optima/useOptima';
|
||||
|
||||
import type { AppCallIntent } from './AppCall';
|
||||
import { CallAvatar } from './components/CallAvatar';
|
||||
@@ -48,7 +56,7 @@ function CallMenuItems(props: {
|
||||
|
||||
const handleChangeVoiceToggle = () => props.setOverride(!props.override);
|
||||
|
||||
return <>
|
||||
return <OptimaPanelGroup title='Call'>
|
||||
|
||||
<MenuItem onClick={handlePushToTalkToggle}>
|
||||
<ListItemDecorator>{props.pushToTalk ? <MicNoneIcon /> : <MicIcon />}</ListItemDecorator>
|
||||
@@ -57,7 +65,7 @@ function CallMenuItems(props: {
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={handleChangeVoiceToggle}>
|
||||
<ListItemDecorator><RecordVoiceOverIcon /></ListItemDecorator>
|
||||
<ListItemDecorator><RecordVoiceOverTwoToneIcon /></ListItemDecorator>
|
||||
Change Voice
|
||||
<Switch checked={props.override} onChange={handleChangeVoiceToggle} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
@@ -78,7 +86,7 @@ function CallMenuItems(props: {
|
||||
Voice Calls Feedback
|
||||
</MenuItem>
|
||||
|
||||
</>;
|
||||
</OptimaPanelGroup>;
|
||||
}
|
||||
|
||||
|
||||
@@ -95,11 +103,12 @@ export function Telephone(props: {
|
||||
const [personaTextInterim, setPersonaTextInterim] = React.useState<string | null>(null);
|
||||
const [pushToTalk, setPushToTalk] = React.useState(true);
|
||||
const [stage, setStage] = React.useState<'ring' | 'declined' | 'connected' | 'ended'>('ring');
|
||||
const llmDropdownRef = React.useRef<OptimaBarControlMethods>(null);
|
||||
const responseAbortController = React.useRef<AbortController | null>(null);
|
||||
|
||||
// external state
|
||||
const { chatLLMId, chatLLMDropdown } = useChatLLMDropdown();
|
||||
const { chatTitle, reMessages } = useChatStore(state => {
|
||||
const { chatLLMId, chatLLMDropdown } = useChatLLMDropdown(llmDropdownRef);
|
||||
const { chatTitle, reMessages } = useChatStore(useShallow(state => {
|
||||
const conversation = props.callIntent.conversationId
|
||||
? state.conversations.find(conversation => conversation.id === props.callIntent.conversationId) ?? null
|
||||
: null;
|
||||
@@ -107,7 +116,7 @@ export function Telephone(props: {
|
||||
chatTitle: conversation ? conversationTitle(conversation) : null,
|
||||
reMessages: conversation ? conversation.messages : null,
|
||||
};
|
||||
}, shallow);
|
||||
}));
|
||||
const persona = SystemPurposes[props.callIntent.personaId as SystemPurposeId] ?? undefined;
|
||||
const personaCallStarters = persona?.call?.starters ?? undefined;
|
||||
const personaVoiceId = overridePersonaVoice ? undefined : (persona?.voices?.elevenLabs?.voiceId ?? undefined);
|
||||
@@ -118,12 +127,12 @@ export function Telephone(props: {
|
||||
const onSpeechResultCallback = React.useCallback((result: SpeechResult) => {
|
||||
setSpeechInterim(result.done ? null : { ...result });
|
||||
if (result.done) {
|
||||
const transcribed = result.transcript.trim();
|
||||
if (transcribed.length >= 1)
|
||||
setCallMessages(messages => [...messages, createDMessage('user', transcribed)]);
|
||||
const userSpeechTranscribed = result.transcript.trim();
|
||||
if (userSpeechTranscribed.length >= 1)
|
||||
setCallMessages(messages => [...messages, createDMessageTextContent('user', userSpeechTranscribed)]); // [state] append user:speech
|
||||
}
|
||||
}, []);
|
||||
const { isSpeechEnabled, isRecording, isRecordingAudio, isRecordingSpeech, startRecording, stopRecording, toggleRecording } = useSpeechRecognition(onSpeechResultCallback, 1000);
|
||||
const { recognitionState, startRecognition, stopRecognition, toggleRecognition } = useSpeechRecognition('webSpeechApi', onSpeechResultCallback, 1000);
|
||||
|
||||
// derived state
|
||||
const isRinging = stage === 'ring';
|
||||
@@ -136,17 +145,23 @@ export function Telephone(props: {
|
||||
|
||||
// pickup / hangup
|
||||
React.useEffect(() => {
|
||||
!isRinging && playSoundUrl(isConnected ? '/sounds/chat-begin.mp3' : '/sounds/chat-end.mp3');
|
||||
!isRinging && AudioPlayer.playUrl(isConnected ? '/sounds/chat-begin.mp3' : '/sounds/chat-end.mp3');
|
||||
}, [isRinging, isConnected]);
|
||||
|
||||
// ringtone
|
||||
usePlaySoundUrl(isRinging ? '/sounds/chat-ringtone.mp3' : null, 300, 2800 * 2);
|
||||
usePlayUrl(isRinging ? '/sounds/chat-ringtone.mp3' : null, 300, 2800 * 2);
|
||||
|
||||
|
||||
/// Shortcuts
|
||||
|
||||
useGlobalShortcuts('Telephone', React.useMemo(() => [
|
||||
{ key: 'm', ctrl: true, action: toggleRecognition },
|
||||
], [toggleRecognition]));
|
||||
|
||||
/// CONNECTED
|
||||
|
||||
const handleCallStop = () => {
|
||||
stopRecording();
|
||||
stopRecognition(false);
|
||||
setStage('ended');
|
||||
};
|
||||
|
||||
@@ -169,9 +184,10 @@ export function Telephone(props: {
|
||||
const phoneMessages = personaCallStarters || ['Hello?', 'Hey!'];
|
||||
const firstMessage = phoneMessages[Math.floor(Math.random() * phoneMessages.length)];
|
||||
|
||||
setCallMessages([createDMessage('assistant', firstMessage)]);
|
||||
setCallMessages([createDMessageTextContent('assistant', firstMessage)]); // [state] set assistant:hello message
|
||||
|
||||
// fire/forget
|
||||
void EXPERIMENTAL_speakTextStream(firstMessage, personaVoiceId);
|
||||
void elevenLabsSpeakText(firstMessage, personaVoiceId, true, true);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [isConnected, personaCallStarters, personaVoiceId]);
|
||||
@@ -179,22 +195,30 @@ export function Telephone(props: {
|
||||
// [E] persona streaming response - upon new user message
|
||||
React.useEffect(() => {
|
||||
// only act when we have a new user message
|
||||
if (!isConnected || callMessages.length < 1 || callMessages[callMessages.length - 1].role !== 'user')
|
||||
if (!isConnected || callMessages.length < 1)
|
||||
return;
|
||||
switch (callMessages[callMessages.length - 1].text) {
|
||||
|
||||
// Voice commands
|
||||
const lastUserMessage = callMessages[callMessages.length - 1];
|
||||
if (lastUserMessage.role !== 'user')
|
||||
return;
|
||||
switch (messageFragmentsReduceText(lastUserMessage.fragments)) {
|
||||
// do not respond
|
||||
case 'Stop.':
|
||||
return;
|
||||
|
||||
// command: close the call
|
||||
case 'Goodbye.':
|
||||
setStage('ended');
|
||||
setTimeout(launchAppChat, 2000);
|
||||
return;
|
||||
|
||||
// command: regenerate answer
|
||||
case 'Retry.':
|
||||
case 'Try again.':
|
||||
setCallMessages(messages => messages.slice(0, messages.length - 2));
|
||||
return;
|
||||
|
||||
// command: restart chat
|
||||
case 'Restart.':
|
||||
setCallMessages([]);
|
||||
@@ -204,43 +228,57 @@ export function Telephone(props: {
|
||||
// bail if no llm selected
|
||||
if (!chatLLMId) return;
|
||||
|
||||
// temp fix: when the chat has no messages, only assume a single system message
|
||||
const chatMessages: { role: VChatMessageIn['role'], text: string }[] = (reMessages && reMessages.length > 0)
|
||||
? reMessages
|
||||
: personaSystemMessage
|
||||
? [{ role: 'system', text: personaSystemMessage }]
|
||||
: [];
|
||||
|
||||
// 'prompt' for a "telephone call"
|
||||
// FIXME: can easily run ouf of tokens - if this gets traction, we'll fix it
|
||||
const callPrompt: VChatMessageIn[] = [
|
||||
{ role: 'system', content: 'You are having a phone call. Your response style is brief and to the point, and according to your personality, defined below.' },
|
||||
...chatMessages.map(message => ({ role: message.role, content: message.text })),
|
||||
{ role: 'system', content: 'You are now on the phone call related to the chat above. Respect your personality and answer with short, friendly and accurate thoughtful lines.' },
|
||||
...callMessages.map(message => ({ role: message.role, content: message.text })),
|
||||
// Call Message Generation Prompt
|
||||
const callSystemInstruction = createDMessageTextContent('system', 'You are having a phone call. Your response style is brief and to the point, and according to your personality, defined below.');
|
||||
const reMessagesRemapSysToUsr = remapMessagesSysToUsr(reMessages);
|
||||
const callGenerationInputHistory: DMessage[] = [
|
||||
// Chat messages, including the system prompt which is casted to a user message
|
||||
// TODO: when upgrading to dynamic personas, we need to inject the persona message instead - not rely on reMessages, as messages[0] !== 'system'
|
||||
...(reMessagesRemapSysToUsr ? reMessagesRemapSysToUsr : [createDMessageTextContent('user', personaSystemMessage)]),
|
||||
// Call system prompt 2, to indicate the call has started
|
||||
createDMessageTextContent('user', '**You are now on the phone call related to the chat above**.\nRespect your personality and answer with short, friendly and accurate thoughtful brief lines.'),
|
||||
// Call history
|
||||
...callMessages,
|
||||
];
|
||||
|
||||
|
||||
// perform completion
|
||||
responseAbortController.current = new AbortController();
|
||||
let finalText = '';
|
||||
let error: any | null = null;
|
||||
setPersonaTextInterim('💭...');
|
||||
llmStreamingChatGenerate(chatLLMId, callPrompt, null, null, responseAbortController.current.signal, ({ textSoFar }) => {
|
||||
const text = textSoFar?.trim();
|
||||
if (text) {
|
||||
finalText = text;
|
||||
setPersonaTextInterim(text);
|
||||
}
|
||||
|
||||
aixChatGenerateContent_DMessage_FromConversation(
|
||||
chatLLMId,
|
||||
callSystemInstruction,
|
||||
callGenerationInputHistory,
|
||||
'call',
|
||||
callMessages[0].id,
|
||||
{ abortSignal: responseAbortController.current.signal },
|
||||
(update: AixChatGenerateContent_DMessage, _isDone: boolean) => {
|
||||
const updatedText = messageFragmentsReduceText(update.fragments).trim();
|
||||
if (updatedText)
|
||||
setPersonaTextInterim(finalText = updatedText);
|
||||
},
|
||||
).then((status) => {
|
||||
|
||||
// whether status.outcome === 'success' or not, we get a valid DMessage, eventually with Error Fragments inside
|
||||
const fullMessage = createDMessageFromFragments('assistant', status.lastDMessage.fragments);
|
||||
fullMessage.generator = status.lastDMessage.generator;
|
||||
setCallMessages(messages => [...messages, fullMessage]); // [state] append assistant:call_response
|
||||
|
||||
// fire/forget
|
||||
if (status.outcome === 'success' && finalText?.length >= 1)
|
||||
void elevenLabsSpeakText(finalText, personaVoiceId, true, true);
|
||||
|
||||
}).catch((err: DOMException) => {
|
||||
if (err?.name !== 'AbortError')
|
||||
error = err;
|
||||
if (err?.name !== 'AbortError') {
|
||||
// create an error message to explain the exception
|
||||
const errorMesage = createDMessageFromFragments('assistant', [createErrorContentFragment(err.message || err.toString())]);
|
||||
setCallMessages(messages => [...messages, errorMesage]); // [state] append assistant:call_response-ERROR
|
||||
}
|
||||
}).finally(() => {
|
||||
setPersonaTextInterim(null);
|
||||
if (finalText || error)
|
||||
setCallMessages(messages => [...messages, createDMessage('assistant', finalText + (error ? ` (ERROR: ${error.message || error.toString()})` : ''))]);
|
||||
// fire/forget
|
||||
if (finalText?.length >= 1)
|
||||
void EXPERIMENTAL_speakTextStream(finalText, personaVoiceId);
|
||||
});
|
||||
|
||||
return () => {
|
||||
@@ -250,7 +288,7 @@ export function Telephone(props: {
|
||||
}, [isConnected, callMessages, chatLLMId, personaVoiceId, personaSystemMessage, reMessages]);
|
||||
|
||||
// [E] Message interrupter
|
||||
const abortTrigger = isConnected && isRecordingSpeech;
|
||||
const abortTrigger = isConnected && recognitionState.hasSpeech;
|
||||
React.useEffect(() => {
|
||||
if (abortTrigger && responseAbortController.current) {
|
||||
responseAbortController.current.abort();
|
||||
@@ -261,16 +299,16 @@ export function Telephone(props: {
|
||||
|
||||
|
||||
// [E] continuous speech recognition (reload)
|
||||
const shouldStartRecording = isConnected && !pushToTalk && speechInterim === null && !isRecordingAudio;
|
||||
const shouldStartRecording = isConnected && !pushToTalk && speechInterim === null && !recognitionState.hasAudio;
|
||||
React.useEffect(() => {
|
||||
if (shouldStartRecording)
|
||||
startRecording();
|
||||
}, [shouldStartRecording, startRecording]);
|
||||
startRecognition();
|
||||
}, [shouldStartRecording, startRecognition]);
|
||||
|
||||
|
||||
// more derived state
|
||||
const personaName = persona?.title ?? 'Unknown';
|
||||
const isMicEnabled = isSpeechEnabled;
|
||||
const isMicEnabled = recognitionState.isAvailable;
|
||||
const isTTSEnabled = true;
|
||||
const isEnabled = isMicEnabled && isTTSEnabled;
|
||||
|
||||
@@ -284,10 +322,11 @@ export function Telephone(props: {
|
||||
, [overridePersonaVoice, pushToTalk],
|
||||
);
|
||||
|
||||
usePluggableOptimaLayout(null, chatLLMDropdown, menuItems, 'CallUI');
|
||||
useSetOptimaAppMenu(menuItems, 'CallUI-Call');
|
||||
|
||||
|
||||
return <>
|
||||
<OptimaToolbarIn>{chatLLMDropdown}</OptimaToolbarIn>
|
||||
|
||||
<Typography
|
||||
level='h1'
|
||||
@@ -331,28 +370,15 @@ export function Telephone(props: {
|
||||
padding: 0, // move this to the ScrollToBottom component
|
||||
}}>
|
||||
|
||||
<ScrollToBottom
|
||||
// bootToBottom
|
||||
stickToBottom
|
||||
sx={{
|
||||
// allows the content to be scrolled (all browsers)
|
||||
overflowY: 'auto',
|
||||
// actually make sure this scrolls & fills
|
||||
height: '100%',
|
||||
<ScrollToBottom stickToBottomInitial>
|
||||
|
||||
// content
|
||||
display: 'grid',
|
||||
padding: 1,
|
||||
}}
|
||||
>
|
||||
|
||||
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 1 }}>
|
||||
<Box sx={{ minHeight: '100%', p: 1, display: 'flex', flexDirection: 'column', gap: 1 }}>
|
||||
|
||||
{/* Call Messages [] */}
|
||||
{callMessages.map((message) =>
|
||||
<CallMessage
|
||||
key={message.id}
|
||||
text={message.text}
|
||||
text={messageFragmentsReduceText(message.fragments)}
|
||||
variant={message.role === 'assistant' ? 'solid' : 'soft'}
|
||||
color={message.role === 'assistant' ? 'neutral' : 'primary'}
|
||||
role={message.role}
|
||||
@@ -370,10 +396,10 @@ export function Telephone(props: {
|
||||
)}
|
||||
|
||||
{/* Listening... */}
|
||||
{isRecording && (
|
||||
{recognitionState.isActive && (
|
||||
<CallMessage
|
||||
text={<>{speechInterim?.transcript.trim() || null}{speechInterim?.interimTranscript.trim() ? <i> {speechInterim.interimTranscript}</i> : null}</>}
|
||||
variant={(isRecordingSpeech || !!speechInterim?.transcript) ? 'soft' : 'outlined'}
|
||||
variant={(recognitionState.hasSpeech || !!speechInterim?.transcript) ? 'soft' : 'outlined'}
|
||||
color='primary'
|
||||
role='user'
|
||||
/>
|
||||
@@ -399,11 +425,11 @@ export function Telephone(props: {
|
||||
{isConnected && <CallButton Icon={CallEndIcon} text='Hang up' color='danger' variant='soft' onClick={handleCallStop} />}
|
||||
{isConnected && (pushToTalk ? (
|
||||
<CallButton
|
||||
Icon={MicIcon} onClick={toggleRecording}
|
||||
text={isRecordingSpeech ? 'Listening...' : isRecording ? 'Listening' : 'Push To Talk'}
|
||||
variant={isRecordingSpeech ? 'solid' : isRecording ? 'soft' : 'outlined'}
|
||||
Icon={MicIcon} onClick={toggleRecognition}
|
||||
text={recognitionState.hasSpeech ? 'Listening...' : recognitionState.isActive ? 'Listening' : 'Push To Talk'}
|
||||
variant={recognitionState.hasSpeech ? 'solid' : recognitionState.isActive ? 'soft' : 'outlined'}
|
||||
color='primary'
|
||||
sx={!isRecording ? { backgroundColor: 'background.surface' } : undefined}
|
||||
sx={!recognitionState.isActive ? { backgroundColor: 'background.surface' } : undefined}
|
||||
/>
|
||||
) : null
|
||||
// <CallButton disabled={true} Icon={MicOffIcon} onClick={() => setMicMuted(muted => !muted)}
|
||||
@@ -419,9 +445,9 @@ export function Telephone(props: {
|
||||
|
||||
{/* DEBUG state */}
|
||||
{avatarClickCount > 10 && (avatarClickCount % 2 === 0) && (
|
||||
<Card variant='outlined' sx={{ maxHeight: '25dvh', overflow: 'auto', whiteSpace: 'pre', py: 0, width: '100%' }}>
|
||||
Special commands: Stop, Retry, Try Again, Restart, Goodbye.
|
||||
{JSON.stringify({ isSpeechEnabled, isRecordingAudio, speechInterim }, null, 2)}
|
||||
<Card variant='outlined' sx={{ maxHeight: '25dvh', fontSize: 'sm', overflow: 'auto', whiteSpace: 'pre', py: 0, width: '100%' }}>
|
||||
Special commands: Stop, Retry, Try Again, Restart, Goodbye.<br />
|
||||
{JSON.stringify({ ...recognitionState, speechInterim }, null, 2)}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
|
||||
@@ -1,19 +1,8 @@
|
||||
import * as React from 'react';
|
||||
import { keyframes } from '@emotion/react';
|
||||
|
||||
import { Avatar, Box } from '@mui/joy';
|
||||
|
||||
|
||||
const cssScaleKeyframes = keyframes`
|
||||
0% {
|
||||
transform: scale(1);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.2);
|
||||
}
|
||||
100% {
|
||||
transform: scale(1);
|
||||
}`;
|
||||
import { animationScalePulse } from '~/common/util/animUtils';
|
||||
|
||||
|
||||
export function CallAvatar(props: { symbol: string, imageUrl?: string, isRinging?: boolean, onClick: () => void }) {
|
||||
@@ -34,7 +23,7 @@ export function CallAvatar(props: { symbol: string, imageUrl?: string, isRinging
|
||||
<Box
|
||||
sx={{
|
||||
...(props.isRinging
|
||||
? { animation: `${cssScaleKeyframes} 1.4s ease-in-out infinite` }
|
||||
? { animation: `${animationScalePulse} 1.4s ease-in-out infinite` }
|
||||
: {}),
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -3,13 +3,13 @@ import * as React from 'react';
|
||||
import { Chip, ColorPaletteProp, VariantProp } from '@mui/joy';
|
||||
import { SxProps } from '@mui/joy/styles/types';
|
||||
|
||||
import type { VChatMessageIn } from '~/modules/llms/llm.client';
|
||||
import type { DMessage } from '~/common/stores/chat/chat.message';
|
||||
|
||||
|
||||
export function CallMessage(props: {
|
||||
text?: string | React.JSX.Element,
|
||||
variant?: VariantProp, color?: ColorPaletteProp,
|
||||
role: VChatMessageIn['role'],
|
||||
role: DMessage['role'],
|
||||
sx?: SxProps,
|
||||
}) {
|
||||
const isUserMessage = props.role === 'user';
|
||||
|
||||
@@ -3,18 +3,18 @@ import ClearIcon from '@mui/icons-material/Clear';
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsAlter: ICommandsProvider = {
|
||||
id: 'chat-alter',
|
||||
id: 'cmd-chat-alter',
|
||||
rank: 25,
|
||||
|
||||
getCommands: () => [{
|
||||
primary: '/assistant',
|
||||
alternatives: ['/a'],
|
||||
arguments: ['text'],
|
||||
arguments: ['text...'],
|
||||
description: 'Injects assistant response',
|
||||
}, {
|
||||
primary: '/system',
|
||||
alternatives: ['/s'],
|
||||
arguments: ['text'],
|
||||
arguments: ['text...'],
|
||||
description: 'Injects system message',
|
||||
}, {
|
||||
primary: '/clear',
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
|
||||
import { getUXLabsChatBeam } from '~/common/state/store-ux-labs';
|
||||
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsBeam: ICommandsProvider = {
|
||||
id: 'ass-beam',
|
||||
rank: 9,
|
||||
|
||||
getCommands: () => getUXLabsChatBeam() ? [{
|
||||
primary: '/beam',
|
||||
arguments: ['prompt'],
|
||||
description: 'Best of multiple replies',
|
||||
Icon: ChatBeamIcon,
|
||||
}] : [],
|
||||
|
||||
};
|
||||
@@ -1,16 +0,0 @@
|
||||
import LanguageIcon from '@mui/icons-material/Language';
|
||||
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsBrowse: ICommandsProvider = {
|
||||
id: 'ass-browse',
|
||||
rank: 20,
|
||||
|
||||
getCommands: () => [{
|
||||
primary: '/browse',
|
||||
arguments: ['URL'],
|
||||
description: 'Assistant will download the web page',
|
||||
Icon: LanguageIcon,
|
||||
}],
|
||||
|
||||
};
|
||||
@@ -1,9 +1,13 @@
|
||||
import FormatPaintIcon from '@mui/icons-material/FormatPaint';
|
||||
import FormatPaintTwoToneIcon from '@mui/icons-material/FormatPaintTwoTone';
|
||||
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export function textToDrawCommand(text: string): string {
|
||||
return `/draw ${text}`;
|
||||
}
|
||||
|
||||
export const CommandsDraw: ICommandsProvider = {
|
||||
id: 'ass-t2i',
|
||||
id: 'cmd-ass-t2i',
|
||||
rank: 10,
|
||||
|
||||
getCommands: () => [{
|
||||
@@ -11,7 +15,7 @@ export const CommandsDraw: ICommandsProvider = {
|
||||
alternatives: ['/imagine', '/img'],
|
||||
arguments: ['prompt'],
|
||||
description: 'Assistant will draw the text',
|
||||
Icon: FormatPaintIcon,
|
||||
Icon: FormatPaintTwoToneIcon,
|
||||
}],
|
||||
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ import PsychologyIcon from '@mui/icons-material/Psychology';
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsReact: ICommandsProvider = {
|
||||
id: 'ass-react',
|
||||
id: 'cmd-mode-react',
|
||||
rank: 15,
|
||||
|
||||
getCommands: () => [{
|
||||
|
||||
@@ -1,27 +1,23 @@
|
||||
import { ChatCommand, ICommandsProvider } from './ICommandsProvider';
|
||||
import type { ChatCommand, ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
import { CommandsAlter } from './CommandsAlter';
|
||||
import { CommandsBeam } from './CommandsBeam';
|
||||
import { CommandsBrowse } from './CommandsBrowse';
|
||||
import { CommandsDraw } from './CommandsDraw';
|
||||
import { CommandsHelp } from './CommandsHelp';
|
||||
import { CommandsReact } from './CommandsReact';
|
||||
|
||||
|
||||
export type CommandsProviderId = 'ass-beam' | 'ass-browse' | 'ass-t2i' | 'ass-react' | 'chat-alter' | 'cmd-help';
|
||||
export type CommandsProviderId = 'cmd-ass-t2i' | 'cmd-chat-alter' | 'cmd-help' | 'cmd-mode-react';
|
||||
|
||||
type TextCommandPiece =
|
||||
| { type: 'text'; value: string; }
|
||||
| { type: 'cmd'; providerId: CommandsProviderId, command: string; params?: string, isError?: boolean };
|
||||
| { type: 'nocmd'; value: string; }
|
||||
| { type: 'cmd'; providerId: CommandsProviderId, command: string; params?: string, isErrorNoArgs?: boolean };
|
||||
|
||||
|
||||
const ChatCommandsProviders: Record<CommandsProviderId, ICommandsProvider> = {
|
||||
'ass-beam': CommandsBeam,
|
||||
'ass-browse': CommandsBrowse,
|
||||
'ass-react': CommandsReact,
|
||||
'ass-t2i': CommandsDraw,
|
||||
'chat-alter': CommandsAlter,
|
||||
'cmd-ass-t2i': CommandsDraw,
|
||||
'cmd-chat-alter': CommandsAlter,
|
||||
'cmd-help': CommandsHelp,
|
||||
'cmd-mode-react': CommandsReact,
|
||||
};
|
||||
|
||||
export function findAllChatCommands(): ChatCommand[] {
|
||||
@@ -31,16 +27,25 @@ export function findAllChatCommands(): ChatCommand[] {
|
||||
.flat();
|
||||
}
|
||||
|
||||
export function helpPrettyChatCommands() {
|
||||
return findAllChatCommands()
|
||||
.map(cmd => ` - ${cmd.primary}` + (cmd.alternatives?.length ? ` (${cmd.alternatives.join(', ')})` : '') + `: ${cmd.description}`)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
export function extractChatCommand(input: string): TextCommandPiece[] {
|
||||
const inputTrimmed = input.trim();
|
||||
|
||||
// quick exit: command does not start with '/'
|
||||
if (!inputTrimmed.startsWith('/'))
|
||||
return [{ type: 'text', value: input }];
|
||||
return [{ type: 'nocmd', value: input }];
|
||||
|
||||
// Find the first space to separate the command from its parameters (if any)
|
||||
const firstSpaceIndex = inputTrimmed.indexOf(' ');
|
||||
const potentialCommand = inputTrimmed.substring(0, firstSpaceIndex >= 0 ? firstSpaceIndex : inputTrimmed.length);
|
||||
const commandMatch = inputTrimmed.match(/^\/\S+/);
|
||||
const potentialCommand = commandMatch ? commandMatch[0] : inputTrimmed;
|
||||
|
||||
const textAfterCommand = firstSpaceIndex >= 0 ? inputTrimmed.substring(firstSpaceIndex + 1) : '';
|
||||
|
||||
// Check if the potential command is an actual command
|
||||
for (const provider of Object.values(ChatCommandsProviders)) {
|
||||
@@ -48,22 +53,33 @@ export function extractChatCommand(input: string): TextCommandPiece[] {
|
||||
if (cmd.primary === potentialCommand || cmd.alternatives?.includes(potentialCommand)) {
|
||||
|
||||
// command needs arguments: take the rest of the input as parameters
|
||||
if (cmd.arguments?.length) {
|
||||
const params = firstSpaceIndex >= 0 ? inputTrimmed.substring(firstSpaceIndex + 1) : '';
|
||||
return [{ type: 'cmd', providerId: provider.id, command: potentialCommand, params: params || undefined, isError: !params || undefined }];
|
||||
}
|
||||
if (cmd.arguments?.length) return [{
|
||||
type: 'cmd',
|
||||
providerId: provider.id,
|
||||
command: potentialCommand,
|
||||
params: textAfterCommand || undefined,
|
||||
isErrorNoArgs: !textAfterCommand,
|
||||
}];
|
||||
|
||||
// command without arguments, treat any text after as a separate text piece
|
||||
const pieces: TextCommandPiece[] = [{ type: 'cmd', providerId: provider.id, command: potentialCommand, params: undefined }];
|
||||
const textAfterCommand = firstSpaceIndex >= 0 ? inputTrimmed.substring(firstSpaceIndex + 1) : '';
|
||||
if (textAfterCommand)
|
||||
pieces.push({ type: 'text', value: textAfterCommand });
|
||||
const pieces: TextCommandPiece[] = [{
|
||||
type: 'cmd',
|
||||
providerId: provider.id,
|
||||
command: potentialCommand,
|
||||
params: undefined,
|
||||
}];
|
||||
textAfterCommand && pieces.push({
|
||||
type: 'nocmd',
|
||||
value: textAfterCommand,
|
||||
});
|
||||
return pieces;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No command found, return the entire input as text
|
||||
return [{ type: 'text', value: input }];
|
||||
return [{
|
||||
type: 'nocmd',
|
||||
value: input,
|
||||
}];
|
||||
}
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Modal, ModalClose } from '@mui/joy';
|
||||
|
||||
import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
|
||||
import { BeamView } from '~/modules/beam/BeamView';
|
||||
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
|
||||
|
||||
/*const overlaySx: SxProps = {
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
zIndex: themeZIndexBeamView, // stay on top of Message > Chips (:1), and Overlays (:2) - note: Desktop Drawer (:26)
|
||||
}*/
|
||||
|
||||
|
||||
export function ChatBeamWrapper(props: {
|
||||
beamStore: BeamStoreApi,
|
||||
isMobile: boolean,
|
||||
inlineSx?: SxProps,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const isMaximized = useBeamStore(props.beamStore, state => state.isMaximized);
|
||||
|
||||
const handleUnMaximize = React.useCallback(() => {
|
||||
props.beamStore.getState().setIsMaximized(false);
|
||||
}, [props.beamStore]);
|
||||
|
||||
// memo the beamview
|
||||
const beamView = React.useMemo(() => (
|
||||
<BeamView
|
||||
beamStore={props.beamStore}
|
||||
isMobile={props.isMobile}
|
||||
showExplainer
|
||||
/>
|
||||
), [props.beamStore, props.isMobile]);
|
||||
|
||||
return isMaximized ? (
|
||||
<Modal open onClose={handleUnMaximize}>
|
||||
<Box sx={{
|
||||
backgroundColor: 'background.level1',
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
}}>
|
||||
<ScrollToBottom disableAutoStick>
|
||||
{beamView}
|
||||
</ScrollToBottom>
|
||||
<ModalClose sx={{ color: 'white', backgroundColor: 'background.surface', boxShadow: 'xs', mr: 2 }} />
|
||||
</Box>
|
||||
</Modal>
|
||||
) : (
|
||||
<Box sx={props.inlineSx}>
|
||||
{beamView}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -1,371 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
|
||||
import { Box, Dropdown, IconButton, ListDivider, ListItem, ListItemButton, ListItemDecorator, Menu, MenuButton, MenuItem, Tooltip, Typography } from '@mui/joy';
|
||||
import AddIcon from '@mui/icons-material/Add';
|
||||
import CheckIcon from '@mui/icons-material/Check';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import FileDownloadOutlinedIcon from '@mui/icons-material/FileDownloadOutlined';
|
||||
import FileUploadOutlinedIcon from '@mui/icons-material/FileUploadOutlined';
|
||||
import FolderIcon from '@mui/icons-material/Folder';
|
||||
import MoreVertIcon from '@mui/icons-material/MoreVert';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
import { DFolder, useFolderStore } from '~/common/state/store-folders';
|
||||
import { DebounceInputMemo } from '~/common/components/DebounceInput';
|
||||
import { FoldersToggleOff } from '~/common/components/icons/FoldersToggleOff';
|
||||
import { FoldersToggleOn } from '~/common/components/icons/FoldersToggleOn';
|
||||
import { PageDrawerHeader } from '~/common/layout/optima/components/PageDrawerHeader';
|
||||
import { PageDrawerList } from '~/common/layout/optima/components/PageDrawerList';
|
||||
import { capitalizeFirstLetter } from '~/common/util/textUtils';
|
||||
import { themeScalingMap, themeZIndexOverMobileDrawer } from '~/common/app.theme';
|
||||
import { useOptimaDrawers } from '~/common/layout/optima/useOptimaDrawers';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
import { ChatDrawerItemMemo, FolderChangeRequest } from './ChatDrawerItem';
|
||||
import { ChatFolderList } from './folders/ChatFolderList';
|
||||
import { ChatNavGrouping, useChatNavRenderItems } from './useChatNavRenderItems';
|
||||
import { ClearFolderText } from './folders/useFolderDropdown';
|
||||
import { useChatShowRelativeSize } from '../store-app-chat';
|
||||
|
||||
|
||||
// this is here to make shallow comparisons work on the next hook
|
||||
const noFolders: DFolder[] = [];
|
||||
|
||||
/*
|
||||
* Lists folders and returns the active folder
|
||||
*/
|
||||
export const useFolders = (activeFolderId: string | null) => useFolderStore(({ enableFolders, folders, toggleEnableFolders }) => {
|
||||
|
||||
// finds the active folder if any
|
||||
const activeFolder = (enableFolders && activeFolderId)
|
||||
? folders.find(folder => folder.id === activeFolderId) ?? null
|
||||
: null;
|
||||
|
||||
return {
|
||||
activeFolder,
|
||||
allFolders: enableFolders ? folders : noFolders,
|
||||
enableFolders,
|
||||
toggleEnableFolders,
|
||||
};
|
||||
}, shallow);
|
||||
|
||||
|
||||
export const ChatDrawerMemo = React.memo(ChatDrawer);
|
||||
|
||||
function ChatDrawer(props: {
|
||||
isMobile: boolean,
|
||||
activeConversationId: DConversationId | null,
|
||||
activeFolderId: string | null,
|
||||
chatPanesConversationIds: DConversationId[],
|
||||
disableNewButton: boolean,
|
||||
onConversationActivate: (conversationId: DConversationId) => void,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string | null) => void,
|
||||
onConversationNew: (forceNoRecycle: boolean) => void,
|
||||
onConversationsDelete: (conversationIds: DConversationId[], bypassConfirmation: boolean) => void,
|
||||
onConversationsExportDialog: (conversationId: DConversationId | null, exportAll: boolean) => void,
|
||||
onConversationsImportDialog: () => void,
|
||||
setActiveFolderId: (folderId: string | null) => void,
|
||||
}) {
|
||||
|
||||
const { onConversationActivate, onConversationBranch, onConversationNew, onConversationsDelete, onConversationsExportDialog } = props;
|
||||
|
||||
// local state
|
||||
const [navGrouping, setNavGrouping] = React.useState<ChatNavGrouping>('date');
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = React.useState('');
|
||||
const [folderChangeRequest, setFolderChangeRequest] = React.useState<FolderChangeRequest | null>(null);
|
||||
|
||||
// external state
|
||||
const { closeDrawer, closeDrawerOnMobile } = useOptimaDrawers();
|
||||
const { showRelativeSize, toggleRelativeSize } = useChatShowRelativeSize();
|
||||
const { activeFolder, allFolders, enableFolders, toggleEnableFolders } = useFolders(props.activeFolderId);
|
||||
const { filteredChatsCount, filteredChatIDs, filteredChatsAreEmpty, filteredChatsBarBasis, filteredChatsIncludeActive, renderNavItems } = useChatNavRenderItems(
|
||||
props.activeConversationId, props.chatPanesConversationIds, debouncedSearchQuery, activeFolder, allFolders, navGrouping, showRelativeSize,
|
||||
);
|
||||
const { contentScaling, showSymbols } = useUIPreferencesStore(state => ({
|
||||
contentScaling: state.contentScaling,
|
||||
showSymbols: state.zenMode !== 'cleaner',
|
||||
}), shallow);
|
||||
|
||||
|
||||
// New/Activate/Delete Conversation
|
||||
|
||||
const isMultiPane = props.chatPanesConversationIds.length >= 2;
|
||||
const disableNewButton = props.disableNewButton && filteredChatsIncludeActive;
|
||||
const newButtonDontRecycle = isMultiPane || !filteredChatsIncludeActive;
|
||||
|
||||
const handleButtonNew = React.useCallback(() => {
|
||||
onConversationNew(newButtonDontRecycle);
|
||||
closeDrawerOnMobile();
|
||||
}, [closeDrawerOnMobile, newButtonDontRecycle, onConversationNew]);
|
||||
|
||||
const handleConversationActivate = React.useCallback((conversationId: DConversationId, closeMenu: boolean) => {
|
||||
onConversationActivate(conversationId);
|
||||
if (closeMenu)
|
||||
closeDrawerOnMobile();
|
||||
}, [closeDrawerOnMobile, onConversationActivate]);
|
||||
|
||||
const handleConversationsDeleteFiltered = React.useCallback(() => {
|
||||
!!filteredChatIDs?.length && onConversationsDelete(filteredChatIDs, false);
|
||||
}, [filteredChatIDs, onConversationsDelete]);
|
||||
|
||||
const handleConversationDeleteNoConfirmation = React.useCallback((conversationId: DConversationId) => {
|
||||
conversationId && onConversationsDelete([conversationId], true);
|
||||
}, [onConversationsDelete]);
|
||||
|
||||
const handleConversationsExport = React.useCallback(() => {
|
||||
props.activeConversationId && onConversationsExportDialog(props.activeConversationId, true);
|
||||
}, [onConversationsExportDialog, props.activeConversationId]);
|
||||
|
||||
|
||||
// Folder change request
|
||||
|
||||
const handleConversationFolderChange = React.useCallback((folderChangeRequest: FolderChangeRequest) => setFolderChangeRequest(folderChangeRequest), []);
|
||||
|
||||
const handleConversationFolderCancel = React.useCallback(() => setFolderChangeRequest(null), []);
|
||||
|
||||
const handleConversationFolderSet = React.useCallback((conversationId: DConversationId, nextFolderId: string | null) => {
|
||||
// Remove conversation from existing folders
|
||||
const { addConversationToFolder, folders, removeConversationFromFolder } = useFolderStore.getState();
|
||||
folders.forEach(folder => folder.conversationIds.includes(conversationId) && removeConversationFromFolder(folder.id, conversationId));
|
||||
|
||||
// Add conversation to the selected folder
|
||||
nextFolderId && addConversationToFolder(nextFolderId, conversationId);
|
||||
|
||||
// Close the menu
|
||||
setFolderChangeRequest(null);
|
||||
}, []);
|
||||
|
||||
|
||||
// memoize the group dropdown
|
||||
const groupingComponent = React.useMemo(() => (
|
||||
<Dropdown>
|
||||
<MenuButton
|
||||
aria-label='View options'
|
||||
slots={{ root: IconButton }}
|
||||
slotProps={{ root: { size: 'sm' } }}
|
||||
>
|
||||
<MoreVertIcon sx={{ fontSize: 'xl' }} />
|
||||
</MenuButton>
|
||||
<Menu placement='bottom-start' sx={{ minWidth: 180, zIndex: themeZIndexOverMobileDrawer /* need to be on top of the Modal on Mobile */ }}>
|
||||
<ListItem>
|
||||
<Typography level='body-sm'>Group By</Typography>
|
||||
</ListItem>
|
||||
{(['date', 'persona'] as const).map(_gName => (
|
||||
<MenuItem
|
||||
key={'group-' + _gName}
|
||||
aria-label={`Group by ${_gName}`}
|
||||
selected={navGrouping === _gName}
|
||||
onClick={() => setNavGrouping(grouping => grouping === _gName ? false : _gName)}
|
||||
>
|
||||
<ListItemDecorator>{navGrouping === _gName && <CheckIcon />}</ListItemDecorator>
|
||||
{capitalizeFirstLetter(_gName)}
|
||||
</MenuItem>
|
||||
))}
|
||||
<ListDivider />
|
||||
<ListItem>
|
||||
<Typography level='body-sm'>Show</Typography>
|
||||
</ListItem>
|
||||
<MenuItem onClick={toggleRelativeSize}>
|
||||
<ListItemDecorator>{showRelativeSize && <CheckIcon />}</ListItemDecorator>
|
||||
Relative Size
|
||||
</MenuItem>
|
||||
</Menu>
|
||||
</Dropdown>
|
||||
), [navGrouping, showRelativeSize, toggleRelativeSize]);
|
||||
|
||||
|
||||
return <>
|
||||
|
||||
{/* Drawer Header */}
|
||||
<PageDrawerHeader title='Chats' onClose={closeDrawer}>
|
||||
<Tooltip title={enableFolders ? 'Hide Folders' : 'Use Folders'}>
|
||||
<IconButton onClick={toggleEnableFolders}>
|
||||
{enableFolders ? <FoldersToggleOn /> : <FoldersToggleOff />}
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
</PageDrawerHeader>
|
||||
|
||||
{/* Folders List */}
|
||||
{/*<Box sx={{*/}
|
||||
{/* display: 'grid',*/}
|
||||
{/* gridTemplateRows: !enableFolders ? '0fr' : '1fr',*/}
|
||||
{/* transition: 'grid-template-rows 0.42s cubic-bezier(.17,.84,.44,1)',*/}
|
||||
{/* '& > div': {*/}
|
||||
{/* padding: enableFolders ? 2 : 0,*/}
|
||||
{/* transition: 'padding 0.42s cubic-bezier(.17,.84,.44,1)',*/}
|
||||
{/* overflow: 'hidden',*/}
|
||||
{/* },*/}
|
||||
{/*}}>*/}
|
||||
{enableFolders && (
|
||||
<ChatFolderList
|
||||
folders={allFolders}
|
||||
contentScaling={contentScaling}
|
||||
activeFolderId={props.activeFolderId}
|
||||
onFolderSelect={props.setActiveFolderId}
|
||||
/>
|
||||
)}
|
||||
{/*</Box>*/}
|
||||
|
||||
{/* Chats List */}
|
||||
<PageDrawerList variant='plain' noTopPadding noBottomPadding tallRows>
|
||||
|
||||
{enableFolders && <ListDivider sx={{ mb: 0 }} />}
|
||||
|
||||
{/* Search Input Field */}
|
||||
<DebounceInputMemo
|
||||
minChars={2}
|
||||
onDebounce={setDebouncedSearchQuery}
|
||||
debounceTimeout={300}
|
||||
placeholder='Search...'
|
||||
aria-label='Search'
|
||||
endDecorator={groupingComponent}
|
||||
sx={{ m: 2 }}
|
||||
/>
|
||||
|
||||
{/* New Chat Button */}
|
||||
<ListItem sx={{ mx: '0.25rem', mb: 0.5 }}>
|
||||
<ListItemButton
|
||||
// variant='outlined'
|
||||
variant={disableNewButton ? undefined : 'outlined'}
|
||||
disabled={disableNewButton}
|
||||
onClick={handleButtonNew}
|
||||
sx={{
|
||||
// ...PageDrawerTallItemSx,
|
||||
px: 'calc(var(--ListItem-paddingX) - 0.25rem)',
|
||||
|
||||
// text size
|
||||
fontSize: 'sm',
|
||||
fontWeight: 'lg',
|
||||
|
||||
// style
|
||||
borderRadius: 'md',
|
||||
boxShadow: (disableNewButton || props.isMobile) ? 'none' : 'sm',
|
||||
backgroundColor: 'background.popup',
|
||||
transition: 'box-shadow 0.2s',
|
||||
}}
|
||||
>
|
||||
<ListItemDecorator><AddIcon sx={{ '--Icon-fontSize': 'var(--joy-fontSize-xl)', pl: '0.125rem' }} /></ListItemDecorator>
|
||||
New chat
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
|
||||
{/*<ListDivider sx={{ mt: 0 }} />*/}
|
||||
|
||||
{/* List of Chat Titles (and actions) */}
|
||||
<Box sx={{ flex: 1, overflowY: 'auto', ...themeScalingMap[contentScaling].chatDrawerItemSx }}>
|
||||
{/*<ListItem sticky sx={{ justifyContent: 'space-between', boxShadow: 'sm' }}>*/}
|
||||
{/* <Typography level='body-sm'>*/}
|
||||
{/* Conversations*/}
|
||||
{/* </Typography>*/}
|
||||
{/* <ToggleButtonGroup variant='soft' size='sm' value={grouping} onChange={(_event, newValue) => newValue && setGrouping(newValue)}>*/}
|
||||
{/* <IconButton value='off'>*/}
|
||||
{/* <AccessTimeIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* <IconButton value='persona'>*/}
|
||||
{/* <PersonIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* </ToggleButtonGroup>*/}
|
||||
{/*</ListItem>*/}
|
||||
|
||||
{renderNavItems.map((item, idx) => item.type === 'nav-item-chat-data' ? (
|
||||
<ChatDrawerItemMemo
|
||||
key={'nav-chat-' + item.conversationId}
|
||||
item={item}
|
||||
showSymbols={showSymbols}
|
||||
bottomBarBasis={filteredChatsBarBasis}
|
||||
onConversationActivate={handleConversationActivate}
|
||||
onConversationBranch={onConversationBranch}
|
||||
onConversationDelete={handleConversationDeleteNoConfirmation}
|
||||
onConversationExport={onConversationsExportDialog}
|
||||
onConversationFolderChange={handleConversationFolderChange}
|
||||
/>
|
||||
) : item.type === 'nav-item-group' ? (
|
||||
<Typography key={'nav-divider-' + idx} level='body-xs' sx={{ textAlign: 'center', my: 'calc(var(--ListItem-minHeight) / 4)' }}>
|
||||
{item.title}
|
||||
</Typography>
|
||||
) : item.type === 'nav-item-info-message' ? (
|
||||
<Typography key={'nav-info-' + idx} level='body-xs' sx={{ textAlign: 'center', my: 'calc(var(--ListItem-minHeight) / 2)' }}>
|
||||
{item.message}
|
||||
</Typography>
|
||||
) : null,
|
||||
)}
|
||||
</Box>
|
||||
|
||||
<ListDivider sx={{ my: 0 }} />
|
||||
|
||||
<Box sx={{ display: 'flex', alignItems: 'center' }}>
|
||||
<ListItemButton onClick={props.onConversationsImportDialog} sx={{ flex: 1 }}>
|
||||
<ListItemDecorator>
|
||||
<FileUploadOutlinedIcon />
|
||||
</ListItemDecorator>
|
||||
Import
|
||||
{/*<OpenAIIcon sx={{ ml: 'auto' }} />*/}
|
||||
</ListItemButton>
|
||||
|
||||
<ListItemButton disabled={filteredChatsAreEmpty} onClick={handleConversationsExport} sx={{ flex: 1 }}>
|
||||
<ListItemDecorator>
|
||||
<FileDownloadOutlinedIcon />
|
||||
</ListItemDecorator>
|
||||
Export
|
||||
</ListItemButton>
|
||||
</Box>
|
||||
|
||||
<ListItemButton disabled={filteredChatsAreEmpty} onClick={handleConversationsDeleteFiltered}>
|
||||
<ListItemDecorator>
|
||||
<DeleteOutlineIcon />
|
||||
</ListItemDecorator>
|
||||
Delete {filteredChatsCount >= 2 ? `all ${filteredChatsCount} chats` : 'chat'}
|
||||
</ListItemButton>
|
||||
|
||||
</PageDrawerList>
|
||||
|
||||
|
||||
{/* [Menu] Chat Item Folder Change */}
|
||||
{!!folderChangeRequest?.anchorEl && (
|
||||
<CloseableMenu
|
||||
bigIcons
|
||||
open anchorEl={folderChangeRequest.anchorEl} onClose={handleConversationFolderCancel}
|
||||
placement='bottom-start'
|
||||
zIndex={themeZIndexOverMobileDrawer /* need to be on top of the Modal on Mobile */}
|
||||
sx={{ minWidth: 200 }}
|
||||
>
|
||||
|
||||
{/* Folder Assignment Buttons */}
|
||||
{allFolders.map(folder => {
|
||||
const isRequestFolder = folder === folderChangeRequest.currentFolder;
|
||||
return (
|
||||
<ListItem
|
||||
key={folder.id}
|
||||
variant={isRequestFolder ? 'soft' : 'plain'}
|
||||
onClick={() => handleConversationFolderSet(folderChangeRequest.conversationId, folder.id)}
|
||||
>
|
||||
<ListItemButton>
|
||||
<ListItemDecorator>
|
||||
<FolderIcon sx={{ color: folder.color }} />
|
||||
</ListItemDecorator>
|
||||
{folder.title}
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Remove Folder Assignment */}
|
||||
{!!folderChangeRequest.currentFolder && (
|
||||
<ListItem onClick={() => handleConversationFolderSet(folderChangeRequest.conversationId, null)}>
|
||||
<ListItemButton>
|
||||
<ListItemDecorator>
|
||||
<ClearIcon />
|
||||
</ListItemDecorator>
|
||||
{ClearFolderText}
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
)}
|
||||
|
||||
</CloseableMenu>
|
||||
)}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
|
||||
import { useChatLLMDropdown } from './useLLMDropdown';
|
||||
import { usePersonaIdDropdown } from './usePersonaDropdown';
|
||||
import { useFolderDropdown } from './folders/useFolderDropdown';
|
||||
|
||||
|
||||
export function ChatDropdowns(props: {
|
||||
conversationId: DConversationId | null
|
||||
}) {
|
||||
|
||||
// state
|
||||
const { chatLLMDropdown } = useChatLLMDropdown();
|
||||
const { personaDropdown } = usePersonaIdDropdown(props.conversationId);
|
||||
const { folderDropdown } = useFolderDropdown(props.conversationId);
|
||||
|
||||
return <>
|
||||
|
||||
{/* Persona selector */}
|
||||
{personaDropdown}
|
||||
|
||||
{/* Model selector */}
|
||||
{chatLLMDropdown}
|
||||
|
||||
{/* Folder selector */}
|
||||
{folderDropdown}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,28 +1,37 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, List } from '@mui/joy';
|
||||
import { SxProps } from '@mui/joy/styles/types';
|
||||
|
||||
import type { SystemPurposeExample } from '../../../data';
|
||||
|
||||
import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
|
||||
import type { ConversationHandler } from '~/common/chats/ConversationHandler';
|
||||
import { InlineError } from '~/common/components/InlineError';
|
||||
import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { ShortcutKeyName, useGlobalShortcut } from '~/common/components/useGlobalShortcut';
|
||||
import { createDMessage, DConversationId, DMessage, getConversation, useChatStore } from '~/common/state/store-chats';
|
||||
import type { ConversationHandler } from '~/common/chat-overlay/ConversationHandler';
|
||||
import { DConversationId, excludeSystemMessages } from '~/common/stores/chat/chat.conversation';
|
||||
import { ShortcutKey, useGlobalShortcuts } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { convertFilesToDAttachmentFragments } from '~/common/attachment-drafts/attachment.pipeline';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessage, DMessageId, DMessageUserFlag, DMetaReferenceItem, MESSAGE_FLAG_AIX_SKIP } from '~/common/stores/chat/chat.message';
|
||||
import { createTextContentFragment, DMessageFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
|
||||
import { openFileForAttaching } from '~/common/components/ButtonAttachFiles';
|
||||
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
|
||||
import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating';
|
||||
import { useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useEphemerals } from '~/common/chats/EphemeralsStore';
|
||||
import { useChatOverlayStore } from '~/common/chat-overlay/store-perchat_vanilla';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useScrollToBottom } from '~/common/scroll-to-bottom/useScrollToBottom';
|
||||
|
||||
import { CMLZeroConversation } from './messages-list/CMLZeroConversation';
|
||||
import { ChatMessage, ChatMessageMemo } from './message/ChatMessage';
|
||||
import { CleanerMessage, MessagesSelectionHeader } from './message/CleanerMessage';
|
||||
import { Ephemerals } from './Ephemerals';
|
||||
import { PersonaSelector } from './persona-selector/PersonaSelector';
|
||||
import { useChatShowSystemMessages } from '../store-app-chat';
|
||||
import { useScrollToBottom } from './scroll-to-bottom/useScrollToBottom';
|
||||
import { useChatAutoSuggestHTMLUI, useChatShowSystemMessages } from '../store-app-chat';
|
||||
|
||||
|
||||
const stableNoMessages: DMessage[] = [];
|
||||
|
||||
/**
|
||||
* A list of ChatMessages
|
||||
*/
|
||||
@@ -30,11 +39,15 @@ export function ChatMessageList(props: {
|
||||
conversationId: DConversationId | null,
|
||||
conversationHandler: ConversationHandler | null,
|
||||
capabilityHasT2I: boolean,
|
||||
chatLLMAntPromptCaching: boolean,
|
||||
chatLLMContextTokens: number | null,
|
||||
chatLLMSupportsImages: boolean,
|
||||
fitScreen: boolean,
|
||||
isMobile: boolean,
|
||||
isMessageSelectionMode: boolean,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string) => void,
|
||||
onConversationExecuteHistory: (conversationId: DConversationId, history: DMessage[], chatEffectBeam: boolean) => Promise<void>,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string, addSplitPane: boolean) => void,
|
||||
onConversationExecuteHistory: (conversationId: DConversationId) => Promise<void>,
|
||||
onConversationNew: (forceNoRecycle: boolean, isIncognito: boolean) => void,
|
||||
onTextDiagram: (diagramConfig: DiagramConfig | null) => void,
|
||||
onTextImagine: (conversationId: DConversationId, selectedText: string) => Promise<void>,
|
||||
onTextSpeak: (selectedText: string) => Promise<void>,
|
||||
@@ -49,84 +62,163 @@ export function ChatMessageList(props: {
|
||||
|
||||
// external state
|
||||
const { notifyBooting } = useScrollToBottom();
|
||||
const { openPreferencesTab } = useOptimaLayout();
|
||||
const danger_experimentalHtmlWebUi = useChatAutoSuggestHTMLUI();
|
||||
const [showSystemMessages] = useChatShowSystemMessages();
|
||||
const optionalTranslationWarning = useBrowserTranslationWarning();
|
||||
const { conversationMessages, historyTokenCount, editMessage, deleteMessage, setMessages } = useChatStore(state => {
|
||||
const conversation = state.conversations.find(conversation => conversation.id === props.conversationId);
|
||||
const { conversationMessages, historyTokenCount } = useChatStore(useShallow(({ conversations }) => {
|
||||
const conversation = conversations.find(conversation => conversation.id === props.conversationId);
|
||||
return {
|
||||
conversationMessages: conversation ? conversation.messages : [],
|
||||
conversationMessages: conversation ? conversation.messages : stableNoMessages,
|
||||
historyTokenCount: conversation ? conversation.tokenCount : 0,
|
||||
deleteMessage: state.deleteMessage,
|
||||
editMessage: state.editMessage,
|
||||
setMessages: state.setMessages,
|
||||
};
|
||||
}, shallow);
|
||||
const ephemerals = useEphemerals(props.conversationHandler);
|
||||
}));
|
||||
const { _composerInReferenceToCount, ephemerals } = useChatOverlayStore(props.conversationHandler?.conversationOverlayStore ?? null, useShallow(state => ({
|
||||
_composerInReferenceToCount: state.inReferenceTo?.length ?? 0,
|
||||
ephemerals: state.ephemerals?.length ? state.ephemerals : null,
|
||||
})));
|
||||
const { mayWork: isSpeakable } = useCapabilityElevenLabs();
|
||||
|
||||
// derived state
|
||||
const { conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine, onTextSpeak } = props;
|
||||
|
||||
const { conversationHandler, conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine, onTextSpeak } = props;
|
||||
const composerCanAddInReferenceTo = _composerInReferenceToCount < 5;
|
||||
const composerHasInReferenceto = _composerInReferenceToCount > 0;
|
||||
|
||||
// text actions
|
||||
|
||||
const handleRunExample = React.useCallback(async (text: string) => {
|
||||
conversationId && await onConversationExecuteHistory(conversationId, [...conversationMessages, createDMessage('user', text)], false);
|
||||
}, [conversationId, conversationMessages, onConversationExecuteHistory]);
|
||||
const handleRunExample = React.useCallback(async (example: SystemPurposeExample) => {
|
||||
if (!conversationId || !conversationHandler) return;
|
||||
|
||||
// Simple Example Prompt (User text message)
|
||||
if (typeof example === 'string') {
|
||||
conversationHandler.messageAppend(createDMessageTextContent('user', example)); // [chat] append user:persona question
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
return;
|
||||
}
|
||||
|
||||
// User-Action Example Prompts (User text message + File attachments)
|
||||
switch (example.action) {
|
||||
case 'require-data-attachment':
|
||||
await openFileForAttaching(true, async (filesWithHandle) => {
|
||||
|
||||
// Retrieve fully-fledged Attachment Fragments (converted/extracted, with sources, mimes, etc.) from the selected files
|
||||
const attachmentFragments = await convertFilesToDAttachmentFragments('file-open', filesWithHandle, {
|
||||
hintAddImages: props.chatLLMSupportsImages,
|
||||
});
|
||||
|
||||
// Create a User message with the prompt and the attachment fragments
|
||||
if (attachmentFragments.length) {
|
||||
conversationHandler.messageAppend(createDMessageFromFragments('user', [ // [chat] append user:persona question + attachment(s)
|
||||
createTextContentFragment(example.prompt),
|
||||
...attachmentFragments,
|
||||
]));
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory, props.chatLLMSupportsImages]);
|
||||
|
||||
const handleMessageContinue = React.useCallback(async (_messageId: DMessageId /* Ignored for now */) => {
|
||||
if (conversationId && conversationHandler) {
|
||||
conversationHandler.messageAppend(createDMessageTextContent('user', 'Continue')); // [chat] append user:Continue
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory]);
|
||||
|
||||
|
||||
// message menu methods proxy
|
||||
|
||||
const handleConversationBranch = React.useCallback((messageId: string) => {
|
||||
conversationId && onConversationBranch(conversationId, messageId);
|
||||
const handleMessageAssistantFrom = React.useCallback(async (messageId: DMessageId, offset: number) => {
|
||||
if (conversationId && conversationHandler) {
|
||||
conversationHandler.historyTruncateTo(messageId, offset);
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory]);
|
||||
|
||||
const handleMessageBeam = React.useCallback(async (messageId: DMessageId) => {
|
||||
// Message option menu Beam
|
||||
if (!conversationId || !props.conversationHandler || !props.conversationHandler.isValid()) return;
|
||||
const inputHistory = props.conversationHandler.historyViewHeadOrThrow('chat-beam-message');
|
||||
if (!inputHistory.length) return;
|
||||
|
||||
// TODO: replace the Persona and Auto-Cache-hint in the history?
|
||||
|
||||
// truncate the history to the given message (may or may not have more after)
|
||||
const truncatedHistory = inputHistory.slice(0, inputHistory.findIndex(m => m.id === messageId) + 1);
|
||||
const lastTruncatedMessage = truncatedHistory[truncatedHistory.length - 1];
|
||||
if (!lastTruncatedMessage) return;
|
||||
|
||||
// assistant: do an in-place beam
|
||||
if (lastTruncatedMessage.role === 'assistant') {
|
||||
if (truncatedHistory.length >= 2)
|
||||
props.conversationHandler.beamInvoke(truncatedHistory.slice(0, -1), [lastTruncatedMessage], lastTruncatedMessage.id);
|
||||
} else if (lastTruncatedMessage.role === 'user') {
|
||||
// user: truncate and append (but if the next message is an assistant message, import it)
|
||||
const possibleNextMessage = inputHistory[truncatedHistory.length];
|
||||
if (possibleNextMessage?.role === 'assistant')
|
||||
props.conversationHandler.beamInvoke(truncatedHistory, [possibleNextMessage], null);
|
||||
else
|
||||
props.conversationHandler.beamInvoke(truncatedHistory, [], null);
|
||||
}
|
||||
}, [conversationId, props.conversationHandler]);
|
||||
|
||||
const handleMessageBranch = React.useCallback((messageId: DMessageId) => {
|
||||
conversationId && onConversationBranch(conversationId, messageId, true);
|
||||
}, [conversationId, onConversationBranch]);
|
||||
|
||||
const handleConversationRestartFrom = React.useCallback(async (messageId: string, offset: number, chatEffectBeam: boolean) => {
|
||||
const messages = getConversation(conversationId)?.messages;
|
||||
if (messages) {
|
||||
const truncatedHistory = messages.slice(0, messages.findIndex(m => m.id === messageId) + offset + 1);
|
||||
conversationId && await onConversationExecuteHistory(conversationId, truncatedHistory, chatEffectBeam);
|
||||
}
|
||||
}, [conversationId, onConversationExecuteHistory]);
|
||||
const handleMessageTruncate = React.useCallback((messageId: DMessageId) => {
|
||||
props.conversationHandler?.historyTruncateTo(messageId, 0);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleConversationTruncate = React.useCallback((messageId: string) => {
|
||||
const messages = getConversation(conversationId)?.messages;
|
||||
if (conversationId && messages) {
|
||||
const truncatedHistory = messages.slice(0, messages.findIndex(m => m.id === messageId) + 1);
|
||||
setMessages(conversationId, truncatedHistory);
|
||||
}
|
||||
}, [conversationId, setMessages]);
|
||||
const handleMessageDelete = React.useCallback((messageId: DMessageId) => {
|
||||
props.conversationHandler?.messagesDelete([messageId]);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleMessageDelete = React.useCallback((messageId: string) => {
|
||||
conversationId && deleteMessage(conversationId, messageId);
|
||||
}, [conversationId, deleteMessage]);
|
||||
const handleMessageAppendFragment = React.useCallback((messageId: DMessageId, fragment: DMessageFragment) => {
|
||||
props.conversationHandler?.messageFragmentAppend(messageId, fragment, false, false);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleMessageEdit = React.useCallback((messageId: string, newText: string) => {
|
||||
conversationId && editMessage(conversationId, messageId, { text: newText }, true);
|
||||
}, [conversationId, editMessage]);
|
||||
const handleMessageDeleteFragment = React.useCallback((messageId: DMessageId, fragmentId: DMessageFragmentId) => {
|
||||
props.conversationHandler?.messageFragmentDelete(messageId, fragmentId, false, true);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleTextDiagram = React.useCallback(async (messageId: string, text: string) => {
|
||||
const handleMessageReplaceFragment = React.useCallback((messageId: DMessageId, fragmentId: DMessageFragmentId, newFragment: DMessageFragment) => {
|
||||
props.conversationHandler?.messageFragmentReplace(messageId, fragmentId, newFragment, false);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleMessageToggleUserFlag = React.useCallback((messageId: DMessageId, userFlag: DMessageUserFlag, _maxPerConversation?: number) => {
|
||||
props.conversationHandler?.messageToggleUserFlag(messageId, userFlag, true /* touch */);
|
||||
// Note: we don't support 'maxPerConversation' yet, which is supposed to turn off the flag from the beginning if it's too numerous
|
||||
// if (_maxPerConversation) {
|
||||
// ...
|
||||
// }
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleAddInReferenceTo = React.useCallback((item: DMetaReferenceItem) => {
|
||||
props.conversationHandler?.overlayActions.addInReferenceTo(item);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleTextDiagram = React.useCallback(async (messageId: DMessageId, text: string) => {
|
||||
conversationId && onTextDiagram({ conversationId: conversationId, messageId, text });
|
||||
}, [conversationId, onTextDiagram]);
|
||||
|
||||
const handleTextImagine = React.useCallback(async (text: string) => {
|
||||
if (!capabilityHasT2I)
|
||||
return openPreferencesTab(PreferencesTab.Draw);
|
||||
return optimaOpenPreferences('draw');
|
||||
if (conversationId) {
|
||||
setIsImagining(true);
|
||||
await onTextImagine(conversationId, text);
|
||||
setIsImagining(false);
|
||||
}
|
||||
}, [capabilityHasT2I, conversationId, onTextImagine, openPreferencesTab]);
|
||||
}, [capabilityHasT2I, conversationId, onTextImagine]);
|
||||
|
||||
const handleTextSpeak = React.useCallback(async (text: string) => {
|
||||
if (!isSpeakable)
|
||||
return openPreferencesTab(PreferencesTab.Voice);
|
||||
return optimaOpenPreferences('voice');
|
||||
setIsSpeaking(true);
|
||||
await onTextSpeak(text);
|
||||
setIsSpeaking(false);
|
||||
}, [isSpeakable, onTextSpeak, openPreferencesTab]);
|
||||
}, [isSpeakable, onTextSpeak]);
|
||||
|
||||
|
||||
// operate on the local selection set
|
||||
@@ -139,36 +231,43 @@ export function ChatMessageList(props: {
|
||||
setSelectedMessages(newSelected);
|
||||
};
|
||||
|
||||
const handleSelectMessage = (messageId: string, selected: boolean) => {
|
||||
const handleSelectMessage = (messageId: DMessageId, selected: boolean) => {
|
||||
const newSelected = new Set(selectedMessages);
|
||||
selected ? newSelected.add(messageId) : newSelected.delete(messageId);
|
||||
setSelectedMessages(newSelected);
|
||||
};
|
||||
|
||||
const handleSelectionDelete = () => {
|
||||
if (conversationId)
|
||||
for (const selectedMessage of selectedMessages)
|
||||
deleteMessage(conversationId, selectedMessage);
|
||||
const handleSelectionDelete = React.useCallback(() => {
|
||||
props.conversationHandler?.messagesDelete(Array.from(selectedMessages));
|
||||
setSelectedMessages(new Set());
|
||||
};
|
||||
}, [props.conversationHandler, selectedMessages]);
|
||||
|
||||
useGlobalShortcut(props.isMessageSelectionMode && ShortcutKeyName.Esc, false, false, false, () => {
|
||||
props.setIsMessageSelectionMode(false);
|
||||
});
|
||||
const handleSelectionHide = React.useCallback(() => {
|
||||
for (let selectedMessage of Array.from(selectedMessages))
|
||||
props.conversationHandler?.messageSetUserFlag(selectedMessage, MESSAGE_FLAG_AIX_SKIP, true, true);
|
||||
setSelectedMessages(new Set());
|
||||
}, [props.conversationHandler, selectedMessages]);
|
||||
|
||||
const { isMessageSelectionMode, setIsMessageSelectionMode } = props;
|
||||
|
||||
useGlobalShortcuts('ChatMessageList_Selection', React.useMemo(() => !isMessageSelectionMode ? [] : [
|
||||
{ key: ShortcutKey.Esc, action: () => setIsMessageSelectionMode(false), description: 'Close Cleanup', level: 10 - 1 },
|
||||
], [isMessageSelectionMode, setIsMessageSelectionMode]));
|
||||
|
||||
|
||||
// text-diff functionality: only diff the last message and when it's complete (not typing), and they're similar in size
|
||||
// text-diff functionality: only diff the last complete message, and they're similar in size
|
||||
|
||||
const { diffTargetMessage, diffPrevText } = React.useMemo(() => {
|
||||
const [msgB, msgA] = conversationMessages.filter(m => m.role === 'assistant').reverse();
|
||||
if (msgB?.text && msgA?.text && !msgB?.typing) {
|
||||
const textA = msgA.text, textB = msgB.text;
|
||||
const lenA = textA.length, lenB = textB.length;
|
||||
if (lenA > 80 && lenB > 80 && lenA > lenB / 3 && lenB > lenA / 3)
|
||||
return { diffTargetMessage: msgB, diffPrevText: textA };
|
||||
}
|
||||
return { diffTargetMessage: undefined, diffPrevText: undefined };
|
||||
}, [conversationMessages]);
|
||||
// const { diffTargetMessage, diffPrevText } = React.useMemo(() => {
|
||||
// const [msgB, msgA] = conversationMessages.filter(m => m.role === 'assistant').reverse();
|
||||
// const textB = msgB ? singleTextOrThrow(msgB) : undefined;
|
||||
// const textA = msgA ? singleTextOrThrow(msgA) : undefined;
|
||||
// if (textB && textA && !msgB?.pendingIncomplete) {
|
||||
// const lenA = textA.length, lenB = textB.length;
|
||||
// if (lenA > 80 && lenB > 80 && lenA > lenB / 3 && lenB > lenA / 3)
|
||||
// return { diffTargetMessage: msgB, diffPrevText: textA };
|
||||
// }
|
||||
// return { diffTargetMessage: undefined, diffPrevText: undefined };
|
||||
// }, [conversationMessages]);
|
||||
|
||||
|
||||
// scroll to the very bottom of a new chat
|
||||
@@ -178,30 +277,39 @@ export function ChatMessageList(props: {
|
||||
}, [conversationId, notifyBooting]);
|
||||
|
||||
|
||||
// style memo
|
||||
const listSx: SxProps = React.useMemo(() => ({
|
||||
p: 0,
|
||||
...props.sx,
|
||||
|
||||
// fix for the double-border on the last message (one by the composer, one to the bottom of the message)
|
||||
// marginBottom: '-1px',
|
||||
|
||||
// layout
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
}), [props.sx]);
|
||||
|
||||
|
||||
// no conversation: sine qua non
|
||||
if (!conversationId)
|
||||
return <CMLZeroConversation onConversationNew={props.onConversationNew} />;
|
||||
|
||||
|
||||
// no content: show the persona selector
|
||||
|
||||
const filteredMessages = conversationMessages
|
||||
.filter(m => m.role !== 'system' || showSystemMessages); // hide the System message if the user choses to
|
||||
const filteredMessages = excludeSystemMessages(conversationMessages, showSystemMessages);
|
||||
|
||||
|
||||
if (!filteredMessages.length)
|
||||
return (
|
||||
<Box sx={{ ...props.sx }}>
|
||||
{conversationId
|
||||
? <PersonaSelector conversationId={conversationId} runExample={handleRunExample} />
|
||||
: <InlineError severity='info' error='Select a conversation' sx={{ m: 2 }} />}
|
||||
<PersonaSelector conversationId={conversationId} isMobile={props.isMobile} runExample={handleRunExample} />
|
||||
</Box>
|
||||
);
|
||||
|
||||
return (
|
||||
<List sx={{
|
||||
p: 0, ...(props.sx || {}),
|
||||
// this makes sure that the the window is scrolled to the bottom (column-reverse)
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
// fix for the double-border on the last message (one by the composer, one to the bottom of the message)
|
||||
// marginBottom: '-1px',
|
||||
}}>
|
||||
<List role='chat-messages-list' sx={listSx}>
|
||||
|
||||
{optionalTranslationWarning}
|
||||
|
||||
@@ -212,13 +320,14 @@ export function ChatMessageList(props: {
|
||||
onClose={() => props.setIsMessageSelectionMode(false)}
|
||||
onSelectAll={handleSelectAll}
|
||||
onDeleteMessages={handleSelectionDelete}
|
||||
onHideMessages={handleSelectionHide}
|
||||
/>
|
||||
)}
|
||||
|
||||
{filteredMessages.map((message, idx, { length: count }) => {
|
||||
{filteredMessages.map((message, idx) => {
|
||||
|
||||
// Optimization: if the component is going to change (e.g. the message is typing), we don't want to memoize it to not throw garbage in memory
|
||||
const ChatMessageMemoOrNot = message.typing ? ChatMessage : ChatMessageMemo;
|
||||
// Optimization: only memo complete components, or we'd be memoizing garbage
|
||||
const ChatMessageMemoOrNot = !message.pendingIncomplete ? ChatMessageMemo : ChatMessage;
|
||||
|
||||
return props.isMessageSelectionMode ? (
|
||||
|
||||
@@ -234,33 +343,43 @@ export function ChatMessageList(props: {
|
||||
<ChatMessageMemoOrNot
|
||||
key={'msg-' + message.id}
|
||||
message={message}
|
||||
diffPreviousText={message === diffTargetMessage ? diffPrevText : undefined}
|
||||
// diffPreviousText={message === diffTargetMessage ? diffPrevText : undefined}
|
||||
fitScreen={props.fitScreen}
|
||||
isBottom={idx === count - 1}
|
||||
hasInReferenceTo={composerHasInReferenceto}
|
||||
isMobile={props.isMobile}
|
||||
isBottom={idx === filteredMessages.length - 1}
|
||||
isImagining={isImagining}
|
||||
isSpeaking={isSpeaking}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationRestartFrom={handleConversationRestartFrom}
|
||||
onConversationTruncate={handleConversationTruncate}
|
||||
showAntPromptCaching={props.chatLLMAntPromptCaching}
|
||||
showUnsafeHtmlCode={danger_experimentalHtmlWebUi}
|
||||
onAddInReferenceTo={!composerCanAddInReferenceTo ? undefined : handleAddInReferenceTo}
|
||||
onMessageAssistantFrom={handleMessageAssistantFrom}
|
||||
onMessageBeam={handleMessageBeam}
|
||||
onMessageBranch={handleMessageBranch}
|
||||
onMessageContinue={handleMessageContinue}
|
||||
onMessageDelete={handleMessageDelete}
|
||||
onMessageEdit={handleMessageEdit}
|
||||
onMessageFragmentAppend={handleMessageAppendFragment}
|
||||
onMessageFragmentDelete={handleMessageDeleteFragment}
|
||||
onMessageFragmentReplace={handleMessageReplaceFragment}
|
||||
onMessageToggleUserFlag={handleMessageToggleUserFlag}
|
||||
onMessageTruncate={handleMessageTruncate}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={handleTextImagine}
|
||||
onTextSpeak={handleTextSpeak}
|
||||
onTextImagine={capabilityHasT2I ? handleTextImagine : undefined}
|
||||
onTextSpeak={isSpeakable ? handleTextSpeak : undefined}
|
||||
/>
|
||||
|
||||
);
|
||||
},
|
||||
)}
|
||||
|
||||
{!!ephemerals.length && (
|
||||
{/* Render ephemerals (sidebar ReAct output widgets) at the bottom */}
|
||||
{!!ephemerals?.length && !!conversationHandler && (
|
||||
<Ephemerals
|
||||
ephemerals={ephemerals}
|
||||
conversationId={props.conversationId}
|
||||
conversationHandler={conversationHandler}
|
||||
sx={{
|
||||
mt: 'auto',
|
||||
overflowY: 'auto',
|
||||
minHeight: 64,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -1,151 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, IconButton, ListDivider, ListItemDecorator, MenuItem, Switch, Tooltip } from '@mui/joy';
|
||||
import AddIcon from '@mui/icons-material/Add';
|
||||
import CheckBoxOutlineBlankOutlinedIcon from '@mui/icons-material/CheckBoxOutlineBlankOutlined';
|
||||
import CheckBoxOutlinedIcon from '@mui/icons-material/CheckBoxOutlined';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import CompressIcon from '@mui/icons-material/Compress';
|
||||
import ForkRightIcon from '@mui/icons-material/ForkRight';
|
||||
import HorizontalSplitIcon from '@mui/icons-material/HorizontalSplit';
|
||||
import HorizontalSplitOutlinedIcon from '@mui/icons-material/HorizontalSplitOutlined';
|
||||
import SettingsSuggestOutlinedIcon from '@mui/icons-material/SettingsSuggestOutlined';
|
||||
import VerticalSplitIcon from '@mui/icons-material/VerticalSplit';
|
||||
import VerticalSplitOutlinedIcon from '@mui/icons-material/VerticalSplitOutlined';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { useOptimaDrawers } from '~/common/layout/optima/useOptimaDrawers';
|
||||
|
||||
import { useChatShowSystemMessages } from '../store-app-chat';
|
||||
import { usePaneDuplicateOrClose } from './panes/usePanesManager';
|
||||
|
||||
|
||||
export function ChatPageMenuItems(props: {
|
||||
isMobile: boolean,
|
||||
conversationId: DConversationId | null,
|
||||
disableItems: boolean,
|
||||
hasConversations: boolean,
|
||||
isMessageSelectionMode: boolean,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string | null) => void,
|
||||
onConversationClear: (conversationId: DConversationId) => void,
|
||||
onConversationFlatten: (conversationId: DConversationId) => void,
|
||||
// onConversationNew: (forceNoRecycle: boolean) => void,
|
||||
setIsMessageSelectionMode: (isMessageSelectionMode: boolean) => void,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const { closePageMenu } = useOptimaDrawers();
|
||||
const { canAddPane, isMultiPane, duplicateFocusedPane, removeOtherPanes } = usePaneDuplicateOrClose();
|
||||
const [showSystemMessages, setShowSystemMessages] = useChatShowSystemMessages();
|
||||
|
||||
|
||||
const handleIncreaseMultiPane = React.useCallback((event?: React.MouseEvent) => {
|
||||
event?.stopPropagation();
|
||||
|
||||
// create a new pane with the current conversation
|
||||
duplicateFocusedPane();
|
||||
|
||||
// load a brand new conversation inside
|
||||
// FIXME: still testing this
|
||||
// props.onConversationNew(true);
|
||||
}, [duplicateFocusedPane]);
|
||||
|
||||
const handleToggleMultiPane = React.useCallback((_event: React.MouseEvent) => {
|
||||
if (isMultiPane)
|
||||
removeOtherPanes();
|
||||
else
|
||||
handleIncreaseMultiPane(undefined);
|
||||
}, [handleIncreaseMultiPane, isMultiPane, removeOtherPanes]);
|
||||
|
||||
|
||||
const closeMenu = (event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
closePageMenu();
|
||||
};
|
||||
|
||||
const handleConversationClear = (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
closeMenu(event);
|
||||
props.conversationId && props.onConversationClear(props.conversationId);
|
||||
};
|
||||
|
||||
const handleConversationBranch = (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
closeMenu(event);
|
||||
props.conversationId && props.onConversationBranch(props.conversationId, null);
|
||||
};
|
||||
|
||||
const handleConversationFlatten = (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
closeMenu(event);
|
||||
props.conversationId && props.onConversationFlatten(props.conversationId);
|
||||
};
|
||||
|
||||
const handleToggleMessageSelectionMode = (event: React.MouseEvent) => {
|
||||
closeMenu(event);
|
||||
props.setIsMessageSelectionMode(!props.isMessageSelectionMode);
|
||||
};
|
||||
|
||||
const handleToggleSystemMessages = () => setShowSystemMessages(!showSystemMessages);
|
||||
|
||||
|
||||
return <>
|
||||
|
||||
{/* System Message(s) */}
|
||||
<MenuItem onClick={handleToggleSystemMessages}>
|
||||
<ListItemDecorator><SettingsSuggestOutlinedIcon /></ListItemDecorator>
|
||||
System messages
|
||||
<Switch checked={showSystemMessages} onChange={handleToggleSystemMessages} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
{/* Un /Split */}
|
||||
<MenuItem onClick={handleToggleMultiPane}>
|
||||
<ListItemDecorator>{props.isMobile
|
||||
? (isMultiPane ? <HorizontalSplitIcon /> : <HorizontalSplitOutlinedIcon />)
|
||||
: (isMultiPane ? <VerticalSplitIcon /> : <VerticalSplitOutlinedIcon />)
|
||||
}</ListItemDecorator>
|
||||
{/* Unsplit / Split text*/}
|
||||
{isMultiPane ? 'Unsplit' : props.isMobile ? 'Split Down' : 'Split Right'}
|
||||
{/* '+' */}
|
||||
{isMultiPane && (
|
||||
<Tooltip title='Add Another Split'>
|
||||
<IconButton
|
||||
size='sm'
|
||||
variant='outlined'
|
||||
disabled={!canAddPane}
|
||||
onClick={handleIncreaseMultiPane}
|
||||
sx={{ ml: 'auto', /*mr: '2px',*/ my: '-0.25rem' /* absorb the menuItem padding */ }}
|
||||
>
|
||||
<AddIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem disabled={props.disableItems} onClick={handleConversationBranch}>
|
||||
<ListItemDecorator><ForkRightIcon /></ListItemDecorator>
|
||||
Branch
|
||||
</MenuItem>
|
||||
|
||||
<ListDivider />
|
||||
|
||||
<MenuItem disabled={props.disableItems} onClick={handleToggleMessageSelectionMode} sx={props.isMessageSelectionMode ? { fontWeight: 'lg' } : {}}>
|
||||
<ListItemDecorator>{props.isMessageSelectionMode ? <CheckBoxOutlinedIcon /> : <CheckBoxOutlineBlankOutlinedIcon />}</ListItemDecorator>
|
||||
Cleanup ...
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem disabled={props.disableItems} onClick={handleConversationFlatten}>
|
||||
<ListItemDecorator><CompressIcon color='success' /></ListItemDecorator>
|
||||
Compress ...
|
||||
</MenuItem>
|
||||
|
||||
<ListDivider />
|
||||
|
||||
<MenuItem disabled={props.disableItems} onClick={handleConversationClear}>
|
||||
<ListItemDecorator><ClearIcon /></ListItemDecorator>
|
||||
<Box sx={{ flexGrow: 1, display: 'flex', justifyContent: 'space-between', gap: 1 }}>
|
||||
Reset Chat
|
||||
{!props.disableItems && <KeyStroke combo='Ctrl + Alt + X' />}
|
||||
</Box>
|
||||
</MenuItem>
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,14 +1,22 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Grid, IconButton, Sheet, styled, Typography } from '@mui/joy';
|
||||
import { SxProps } from '@mui/joy/styles/types';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
import MaximizeIcon from '@mui/icons-material/Maximize';
|
||||
import MinimizeIcon from '@mui/icons-material/Minimize';
|
||||
import VerticalSplitIcon from '@mui/icons-material/VerticalSplit';
|
||||
import VerticalSplitOutlinedIcon from '@mui/icons-material/VerticalSplitOutlined';
|
||||
|
||||
import { ConversationManager } from '~/common/chats/ConversationHandler';
|
||||
import { DConversationId } from '~/common/state/store-chats';
|
||||
import { DEphemeral } from '~/common/chats/EphemeralsStore';
|
||||
import { lineHeightChatTextMd } from '~/common/app.theme';
|
||||
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
|
||||
|
||||
import type { DEphemeral } from '~/common/chat-overlay/store-perchat-ephemerals_slice';
|
||||
import { ConversationHandler } from '~/common/chat-overlay/ConversationHandler';
|
||||
import { adjustContentScaling, ContentScaling, lineHeightChatTextMd } from '~/common/app.theme';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
|
||||
// State Pane
|
||||
|
||||
const StateLine = styled(Typography)(({ theme }) => ({
|
||||
textOverflow: 'ellipsis',
|
||||
@@ -46,8 +54,7 @@ function ObjectRenderer({ name }: { name: string }) {
|
||||
return <StateLine><b>{name}</b>: <i>object not displayed</i></StateLine>;
|
||||
}
|
||||
|
||||
|
||||
function StateRenderer(props: { state: object }) {
|
||||
function StateRenderer(props: { state: object, contentScaling: ContentScaling }) {
|
||||
if (typeof props.state !== 'object')
|
||||
return <pre>Developer Warning: state is not an object: {JSON.stringify(props.state, null, 2)}</pre>;
|
||||
|
||||
@@ -55,10 +62,17 @@ function StateRenderer(props: { state: object }) {
|
||||
|
||||
return (
|
||||
<Box>
|
||||
<Typography fontSize='smaller' sx={{ mb: 1 }}>
|
||||
## Internal State
|
||||
</Typography>
|
||||
<Sheet sx={{ p: 1 }}>
|
||||
<ScaledTextBlockRenderer
|
||||
text='**Internal State**'
|
||||
contentScaling={props.contentScaling}
|
||||
textRenderVariant='markdown'
|
||||
/>
|
||||
<Box sx={{
|
||||
mt: 1,
|
||||
p: 1,
|
||||
borderRadius: 'md',
|
||||
background: 'linear-gradient(180deg, var(--joy-palette-success-softHoverBg), transparent)',
|
||||
}}>
|
||||
{!entries && <Typography level='body-sm'>No state variables</Typography>}
|
||||
{entries.map(([key, value]) =>
|
||||
isPrimitive(value)
|
||||
@@ -69,97 +83,164 @@ function StateRenderer(props: { state: object }) {
|
||||
? <ObjectRenderer key={'state-' + key} name={key} />
|
||||
: <Typography key={'state-' + key} level='body-sm'>{key}: {value}</Typography>,
|
||||
)}
|
||||
</Sheet>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
function EphemeralItem({ conversationId, ephemeral }: { conversationId: string, ephemeral: DEphemeral }) {
|
||||
const leftPaneSx = {
|
||||
// <pre> looks
|
||||
overflowWrap: 'anywhere',
|
||||
whiteSpace: 'break-spaces',
|
||||
// 'undo' some of the github-markdown CSS customizations
|
||||
'.markdown-body': { mx: '0!important' },
|
||||
'.markdown-body p': { mb: 0 },
|
||||
};
|
||||
|
||||
const rightPaneSx = {
|
||||
borderLeft: { md: `1px dashed` },
|
||||
borderTop: { xs: `1px dashed`, md: 'none' },
|
||||
};
|
||||
|
||||
|
||||
function EphemeralItem(props: {
|
||||
ephemeral: DEphemeral,
|
||||
conversationHandler: ConversationHandler,
|
||||
contentScaling: ContentScaling,
|
||||
}) {
|
||||
|
||||
const { ephemeral, conversationHandler } = props;
|
||||
|
||||
// Event handlers
|
||||
const handleDelete = React.useCallback(() => {
|
||||
ConversationManager.getHandler(conversationId).ephemeralsStore.delete(ephemeral.id);
|
||||
}, [conversationId, ephemeral.id]);
|
||||
conversationHandler.overlayActions.ephemeralsDelete(ephemeral.id);
|
||||
}, [conversationHandler, ephemeral.id]);
|
||||
|
||||
return <Box
|
||||
sx={{
|
||||
p: { xs: 1, md: 2 },
|
||||
position: 'relative',
|
||||
// border: (i < ephemerals.length - 1) ? `2px solid ${theme.palette.divider}` : undefined,
|
||||
'&:hover > button': { opacity: 1 },
|
||||
}}>
|
||||
const handleToggleMinimized = React.useCallback(() => {
|
||||
conversationHandler.overlayActions.ephemeralsToggleMinimized(ephemeral.id);
|
||||
}, [conversationHandler, ephemeral.id]);
|
||||
|
||||
{/* Title */}
|
||||
{ephemeral.title && <Typography level='title-sm' sx={{ mb: 1.5 }}>
|
||||
{ephemeral.title} Development Tools
|
||||
</Typography>}
|
||||
|
||||
{/* Vertical | split */}
|
||||
<Grid container spacing={2}>
|
||||
|
||||
{/* Left pane (console) */}
|
||||
<Grid xs={12} md={ephemeral.state ? 6 : 12}>
|
||||
<Typography fontSize='smaller' sx={{ overflowWrap: 'anywhere', whiteSpace: 'break-spaces', lineHeight: lineHeightChatTextMd }}>
|
||||
{ephemeral.text}
|
||||
</Typography>
|
||||
</Grid>
|
||||
|
||||
{/* Right pane (state) */}
|
||||
{!!ephemeral.state && <Grid
|
||||
xs={12} md={6}
|
||||
sx={{
|
||||
borderLeft: { md: `1px dashed` },
|
||||
borderTop: { xs: `1px dashed`, md: 'none' },
|
||||
}}>
|
||||
<StateRenderer state={ephemeral.state} />
|
||||
</Grid>}
|
||||
</Grid>
|
||||
|
||||
{/* Close button (right of title) */}
|
||||
<IconButton
|
||||
size='sm'
|
||||
onClick={handleDelete}
|
||||
sx={{
|
||||
position: 'absolute', top: 8, right: 8,
|
||||
opacity: { xs: 1, sm: 0.5 }, transition: 'opacity 0.3s',
|
||||
}}>
|
||||
<CloseRoundedIcon />
|
||||
</IconButton>
|
||||
|
||||
</Box>;
|
||||
}
|
||||
|
||||
// const dashedBorderSVG = encodeURIComponent(`
|
||||
// <svg xmlns='http://www.w3.org/2000/svg' width='100%' height='100%'>
|
||||
// <rect x='0' y='0' width='100%' height='100%' fill='none' stroke='currentColor' stroke-width='2' stroke-dasharray='16, 2' />
|
||||
// </svg>
|
||||
// `);
|
||||
const handleToggleShowState = React.useCallback(() => {
|
||||
conversationHandler.overlayActions.ephemeralsToggleShowStatePane(ephemeral.id);
|
||||
}, [conversationHandler, ephemeral.id]);
|
||||
|
||||
|
||||
export function Ephemerals(props: { ephemerals: DEphemeral[], conversationId: DConversationId | null, sx?: SxProps }) {
|
||||
// global state
|
||||
// const ephemerals = useChatStore(state => {
|
||||
// const conversation = state.conversations.find(conversation => conversation.id === props.conversationId);
|
||||
// return conversation ? conversation.ephemerals : [];
|
||||
// }, shallow);
|
||||
|
||||
const ephemerals = props.ephemerals;
|
||||
// if (!ephemerals?.length) return null;
|
||||
const showStatePane = ephemeral.showStatePane && !!ephemeral.state;
|
||||
|
||||
return (
|
||||
<Sheet
|
||||
variant='soft' color='success' invertedColors
|
||||
sx={{
|
||||
borderTop: '1px solid',
|
||||
borderTopColor: 'divider',
|
||||
// backgroundImage: `url("data:image/svg+xml,${dashedBorderSVG.replace('currentColor', '%23A1E8A1')}")`,
|
||||
// backgroundSize: '100% 100%',
|
||||
// backgroundRepeat: 'no-repeat',
|
||||
...(props.sx || {}),
|
||||
<Box sx={{
|
||||
borderTop: '1px solid',
|
||||
borderTopColor: 'divider',
|
||||
// border: (i < ephemerals.length - 1) ? `2px solid ${theme.palette.divider}` : undefined,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
}}>
|
||||
|
||||
{/* Top Line - Title and Buttons */}
|
||||
<Box sx={{
|
||||
py: 1,
|
||||
px: { xs: 1, md: 2 },
|
||||
backgroundColor: 'success.softHoverBg',
|
||||
display: 'flex',
|
||||
gap: 1,
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
|
||||
{ephemerals.map((ephemeral, i) =>
|
||||
props.conversationId && <EphemeralItem key={`ephemeral-${i}`} conversationId={props.conversationId} ephemeral={ephemeral} />)}
|
||||
<Typography level='title-sm' sx={{ flex: 1, color: 'success.solidBg' }}>
|
||||
{ephemeral.title} Internal Monologue
|
||||
</Typography>
|
||||
|
||||
{/* Show State */}
|
||||
{!ephemeral.minimized && (
|
||||
<IconButton
|
||||
size='sm'
|
||||
variant={ephemeral.showStatePane ? 'solid' : 'outlined'}
|
||||
onClick={handleToggleShowState}
|
||||
>
|
||||
{ephemeral.showStatePane ? <VerticalSplitIcon /> : <VerticalSplitOutlinedIcon />}
|
||||
</IconButton>
|
||||
)}
|
||||
|
||||
{/* Minimize/Expand Button */}
|
||||
<IconButton
|
||||
size='sm'
|
||||
variant={'outlined'}
|
||||
onClick={handleToggleMinimized}
|
||||
>
|
||||
{ephemeral.minimized ? <MaximizeIcon /> : <MinimizeIcon />}
|
||||
</IconButton>
|
||||
|
||||
{/* Close */}
|
||||
<IconButton
|
||||
size='sm'
|
||||
variant={ephemeral.done ? 'solid' : 'outlined'}
|
||||
onClick={handleDelete}
|
||||
>
|
||||
<CloseRoundedIcon />
|
||||
</IconButton>
|
||||
|
||||
</Box>
|
||||
|
||||
{/* Content */}
|
||||
{!ephemeral.minimized && <Box sx={{
|
||||
py: 1,
|
||||
px: { xs: 1, md: 2 },
|
||||
}}>
|
||||
|
||||
{/* Content Grid */}
|
||||
<Grid container spacing={2} sx={{ mt: 0.5 }}>
|
||||
|
||||
{/* Left pane (log) */}
|
||||
<Grid xs={12} md={showStatePane ? 6 : 12}>
|
||||
{/* New renderer, with */}
|
||||
<Box sx={leftPaneSx}>
|
||||
<ScaledTextBlockRenderer
|
||||
text={ephemeral.text}
|
||||
contentScaling={props.contentScaling}
|
||||
textRenderVariant='markdown'
|
||||
/>
|
||||
</Box>
|
||||
</Grid>
|
||||
|
||||
{/* Right pane (state) */}
|
||||
{showStatePane && (
|
||||
<Grid xs={12} md={6} sx={rightPaneSx}>
|
||||
<StateRenderer
|
||||
state={ephemeral.state}
|
||||
contentScaling={props.contentScaling}
|
||||
/>
|
||||
</Grid>
|
||||
)}
|
||||
|
||||
</Grid>
|
||||
</Box>}
|
||||
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export function Ephemerals(props: {
|
||||
ephemerals: DEphemeral[],
|
||||
conversationHandler: ConversationHandler,
|
||||
sx?: SxProps
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const adjContentScaling = useUIPreferencesStore(state => adjustContentScaling(state.contentScaling, -1));
|
||||
|
||||
return (
|
||||
<Sheet variant='soft' color='success' invertedColors sx={props.sx}>
|
||||
|
||||
{props.ephemerals.map((ephemeral, i) => (
|
||||
<EphemeralItem
|
||||
key={ephemeral.id}
|
||||
ephemeral={ephemeral}
|
||||
conversationHandler={props.conversationHandler}
|
||||
contentScaling={adjContentScaling}
|
||||
/>
|
||||
))}
|
||||
|
||||
</Sheet>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
import * as React from 'react';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, IconButton, styled, Typography } from '@mui/joy';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
import ExpandLessIcon from '@mui/icons-material/ExpandLess';
|
||||
import MinimizeIcon from '@mui/icons-material/Minimize';
|
||||
|
||||
// import { isMacUser } from '~/common/util/pwaUtils';
|
||||
import type { ShortcutObject } from '~/common/components/shortcuts/useGlobalShortcuts';
|
||||
import { ConfirmationModal } from '~/common/components/modals/ConfirmationModal';
|
||||
import { GoodTooltip } from '~/common/components/GoodTooltip';
|
||||
import { useGlobalShortcutsStore } from '~/common/components/shortcuts/store-global-shortcuts';
|
||||
import { useOverlayComponents } from '~/common/layout/overlays/useOverlayComponents';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
|
||||
|
||||
// configuration
|
||||
const COMPOSER_ENABLE_MINIMIZE = false;
|
||||
|
||||
|
||||
const hideButtonTooltip = (
|
||||
<Box sx={{ px: 1, py: 0.75, lineHeight: '1.5rem' }}>
|
||||
Hide Shortcuts<br />
|
||||
Enable again in Settings > Labs
|
||||
</Box>
|
||||
);
|
||||
|
||||
const hideButtonSx: SxProps = {
|
||||
'--IconButton-size': '28px',
|
||||
'--Icon-fontSize': '16px',
|
||||
'--Icon-color': 'var(--joy-palette-text-tertiary)',
|
||||
mr: -0.5,
|
||||
};
|
||||
|
||||
// const animateAppear = keyframes`
|
||||
// from {
|
||||
// opacity: 0;
|
||||
// transform: translateY(10px);
|
||||
// }
|
||||
// to {
|
||||
// opacity: 1;
|
||||
// transform: translateY(0);
|
||||
// }
|
||||
// `;
|
||||
|
||||
const StatusBarContainer = styled(Box)({
|
||||
borderBottom: '1px solid',
|
||||
// borderBottomColor: 'var(--joy-palette-divider)',
|
||||
borderBottomColor: 'rgba(var(--joy-palette-neutral-mainChannel) / 0.1)',
|
||||
// borderTopColor: 'rgba(var(--joy-palette-neutral-mainChannel, 99 107 116) / 0.4)',
|
||||
// backgroundColor: 'var(--joy-palette-background-surface)',
|
||||
// paddingBlock: '0.25rem',
|
||||
paddingInline: '0.5rem',
|
||||
// layout
|
||||
display: 'flex',
|
||||
flexFlow: 'row nowrap',
|
||||
columnGap: '1.5rem', // space between shortcuts
|
||||
lineHeight: '1em',
|
||||
// animation: `${animateAppear} 0.3s ease-out`,
|
||||
// transition: 'all 0.2s ease',
|
||||
// '&:hover': {
|
||||
// backgroundColor: 'var(--joy-palette-background-level1)',
|
||||
// },
|
||||
});
|
||||
|
||||
const ShortcutContainer = styled(Box)({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
whiteSpace: 'nowrap',
|
||||
gap: '2px', // space between modifiers
|
||||
marginBlock: '0.25rem',
|
||||
// transition: 'transform 0.2s ease',
|
||||
// '&:hover': {
|
||||
// transform: 'scale(1.05)',
|
||||
// },
|
||||
'&:hover > div': {
|
||||
backgroundColor: 'var(--joy-palette-background-level1)',
|
||||
},
|
||||
cursor: 'pointer',
|
||||
[`&[aria-disabled="true"]`]: {
|
||||
opacity: 0.5,
|
||||
pointerEvents: 'none',
|
||||
}
|
||||
});
|
||||
|
||||
const ShortcutKey = styled(Box)({
|
||||
fontSize: 'var(--joy-fontSize-xs)',
|
||||
fontWeight: 'var(--joy-fontWeight-md)',
|
||||
border: '1px solid',
|
||||
borderColor: 'var(--joy-palette-neutral-outlinedBorder)',
|
||||
borderRadius: 'var(--joy-radius-xs)',
|
||||
// backgroundColor: 'var(--joy-palette-neutral-outlinedBorder)',
|
||||
backgroundColor: 'var(--joy-palette-background-popup)',
|
||||
// boxShadow: 'inset 2px 0px 4px -2px var(--joy-palette-background-backdrop)',
|
||||
boxShadow: 'var(--joy-shadow-xs)',
|
||||
// minWidth: '1rem',
|
||||
paddingBlock: '1px',
|
||||
paddingInline: '4px',
|
||||
// pointerEvents: 'none',
|
||||
cursor: 'pointer',
|
||||
transition: 'background-color 1s ease',
|
||||
});
|
||||
|
||||
|
||||
// Display mac-style shortcuts on windows as well
|
||||
const displayMacModifiers = true;
|
||||
|
||||
function _platformAwareModifier(symbol: 'Ctrl' | 'Alt' | 'Shift') {
|
||||
switch (symbol) {
|
||||
case 'Ctrl':
|
||||
return displayMacModifiers ? '⌃' : 'Ctrl';
|
||||
case 'Shift':
|
||||
return displayMacModifiers ? '⇧' : '⇧';
|
||||
case 'Alt':
|
||||
return displayMacModifiers ? '⌥' /* Option */ : 'Alt';
|
||||
}
|
||||
}
|
||||
|
||||
function ShortcutItem(props: { shortcut: ShortcutObject }) {
|
||||
|
||||
const handleClicked = React.useCallback(() => {
|
||||
if (props.shortcut.action !== '_specialPrintShortcuts')
|
||||
props.shortcut.action();
|
||||
}, [props.shortcut]);
|
||||
|
||||
return (
|
||||
<ShortcutContainer onClick={!props.shortcut.disabled ? handleClicked : undefined} aria-disabled={props.shortcut.disabled}>
|
||||
{!!props.shortcut.ctrl && <ShortcutKey>{_platformAwareModifier('Ctrl')}</ShortcutKey>}
|
||||
{!!props.shortcut.shift && <ShortcutKey>{_platformAwareModifier('Shift')}</ShortcutKey>}
|
||||
{/*{!!props.shortcut.altForNonMac && <ShortcutKey onClick={handleClicked}>{_platformAwareModifier('Alt')}</ShortcutKey>}*/}
|
||||
<ShortcutKey>{props.shortcut.key === 'Escape' ? 'Esc' : props.shortcut.key === 'Enter' ? '↵' : props.shortcut.key.toUpperCase()}</ShortcutKey>
|
||||
<Typography level='body-xs'>{props.shortcut.description}</Typography>
|
||||
{props.shortcut.endDecoratorIcon && <props.shortcut.endDecoratorIcon sx={{ fontSize: 'md' }} />}
|
||||
</ShortcutContainer>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export function StatusBar(props: { toggleMinimized?: () => void, isMinimized?: boolean }) {
|
||||
|
||||
// state (modifiers pressed/not)
|
||||
const { showPromisedOverlay } = useOverlayComponents();
|
||||
// const [ctrlPressed, setCtrlPressed] = React.useState(false);
|
||||
// const [shiftPressed, setShiftPressed] = React.useState(false);
|
||||
|
||||
// external state
|
||||
const labsShowShortcutBar = useUXLabsStore(state => state.labsShowShortcutBar);
|
||||
const shortcuts = useGlobalShortcutsStore(useShallow(state => {
|
||||
let visibleShortcuts = !labsShowShortcutBar ? [] : state.getAllShortcuts().filter(shortcut => !!shortcut.description);
|
||||
const maxLevel = Math.max(...visibleShortcuts.map(s => s.level ?? 0));
|
||||
if (maxLevel > 0)
|
||||
visibleShortcuts = visibleShortcuts.filter(s => s.level === maxLevel);
|
||||
visibleShortcuts.sort((a, b) => {
|
||||
// if they don't have a 'shift', they are sorted first
|
||||
if (a.shift !== b.shift)
|
||||
return a.shift ? 1 : -1;
|
||||
// (Hack) If the description is 'Beam', it goes last
|
||||
if (a.description === 'Beam Edit')
|
||||
return 1;
|
||||
// alphabetical for the rest
|
||||
return a.key.localeCompare(b.key);
|
||||
});
|
||||
return visibleShortcuts;
|
||||
}));
|
||||
|
||||
// handlers
|
||||
const handleHideShortcuts = React.useCallback((event: React.MouseEvent) => {
|
||||
if (event.shiftKey) {
|
||||
console.log('shortcutGroups', useGlobalShortcutsStore.getState().shortcutGroups);
|
||||
return;
|
||||
}
|
||||
showPromisedOverlay('shortcuts-confirm-close', {}, ({ onResolve, onUserReject }) =>
|
||||
<ConfirmationModal
|
||||
open onClose={onUserReject} onPositive={() => onResolve(true)}
|
||||
confirmationText='Remove productivity tips and shortcuts? You can add it back in Settings > Labs.'
|
||||
positiveActionText='Remove'
|
||||
/>,
|
||||
).then(() => useUXLabsStore.getState().setLabsShowShortcutBar(false)).catch(() => null /* ignore closure */);
|
||||
}, [showPromisedOverlay]);
|
||||
|
||||
// React to modifiers
|
||||
// React.useEffect(() => {
|
||||
// const handleKeyDown = (e: KeyboardEvent) => {
|
||||
// if (e.key === 'Control') setCtrlPressed(true);
|
||||
// if (e.key === 'Shift') setShiftPressed(true);
|
||||
// };
|
||||
// const handleKeyUp = (e: KeyboardEvent) => {
|
||||
// if (e.key === 'Control') setCtrlPressed(false);
|
||||
// if (e.key === 'Shift') setShiftPressed(false);
|
||||
// };
|
||||
// window.addEventListener('keydown', handleKeyDown);
|
||||
// window.addEventListener('keyup', handleKeyUp);
|
||||
// return () => {
|
||||
// window.removeEventListener('keydown', handleKeyDown);
|
||||
// window.removeEventListener('keyup', handleKeyUp);
|
||||
// };
|
||||
// }, []);
|
||||
|
||||
if (!labsShowShortcutBar)
|
||||
return null;
|
||||
|
||||
return (
|
||||
<StatusBarContainer aria-label='Status bar'>
|
||||
|
||||
{(!props.toggleMinimized || !COMPOSER_ENABLE_MINIMIZE) && !props.isMinimized ? (
|
||||
// Close Button
|
||||
<GoodTooltip variantOutlined arrow placement='top' title={hideButtonTooltip}>
|
||||
<IconButton size='sm' sx={hideButtonSx} onClick={handleHideShortcuts}>
|
||||
<CloseRoundedIcon />
|
||||
</IconButton>
|
||||
</GoodTooltip>
|
||||
) : (
|
||||
// Minimize / Maximize Button - note the Maximize icon would be more correct, but also less discoverable
|
||||
<IconButton size='sm' sx={hideButtonSx} onClick={props.toggleMinimized}>
|
||||
{props.isMinimized ? <ExpandLessIcon /> : <MinimizeIcon />}
|
||||
</IconButton>
|
||||
)}
|
||||
|
||||
{/* Show all shortcuts */}
|
||||
{shortcuts.map((shortcut, idx) => (
|
||||
<ShortcutItem key={shortcut.key + idx} shortcut={shortcut} />
|
||||
))}
|
||||
|
||||
</StatusBarContainer>
|
||||
);
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Alert, Box, Sheet, Typography } from '@mui/joy';
|
||||
|
||||
import { ConversationHandler } from '~/common/chats/ConversationHandler';
|
||||
import { useBeam } from '~/common/chats/BeamStore';
|
||||
import { useLLMSelect } from '~/common/components/forms/useLLMSelect';
|
||||
|
||||
|
||||
export function Beam(props: {
|
||||
conversationHandler: ConversationHandler | null,
|
||||
isMobile: boolean,
|
||||
sx?: SxProps
|
||||
}) {
|
||||
|
||||
// state
|
||||
const { config, candidates } = useBeam(props.conversationHandler);
|
||||
|
||||
// external state
|
||||
const [allChatLlm, allChatLlmComponent] = useLLMSelect(true, 'Beam LLM');
|
||||
|
||||
if (!config)
|
||||
return null;
|
||||
|
||||
const lastMessage = config.history.slice(-1)[0] ?? null;
|
||||
|
||||
return (
|
||||
<Box sx={{ ...props.sx, px: 2, display: 'flex', flexDirection: 'column', gap: 2 }}>
|
||||
|
||||
{/* Issues */}
|
||||
{!!config.configError && (
|
||||
<Alert>
|
||||
{config.configError}
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{/* Models, [x] all same, */}
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'start', gap: 2 }}>
|
||||
<Box sx={{ minWidth: 200 }}>
|
||||
{allChatLlmComponent}
|
||||
</Box>
|
||||
|
||||
{!!lastMessage && (
|
||||
<Box sx={{
|
||||
backgroundColor: 'background.surface',
|
||||
boxShadow: 'xs',
|
||||
borderRadius: 'lg',
|
||||
borderTopRightRadius: 0,
|
||||
borderTopLeftRadius: 0,
|
||||
py: 1,
|
||||
px: 1,
|
||||
mb: 'auto',
|
||||
|
||||
|
||||
flex: 1,
|
||||
}}>
|
||||
{lastMessage.text}
|
||||
</Box>
|
||||
// <ChatMessageMemo
|
||||
// message={lastMessage}
|
||||
// fitScreen={props.isMobile}
|
||||
// sx={{
|
||||
// borderRadius: 'lg',
|
||||
// borderBottomRightRadius: lastMessage.role === 'assistant' ? undefined : 0,
|
||||
// borderBottomLeftRadius: lastMessage.role === 'user' ? undefined : 0,
|
||||
// boxShadow: 'xs',
|
||||
// my: 2,
|
||||
// px: 0,
|
||||
// py: 1,
|
||||
// alignSelf: 'self-end',
|
||||
// flex: 1,
|
||||
// maxHeight: '5rem',
|
||||
// overflow: 'hidden',
|
||||
// }}
|
||||
// />
|
||||
)}
|
||||
</Box>
|
||||
|
||||
{/* Grid */}
|
||||
<Box sx={{
|
||||
// my: 'auto',
|
||||
// display: 'flex', flexDirection: 'column', alignItems: 'center',
|
||||
border: '1px solid purple',
|
||||
minHeight: '300px',
|
||||
|
||||
// layout
|
||||
display: 'grid',
|
||||
gridTemplateColumns: props.isMobile ? 'repeat(auto-fit, minmax(320px, 1fr))' : 'repeat(auto-fit, minmax(400px, 1fr))',
|
||||
gap: { xs: 2, md: 2 },
|
||||
}}>
|
||||
<Sheet sx={{ minHeight: '50%' }}>
|
||||
b
|
||||
</Sheet>
|
||||
<Sheet>
|
||||
a
|
||||
</Sheet>
|
||||
<Sheet>
|
||||
a
|
||||
</Sheet>
|
||||
<Sheet>
|
||||
a
|
||||
</Sheet>
|
||||
</Box>
|
||||
|
||||
{/* Auto-Gatherer: All-in-one, Best-Of */}
|
||||
<Box>
|
||||
Gatherer
|
||||
</Box>
|
||||
|
||||
|
||||
<Box sx={{ flex: 1 }}>
|
||||
<Typography level='body-sm' sx={{ whiteSpace: 'break-spaces' }}>
|
||||
{/*{JSON.stringify(config, null, 2)}*/}
|
||||
</Typography>
|
||||
</Box>
|
||||
|
||||
<Box sx={{
|
||||
height: '100%',
|
||||
borderRadius: 'lg',
|
||||
borderBottomLeftRadius: 0,
|
||||
backgroundColor: 'background.surface',
|
||||
boxShadow: 'lg',
|
||||
m: 2,
|
||||
p: '0.25rem 1rem',
|
||||
}}>
|
||||
|
||||
</Box>
|
||||
|
||||
<Box>
|
||||
a
|
||||
</Box>
|
||||
|
||||
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -1,25 +1,71 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, IconButton, Modal, ModalClose, Option, Select, Sheet, Typography } from '@mui/joy';
|
||||
import CameraAltIcon from '@mui/icons-material/CameraAlt';
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Button, ButtonGroup, IconButton, Modal, ModalClose, Option, Select, Sheet, Tooltip, Typography } from '@mui/joy';
|
||||
import AddRoundedIcon from '@mui/icons-material/AddRounded';
|
||||
import CameraEnhanceIcon from '@mui/icons-material/CameraEnhance';
|
||||
import CameraFrontIcon from '@mui/icons-material/CameraFront';
|
||||
import CameraRearIcon from '@mui/icons-material/CameraRear';
|
||||
import DownloadIcon from '@mui/icons-material/Download';
|
||||
import InfoIcon from '@mui/icons-material/Info';
|
||||
import FlipCameraAndroidOutlinedIcon from '@mui/icons-material/FlipCameraAndroidOutlined';
|
||||
import InfoOutlinedIcon from '@mui/icons-material/InfoOutlined';
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
|
||||
import { InlineError } from '~/common/components/InlineError';
|
||||
import { downloadVideoFrameAsPNG, renderVideoFrameAsPNGFile } from '~/common/util/videoUtils';
|
||||
import { Is } from '~/common/util/pwaUtils';
|
||||
import { animationBackgroundCameraFlash } from '~/common/util/animUtils';
|
||||
import { downloadVideoFrame, renderVideoFrameAsFile } from '~/common/util/videoUtils';
|
||||
import { useCameraCapture } from '~/common/components/useCameraCapture';
|
||||
|
||||
|
||||
// configuration
|
||||
const DEBUG_NO_CAMERA_OPTION = false;
|
||||
const FLASH_DURATION_MS = 600;
|
||||
const ADD_COOLDOWN_MS = 300;
|
||||
|
||||
|
||||
const captureButtonContainerSx: SxProps = {
|
||||
display: 'flex',
|
||||
gap: 1,
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
};
|
||||
|
||||
const captureButtonGroupSx: SxProps = {
|
||||
'--ButtonGroup-separatorColor': 'none !important',
|
||||
// '--ButtonGroup-separatorSize': '2px',
|
||||
borderRadius: '3rem',
|
||||
// boxShadow: 'md',
|
||||
boxShadow: '0 8px 12px -6px rgb(var(--joy-palette-neutral-darkChannel) / 50%)',
|
||||
};
|
||||
|
||||
const captureButtonSx: SxProps = {
|
||||
backgroundColor: 'neutral.solidHoverBg',
|
||||
pl: 3.25,
|
||||
pr: 4.5,
|
||||
py: 1.5,
|
||||
minWidth: { md: 200 },
|
||||
'&:hover': {
|
||||
backgroundColor: 'neutral.plainHoverColor',
|
||||
},
|
||||
};
|
||||
|
||||
const addButtonSx: SxProps = {
|
||||
pl: 2.5,
|
||||
pr: 2,
|
||||
};
|
||||
|
||||
|
||||
export function CameraCaptureModal(props: {
|
||||
onCloseModal: () => void,
|
||||
onAttachImage: (file: File) => void
|
||||
onCloseModal: () => void;
|
||||
onAttachImage: (file: File) => void;
|
||||
// onOCR: (ocrText: string) => void }
|
||||
}) {
|
||||
|
||||
// state
|
||||
const [showInfo, setShowInfo] = React.useState(false);
|
||||
// const [ocrProgress/*, setOCRProgress*/] = React.useState<number | null>(null);
|
||||
const [isFlashing, setIsFlashing] = React.useState(false); // For flash effect
|
||||
const [isAddButtonDisabled, setIsAddButtonDisabled] = React.useState(false); // Cooldown state
|
||||
|
||||
// external state
|
||||
const {
|
||||
@@ -39,29 +85,28 @@ export function CameraCaptureModal(props: {
|
||||
onCloseModal();
|
||||
}, [onCloseModal, resetVideo]);
|
||||
|
||||
/*const handleVideoOCRClicked = async () => {
|
||||
if (!videoRef.current) return;
|
||||
const renderedFrame = renderVideoFrameToCanvas(videoRef.current);
|
||||
|
||||
setOCRProgress(0);
|
||||
const { recognize } = await import('tesseract.js');
|
||||
const result = await recognize(renderedFrame, undefined, {
|
||||
logger: m => {
|
||||
// noinspection SuspiciousTypeOfGuard
|
||||
if (typeof m.progress === 'number')
|
||||
setOCRProgress(m.progress);
|
||||
},
|
||||
errorHandler: e => console.error(e),
|
||||
});
|
||||
setOCRProgress(null);
|
||||
stopAndClose();
|
||||
props.onOCR(result.data.text);
|
||||
};*/
|
||||
const handleFlashEffect = React.useCallback((cooldownMs: number) => {
|
||||
// Flash effect
|
||||
setIsFlashing(true);
|
||||
setTimeout(() => {
|
||||
setIsFlashing(false);
|
||||
}, FLASH_DURATION_MS); // Flash duration in milliseconds
|
||||
|
||||
// Cooldown
|
||||
if (cooldownMs) {
|
||||
setIsAddButtonDisabled(true);
|
||||
setTimeout(() => {
|
||||
setIsAddButtonDisabled(false);
|
||||
}, cooldownMs);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleVideoSnapClicked = React.useCallback(async () => {
|
||||
if (!videoRef.current) return;
|
||||
try {
|
||||
const file = await renderVideoFrameAsPNGFile(videoRef.current, 'camera');
|
||||
// handleFlashEffect(0); // Trigger flash
|
||||
const file = await renderVideoFrameAsFile(videoRef.current, 'camera', 'image/jpeg', 0.95);
|
||||
onAttachImage(file);
|
||||
stopAndClose();
|
||||
} catch (error) {
|
||||
@@ -69,96 +114,250 @@ export function CameraCaptureModal(props: {
|
||||
}
|
||||
}, [onAttachImage, stopAndClose, videoRef]);
|
||||
|
||||
const handleVideoDownloadClicked = React.useCallback(() => {
|
||||
const handleVideoAddClicked = React.useCallback(async () => {
|
||||
if (!videoRef.current) return;
|
||||
downloadVideoFrameAsPNG(videoRef.current, 'camera');
|
||||
try {
|
||||
handleFlashEffect(ADD_COOLDOWN_MS); // Trigger flash and cooldown
|
||||
const file = await renderVideoFrameAsFile(videoRef.current, 'camera', 'image/jpeg', 0.95);
|
||||
onAttachImage(file);
|
||||
} catch (error) {
|
||||
console.error('Error capturing video frame:', error);
|
||||
}
|
||||
}, [handleFlashEffect, onAttachImage, videoRef]);
|
||||
|
||||
const handleVideoDownloadClicked = React.useCallback(async () => {
|
||||
if (!videoRef.current) return;
|
||||
await downloadVideoFrame(videoRef.current, 'camera', 'image/jpeg', 0.98);
|
||||
}, [videoRef]);
|
||||
|
||||
|
||||
// Reduced set of cameras
|
||||
|
||||
const displayCameras = React.useMemo(() => {
|
||||
// iOS/English: "Front Camera", "Back Camera"
|
||||
if (Is.OS.iOS) {
|
||||
let reducedCameras = cameras.filter((device) => ['Front Camera', 'Back Camera'].includes(device.label));
|
||||
if (reducedCameras.length > 0)
|
||||
return reducedCameras;
|
||||
}
|
||||
return cameras;
|
||||
}, [cameras]);
|
||||
|
||||
const { canSwitchCameras, isFrontCamera, isBackCamera } = React.useMemo(() => {
|
||||
|
||||
// determine if the current device is a front or back camera
|
||||
let isFrontCamera = false;
|
||||
let isBackCamera = false;
|
||||
if (cameraIdx !== -1) {
|
||||
const currentDevice = displayCameras[cameraIdx];
|
||||
if (currentDevice) {
|
||||
isFrontCamera = currentDevice.label.includes('Front Camera') || currentDevice.label.toLowerCase().includes('front');
|
||||
isBackCamera = currentDevice.label.includes('Back Camera') || currentDevice.label.toLowerCase().includes('back');
|
||||
}
|
||||
}
|
||||
|
||||
// quick out if we only have 1 or 0 cameras
|
||||
if (displayCameras.length <= 1)
|
||||
return { canSwitchCameras: false, isFrontCamera, isBackCamera };
|
||||
|
||||
// use a reduction to find both the front and back cameras
|
||||
const foundCameras = displayCameras.reduce((acc, device) => {
|
||||
if (acc.front && acc.back) return acc;
|
||||
if (device.label.includes('Front Camera')) acc.front = true;
|
||||
else if (device.label.toLowerCase().includes('front')) acc.front = true;
|
||||
if (device.label.includes('Back Camera')) acc.back = true;
|
||||
else if (device.label.toLowerCase().includes('back')) acc.back = true;
|
||||
return acc;
|
||||
}, { front: false, back: false });
|
||||
|
||||
return { canSwitchCameras: (foundCameras.front && foundCameras.back) || displayCameras.length === 2, isFrontCamera, isBackCamera };
|
||||
}, [cameraIdx, displayCameras]);
|
||||
|
||||
const handleCameraSwitch = React.useCallback(() => {
|
||||
|
||||
// safety checks: has multiple cameras, and current camera is valid
|
||||
if (displayCameras.length <= 1 || cameraIdx === -1) return;
|
||||
const currentCamera = displayCameras[cameraIdx] || undefined;
|
||||
if (!currentCamera) return;
|
||||
|
||||
// finds the camera to switch to
|
||||
let nextIdx: number | undefined = undefined;
|
||||
|
||||
// iOS
|
||||
if (currentCamera.label.includes('Front Camera'))
|
||||
nextIdx = displayCameras.findIndex((device) => device.label.includes('Back Camera'));
|
||||
else if (currentCamera.label.includes('Back Camera'))
|
||||
nextIdx = displayCameras.findIndex((device) => device.label.includes('Front Camera'));
|
||||
|
||||
// Android
|
||||
if (nextIdx === undefined && currentCamera.label.includes('facing front'))
|
||||
nextIdx = displayCameras.map((device) => device.label).findLastIndex((label) => label.includes('facing back'));
|
||||
else if (nextIdx === undefined && currentCamera.label.includes('facing back'))
|
||||
nextIdx = displayCameras.map((device) => device.label).findLastIndex((label) => label.includes('facing front'));
|
||||
|
||||
// Generic: if we have 2 cameras, flip to the other one
|
||||
if (nextIdx === undefined && displayCameras.length === 2)
|
||||
nextIdx = cameraIdx === 0 ? 1 : 0;
|
||||
|
||||
// if we found a valid camera, switch to it
|
||||
if (nextIdx !== undefined && nextIdx !== -1)
|
||||
setCameraIdx(nextIdx);
|
||||
}, [cameraIdx, displayCameras, setCameraIdx]);
|
||||
|
||||
|
||||
return (
|
||||
<Modal open onClose={stopAndClose} sx={{ display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
|
||||
<Modal
|
||||
open
|
||||
onClose={stopAndClose}
|
||||
sx={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}
|
||||
slotProps={{
|
||||
backdrop: {
|
||||
sx: {
|
||||
backdropFilter: 'none', // using none because this is heavy
|
||||
// backdropFilter: 'blur(4px)',
|
||||
// backgroundColor: 'rgba(11 13 14 / 0.75)',
|
||||
backgroundColor: 'rgba(var(--joy-palette-neutral-darkChannel) / 0.5)',
|
||||
},
|
||||
},
|
||||
}}
|
||||
>
|
||||
|
||||
<Box sx={{
|
||||
display: 'flex', flexDirection: 'column', m: 1,
|
||||
borderRadius: 'md', overflow: 'hidden',
|
||||
boxShadow: 'sm',
|
||||
boxShadow: 'lg',
|
||||
}}>
|
||||
|
||||
{/* Top bar */}
|
||||
<Sheet variant='solid' invertedColors sx={{ zIndex: 10, display: 'flex', justifyContent: 'space-between', p: 1 }}>
|
||||
<Select
|
||||
variant='solid' color='neutral'
|
||||
value={cameraIdx} onChange={(_event: any, value: number | null) => setCameraIdx(value === null ? -1 : value)}
|
||||
indicator={<KeyboardArrowDownIcon />}
|
||||
>
|
||||
<Option value={-1}>
|
||||
No Camera
|
||||
</Option>
|
||||
{cameras.map((device: MediaDeviceInfo, camIndex) => (
|
||||
<Option key={'video-dev-' + camIndex} value={camIndex}>
|
||||
{device.label}
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
<Sheet variant='solid' invertedColors={true} sx={{
|
||||
p: 1,
|
||||
backgroundColor: 'neutral.800',
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
}}>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Select
|
||||
size='sm'
|
||||
variant={displayCameras.length > 1 ? 'soft' : 'plain'}
|
||||
color='neutral'
|
||||
value={cameraIdx} onChange={(_event: any, value: number | null) => setCameraIdx(value === null ? -1 : value)}
|
||||
indicator={<KeyboardArrowDownIcon />}
|
||||
sx={{ background: 'transparent' }}
|
||||
slotProps={{ listbox: { size: 'md' } }}
|
||||
>
|
||||
{(!displayCameras.length || DEBUG_NO_CAMERA_OPTION) && (
|
||||
<Option key='video-dev-none' value={-1}>
|
||||
No Camera
|
||||
</Option>
|
||||
)}
|
||||
{displayCameras.map((device: MediaDeviceInfo, camIndex) => (
|
||||
<Option key={'video-dev-' + camIndex} value={camIndex}>
|
||||
{/*{device.label?.includes('Face') ? <CameraFrontIcon />*/}
|
||||
{/* : device.label?.includes('tual') ? <CameraRearIcon />*/}
|
||||
{/* : null}*/}
|
||||
{device.label
|
||||
?.replace('camera2 ', 'Camera ')
|
||||
.replace('facing front', 'Front')
|
||||
.replace('facing back', 'Back')}
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
|
||||
<ModalClose onClick={stopAndClose} sx={{ position: 'static' }} />
|
||||
{canSwitchCameras && (
|
||||
<IconButton size='sm' onClick={handleCameraSwitch}>
|
||||
{isFrontCamera ? <CameraRearIcon /> : isBackCamera ? <CameraFrontIcon /> : <FlipCameraAndroidOutlinedIcon />}
|
||||
</IconButton>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
<ModalClose size='lg' onClick={stopAndClose} sx={{ position: 'static' }} />
|
||||
</Sheet>
|
||||
|
||||
{/* (main) Video */}
|
||||
<Box sx={{ position: 'relative' }}>
|
||||
<Box sx={{ position: 'relative', backgroundColor: 'background.level3' }}>
|
||||
<video
|
||||
ref={videoRef} autoPlay playsInline
|
||||
style={{
|
||||
display: 'block', width: '100%', maxHeight: 'calc(100vh - 200px)',
|
||||
display: 'block',
|
||||
width: !Is.Browser.Safari ? '100%' : undefined,
|
||||
marginLeft: 'auto', marginRight: 'auto',
|
||||
maxHeight: 'calc(100vh - 200px)',
|
||||
background: '#8888', //opacity: ocrProgress !== null ? 0.5 : 1,
|
||||
}}
|
||||
/>
|
||||
|
||||
{showInfo && !!info && <Typography
|
||||
sx={{
|
||||
position: 'absolute', top: 0, left: 0, right: 0, bottom: 0, zIndex: 1,
|
||||
background: 'rgba(0,0,0,0.5)', color: 'white',
|
||||
whiteSpace: 'pre', overflowY: 'scroll',
|
||||
}}>
|
||||
{info}
|
||||
</Typography>}
|
||||
{/* Flash overlay */}
|
||||
{isFlashing && (
|
||||
<Box
|
||||
sx={{
|
||||
position: 'absolute', inset: 0, zIndex: 2,
|
||||
animation: `${animationBackgroundCameraFlash} ${FLASH_DURATION_MS / 1000}s`,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showInfo && !!info && (
|
||||
<Typography
|
||||
sx={{
|
||||
position: 'absolute', inset: 0, zIndex: 1, /* camera info on top of video */
|
||||
background: 'rgba(0,0,0,0.5)', color: 'white',
|
||||
whiteSpace: 'pre', overflowY: 'scroll',
|
||||
}}>
|
||||
{info}
|
||||
</Typography>
|
||||
)}
|
||||
|
||||
{/*{ocrProgress !== null && <CircularProgress sx={{ position: 'absolute', top: 'calc(50% - 34px / 2)', left: 'calc(50% - 34px / 2)', zIndex: 2 }} />}*/}
|
||||
</Box>
|
||||
|
||||
{/* Bottom controls (zoom, ocr, download) & progress */}
|
||||
<Sheet variant='soft' sx={{ display: 'flex', flexDirection: 'column', zIndex: 20, gap: 1, p: 1 }}>
|
||||
|
||||
{/* Bottom controls (zoom, download) & progress */}
|
||||
<Sheet
|
||||
variant='soft'
|
||||
sx={{
|
||||
p: 1,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
gap: 1,
|
||||
}}
|
||||
>
|
||||
{!!error && <InlineError error={error} />}
|
||||
|
||||
{zoomControl}
|
||||
|
||||
{/*{ocrProgress !== null && <LinearProgress color='primary' determinate value={100 * ocrProgress} sx={{ px: 2 }} />}*/}
|
||||
|
||||
<Box sx={{ display: 'flex', gap: 1, justifyContent: 'space-between' }}>
|
||||
<Box paddingBottom={zoomControl ? 1 : undefined} sx={captureButtonContainerSx}>
|
||||
|
||||
{/* Info */}
|
||||
<IconButton size='lg' disabled={!info} variant='soft' onClick={() => setShowInfo(info => !info)} sx={{ zIndex: 30 }}>
|
||||
<InfoIcon />
|
||||
<IconButton disabled={!info} onClick={() => setShowInfo((prev) => !prev)}>
|
||||
<InfoOutlinedIcon />
|
||||
</IconButton>
|
||||
|
||||
{/*<Button disabled={ocrProgress !== null} fullWidth variant='solid' size='lg' onClick={handleVideoOCRClicked} sx={{ flex: 1, maxWidth: 260 }}>*/}
|
||||
{/* Extract Text*/}
|
||||
{/*</Button>*/}
|
||||
|
||||
{/* Capture */}
|
||||
<Button
|
||||
fullWidth
|
||||
variant='solid' color='neutral'
|
||||
onClick={handleVideoSnapClicked}
|
||||
endDecorator={<CameraAltIcon />}
|
||||
sx={{ flex: 1, maxWidth: 200, py: 2, borderRadius: '3rem' }}
|
||||
>
|
||||
Capture
|
||||
</Button>
|
||||
<ButtonGroup variant='solid' sx={captureButtonGroupSx}>
|
||||
<Tooltip disableInteractive arrow placement='top' title='Add to message'>
|
||||
<IconButton size='sm' disabled={isAddButtonDisabled} onClick={handleVideoAddClicked} sx={addButtonSx}>
|
||||
<AddRoundedIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
<Button size='lg' onClick={handleVideoSnapClicked} endDecorator={<CameraEnhanceIcon />} sx={captureButtonSx}>
|
||||
Capture
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
|
||||
{/* Download */}
|
||||
<IconButton size='lg' variant='soft' onClick={handleVideoDownloadClicked}>
|
||||
<IconButton onClick={handleVideoDownloadClicked}>
|
||||
<DownloadIcon />
|
||||
</IconButton>
|
||||
|
||||
</Box>
|
||||
</Sheet>
|
||||
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
import { ChatModeId } from '../../AppChat';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
|
||||
|
||||
interface ChatModeDescription {
|
||||
label: string;
|
||||
description: string | React.JSX.Element;
|
||||
shortcut?: string;
|
||||
requiresTTI?: boolean;
|
||||
}
|
||||
|
||||
const ChatModeItems: { [key in ChatModeId]: ChatModeDescription } = {
|
||||
'generate-text': {
|
||||
label: 'Chat',
|
||||
description: 'Persona replies',
|
||||
},
|
||||
'append-user': {
|
||||
label: 'Write',
|
||||
description: 'Appends a message',
|
||||
shortcut: 'Alt + Enter',
|
||||
},
|
||||
'generate-image': {
|
||||
label: 'Draw',
|
||||
description: 'AI Image Generation',
|
||||
requiresTTI: true,
|
||||
},
|
||||
'generate-text-beam': {
|
||||
label: 'Best-Of', // Best of, Auto-Prime, Top Pick, Select Best
|
||||
description: 'Smarter: best of multiple replies',
|
||||
},
|
||||
'generate-react': {
|
||||
label: 'Reason + Act', // · α
|
||||
description: 'Answers questions in multiple steps',
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
function fixNewLineShortcut(shortcut: string, enterIsNewLine: boolean) {
|
||||
if (shortcut === 'ENTER')
|
||||
return enterIsNewLine ? 'Shift + Enter' : 'Enter';
|
||||
return shortcut;
|
||||
}
|
||||
|
||||
export function ChatModeMenu(props: {
|
||||
anchorEl: HTMLAnchorElement | null, onClose: () => void,
|
||||
chatModeId: ChatModeId, onSetChatModeId: (chatMode: ChatModeId) => void
|
||||
capabilityHasTTI: boolean,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const labsChatBeam = useUXLabsStore(state => state.labsChatBeam);
|
||||
const enterIsNewline = useUIPreferencesStore(state => state.enterIsNewline);
|
||||
|
||||
return (
|
||||
<CloseableMenu
|
||||
placement='top-end'
|
||||
open anchorEl={props.anchorEl} onClose={props.onClose}
|
||||
sx={{ minWidth: 320 }}
|
||||
>
|
||||
|
||||
{/*<MenuItem color='neutral' selected>*/}
|
||||
{/* Conversation Mode*/}
|
||||
{/*</MenuItem>*/}
|
||||
{/**/}
|
||||
{/*<ListDivider />*/}
|
||||
|
||||
{/* ChatMode items */}
|
||||
{Object.entries(ChatModeItems)
|
||||
.filter(([key, data]) => key !== 'generate-text-beam' || labsChatBeam)
|
||||
.map(([key, data]) =>
|
||||
<MenuItem key={'chat-mode-' + key} onClick={() => props.onSetChatModeId(key as ChatModeId)}>
|
||||
<Box sx={{ flexGrow: 1, display: 'flex', flexDirection: 'row', alignItems: 'center', gap: 2 }}>
|
||||
<Radio checked={key === props.chatModeId} />
|
||||
<Box sx={{ flexGrow: 1 }}>
|
||||
<Typography>{data.label}</Typography>
|
||||
<Typography level='body-xs'>{data.description}{(data.requiresTTI && !props.capabilityHasTTI) ? 'Unconfigured' : ''}</Typography>
|
||||
</Box>
|
||||
{(key === props.chatModeId || !!data.shortcut) && (
|
||||
<KeyStroke combo={fixNewLineShortcut((key === props.chatModeId) ? 'ENTER' : data.shortcut ? data.shortcut : 'ENTER', enterIsNewline)} />
|
||||
)}
|
||||
</Box>
|
||||
</MenuItem>)}
|
||||
|
||||
</CloseableMenu>
|
||||
);
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Badge, Box, ColorPaletteProp, Tooltip } from '@mui/joy';
|
||||
|
||||
|
||||
function alignRight(value: number, columnSize: number = 7) {
|
||||
const str = value.toLocaleString();
|
||||
return str.padStart(columnSize);
|
||||
}
|
||||
|
||||
|
||||
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number): {
|
||||
color: ColorPaletteProp, message: string, remainingTokens: number
|
||||
} {
|
||||
const usedTokens = directTokens + (historyTokens || 0) + (responseMaxTokens || 0);
|
||||
const remainingTokens = tokenLimit - usedTokens;
|
||||
const gteLimit = (remainingTokens <= 0 && tokenLimit > 0);
|
||||
|
||||
// message
|
||||
let message: string = gteLimit ? '⚠️ ' : '';
|
||||
|
||||
// no limit: show used tokens only
|
||||
if (!tokenLimit) {
|
||||
message += `Requested: ${usedTokens.toLocaleString()} tokens`;
|
||||
}
|
||||
// has full information (d + i < l)
|
||||
else if (historyTokens || responseMaxTokens) {
|
||||
message +=
|
||||
`${Math.abs(remainingTokens).toLocaleString()} ${remainingTokens >= 0 ? 'available' : 'excess'} message tokens\n\n` +
|
||||
` = Model max tokens: ${alignRight(tokenLimit)}\n` +
|
||||
` - This message: ${alignRight(directTokens)}\n` +
|
||||
` - History: ${alignRight(historyTokens || 0)}\n` +
|
||||
` - Max response: ${alignRight(responseMaxTokens || 0)}`;
|
||||
}
|
||||
// Cleaner mode: d + ? < R (total is the remaining in this case)
|
||||
else {
|
||||
message +=
|
||||
`${(tokenLimit + usedTokens).toLocaleString()} available tokens after deleting this\n\n` +
|
||||
` = Currently free: ${alignRight(tokenLimit)}\n` +
|
||||
` + This message: ${alignRight(usedTokens)}`;
|
||||
}
|
||||
|
||||
const color: ColorPaletteProp =
|
||||
(tokenLimit && remainingTokens < 0)
|
||||
? 'danger'
|
||||
: remainingTokens < tokenLimit / 4
|
||||
? 'warning'
|
||||
: 'primary';
|
||||
|
||||
return { color, message, remainingTokens };
|
||||
}
|
||||
|
||||
|
||||
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.JSX.Element }) =>
|
||||
<Tooltip
|
||||
placement={props.placement}
|
||||
variant={props.color !== 'primary' ? 'solid' : 'soft'} color={props.color}
|
||||
title={props.message
|
||||
? <Box sx={{ p: 2, whiteSpace: 'pre' }}>
|
||||
{props.message}
|
||||
</Box>
|
||||
: null
|
||||
}
|
||||
sx={{
|
||||
fontFamily: 'code',
|
||||
boxShadow: 'xl',
|
||||
}}
|
||||
>
|
||||
{props.children}
|
||||
</Tooltip>;
|
||||
|
||||
|
||||
/**
|
||||
* Simple little component to show the token count (and a tooltip on hover)
|
||||
*/
|
||||
export const TokenBadgeMemo = React.memo(TokenBadge);
|
||||
|
||||
function TokenBadge(props: {
|
||||
direct: number, history?: number, responseMax?: number, limit: number,
|
||||
showExcess?: boolean, absoluteBottomRight?: boolean, inline?: boolean,
|
||||
}) {
|
||||
|
||||
const { message, color, remainingTokens } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax);
|
||||
|
||||
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
|
||||
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
|
||||
? Math.abs(remainingTokens)
|
||||
: props.direct;
|
||||
|
||||
return (
|
||||
<Badge
|
||||
variant='solid' color={color} max={100000}
|
||||
invisible={!props.direct && remainingTokens >= 0}
|
||||
badgeContent={
|
||||
<TokenTooltip color={color} message={message}>
|
||||
<span>{value.toLocaleString()}</span>
|
||||
</TokenTooltip>
|
||||
}
|
||||
sx={{
|
||||
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
|
||||
cursor: 'help',
|
||||
}}
|
||||
slotProps={{
|
||||
badge: {
|
||||
sx: {
|
||||
fontFamily: 'code',
|
||||
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,179 @@
|
||||
import * as React from 'react';
|
||||
import { Controller, useFieldArray, useForm } from 'react-hook-form';
|
||||
|
||||
import { Box, Button, FormControl, FormHelperText, IconButton, Input, Stack, Typography } from '@mui/joy';
|
||||
import AddIcon from '@mui/icons-material/Add';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import LanguageRoundedIcon from '@mui/icons-material/LanguageRounded';
|
||||
import YouTubeIcon from '@mui/icons-material/YouTube';
|
||||
|
||||
import { extractYoutubeVideoIDFromURL } from '~/modules/youtube/youtube.utils';
|
||||
|
||||
import { GoodModal } from '~/common/components/modals/GoodModal';
|
||||
import { addSnackbar } from '~/common/components/snackbar/useSnackbarsStore';
|
||||
import { asValidURL } from '~/common/util/urlUtils';
|
||||
|
||||
|
||||
// configuration
|
||||
const MAX_URLS = 5;
|
||||
|
||||
type WebInputData = {
|
||||
url: string,
|
||||
// attachImages?: boolean,
|
||||
}
|
||||
|
||||
type WebInputModalInputs = {
|
||||
links: WebInputData[];
|
||||
}
|
||||
|
||||
|
||||
function WebInputModal(props: {
|
||||
onClose: () => void,
|
||||
onWebLinks: (urls: WebInputData[]) => void,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const { control: formControl, handleSubmit: formHandleSubmit, formState: { isValid: formIsValid, isDirty: formIsDirty } } = useForm<WebInputModalInputs>({
|
||||
values: { links: [{ url: '' }] },
|
||||
// mode: 'onChange', // validate on change
|
||||
});
|
||||
const { fields: formFields, append: formFieldsAppend, remove: formFieldsRemove } = useFieldArray({ control: formControl, name: 'links' });
|
||||
|
||||
// derived
|
||||
const urlFieldCount = formFields.length;
|
||||
|
||||
|
||||
// handlers
|
||||
|
||||
const { onClose, onWebLinks } = props;
|
||||
|
||||
const handleClose = React.useCallback(() => onClose(), [onClose]);
|
||||
|
||||
const handleSubmit = React.useCallback(({ links }: WebInputModalInputs) => {
|
||||
// clean and prefix URLs
|
||||
const cleanUrls = links.reduce((acc, { url, ...linkRest }) => {
|
||||
const trimmed = (url || '').trim();
|
||||
if (trimmed) {
|
||||
// this form uses a 'relaxed' URL validation, meaning one can write 'big-agi.com' and we'll assume https://
|
||||
const relaxedUrl = asValidURL(trimmed, true);
|
||||
if (relaxedUrl)
|
||||
acc.push({ url: relaxedUrl, ...linkRest });
|
||||
}
|
||||
return acc;
|
||||
}, [] as WebInputData[]);
|
||||
if (!cleanUrls.length) {
|
||||
addSnackbar({ key: 'invalid-urls', message: 'Please enter at least one valid web address', type: 'issue', overrides: { autoHideDuration: 2000 } });
|
||||
return;
|
||||
}
|
||||
onWebLinks(cleanUrls);
|
||||
handleClose();
|
||||
}, [handleClose, onWebLinks]);
|
||||
|
||||
|
||||
return (
|
||||
<GoodModal
|
||||
open
|
||||
onClose={handleClose}
|
||||
title='Add Web Content'
|
||||
titleStartDecorator={<LanguageRoundedIcon />}
|
||||
closeText={'Cancel'}
|
||||
// unfilterBackdrop
|
||||
// themedColor='neutral'
|
||||
hideBottomClose
|
||||
>
|
||||
<Box fontSize='md'>
|
||||
Enter web page addresses to import their content.
|
||||
</Box>
|
||||
<Typography level='body-sm'>
|
||||
Works on most websites and for YouTube videos (e.g., youtube.com/...) the transcript will be imported.
|
||||
{/*You can add up to {MAX_URLS} URLs.*/}
|
||||
</Typography>
|
||||
|
||||
<form onSubmit={formHandleSubmit(handleSubmit)}>
|
||||
<Stack spacing={1}>
|
||||
{formFields.map((field, index) => (
|
||||
<Controller
|
||||
key={field.id}
|
||||
control={formControl}
|
||||
name={`links.${index}.url`}
|
||||
rules={{ required: 'Please enter a valid URL' }}
|
||||
render={({ field: { value, onChange }, fieldState: { error } }) => (
|
||||
<FormControl error={!!error}>
|
||||
<Box sx={{ display: 'flex', gap: 1 }}>
|
||||
<Input
|
||||
autoFocus={index === 0}
|
||||
required={index === 0}
|
||||
placeholder='https://...'
|
||||
endDecorator={extractYoutubeVideoIDFromURL(value) ? <YouTubeIcon sx={{ color: 'red' }} /> : undefined}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
sx={{ flex: 1 }}
|
||||
/>
|
||||
{urlFieldCount > 1 && (
|
||||
<IconButton
|
||||
size='sm'
|
||||
variant='plain'
|
||||
color='neutral'
|
||||
onClick={() => formFieldsRemove(index)}
|
||||
>
|
||||
<DeleteOutlineIcon />
|
||||
</IconButton>
|
||||
)}
|
||||
</Box>
|
||||
{error && <FormHelperText>{error.message}</FormHelperText>}
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
))}
|
||||
</Stack>
|
||||
|
||||
{/* Add a new link */}
|
||||
<Box sx={{ display: 'flex', justifyContent: 'space-between', gap: 1, mt: 2.5 }}>
|
||||
|
||||
{formIsDirty && <Button
|
||||
color='neutral'
|
||||
variant='soft'
|
||||
disabled={urlFieldCount >= MAX_URLS}
|
||||
onClick={() => formFieldsAppend({ url: '' })}
|
||||
startDecorator={<AddIcon />}
|
||||
>
|
||||
Another
|
||||
{/*{urlFieldCount >= MAX_URLS ? 'Enough URLs' : urlFieldCount === 1 ? 'Add URL' : urlFieldCount === 2 ? 'Add another' : urlFieldCount === 3 ? 'And another one' : urlFieldCount === 4 ? 'Why stopping' : 'Just one more'}*/}
|
||||
</Button>}
|
||||
|
||||
<Button
|
||||
variant='solid'
|
||||
type='submit'
|
||||
disabled={!formIsValid || !formIsDirty}
|
||||
sx={{ minWidth: 160, ml: 'auto' }}
|
||||
>
|
||||
Add {urlFieldCount > 1 ? `(${urlFieldCount})` : ''}
|
||||
</Button>
|
||||
|
||||
</Box>
|
||||
</form>
|
||||
|
||||
</GoodModal>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export function useWebInputModal(onAttachWebLinks: (urls: WebInputData[]) => void) {
|
||||
|
||||
// state
|
||||
const [open, setOpen] = React.useState(false);
|
||||
|
||||
const openWebInputDialog = React.useCallback(() => setOpen(true), []);
|
||||
|
||||
const webInputDialogComponent = React.useMemo(() => open && (
|
||||
<WebInputModal
|
||||
onClose={() => setOpen(false)}
|
||||
onWebLinks={onAttachWebLinks}
|
||||
/>
|
||||
), [onAttachWebLinks, open]);
|
||||
|
||||
return {
|
||||
openWebInputDialog,
|
||||
webInputDialogComponent,
|
||||
};
|
||||
}
|
||||
@@ -2,40 +2,46 @@ import * as React from 'react';
|
||||
|
||||
import { Box, ListItem, ListItemButton, ListItemDecorator, Sheet, Typography } from '@mui/joy';
|
||||
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
|
||||
import type { ActileItem } from './ActileProvider';
|
||||
import { CloseablePopup } from '~/common/components/CloseablePopup';
|
||||
|
||||
import type { ActileItem, ActileProvider } from './ActileProvider';
|
||||
|
||||
export function ActilePopup(props: {
|
||||
anchorEl: HTMLElement | null,
|
||||
onClose: () => void,
|
||||
title?: string,
|
||||
items: ActileItem[],
|
||||
activeItemIndex: number | undefined,
|
||||
itemsByProvider: { provider: ActileProvider, items: ActileItem[] }[],
|
||||
activeItemIndex: number,
|
||||
activePrefixLength: number,
|
||||
onItemClick: (item: ActileItem) => void,
|
||||
children?: React.ReactNode
|
||||
}) {
|
||||
|
||||
const hasAnyIcon = props.items.some(item => !!item.Icon);
|
||||
// We need to keep track of the overall item index to correctly match with activeItemIndex
|
||||
const itemIndices = React.useMemo(() => {
|
||||
const indices: { providerKey: string, itemKey: string, isActive: boolean }[] = [];
|
||||
let indexCounter = 0;
|
||||
props.itemsByProvider.forEach(({ provider, items }) => {
|
||||
items.forEach((item) => {
|
||||
indices.push({
|
||||
providerKey: provider.key,
|
||||
itemKey: item.key,
|
||||
isActive: indexCounter === props.activeItemIndex,
|
||||
});
|
||||
indexCounter += 1;
|
||||
});
|
||||
});
|
||||
return indices;
|
||||
}, [props.itemsByProvider, props.activeItemIndex]);
|
||||
|
||||
return (
|
||||
<CloseableMenu
|
||||
noTopPadding noBottomPadding
|
||||
open anchorEl={props.anchorEl} onClose={props.onClose}
|
||||
sx={{ minWidth: 320 }}
|
||||
<CloseablePopup
|
||||
menu anchorEl={props.anchorEl} onClose={props.onClose}
|
||||
maxHeightGapPx={320}
|
||||
minWidth={320}
|
||||
noBottomPadding
|
||||
noTopPadding
|
||||
>
|
||||
|
||||
{!!props.title && (
|
||||
<Sheet variant='soft' sx={{ p: 1, borderBottom: '1px solid', borderBottomColor: 'neutral.softActiveBg' }}>
|
||||
<Typography level='title-sm'>
|
||||
{props.title}
|
||||
</Typography>
|
||||
</Sheet>
|
||||
)}
|
||||
|
||||
{!props.items.length && (
|
||||
{!props.itemsByProvider.length && (
|
||||
<ListItem variant='soft' color='warning'>
|
||||
<Typography level='body-md'>
|
||||
No matching command
|
||||
@@ -43,46 +49,65 @@ export function ActilePopup(props: {
|
||||
</ListItem>
|
||||
)}
|
||||
|
||||
{props.items.map((item, idx) => {
|
||||
const isActive = idx === props.activeItemIndex;
|
||||
const labelBold = item.label.slice(0, props.activePrefixLength);
|
||||
const labelNormal = item.label.slice(props.activePrefixLength);
|
||||
return (
|
||||
<ListItem
|
||||
key={item.id}
|
||||
variant={isActive ? 'soft' : undefined}
|
||||
color={isActive ? 'primary' : undefined}
|
||||
onClick={() => props.onItemClick(item)}
|
||||
>
|
||||
<ListItemButton color='primary'>
|
||||
{hasAnyIcon && (
|
||||
<ListItemDecorator>
|
||||
{item.Icon ? <item.Icon /> : null}
|
||||
</ListItemDecorator>
|
||||
)}
|
||||
<Box>
|
||||
{props.itemsByProvider.map(({ provider, items }) => (
|
||||
<React.Fragment key={provider.key}>
|
||||
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography level='title-sm' color={isActive ? 'primary' : undefined}>
|
||||
<span style={{ textDecoration: 'underline' }}><b>{labelBold}</b></span>{labelNormal}
|
||||
</Typography>
|
||||
{item.argument && <Typography level='body-sm'>
|
||||
{item.argument}
|
||||
{/* Provider Label */}
|
||||
<Sheet variant='soft' sx={{ p: 1, borderBottom: '1px solid', borderBottomColor: 'neutral.softActiveBg' }}>
|
||||
<Typography level='title-sm'>
|
||||
{provider.label}
|
||||
</Typography>
|
||||
</Sheet>
|
||||
|
||||
{/* Items */}
|
||||
{items.map((item) => {
|
||||
const index = itemIndices.findIndex(idx => idx.providerKey === provider.key && idx.itemKey === item.key);
|
||||
const isActive = itemIndices[index]?.isActive;
|
||||
|
||||
const labelBold = item.label.slice(0, props.activePrefixLength);
|
||||
const labelNormal = item.label.slice(props.activePrefixLength);
|
||||
|
||||
return (
|
||||
<ListItem
|
||||
key={`${provider.key}-${item.key}`}
|
||||
variant={isActive ? 'soft' : undefined}
|
||||
color={isActive ? 'primary' : undefined}
|
||||
onClick={() => props.onItemClick(item)}
|
||||
>
|
||||
<ListItemButton color='primary'>
|
||||
{item.Icon && (
|
||||
<ListItemDecorator>
|
||||
<item.Icon />
|
||||
</ListItemDecorator>
|
||||
)}
|
||||
|
||||
{/* Item*/}
|
||||
<Box>
|
||||
|
||||
{/* Item main text */}
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography level='title-sm' color={isActive ? 'primary' : undefined}>
|
||||
<span style={{ textDecoration: 'underline' }}><b>{labelBold}</b></span>{labelNormal}
|
||||
</Typography>
|
||||
{item.argument && <Typography level='body-sm'>
|
||||
{item.argument}
|
||||
</Typography>}
|
||||
</Box>
|
||||
|
||||
{/* Item description */}
|
||||
{!!item.description && <Typography level='body-xs'>
|
||||
{item.description}
|
||||
</Typography>}
|
||||
|
||||
</Box>
|
||||
|
||||
{!!item.description && <Typography level='body-xs'>
|
||||
{item.description}
|
||||
</Typography>}
|
||||
</Box>
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
);
|
||||
},
|
||||
)}
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
);
|
||||
})}
|
||||
</React.Fragment>
|
||||
))}
|
||||
|
||||
{props.children}
|
||||
|
||||
</CloseableMenu>
|
||||
</CloseablePopup>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import type { FunctionComponent } from 'react';
|
||||
|
||||
export interface ActileProvider<TItem extends ActileItem = ActileItem> {
|
||||
|
||||
// Unique key for the provider
|
||||
readonly key: 'pcmd' | 'pstrmsg' | 'pattlbl';
|
||||
|
||||
// Label for display
|
||||
get label(): string;
|
||||
|
||||
// Interface for the provider
|
||||
fastCheckTriggerText: (trailingText: string) => boolean;
|
||||
fetchItems: () => ActileProviderItems<TItem>;
|
||||
onItemSelect: (item: ActileItem) => void;
|
||||
|
||||
}
|
||||
|
||||
export type ActileProviderItems<TItem extends ActileItem = ActileItem> = Promise<{ searchPrefix: string, items: TItem[] }>;
|
||||
|
||||
export interface ActileItem {
|
||||
id: string;
|
||||
key: string;
|
||||
providerKey: ActileProvider['key'];
|
||||
label: string;
|
||||
argument?: string;
|
||||
description?: string;
|
||||
Icon?: FunctionComponent;
|
||||
}
|
||||
|
||||
type ActileProviderIds = 'actile-commands' | 'actile-attach-reference';
|
||||
|
||||
export interface ActileProvider {
|
||||
id: ActileProviderIds;
|
||||
title: string;
|
||||
searchPrefix: string;
|
||||
|
||||
checkTriggerText: (trailingText: string) => boolean;
|
||||
|
||||
fetchItems: () => Promise<ActileItem[]>;
|
||||
onItemSelect: (item: ActileItem) => void;
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
//import { ActileItem, ActileProvider } from './ActileProvider';
|
||||
|
||||
|
||||
/*export const providerAttachReference: ActileProvider = {
|
||||
id: 'actile-attach-reference',
|
||||
title: 'Attach Reference',
|
||||
searchPrefix: '@',
|
||||
|
||||
checkTriggerText: (trailingText: string) =>
|
||||
trailingText.endsWith(' @'),
|
||||
|
||||
fetchItems: async () => {
|
||||
return [{
|
||||
id: 'test-1',
|
||||
label: 'Attach This',
|
||||
description: 'Attach this to the message',
|
||||
Icon: undefined,
|
||||
}];
|
||||
},
|
||||
|
||||
onItemSelect: (item: ActileItem) => {
|
||||
console.log('Selected item:', item);
|
||||
},
|
||||
};*/
|
||||
@@ -0,0 +1,37 @@
|
||||
import type { ActileItem, ActileProvider, ActileProviderItems } from './ActileProvider';
|
||||
|
||||
import type { AttachmentDraftsStoreApi } from '~/common/attachment-drafts/store-attachment-drafts_slice';
|
||||
|
||||
export interface AttachmentLabelItem extends ActileItem {
|
||||
// nothing to do do here, this is really just a label
|
||||
}
|
||||
|
||||
export const providerAttachmentLabels = (
|
||||
attachmentsStoreApi: AttachmentDraftsStoreApi | null,
|
||||
onLabelSelect: (item: ActileItem, searchPrefix: string) => void,
|
||||
): ActileProvider<AttachmentLabelItem> => ({
|
||||
|
||||
key: 'pattlbl',
|
||||
|
||||
get label() {
|
||||
return 'Attachment Labels';
|
||||
},
|
||||
|
||||
// Uses '@' as the trigger
|
||||
fastCheckTriggerText: (trailingText: string) => trailingText === '@' || trailingText.endsWith(' @'),
|
||||
|
||||
fetchItems: async (): ActileProviderItems<AttachmentLabelItem> => ({
|
||||
searchPrefix: '',
|
||||
items: attachmentsStoreApi?.getState()?.attachmentDrafts.map(draft => ({
|
||||
key: draft.id,
|
||||
providerKey: 'pattlbl',
|
||||
label: draft.label,
|
||||
argument: undefined,
|
||||
description: 'name',
|
||||
Icon: undefined,
|
||||
} as AttachmentLabelItem)) ?? [],
|
||||
}),
|
||||
|
||||
onItemSelect: item => onLabelSelect(item as AttachmentLabelItem, '@'),
|
||||
|
||||
});
|
||||
@@ -1,24 +1,35 @@
|
||||
import { ActileItem, ActileProvider } from './ActileProvider';
|
||||
import { findAllChatCommands } from '../../../commands/commands.registry';
|
||||
|
||||
import type { ActileItem, ActileProvider, ActileProviderItems } from './ActileProvider';
|
||||
|
||||
export const providerCommands = (onItemSelect: (item: ActileItem) => void): ActileProvider => ({
|
||||
id: 'actile-commands',
|
||||
title: 'Chat Commands',
|
||||
searchPrefix: '/',
|
||||
|
||||
checkTriggerText: (trailingText: string) =>
|
||||
trailingText.trim() === '/',
|
||||
export const providerCommands = (
|
||||
onCommandSelect: (item: ActileItem, searchPrefix: string) => void,
|
||||
): ActileProvider => ({
|
||||
|
||||
fetchItems: async () => {
|
||||
return findAllChatCommands().map((cmd) => ({
|
||||
id: cmd.primary,
|
||||
key: 'pcmd',
|
||||
|
||||
get label() {
|
||||
return 'Chat Commands';
|
||||
},
|
||||
|
||||
fastCheckTriggerText: (trailingText: string) => {
|
||||
// only the literal '/' is a trigger
|
||||
return trailingText === '/';
|
||||
},
|
||||
|
||||
fetchItems: async (): ActileProviderItems => ({
|
||||
searchPrefix: '/',
|
||||
items: findAllChatCommands().map((cmd) => ({
|
||||
key: cmd.primary,
|
||||
providerKey: 'pcmd',
|
||||
label: cmd.primary,
|
||||
argument: cmd.arguments?.join(' ') ?? undefined,
|
||||
description: cmd.description,
|
||||
Icon: cmd.Icon,
|
||||
}));
|
||||
},
|
||||
} satisfies ActileItem)),
|
||||
}),
|
||||
|
||||
onItemSelect: (item) => onCommandSelect(item as ActileItem, '/'),
|
||||
|
||||
onItemSelect,
|
||||
});
|
||||
@@ -0,0 +1,53 @@
|
||||
import { conversationTitle, DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { MESSAGE_FLAG_STARRED, messageFragmentsReduceText, messageHasUserFlag } from '~/common/stores/chat/chat.message';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
|
||||
import type { ActileItem, ActileProvider, ActileProviderItems } from './ActileProvider';
|
||||
|
||||
|
||||
export interface StarredMessageItem extends ActileItem {
|
||||
conversationId: DConversationId,
|
||||
messageId: string,
|
||||
}
|
||||
|
||||
export const providerStarredMessages = (onMessageSelect: (item: StarredMessageItem) => void): ActileProvider<StarredMessageItem> => ({
|
||||
|
||||
key: 'pstrmsg',
|
||||
|
||||
get label() {
|
||||
return 'Starred Messages';
|
||||
},
|
||||
|
||||
// only the literal '@' at start of chat, or ' @' at end of chat
|
||||
fastCheckTriggerText: (trailingText: string) => trailingText === '@' || trailingText.endsWith(' @'),
|
||||
|
||||
// finds all the starred messages in all the conversations - this could be heavy
|
||||
fetchItems: async (): ActileProviderItems<StarredMessageItem> => {
|
||||
const { conversations } = useChatStore.getState();
|
||||
|
||||
const starredMessages: StarredMessageItem[] = [];
|
||||
conversations.forEach((conversation) => {
|
||||
conversation.messages.forEach((message) => {
|
||||
messageHasUserFlag(message, MESSAGE_FLAG_STARRED) && starredMessages.push({
|
||||
key: message.id,
|
||||
providerKey: 'pstrmsg',
|
||||
// data
|
||||
conversationId: conversation.id,
|
||||
messageId: message.id,
|
||||
// looks
|
||||
label: conversationTitle(conversation) + ' - ' + messageFragmentsReduceText(message.fragments).slice(0, 32) + '...',
|
||||
// description: message.text.slice(32, 100),
|
||||
Icon: undefined,
|
||||
} satisfies StarredMessageItem);
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
searchPrefix: '',
|
||||
items: starredMessages,
|
||||
};
|
||||
},
|
||||
|
||||
onItemSelect: item => onMessageSelect(item as StarredMessageItem),
|
||||
|
||||
});
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as React from 'react';
|
||||
import { ActileItem, ActileProvider } from './ActileProvider';
|
||||
|
||||
import type { ActileItem, ActileProvider } from './ActileProvider';
|
||||
import { ActilePopup } from './ActilePopup';
|
||||
|
||||
|
||||
@@ -7,63 +8,74 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
|
||||
// state
|
||||
const [popupOpen, setPopupOpen] = React.useState(false);
|
||||
const [provider, setProvider] = React.useState<ActileProvider | null>(null);
|
||||
|
||||
const [items, setItems] = React.useState<ActileItem[]>([]);
|
||||
const [itemsByProvider, setItemsByProvider] = React.useState<{ provider: ActileProvider, items: ActileItem[] }[]>([]);
|
||||
const [activeSearchString, setActiveSearchString] = React.useState<string>('');
|
||||
const [activeItemIndex, setActiveItemIndex] = React.useState<number>(0);
|
||||
|
||||
|
||||
// derived state
|
||||
const activeItems = React.useMemo(() => {
|
||||
const activeItemsByProvider = React.useMemo(() => {
|
||||
const search = activeSearchString.trim().toLowerCase();
|
||||
return items.filter(item => item.label.toLowerCase().startsWith(search));
|
||||
}, [items, activeSearchString]);
|
||||
const activeItem = activeItemIndex >= 0 && activeItemIndex < activeItems.length ? activeItems[activeItemIndex] : null;
|
||||
return itemsByProvider.map(({ provider, items }) => ({
|
||||
provider,
|
||||
items: items.filter(item => item.label?.toLowerCase().startsWith(search)),
|
||||
})).filter(({ items }) => items.length > 0);
|
||||
}, [itemsByProvider, activeSearchString]);
|
||||
|
||||
const flatActiveItems = React.useMemo(() => {
|
||||
return activeItemsByProvider.flatMap(({ items }) => items);
|
||||
}, [activeItemsByProvider]);
|
||||
const totalItems = flatActiveItems.length;
|
||||
const activeItem = totalItems > 0 && activeItemIndex >= 0 && activeItemIndex < totalItems ? flatActiveItems[activeItemIndex] : null;
|
||||
|
||||
const handleClose = React.useCallback(() => {
|
||||
setPopupOpen(false);
|
||||
setProvider(null);
|
||||
setItems([]);
|
||||
setItemsByProvider([]);
|
||||
setActiveSearchString('');
|
||||
setActiveItemIndex(0);
|
||||
}, []);
|
||||
|
||||
const handlePopupItemClicked = React.useCallback((item: ActileItem) => {
|
||||
const provider = providers.find(p => p.key === item.providerKey);
|
||||
provider?.onItemSelect(item);
|
||||
handleClose();
|
||||
}, [handleClose, provider]);
|
||||
}, [providers, handleClose]);
|
||||
|
||||
const handleEnterKey = React.useCallback(() => {
|
||||
activeItem && handlePopupItemClicked(activeItem);
|
||||
if (activeItem)
|
||||
handlePopupItemClicked(activeItem);
|
||||
}, [activeItem, handlePopupItemClicked]);
|
||||
|
||||
|
||||
const actileInterceptTextChange = React.useCallback((trailingText: string) => {
|
||||
for (const provider of providers) {
|
||||
if (provider.checkTriggerText(trailingText)) {
|
||||
setProvider(provider);
|
||||
setPopupOpen(true);
|
||||
setActiveSearchString(provider.searchPrefix);
|
||||
provider
|
||||
.fetchItems()
|
||||
.then(items => setItems(items))
|
||||
.catch(error => {
|
||||
handleClose();
|
||||
console.error('Failed to fetch popup items:', error);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
// Collect all providers whose trigger matches
|
||||
const matchingProviders = providers.filter(provider => provider.fastCheckTriggerText(trailingText));
|
||||
|
||||
if (matchingProviders.length > 0) {
|
||||
// Fetch items from all matching providers
|
||||
Promise.all(matchingProviders.map(provider =>
|
||||
provider.fetchItems().then(({ searchPrefix, items }) => ({
|
||||
provider,
|
||||
searchPrefix,
|
||||
items: items.map(item => ({ ...item, providerKey: provider.key })),
|
||||
})),
|
||||
)).then((results) => {
|
||||
// Filter out empty results
|
||||
results = results.filter(result => result.items.length > 0);
|
||||
if (results.length) {
|
||||
setPopupOpen(true);
|
||||
setItemsByProvider(results.map(result => ({ provider: result.provider, items: result.items })));
|
||||
setActiveSearchString(results[0].searchPrefix); // Assuming all search prefixes are the same
|
||||
setActiveItemIndex(0);
|
||||
}
|
||||
}).catch(error => {
|
||||
handleClose();
|
||||
console.error('Failed to fetch popup items:', error);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}, [handleClose, providers]);
|
||||
|
||||
|
||||
const actileInterceptKeydown = React.useCallback((_event: React.KeyboardEvent<HTMLTextAreaElement>): boolean => {
|
||||
|
||||
// Popup open: Intercept
|
||||
|
||||
const { key, currentTarget, ctrlKey, metaKey } = _event;
|
||||
|
||||
if (popupOpen) {
|
||||
@@ -72,11 +84,11 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
handleClose();
|
||||
} else if (key === 'ArrowUp') {
|
||||
_event.preventDefault();
|
||||
setActiveItemIndex((prevIndex) => (prevIndex > 0 ? prevIndex - 1 : activeItems.length - 1));
|
||||
setActiveItemIndex((prevIndex) => (prevIndex > 0 ? prevIndex - 1 : totalItems - 1));
|
||||
} else if (key === 'ArrowDown') {
|
||||
_event.preventDefault();
|
||||
setActiveItemIndex((prevIndex) => (prevIndex < activeItems.length - 1 ? prevIndex + 1 : 0));
|
||||
} else if (key === 'Enter' || key === 'ArrowRight' || key === 'Tab' || (key === ' ' && activeItems.length === 1)) {
|
||||
setActiveItemIndex((prevIndex) => (prevIndex < totalItems - 1 ? prevIndex + 1 : 0));
|
||||
} else if (key === 'Enter' || key === 'ArrowRight' || key === 'Tab' || (key === ' ' && totalItems === 1)) {
|
||||
_event.preventDefault();
|
||||
handleEnterKey();
|
||||
} else if (key === 'Backspace') {
|
||||
@@ -92,26 +104,24 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
const trailingText = (currentTarget.value || '') + key;
|
||||
return actileInterceptTextChange(trailingText);
|
||||
|
||||
}, [actileInterceptTextChange, activeItems.length, handleClose, handleEnterKey, popupOpen]);
|
||||
|
||||
}, [actileInterceptTextChange, handleClose, handleEnterKey, popupOpen, totalItems]);
|
||||
|
||||
const actileComponent = React.useMemo(() => {
|
||||
return !popupOpen ? null : (
|
||||
<ActilePopup
|
||||
anchorEl={anchorRef.current}
|
||||
onClose={handleClose}
|
||||
title={provider?.title}
|
||||
items={activeItems}
|
||||
itemsByProvider={activeItemsByProvider}
|
||||
activeItemIndex={activeItemIndex}
|
||||
activePrefixLength={activeSearchString.length}
|
||||
onItemClick={handlePopupItemClicked}
|
||||
/>
|
||||
);
|
||||
}, [activeItemIndex, activeItems, activeSearchString.length, anchorRef, handleClose, handlePopupItemClicked, popupOpen, provider?.title]);
|
||||
}, [activeItemIndex, activeItemsByProvider, activeSearchString.length, anchorRef, handleClose, handlePopupItemClicked, popupOpen]);
|
||||
|
||||
return {
|
||||
actileComponent,
|
||||
actileInterceptKeydown,
|
||||
actileInterceptTextChange,
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||