Compare commits
1497 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5c44804d50 | |||
| 8589376c66 | |||
| d53a8b4941 | |||
| af819da623 | |||
| 3addc4e2ac | |||
| 7ff7e489ab | |||
| 95aa0da014 | |||
| b12637267b | |||
| 3a44f70db9 | |||
| 92206d9740 | |||
| bddd91df2a | |||
| 144ead8cfe | |||
| 185f8e7f44 | |||
| 1538cd83af | |||
| 027f7deb3a | |||
| 4043a6098b | |||
| 92b913be98 | |||
| 8505ba6b84 | |||
| c6973f6b4e | |||
| 94eddaff3f | |||
| f38be4aff3 | |||
| 3ea78fcf9f | |||
| 78cfcc6206 | |||
| 9c5d4a18ce | |||
| aa48b4d596 | |||
| 265acd9345 | |||
| 34ec1d5671 | |||
| 4a1f4f0a01 | |||
| 850528820f | |||
| 4dc8197c51 | |||
| 42e97eed4c | |||
| 065f30ac38 | |||
| 9e705a12b1 | |||
| b8144f0748 | |||
| e5b5faad3e | |||
| f840c1d424 | |||
| eabd268874 | |||
| 06aadc543a | |||
| 2a410f52b5 | |||
| eb7a32ed16 | |||
| 14118d3056 | |||
| c8b3d8ad9b | |||
| a097b32d5c | |||
| 0a88a9cee6 | |||
| bef1c0c5fc | |||
| 52e6ef436f | |||
| ad0617de90 | |||
| 1753c1a40a | |||
| 13b7004959 | |||
| 3b9a21bbf7 | |||
| 5f0beb9d00 | |||
| 8411a73589 | |||
| 009a3751c0 | |||
| adef88e358 | |||
| f8b9df7bf0 | |||
| c6fa3e1d24 | |||
| ae24dd1e28 | |||
| 1efca7dd48 | |||
| 3178f4e7e9 | |||
| e00f61dcd0 | |||
| 6a5774aae7 | |||
| 5119061861 | |||
| fdfbae334a | |||
| e3fce43e62 | |||
| 9251f8ff0e | |||
| 18ef40f6f4 | |||
| 46887d1d9f | |||
| 632d10e9e3 | |||
| 9fa33eea73 | |||
| 2c4c13bc2c | |||
| 33f8a4eb3a | |||
| aa7959a970 | |||
| 7471bc0bb2 | |||
| b257f75e53 | |||
| 455e279216 | |||
| 7fd359852a | |||
| 82ecfdbd37 | |||
| 478452983f | |||
| 5c1a7d485f | |||
| 39c4ce9240 | |||
| da49585df5 | |||
| 0b9bee02fe | |||
| 00e5d1ae27 | |||
| b290d63926 | |||
| 1b5438cc6c | |||
| 17323facce | |||
| bc9dedeea4 | |||
| 1b3a383b53 | |||
| 4e0a535402 | |||
| 0005db1b33 | |||
| 5cd74031be | |||
| facb85b5da | |||
| 5f97d17837 | |||
| af722e09f8 | |||
| 959edf6010 | |||
| d08f183394 | |||
| da541ae182 | |||
| 4582c4c03d | |||
| 8c7d70d434 | |||
| fcf9f9e562 | |||
| 7bb0fb294a | |||
| 2e7b5ba5f0 | |||
| 6b017f3678 | |||
| a303d00900 | |||
| aaa351dca4 | |||
| ee5fb5361c | |||
| aaffcdbfeb | |||
| a8fefb5a90 | |||
| 8e3b07fa49 | |||
| 36ac618e88 | |||
| ab0eeae1e3 | |||
| f74adffa12 | |||
| 8f23f41e2f | |||
| 7d04844c6a | |||
| c301dcc226 | |||
| 8dd4ece730 | |||
| 75bd68f9fe | |||
| 96af022afa | |||
| c570c68f1b | |||
| 21a226a486 | |||
| 2695cb8e46 | |||
| 2207405ebc | |||
| 3802123147 | |||
| c6c630f5c6 | |||
| 7c76a17c08 | |||
| 5ba7723fa0 | |||
| 87ff07c850 | |||
| 71e1a2eeec | |||
| 88fba0f53a | |||
| 07260a8e06 | |||
| c1d155b569 | |||
| 7e7cfe1db1 | |||
| d27a44ab7f | |||
| 2adcca1cda | |||
| cf854b7262 | |||
| ecb0e07312 | |||
| 7d6d7e619b | |||
| 8b2b88c7cb | |||
| 9af1a6a16b | |||
| 34caa16e39 | |||
| 976426dbd3 | |||
| d1ac9adc7e | |||
| 513edf90f7 | |||
| 60d47510ab | |||
| 5b7b9837f0 | |||
| 333c3327c4 | |||
| 9723c98940 | |||
| 97604f3c5b | |||
| 044f18da46 | |||
| 53946b9523 | |||
| fd8f88c5e4 | |||
| e7d15ce2b0 | |||
| ff1d98a87e | |||
| accc68cd28 | |||
| b2c7bc980f | |||
| 75fbe8d5d8 | |||
| 13ebf3b3aa | |||
| 916d3812db | |||
| 90610c819b | |||
| a5f6f62559 | |||
| bfb3501dec | |||
| c0513c50b1 | |||
| bcf4baf004 | |||
| 53bf948a04 | |||
| 2186d91f89 | |||
| aaf856a503 | |||
| 8af625b7dc | |||
| 4690891757 | |||
| bb3e17c0fa | |||
| 7965df5ff2 | |||
| 5b5f0a5a8d | |||
| fdb087a39b | |||
| 97749378d6 | |||
| 63dc2301ff | |||
| 5659c0bc70 | |||
| 1e288ab0fd | |||
| 4f058a0174 | |||
| 7284114565 | |||
| 0b2592dbd7 | |||
| edfaf6f002 | |||
| da3990b614 | |||
| 25740ae13c | |||
| fb4c05f698 | |||
| a0c4e37c94 | |||
| 278caf6f0c | |||
| 2ce0c61f83 | |||
| afb25324a7 | |||
| ba1b761c08 | |||
| 0e2d4af617 | |||
| 1b0b54a072 | |||
| 9c629d3c5c | |||
| 173af4e459 | |||
| c0f12c0a5d | |||
| 390605fe66 | |||
| e4bd5f865c | |||
| b31c891772 | |||
| 08e4016972 | |||
| aea7eb6ba3 | |||
| 5496750085 | |||
| 4b9709898c | |||
| 705daac737 | |||
| a802b32f47 | |||
| 8b8db5e447 | |||
| 3ee44599c7 | |||
| 2955a41ed5 | |||
| a52802c882 | |||
| b46c70512a | |||
| 18f91e2eeb | |||
| 9296984569 | |||
| 7b835d9855 | |||
| ce23b9169b | |||
| 47a535d309 | |||
| 6342801aa0 | |||
| 50c00f5516 | |||
| 4a49678fb6 | |||
| 0f10b8f677 | |||
| d8433b79cc | |||
| f94f640212 | |||
| 5cf779757f | |||
| d49acf379e | |||
| b9bff4abc0 | |||
| 6fc4dbe9d1 | |||
| cca8132a2c | |||
| 91654ca219 | |||
| 547d7eca59 | |||
| b86bf31baa | |||
| 5b5b4efe42 | |||
| e9fb65edba | |||
| cc1cba9aa8 | |||
| a765c566c8 | |||
| 63e9022b84 | |||
| 368a995e7f | |||
| c844c66b5a | |||
| 73b18313e9 | |||
| bdd68dc6c9 | |||
| 3901b94382 | |||
| 82ac276338 | |||
| 02c9f3ebdb | |||
| 364ad63877 | |||
| 5fc4196d01 | |||
| 3a1e10bd21 | |||
| 73519ec562 | |||
| bf9c9916b1 | |||
| 01d017c6cd | |||
| ca98ab02d8 | |||
| 347804a02e | |||
| 4c80f8dbf4 | |||
| 73ee96040f | |||
| 6180da1333 | |||
| 2756ff6ad0 | |||
| e57491b812 | |||
| 9d8ae538d9 | |||
| dd7defd2c7 | |||
| e79ec45b5b | |||
| 1a138bbc16 | |||
| b067165471 | |||
| 6fbcbb9399 | |||
| aaf77b4e20 | |||
| f5cc2e952b | |||
| eeab362567 | |||
| 834205c426 | |||
| fbad8ca62e | |||
| 1e4c6f13c5 | |||
| b7c2b3d4cb | |||
| 0d5b7d36f1 | |||
| 059886fede | |||
| db7dd0ca43 | |||
| f4c611b47d | |||
| 39c32646c5 | |||
| 1720fffbdc | |||
| b4d8e39d56 | |||
| 6c51cd0d1d | |||
| cb9cdc508a | |||
| 7d037a206f | |||
| ace10ab4be | |||
| bc0a7b6ac3 | |||
| e77e2045e3 | |||
| abbd55c740 | |||
| bf5e80a462 | |||
| 121deaae5f | |||
| 80317232ba | |||
| 22f815dcd1 | |||
| fb96c3ab47 | |||
| 3b15ad51a1 | |||
| 11c41e7381 | |||
| 358d8a54ff | |||
| 3c8fedce68 | |||
| 5066336c75 | |||
| 1744b5b9d0 | |||
| 0807744577 | |||
| 59f871d3ec | |||
| fed351a2fc | |||
| 0c15476dd2 | |||
| 94ef76c67e | |||
| bd5bf6f94f | |||
| 1fbf454c3c | |||
| 07b62fe5c1 | |||
| 7fbf6ee2e8 | |||
| ba66fc30c5 | |||
| 45b7ed3220 | |||
| 20f1c4c0ae | |||
| 97b6fc5e2b | |||
| 44d8c30187 | |||
| e3957bf08b | |||
| acfe0aba21 | |||
| 6247b5411b | |||
| 5cc0b0a011 | |||
| 1fed2fb18c | |||
| 8a0e7a4e3d | |||
| 29a784c6c6 | |||
| 409a3ee194 | |||
| 54caa3e01a | |||
| e1a723a39f | |||
| 463ea35d7c | |||
| f751c91c68 | |||
| ad24c8771a | |||
| 6f82e2c3ed | |||
| f4b39071f0 | |||
| 621c968f3f | |||
| aeb129e422 | |||
| 3050b546ac | |||
| 1429726ba6 | |||
| 4075581acd | |||
| 56774fd974 | |||
| 5e674d2299 | |||
| 06f5b6d6ff | |||
| b25b4e6c8f | |||
| 645e07dba8 | |||
| 46181fcaa2 | |||
| 8d7ae425f9 | |||
| 7d572334a1 | |||
| 5dab6f68e6 | |||
| d1c595d8db | |||
| eaa2635b51 | |||
| dc2d226ddb | |||
| 336a4e1f35 | |||
| 4d3b6b4f43 | |||
| a12601b49c | |||
| 15a895064e | |||
| 8bd1507ace | |||
| 89d7ec5d0b | |||
| 670e57735a | |||
| fa703c25e8 | |||
| f58161b1d1 | |||
| 8db2a37a59 | |||
| bfdb9c2624 | |||
| 240e984737 | |||
| fe128c18b1 | |||
| b208d8c40d | |||
| 556641e1f4 | |||
| 464eb671db | |||
| 12b8f1e3ef | |||
| ab199afe0d | |||
| fe1a498da0 | |||
| 4f9d55eb42 | |||
| 70f450f547 | |||
| 28fc7deefc | |||
| 428babf856 | |||
| b824ddf2e3 | |||
| 2396966740 | |||
| 23ca49128a | |||
| ec6bdede20 | |||
| 4ada2013d2 | |||
| 79afef6bc1 | |||
| e7000df89f | |||
| 59f77a64ea | |||
| 8be152666e | |||
| 10488854ce | |||
| 6586aafed8 | |||
| 4568a60be3 | |||
| 193bc8bb8e | |||
| ce381b7690 | |||
| b238428816 | |||
| 0ac37f50cf | |||
| 54b9389b77 | |||
| a183c26e51 | |||
| 01a03d164c | |||
| cdff1fde2d | |||
| c38b9998a6 | |||
| 77c1a335ad | |||
| 07a0fe6249 | |||
| 204bc46976 | |||
| b910506519 | |||
| 3cef39da17 | |||
| 3aea29bcb5 | |||
| dd0d19168b | |||
| 6727fcd111 | |||
| 9d347f4a5a | |||
| 084e48ddc2 | |||
| 31e89ce9a1 | |||
| baad3ae1c3 | |||
| 7c099cab94 | |||
| 811875dd2e | |||
| 127443d550 | |||
| d2064605bf | |||
| 4c6fb61ca8 | |||
| 608ba8bcb4 | |||
| b53c054dee | |||
| 05aa4b547f | |||
| 6afb61d25d | |||
| a7ce5c1ca6 | |||
| 952bd2bd93 | |||
| f9d33d4888 | |||
| 81d99f19d4 | |||
| 454a4257da | |||
| e513b42786 | |||
| b607e3c034 | |||
| d5c3f5012b | |||
| 21d045be59 | |||
| a9c1c34dc9 | |||
| 44ab0483b6 | |||
| 9eb0cc0b62 | |||
| 2db74867f5 | |||
| fd30baafb8 | |||
| 3623eef47f | |||
| 7b07bb7884 | |||
| 7946cd6614 | |||
| 51b6e30986 | |||
| 002df7b0f9 | |||
| 564cf0fed0 | |||
| dee9492d4c | |||
| 6ae026f7c5 | |||
| 6bcbe286f3 | |||
| 6f35f72607 | |||
| 3a7aa75538 | |||
| e4e7ac260a | |||
| b8aaa4bb42 | |||
| 7793e2694b | |||
| 83f2c72f29 | |||
| 1caeaee7f0 | |||
| f354134234 | |||
| 66219d30e0 | |||
| b9e3942ed8 | |||
| 2354cdc1d1 | |||
| d929438df9 | |||
| 1acaed1de7 | |||
| 16195f8a55 | |||
| d7fc8c178f | |||
| 2894e16706 | |||
| c2340f3432 | |||
| 3b7b3106db | |||
| cff92819f9 | |||
| 2f981d852b | |||
| 8eef74d776 | |||
| 60e46204dc | |||
| 6a5d783435 | |||
| 0223e076c4 | |||
| ce80c78319 | |||
| cc0085ae61 | |||
| f28e243b9d | |||
| 2e4532593f | |||
| 1f10905a03 | |||
| 88762db484 | |||
| 3b5ab0ac70 | |||
| 8903c9296b | |||
| 97858a3c94 | |||
| 0ec3e83518 | |||
| 8c007b5bf7 | |||
| 768236b0e2 | |||
| 495d78b885 | |||
| 34b1e515fe | |||
| 2ac1789312 | |||
| 79edbd3fa5 | |||
| f50d9994e2 | |||
| 1603d3085f | |||
| ccf7036f33 | |||
| a0a1a5e3c1 | |||
| fbf9120859 | |||
| 8a770beec3 | |||
| 6b31669765 | |||
| 26d72fc2d8 | |||
| 5eb56d0994 | |||
| dbc4a922d5 | |||
| 141f423842 | |||
| 667f2433ab | |||
| fd930ef548 | |||
| 7eadfb1a63 | |||
| 67cb07ac92 | |||
| 96d28c43fc | |||
| e57e3f5f0a | |||
| 7b99bd71da | |||
| 861a037321 | |||
| 84cbe6c434 | |||
| 2cbb811523 | |||
| 8ef4faa10f | |||
| f6a1c9bf52 | |||
| 5d9f6fb4f5 | |||
| 66840a8ecd | |||
| a8ee6b255a | |||
| bd73d1c533 | |||
| e33c0ebc42 | |||
| 57e4a35fee | |||
| d490b57410 | |||
| 0416602e5f | |||
| ddc27b2eb9 | |||
| 374deb147b | |||
| d2eabd1ad0 | |||
| efbc625cc3 | |||
| 91ae0b8cb0 | |||
| ddc5741b00 | |||
| 4729aca6b0 | |||
| bb4fc3a70c | |||
| 5d8084b650 | |||
| f316b892f5 | |||
| cbda1d7cd0 | |||
| 2f8e879976 | |||
| cc0ac5ae3c | |||
| 0185d24fb3 | |||
| 97dbdc9c31 | |||
| a07c66c9a3 | |||
| 308bd25bc0 | |||
| 70066a03b6 | |||
| a7f3872af3 | |||
| 22e10e675a | |||
| 89679e946d | |||
| 1d1bb9d3df | |||
| 8faf2b2595 | |||
| e47ad9700e | |||
| 372b19a057 | |||
| cbe156a868 | |||
| 181a3881e2 | |||
| 3eef03b303 | |||
| ad56e3165c | |||
| b1a96b6e75 | |||
| 56419b1b4e | |||
| 372f14a9c5 | |||
| e1ec56a120 | |||
| 5bb11249d6 | |||
| 9fbcca1ff2 | |||
| 323f2b2c3e | |||
| b971d38dd5 | |||
| 278f479a3a | |||
| 03aea5678d | |||
| b62b8ee7e6 | |||
| 63f55551e5 | |||
| b185fbc57d | |||
| ceb9d58e72 | |||
| a0bb515a4f | |||
| 2cfac2f18b | |||
| d412f538b2 | |||
| 94f90ad861 | |||
| 4a402e7937 | |||
| c226d6c391 | |||
| 67410e6c59 | |||
| 419c361147 | |||
| 3769a53ffa | |||
| ec4aaa3bfb | |||
| be52680fcd | |||
| 9d41ab9339 | |||
| f126fc3087 | |||
| 764377037c | |||
| 8e09eaab45 | |||
| 6523da186c | |||
| 6471fd8b6f | |||
| 247a74881a | |||
| 3ef09f0a5f | |||
| b924d331f9 | |||
| 14041b6012 | |||
| 2c6cc5ecec | |||
| ac022b1df0 | |||
| 0a2081de08 | |||
| 64a8e554c7 | |||
| 082d29fd2f | |||
| ba5cf9d002 | |||
| 57a55318df | |||
| e70f4f7a59 | |||
| 1d217fad67 | |||
| e95d46f085 | |||
| f4577878e1 | |||
| 1bd1e5c8e3 | |||
| c975dee965 | |||
| 9d690f4219 | |||
| 29ddb3f58d | |||
| 8626bc0b1c | |||
| c362cf6596 | |||
| 97264fc5ff | |||
| 494c4409c1 | |||
| d46e366c81 | |||
| 6afe33ee9c | |||
| 903c9e1cc3 | |||
| 3ef43fc3f5 | |||
| b1c3be05dd | |||
| efee23b4a7 | |||
| 06b67a7586 | |||
| 889a2dbf9d | |||
| 2f80fcc888 | |||
| f7ee479c1d | |||
| 94fa0981fe | |||
| 4c74afe438 | |||
| f76cea22de | |||
| 3d49110808 | |||
| 88a4579f7a | |||
| 241bde0333 | |||
| 73c7867cd6 | |||
| b35254f7ad | |||
| 213e78c956 | |||
| 7bf552c491 | |||
| 3bf9923f86 | |||
| a6a8a28f59 | |||
| 56a8e452bf | |||
| 6bec0bf70d | |||
| 5dc9c8f90e | |||
| e3290e12b1 | |||
| 9f37ce9e42 | |||
| 8904c0c811 | |||
| b0d021b7f2 | |||
| 0175f3b8a1 | |||
| 0fa9d5bf62 | |||
| 4919e38e3e | |||
| 2e99533f96 | |||
| f095645d89 | |||
| 757c83142e | |||
| 36d274ca9f | |||
| ec11b61f67 | |||
| 7765271d63 | |||
| 7c2464bba7 | |||
| 17e010f93c | |||
| 452d630a2a | |||
| f317a3e38f | |||
| f56195058e | |||
| 2e93dbb10c | |||
| f862456d73 | |||
| d99b0b2137 | |||
| 1d390f9aa7 | |||
| 514beb7940 | |||
| c7bdfce734 | |||
| e5fe4b06ad | |||
| 89b7c265d3 | |||
| 698c31943e | |||
| b70060d46e | |||
| 6ddc5ef53e | |||
| 212023c7e4 | |||
| b687f23c95 | |||
| 7a05d01554 | |||
| 78e3a57857 | |||
| 79d0c96b20 | |||
| 21ed38a20e | |||
| d8b1f99114 | |||
| b0fb1b9890 | |||
| a63932cff2 | |||
| 0b22165d2a | |||
| 41b1951abe | |||
| 353431e54c | |||
| 7b232dd7d8 | |||
| d32adf9dbf | |||
| 940d490217 | |||
| 46e41e38cf | |||
| 276ff8f995 | |||
| 030837fccf | |||
| a7d38aefb1 | |||
| 230a0d7caf | |||
| 6e14e43c78 | |||
| e6389f08be | |||
| a4edeb098e | |||
| 093c536415 | |||
| 7479b50fea | |||
| ebce36d043 | |||
| 77bab1aa74 | |||
| ebcac3405c | |||
| d2781a6f87 | |||
| f5954f5bb3 | |||
| 6baf694d6f | |||
| cb3b586d4d | |||
| f68789ab20 | |||
| 0c6a3f1917 | |||
| 05fccaf982 | |||
| 7340b9ecc2 | |||
| 78eb4ebe0b | |||
| b1453a34ec | |||
| c357e9e2f5 | |||
| 98717bf8a9 | |||
| d7077ada0e | |||
| 64f63ed1d3 | |||
| 2a27f6c30d | |||
| 9fdddeaba8 | |||
| 2cfa5e93e4 | |||
| 778ac14344 | |||
| 85fcf8be61 | |||
| b31eb09015 | |||
| 5154dd1740 | |||
| 274f11ef1d | |||
| aeb1acf458 | |||
| a204f4a58e | |||
| 8e4a57aa01 | |||
| 797ed0a553 | |||
| 663bc0d471 | |||
| 8d7e2d2c46 | |||
| 19d96bb30b | |||
| 47f2f20d9c | |||
| 12c7c634c0 | |||
| 9a322c150a | |||
| 1a3bc4f666 | |||
| d4881b1ce5 | |||
| a2ad2df473 | |||
| 541c5bd1c3 | |||
| b744e9673b | |||
| bb94b7c5c6 | |||
| e9ff57d5e1 | |||
| 179245457c | |||
| 1493f74691 | |||
| 4857503ed3 | |||
| a0e38b4f0c | |||
| 1d62cad9e9 | |||
| 855761020c | |||
| 0950d06dfb | |||
| 1496402325 | |||
| 77e2c4babb | |||
| a465082984 | |||
| 025fdac686 | |||
| 6bde5ec64c | |||
| f099a9ec39 | |||
| 5bfcef92ee | |||
| 79a8fbd881 | |||
| 7f96a14cf6 | |||
| 5fe6d70713 | |||
| dcba4dd4bc | |||
| ccbe77913b | |||
| 2844cb81c2 | |||
| d86e8e5920 | |||
| 9665fa1eb4 | |||
| 2788ef679b | |||
| e1a88e1fd8 | |||
| 32163c5302 | |||
| 2d3d5efe87 | |||
| e1bbba392c | |||
| ed642c856b | |||
| 927e462f7a | |||
| e250499a3b | |||
| 91d96a6639 | |||
| 104ec4c87c | |||
| 0a7e8436c3 | |||
| 9e597e0a28 | |||
| 01fbb5d47c | |||
| 6517d16337 | |||
| 0e636adf28 | |||
| 0bb281237b | |||
| 2b224376c2 | |||
| e510b369d7 | |||
| a0de1f7230 | |||
| 4591132269 | |||
| a03de8d490 | |||
| 27bcfec17e | |||
| f6dbec3e1d | |||
| aebc45f705 | |||
| 310c60b9d9 | |||
| bcba67c209 | |||
| fc013aed52 | |||
| 8ad41c059b | |||
| 8eaf8db850 | |||
| 896883766c | |||
| 258dacf3ed | |||
| 242243f485 | |||
| a18436dce1 | |||
| 5323cbc00e | |||
| ddd3b137ac | |||
| 94550088e5 | |||
| 1375ca6f5c | |||
| e4c4fe0495 | |||
| 2fa5277e56 | |||
| b73ad8fdc1 | |||
| 9cc281e65e | |||
| d62107d39b | |||
| 4a8d20ad72 | |||
| 5acb72c39b | |||
| 67e8236a60 | |||
| 18b8853f82 | |||
| 65c7df7938 | |||
| 15678cdfa2 | |||
| 6cd6c62046 | |||
| dbf92805a2 | |||
| 11fc9a7b85 | |||
| 8bc970ff57 | |||
| a16eefd97b | |||
| ca5e5b820c | |||
| f73ad52441 | |||
| 729ec1d1bf | |||
| 4adb30b861 | |||
| 999f6de45f | |||
| 70686502b4 | |||
| d17a980151 | |||
| 7fa5947030 | |||
| de8f120fd4 | |||
| 9b54603264 | |||
| 698c77d7ba | |||
| 18d83a4d18 | |||
| 8e849d93b2 | |||
| 4ca42f028b | |||
| 3118337879 | |||
| db4490affb | |||
| 51ab79384e | |||
| 3ee30a252d | |||
| b883566ebb | |||
| ac78fb85b8 | |||
| 0d2b11d0c4 | |||
| 5b610c88c1 | |||
| bf444ce043 | |||
| c91c027dab | |||
| 81fd87c510 | |||
| 9da174a962 | |||
| 84f54a7e65 | |||
| baeecf1464 | |||
| f2fdd39c96 | |||
| 53b074d78e | |||
| f4fc1e6775 | |||
| dba791b8db | |||
| 750fa02621 | |||
| 7a67816111 | |||
| 613625644e | |||
| 0e25071ef0 | |||
| ed1932cd26 | |||
| 67b89213d0 | |||
| 814f142c5f | |||
| 16cd3e7d5a | |||
| c5dcb8faef | |||
| 6b46c022f9 | |||
| 88ef05fc72 | |||
| 445ea367fc | |||
| c819554f43 | |||
| bbc8a79ded | |||
| 3d181bc10d | |||
| ba5478f382 | |||
| 136c993c8d | |||
| 6cf18ea4e8 | |||
| fe7f56c82e | |||
| 6c580f1e43 | |||
| f171cd4f03 | |||
| ea109e6c30 | |||
| f514eed226 | |||
| 274ba80149 | |||
| 46b4dfc458 | |||
| 4af8f4ff6a | |||
| df5810d695 | |||
| d9ad96c374 | |||
| 06cc93fd82 | |||
| 41da63765f | |||
| 3975411c78 | |||
| fc2e75ef61 | |||
| ef0f2dd3d0 | |||
| 548c3c5d72 | |||
| d2e3a0cb8e | |||
| 9cdace6f81 | |||
| 12f020570e | |||
| bef2551eec | |||
| 7e20f8c189 | |||
| 56e8390e55 | |||
| 89fff16385 | |||
| 2cf15a24eb | |||
| 512e867034 | |||
| ce8c55c3c7 | |||
| 8e0d904d9a | |||
| 6c846a8ae7 | |||
| 5004469fe9 | |||
| 14d0af74ed | |||
| 5a76cf9486 | |||
| 82901ccd02 | |||
| 1dc9d66673 | |||
| a0cbfaf390 | |||
| 9a01ae61ef | |||
| 91837d5acd | |||
| 1b9ebdda22 | |||
| b6f6177af3 | |||
| d35486196b | |||
| 1603637e3b | |||
| 8f20840169 | |||
| 4fff2394de | |||
| afb74e68ee | |||
| d5fa7844c5 | |||
| b8470cd640 | |||
| 9a23f573a6 | |||
| efe8fa0fda | |||
| 2d16e8bb4f | |||
| bbd95eebff | |||
| ceb00b4e93 | |||
| cc60d26d1c | |||
| ba3ff739f6 | |||
| 6062647705 | |||
| 070c1c2de9 | |||
| d3aaa69409 | |||
| 0ac7753e35 | |||
| eba9d53d2e | |||
| d04d4ec8e7 | |||
| c7c3efcbe7 | |||
| 2b8d53a44c | |||
| ef6b573e08 | |||
| 61eedd41df | |||
| b265bcda20 | |||
| d703d32a1f | |||
| aab9334404 | |||
| c2570f6955 | |||
| 8e936a6334 | |||
| 46bfc22869 | |||
| db1620dd56 | |||
| e59f8a42a3 | |||
| 17d18bd85d | |||
| fb256cf578 | |||
| 1b6b5db76d | |||
| 41647ca83a | |||
| 07d2a17a87 | |||
| 6d744dfb7e | |||
| b9b946c35f | |||
| 17adfe2117 | |||
| 1e5e21102d | |||
| 4af992222f | |||
| a9447c6a11 | |||
| db71323313 | |||
| b9b2748e05 | |||
| 387231f743 | |||
| 2216a89aa3 | |||
| 4faa6326fa | |||
| cb22b3d9a1 | |||
| 152a3873bd | |||
| adc2760a89 | |||
| dde64acb06 | |||
| 008adbd8bc | |||
| 0e4866a5a2 | |||
| 5cb96cae3a | |||
| 8cbb82a67f | |||
| 848ddbe477 | |||
| 083c1cde8b | |||
| b792971062 | |||
| 07dde8f4b1 | |||
| 01f94127dd | |||
| 4d457b4e9e | |||
| 8ac93ff2da | |||
| ef33a4b08e | |||
| fdd3b25a27 | |||
| 4dc979da08 | |||
| 8f426e03c4 | |||
| 40cd085bf8 | |||
| 6aa75fc5d1 | |||
| eae5920f9d | |||
| 2f6bfa37cc | |||
| 9d6fd9b9b8 | |||
| 260cd67c96 | |||
| aff76e2d18 | |||
| 52e4343045 | |||
| 1ffbb135c6 | |||
| c3ec522261 | |||
| 4538839376 | |||
| 834edd3a71 | |||
| 581c3d9593 | |||
| 0c672fbaa5 | |||
| 6d96b9a312 | |||
| 691791ccd0 | |||
| f4299121d5 | |||
| 1adfb7eedd | |||
| 33ad583d15 | |||
| a7e2fe2277 | |||
| 5a479d5863 | |||
| 873ff034d2 | |||
| 61d3537617 | |||
| ae068a3f64 | |||
| f7402cd6f5 | |||
| c53f9c8020 | |||
| 798b4d57f4 | |||
| 98d428fb34 | |||
| 3ac5ace216 | |||
| 444a1a7ab9 | |||
| 43ea4bd4b5 | |||
| 6a9272e40a | |||
| 10589a11aa | |||
| a88f898bc0 | |||
| 7a84038b04 | |||
| 111c40732d | |||
| 69bb78c8be | |||
| ad3b327d69 | |||
| dc27f38534 | |||
| 5b0816cb92 | |||
| 57f6955303 | |||
| 78915f878d | |||
| 6ced6d626b | |||
| ee3cb819b4 | |||
| cc17b1d19d | |||
| 2c83240d47 | |||
| 54f18ff120 | |||
| 5e1fe363c3 | |||
| 3d2ec507e1 | |||
| 1dd7af3c8b | |||
| 06ec1fcebf | |||
| 86cb863fd4 | |||
| d5ef1288d8 | |||
| f3354c498d | |||
| 9557141b38 | |||
| 3144b66e73 | |||
| 6dbefa3d2f | |||
| c8f3b139e8 | |||
| 288663325d | |||
| 49947ee01d | |||
| fa7a45ebc7 | |||
| 9a074c222f | |||
| 4e0d7b6ed9 | |||
| 1f3defb04c | |||
| 6c52c43460 | |||
| deae2879f1 | |||
| 5b255a7d8b | |||
| 6e06c24b7a | |||
| 2fde1efdd3 | |||
| aeb29d983a | |||
| c8a7123da9 | |||
| 5c22061415 | |||
| 9a0fda8c02 | |||
| 2f9a17c44a | |||
| 50559015d8 | |||
| a8d4e143c2 | |||
| 2a6c69538d | |||
| 0ba5d61353 | |||
| d436ec5790 | |||
| 759b822b92 | |||
| 9df45af698 | |||
| 3474e81446 | |||
| e1f07eb957 | |||
| 71ff1b98be | |||
| 9b370dfa88 | |||
| 0be0661750 | |||
| eaa7230af7 | |||
| 11cb000481 | |||
| 8ae3554a58 | |||
| dfd4736386 | |||
| feb793c9fa | |||
| ee962fde08 | |||
| c08dd96de3 | |||
| b52f771133 | |||
| 4631232551 | |||
| df7f5047aa | |||
| 467d14324d | |||
| cbdce08e96 | |||
| d6bf8f8854 | |||
| 4599da3ded | |||
| 6d50952b2e | |||
| 7066947809 | |||
| e2924aacab | |||
| 1e86d2503f | |||
| eb67eee53a | |||
| dfdad45963 | |||
| 4735508d87 | |||
| c43c47eab8 | |||
| fafb2dc6b9 | |||
| 140e99c465 | |||
| 7ba1974390 | |||
| 51b8510f17 | |||
| 5d6949d471 | |||
| 8e9d0c1fd1 | |||
| 3852a3b779 | |||
| 8b4ba96936 | |||
| 0c17e18491 | |||
| 2bdbab3afc | |||
| b97499a95e | |||
| a70ac57872 | |||
| a9cf457024 | |||
| e5c938ac37 | |||
| edad54efa2 | |||
| f88426758f | |||
| 77a28eb810 | |||
| f834b27562 | |||
| 984e257cc5 | |||
| 729e7612bc | |||
| 59fadeae57 | |||
| bfbf7a298a | |||
| aad5d3bd65 | |||
| 504f19c445 | |||
| 19c47eb442 | |||
| ab6043df60 | |||
| 3305549a0f | |||
| c24c3cb571 | |||
| 952999258b | |||
| 0713eaa52c | |||
| 8fee689f60 | |||
| 75ddb17fed | |||
| 0c6a74626c | |||
| 41e3d0eaf9 | |||
| 8b9cfebd42 | |||
| 16badee259 | |||
| 9d5171dd36 | |||
| e0c0e81b7d | |||
| fd4e8985fc | |||
| 1d9b8503c0 | |||
| b3ef7b914d | |||
| 2f59e12e20 | |||
| 30e8652c2a | |||
| 5ee6aceb60 | |||
| 6940b6a6d1 | |||
| 4e33ce9415 | |||
| 944e22bde6 | |||
| 6054fa0a26 | |||
| 4db13cfed4 | |||
| 6a6adda2e0 | |||
| 4afa55c0db | |||
| bc120bfb2b | |||
| 88966699e7 | |||
| 9a5db3dcfb | |||
| 392aa1e654 | |||
| f2b32e47ff | |||
| 58136d0181 | |||
| 02733e55cb | |||
| 60df8456a7 | |||
| 6d0ecc805c | |||
| a0e9dd24a3 | |||
| d1eb89057d | |||
| 161c6dc83a | |||
| 54848b8a7e | |||
| 990563c604 | |||
| 8489ca8c8d | |||
| b57e2c89e3 | |||
| 66bedf78ac | |||
| 592c5cce60 | |||
| 2ccf9a4e92 | |||
| ed333c0513 | |||
| 89b65b7009 | |||
| 0cc2d346af | |||
| 5f81e78bc4 | |||
| 554b5fd4b5 | |||
| a58c3a6a52 | |||
| 6147f1131b | |||
| 26552aa996 | |||
| 17cc31f376 | |||
| 41f7a63392 | |||
| 70474ce517 | |||
| 365f144c57 | |||
| ff1e1c249f | |||
| e3ed6f802d | |||
| b5ed078260 | |||
| 64310292da | |||
| 2656d0dfa5 | |||
| 70a7f0aaf4 | |||
| d405dcaa3a | |||
| 5ecef67855 | |||
| 8f6d9f8c31 | |||
| 8662437b1a | |||
| ce3e5629e7 | |||
| d4c487534d | |||
| 2b9577b87d | |||
| 6a0f8564f3 | |||
| e9f74946e3 | |||
| e043ab8710 | |||
| 79dd2f5f6b | |||
| 76e6ca8f0c | |||
| 0f310e866f | |||
| 1f66221bbd | |||
| 635b70fb6c | |||
| d113801b18 | |||
| ac74efed4a | |||
| 52e1dc2fb2 | |||
| 7564fd5e03 | |||
| 96810328ee | |||
| 5603a98df9 | |||
| 5c800e35f2 | |||
| dd15eecdf1 | |||
| b6cb68bfcf | |||
| 07c5143f1e | |||
| e8c0cf3306 | |||
| 5e86d16442 | |||
| 5ff246a241 | |||
| 58d54682ab | |||
| 5ab547d434 | |||
| 96a5868543 | |||
| 0422c03efe | |||
| 2745c7295e | |||
| 82f6ec5839 | |||
| 8e1a155cff | |||
| 521578c4aa | |||
| a04f5f8c94 | |||
| fb6f96689b | |||
| 69a12d45f3 | |||
| bf4dd37a1b | |||
| b1230a9758 | |||
| 23621c57ed | |||
| 5f49a9f8ef | |||
| c5b31c3975 | |||
| 74dbe11d4a | |||
| 64b18c0a0a | |||
| 7c6cec8eea | |||
| 2b1869e1b3 | |||
| 87e5a155ba | |||
| d5c7071f1b | |||
| 04eb2210e6 | |||
| 4748b00be1 | |||
| 18968ba985 | |||
| 59b300b71e | |||
| 5916ef74f9 | |||
| f5602723c7 | |||
| 59795dcd22 | |||
| 127a5cbf96 | |||
| 2b040664cb | |||
| 4ffbdfd16c | |||
| e200cbf312 | |||
| f4edd192fd | |||
| dd07167087 | |||
| 81aa8468a7 | |||
| 871e72b655 | |||
| 9825d8e2f3 | |||
| 58c5569beb | |||
| c975511c74 | |||
| e3c52fb1f9 | |||
| 397517e666 | |||
| 09088febe8 | |||
| bbf5dc078e | |||
| 14d57aa622 | |||
| bcfc4921ca | |||
| cff70ebadd | |||
| 4b9c958d65 | |||
| 7dc7116a2f | |||
| 92a2c93644 | |||
| 7be0d88794 | |||
| ff6ca01813 | |||
| ce0dca86ac | |||
| 6c51a36dbc | |||
| 72bb31881a | |||
| c6fcad03cd | |||
| 70de7133a9 | |||
| ef36751eac | |||
| dee1461b9c | |||
| 3b775fc817 | |||
| da52eff9d3 | |||
| a7efaa7720 | |||
| a42587c498 | |||
| d29265f042 | |||
| c305b44c41 | |||
| 32ff65be1c | |||
| b550cbdfc7 | |||
| f767ad81ce | |||
| 35d04055ac | |||
| c7fe75829f | |||
| 8299b4c148 | |||
| 5bb84f8930 | |||
| 047c9a2f07 | |||
| 8c11925444 | |||
| 1cbb4fd11a | |||
| 0a8d9ebd55 | |||
| 386724655e | |||
| 7b37b9e204 | |||
| 3b02612124 | |||
| 32b040cbcf | |||
| 75a15a12a6 | |||
| 0cb7be8381 | |||
| 20d3c267a3 | |||
| 84313ffa8c | |||
| be66ce0f32 | |||
| 12c1194009 | |||
| 82b83a39dd | |||
| ac617de4ae | |||
| b6731c9afa | |||
| 3a7ece6508 | |||
| 2c69d2805d | |||
| 87b03c67ec | |||
| 569b08288e | |||
| 049fa90832 | |||
| f23347de7e | |||
| 0272283f94 | |||
| 64640c1331 | |||
| ff1471cfe8 | |||
| aae3783f67 | |||
| 053aa12a91 | |||
| 17a006db8f | |||
| 56d912da3d | |||
| 3c60284e6e | |||
| 76ddff4820 | |||
| 1bd6dc0a1a | |||
| 5c7d289123 | |||
| 8f6d646a1f | |||
| c42123fe2a | |||
| 58bd84b600 | |||
| 621eb4a54c | |||
| 9073cff1c1 | |||
| d69516df5c | |||
| 7322280d3d | |||
| 5f79569ea9 | |||
| fe8b8472b7 | |||
| cb2b1a89b5 | |||
| 6ece7b884a | |||
| 04fc9264cb | |||
| 016c2df942 | |||
| bf6a2b60b9 | |||
| 5093e70552 | |||
| 3bd50e1b45 | |||
| 793383f70d | |||
| 3b84e42932 | |||
| 09efc9b148 | |||
| 90c2542486 | |||
| 9259fa3b6d | |||
| 0c8f102830 | |||
| 02972a0fb6 | |||
| 2a4a65f129 | |||
| e16270e1ec | |||
| 201a884828 | |||
| 2a32139be3 | |||
| 7955bf2b86 | |||
| a5d70e4ca3 | |||
| 12eb08ee08 | |||
| fe74583bae | |||
| b8b1dd2cfb | |||
| 9723b328c3 | |||
| edc3ab6d00 | |||
| 0e243cd167 | |||
| b8e0064381 | |||
| 018c77901d | |||
| 5849fd9c94 | |||
| 6a5d1eb5c2 | |||
| fc70857fae | |||
| 5cd6fe23d8 | |||
| beffcdcba9 | |||
| cdd39457ff | |||
| 937b2806ef | |||
| 34552190c6 | |||
| 7e762d5ddc | |||
| 8e78b21a5c | |||
| ae85fdf59f | |||
| e39dc428cc | |||
| cc178efacb | |||
| 8a7a3afc10 | |||
| e0f1689125 | |||
| 3acdd75863 | |||
| 1ca5ff726c | |||
| 464051c319 | |||
| 548859fa65 | |||
| f57c10508f | |||
| b7f53d965f | |||
| 28b1090fd7 | |||
| 566bf8d38e | |||
| 663306bd3b | |||
| 165a5e60d3 | |||
| 3b01a26eed | |||
| 65f997a2ba | |||
| c1217ed8ed | |||
| 6ae76c553f | |||
| 141096eace | |||
| c4003a888a | |||
| d1c22e12a7 | |||
| 9461cab182 | |||
| dcceead4ca | |||
| ae8ac5111c | |||
| 1e35fceb61 | |||
| 88d0ffd712 | |||
| 6cbc3fbf28 | |||
| 4eb6f6da9d | |||
| 5bc320385f | |||
| 3d39a35c03 | |||
| 5ca9475bb6 | |||
| f12386c614 | |||
| 485dd0d91f | |||
| fc137176bd | |||
| b34fe2f9f6 | |||
| 3b7916c536 | |||
| d11a2b59ee | |||
| 63d1ec4c30 | |||
| 4ed49be67e | |||
| 3a0749c5b2 | |||
| 63470adc0f | |||
| 0bbfad4b41 | |||
| f9cb97ca49 | |||
| b63636cf2f | |||
| 54b388c9ae | |||
| d233f0946f | |||
| 671ac36946 | |||
| e6ba217302 | |||
| b9a18a5442 | |||
| f8d0f25f72 | |||
| 2213c61760 | |||
| e7edffa237 | |||
| fd83aca7a4 | |||
| bdc2d07747 | |||
| 1953f7d31a | |||
| 054ed80bbe | |||
| 13b64e65c3 | |||
| ee9ee72505 | |||
| 1b631a91b3 | |||
| 118d2cb2ad | |||
| b6acfa9d49 | |||
| 4798ba3fd0 | |||
| 14608f97da | |||
| 901d590159 | |||
| 28e71d4ac7 | |||
| 7f958c9e66 | |||
| 910f0c5556 | |||
| 427ef8c108 | |||
| 2efdfca7e5 | |||
| bc113b08f7 | |||
| 262a6d2560 | |||
| f9224aa25d | |||
| 6d0f7949f8 | |||
| 1a679bcf90 | |||
| 5de34fe3af | |||
| 420b4565dd | |||
| 27eb9adb16 | |||
| c4277b9ef0 | |||
| ec39c58474 | |||
| 3ce2e86a66 | |||
| d62757d94a | |||
| 7ba315c796 | |||
| 75e909e0e7 | |||
| 285c6a3fac | |||
| 9bcdbf8db6 | |||
| ae9d85d2cd | |||
| ad3191fcaf | |||
| d6c98bd304 | |||
| 52c1be20d9 | |||
| 69fb879439 | |||
| 135153464a | |||
| 87e556d6c4 | |||
| 46866ac061 | |||
| 9f222caadf | |||
| f82ac7a476 | |||
| 4fa5d875e9 | |||
| e2b1c6aff0 | |||
| 16b25fcc1f | |||
| 17cd765d00 | |||
| 1ea8b42e5f | |||
| 6b5a207522 | |||
| 85d5fef3fb | |||
| e9a77abd83 | |||
| 9d2857d41e | |||
| 62e71307d0 | |||
| f517f12b7e | |||
| 510b1d178d | |||
| 890e8afd47 | |||
| c25ce6db9d | |||
| ec789de1d1 | |||
| e96ac16d85 | |||
| 9d6fe97b11 | |||
| 8e90552fec | |||
| 71c8d5527e | |||
| 9fef95303a | |||
| 8458da826e | |||
| df59f5eb6b | |||
| 7c0ec8677f | |||
| 2e23026690 | |||
| 7bc110820e | |||
| d3cddd5b60 | |||
| 24cff721dc | |||
| 054df44e05 | |||
| 2dc3af3761 | |||
| 3d9bf70c85 | |||
| 30f4f6e7b8 | |||
| c5c71859f9 | |||
| b1a12d88a1 | |||
| 78d06e79a5 | |||
| 7580f1526f | |||
| 198e76c291 | |||
| f47bb1484c | |||
| 91f5136e29 | |||
| da3be58eec | |||
| 94432b496b | |||
| eab2550b88 | |||
| 179a496737 | |||
| 8f62c2ab78 | |||
| 9eaee22e3b | |||
| 2bdfe8399d | |||
| 001570464c | |||
| 90e77010bb | |||
| 6b73294186 | |||
| 101237aa75 | |||
| 8d3377aeb3 | |||
| 3ad350b10b | |||
| ce00480d99 | |||
| 2e7f2b6004 | |||
| aad0eae1b2 | |||
| be3e64b1aa | |||
| c089ea7499 | |||
| 190010b3e3 | |||
| 4dcdc175ee | |||
| 35fe54c713 | |||
| fd22d55835 | |||
| c978d78bd4 | |||
| fb488596b8 | |||
| 9edfa48e23 | |||
| 25360c5fba | |||
| e8ed346f20 | |||
| 507a35a826 | |||
| e604cf97ae | |||
| 510753ae1c | |||
| 828dfb56a2 | |||
| 843a8dcd69 | |||
| 53255d5524 | |||
| 0f8a5149b5 | |||
| 442d7e5fb5 | |||
| 11011d5367 | |||
| b80afca458 | |||
| a93d9aab08 | |||
| 721d31d98d | |||
| 8d83cff966 | |||
| 7643ee7749 | |||
| 78b0d5eb96 | |||
| 517252240a | |||
| 173635cfd1 | |||
| 051a05435e | |||
| cb367596d1 | |||
| 37de238f92 | |||
| b977c0e31c | |||
| f58c4ec8d7 | |||
| 48b0815363 | |||
| 4f15c9f749 | |||
| 7dd5175063 | |||
| cb9c6739cb | |||
| e541430891 | |||
| 60057716ae | |||
| f684442cc0 | |||
| d4246d305e | |||
| d13fafb2da |
@@ -1,7 +1,12 @@
|
||||
# big-AGI non-code files
|
||||
/docs/
|
||||
/dist/
|
||||
README.md
|
||||
|
||||
# Ignore build and log files
|
||||
Dockerfile
|
||||
/.dockerignore
|
||||
|
||||
# Node build artifacts
|
||||
/node_modules
|
||||
/.pnp
|
||||
|
||||
@@ -21,8 +21,9 @@ assignees: enricoros
|
||||
- [ ] Create a temporary tag `git tag v1.2.3 && git push opensource --tags`
|
||||
- [ ] Create a [New Draft GitHub Release](https://github.com/enricoros/big-agi/releases/new), and generate the automated changelog (for new contributors)
|
||||
- [ ] Update the release version in package.json, and `npm i`
|
||||
- [ ] Update in-app News [src/apps/news/news.data.tsx](/src/apps/news/news.data.tsx)
|
||||
- [ ] Update the in-app News version number
|
||||
- [ ] Update in-app News [src/apps/news/news.data.tsx](/src/apps/news/news.data.tsx)
|
||||
- [ ] Update in-app Cover graphics
|
||||
- [ ] Update the README.md with the new release
|
||||
- [ ] Copy the highlights to the [docs/changelog.md](/docs/changelog.md)
|
||||
- Release:
|
||||
@@ -79,11 +80,32 @@ I need the following from you:
|
||||
|
||||
1. a table summarizing all the new features in 1.2.3 with the following columns: 4 words description (exactly what it is), short description, usefulness (what it does for the user), significance, link to the issue number (not the commit)), which will be used for the artifacts later
|
||||
2. then double-check the git log to see if there are any features of significance that are not in the table
|
||||
3. then score each feature in terms of importance for users (1-10), relative impact of the feature (1-10, where 10 applies to the broadest user base), and novelty and uniqueness (1-10, where 10 is truly unique and novel from what exists already)
|
||||
3. then score each feature in terms of importance for users (1-10), relative impact of the feature (1-10, where 10 applies to the broadest user base), and novelty and uniqueness (1-10, where 10 is truly unique and novel from what exists already)
|
||||
4. then improve the table, in decreasing order of importance for features, fixing any detail that's missing, in particular check if there are commits of significance from a user or developer point of view, which are not contained in the table
|
||||
5. then I want you then to update the news.data.tsx for the new release
|
||||
```
|
||||
|
||||
### release name
|
||||
|
||||
```markdown
|
||||
please brainstorm 10 different names for this release. see the former names here: https://big-agi.com/blog
|
||||
```
|
||||
|
||||
You can follow with 'What do you think of Modelmorphic?' or other selected name
|
||||
|
||||
### cover images
|
||||
|
||||
```markdown
|
||||
Great, now I need to generate images for this. Before I used the following prompts (2 releases before).
|
||||
|
||||
// An image of a capybara sculpted entirely from black cotton candy, set against a minimalist backdrop with splashes of bright, contrasting sparkles. The capybara is using a computer with split screen made of origami, split keyboard and is wearing origami sunglasses with very different split reflections. Split halves are very contrasting. Close up photography, bokeh, white background.
|
||||
import coverV113 from '../../../public/images/covers/release-cover-v1.13.0.png';
|
||||
// An image of a capybara sculpted entirely from black cotton candy, set against a minimalist backdrop with splashes of bright, contrasting sparkles. The capybara is calling on a 3D origami old-school pink telephone and the camera is zooming on the telephone. Close up photography, bokeh, white background.
|
||||
import coverV112 from '../../../public/images/covers/release-cover-v1.12.0.png';
|
||||
|
||||
What can I do now as far as images? Give me 4 prompt ideas with the same style as looks as the former, but different scene or action
|
||||
```
|
||||
|
||||
### Readme (and Changelog)
|
||||
|
||||
```markdown
|
||||
|
||||
@@ -32,6 +32,12 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
||||
with:
|
||||
@@ -49,12 +55,15 @@ jobs:
|
||||
type=raw,value=stable,enable=${{ github.ref == 'refs/heads/main-stable' }}
|
||||
type=ref,event=tag # Use the tag name as a tag for tag builds
|
||||
type=semver,pattern={{version}} # Generate semantic versioning tags for tag builds
|
||||
type=sha # Just in case none of the above applies
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# Frontend Build: ignore API files disabled for this build
|
||||
/app/**/*.backup
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
@@ -10,6 +13,7 @@
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/dist/
|
||||
/out/
|
||||
|
||||
# production
|
||||
|
||||
@@ -2,22 +2,28 @@
|
||||
FROM node:18-alpine AS base
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
|
||||
# Dependencies
|
||||
FROM base AS deps
|
||||
WORKDIR /app
|
||||
|
||||
# Dependency files
|
||||
COPY package*.json ./
|
||||
COPY prisma ./prisma
|
||||
COPY src/server/prisma ./src/server/prisma
|
||||
|
||||
# Install dependencies, including dev (release builds should use npm ci)
|
||||
ENV NODE_ENV development
|
||||
RUN npm ci
|
||||
|
||||
|
||||
# Builder
|
||||
FROM base AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Optional argument to configure GA4 at build time (see: docs/deploy-analytics.md)
|
||||
ARG NEXT_PUBLIC_GA4_MEASUREMENT_ID
|
||||
ENV NEXT_PUBLIC_GA4_MEASUREMENT_ID=${NEXT_PUBLIC_GA4_MEASUREMENT_ID}
|
||||
|
||||
# Copy development deps and source
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
@@ -29,6 +35,7 @@ RUN npm run build
|
||||
# Reduce installed packages to production-only
|
||||
RUN npm prune --production
|
||||
|
||||
|
||||
# Runner
|
||||
FROM base AS runner
|
||||
WORKDIR /app
|
||||
@@ -38,9 +45,10 @@ RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
|
||||
# Copy Built app
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/public public
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next .next
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules node_modules
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next ./.next
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/src/server/prisma ./src/server/prisma
|
||||
|
||||
# Minimal ENV for production
|
||||
ENV NODE_ENV production
|
||||
|
||||
@@ -1,27 +1,94 @@
|
||||
# BIG-AGI 🧠✨
|
||||
|
||||
Welcome to big-AGI 👋, the GPT application for professionals that need function, form,
|
||||
simplicity, and speed. Powered by the latest models from 11 vendors and
|
||||
open-source model servers, `big-AGI` offers best-in-class Voice and Chat with AI Personas,
|
||||
visualizations, coding, drawing, calling, and quite more -- all in a polished UX.
|
||||
Welcome to big-AGI, the AI suite for professionals that need function, form,
|
||||
simplicity, and speed. Powered by the latest models from 12 vendors and
|
||||
open-source servers, `big-AGI` offers best-in-class Chats,
|
||||
[Beams](https://github.com/enricoros/big-AGI/issues/470),
|
||||
and [Calls](https://github.com/enricoros/big-AGI/issues/354) with AI personas,
|
||||
visualizations, coding, drawing, side-by-side chatting, and more -- all wrapped in a polished UX.
|
||||
|
||||
Pros use big-AGI. 🚀 Developers love big-AGI. 🤖
|
||||
Stay ahead of the curve with big-AGI. 🚀 Pros & Devs love big-AGI. 🤖
|
||||
|
||||
[](https://big-agi.com)
|
||||
|
||||
Or fork & run on Vercel
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-agi&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-agi)
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
## 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2)
|
||||
## 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [installation](docs/installation.md) 👉 [documentation](docs/README.md)
|
||||
|
||||
big-AGI is an open book; our **[public roadmap](https://github.com/users/enricoros/projects/4/views/2)**
|
||||
shows the current developments and future ideas.
|
||||
> Note: bigger better features (incl. Beam-2) are being cooked outside of `main`.
|
||||
|
||||
- Got a suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
- Want to contribute? [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
|
||||
[//]: # (big-AGI is an open book; see the **[ready-to-ship and future ideas](https://github.com/users/enricoros/projects/4/views/2)** in our open roadmap)
|
||||
|
||||
## What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline
|
||||
### What's New in 1.16.1...1.16.3 · Jun 20, 2024 (patch releases)
|
||||
|
||||
- 1.16.3: Anthropic Claude 3.5 Sonnet model support
|
||||
- 1.16.2: Improve web downloads, as text, markdwon, or HTML
|
||||
- 1.16.2: Proper support for Gemini models
|
||||
- 1.16.2: Added the latest Mistral model
|
||||
- 1.16.2: Tokenizer support for gpt-4o
|
||||
- 1.16.2: Updates to Beam
|
||||
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
|
||||
|
||||
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
|
||||
|
||||
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
|
||||
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
|
||||
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
|
||||
- Major enhancements to the Auto-Diagrams tool
|
||||
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
|
||||
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
|
||||
- Models update: **Anthropic**, **Groq**, **Ollama**, **OpenAI**, **OpenRouter**, **Perplexity**
|
||||
- Code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, and more [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
|
||||
|
||||
#### 3,000 Commits Milestone · April 7, 2024
|
||||
|
||||

|
||||
|
||||
- 🥇 Today we <b>celebrate commit 3000</b> in just over one year, and going stronger 🚀
|
||||
- 📢️ Thanks everyone for your support and words of love for Big-AGI, we are committed to creating the best AI experiences for everyone.
|
||||
|
||||
### What's New in 1.15.0 · April 1, 2024 · Beam
|
||||
|
||||
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI models setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
|
||||
- Enhanced the default Persona
|
||||
- Fixes to Gemini models and SVGs, improvements to UI and icons
|
||||
- 1.15.1: Support for Gemini Pro 1.5 and OpenAI Turbo models
|
||||
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.14.1 · March 7, 2024 · Modelmorphic</summary>
|
||||
|
||||
- **Anthropic** [Claude-3](https://www.anthropic.com/news/claude-3-family) model family support. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- New **[Perplexity](https://www.perplexity.ai/)** and **[Groq](https://groq.com/)** integration (thanks @Penagwin). [#407](https://github.com/enricoros/big-AGI/issues/407), [#427](https://github.com/enricoros/big-AGI/issues/427)
|
||||
- **[LocalAI](https://localai.io/models/)** deep integration, including support for [model galleries](https://github.com/enricoros/big-AGI/issues/411)
|
||||
- **Mistral** Large and Google **Gemini 1.5** support
|
||||
- Performance optimizations: runs [much faster](https://twitter.com/enricoros/status/1756553038293303434?utm_source=localhost:3000&utm_medium=big-agi), saves lots of power, reduces memory usage
|
||||
- Enhanced UX with auto-sizing charts, refined search and folder functionalities, perfected scaling
|
||||
- And with more UI improvements, documentation, bug fixes (20 tickets), and developer enhancements
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind</summary>
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385686b13
|
||||
|
||||
- **Side-by-Side Split Windows**: multitask with parallel conversations. [#208](https://github.com/enricoros/big-AGI/issues/208)
|
||||
- **Multi-Chat Mode**: message everyone, all at once. [#388](https://github.com/enricoros/big-AGI/issues/388)
|
||||
- **Export tables as CSV**: big thanks to @aj47. [#392](https://github.com/enricoros/big-AGI/pull/392)
|
||||
- Adjustable text size: customize density. [#399](https://github.com/enricoros/big-AGI/issues/399)
|
||||
- Dev2 Persona Technology Preview
|
||||
- Better looking chats with improved spacing, fonts, and menus
|
||||
- More: new video player, [LM Studio tutorial](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-lmstudio.md) (thanks @aj47), [MongoDB support](https://github.com/enricoros/big-AGI/blob/main/docs/deploy-database.md) (thanks @ranfysvalle02), and speedups
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline</summary>
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/95ceb03c-945d-4fdd-9a9f-3317beb54f3f
|
||||
|
||||
@@ -34,7 +101,10 @@ https://github.com/enricoros/big-AGI/assets/32999/95ceb03c-945d-4fdd-9a9f-3317be
|
||||
- Paste tables from Excel [#286](https://github.com/enricoros/big-AGI/issues/286)
|
||||
- Ollama model updates and context window detection fixes [#309](https://github.com/enricoros/big-AGI/issues/309)
|
||||
|
||||
### What's New in 1.11.0 · Jan 16, 2024 · Singularity
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.11.0 · Jan 16, 2024 · Singularity</summary>
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cfcb110c68
|
||||
|
||||
@@ -45,7 +115,10 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
- Enable adding up to five custom OpenAI-compatible endpoints
|
||||
- Developer enhancements: new 'Actiles' framework
|
||||
|
||||
### What's New in 1.10.0 · Jan 6, 2024 · The Year of AGI
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>What's New in 1.10.0 · Jan 6, 2024 · The Year of AGI</summary>
|
||||
|
||||
- **New UI**: for both desktop and mobile, sets the stage for future scale. [#201](https://github.com/enricoros/big-AGI/issues/201)
|
||||
- **Conversation Folders**: enhanced conversation organization. [#321](https://github.com/enricoros/big-AGI/issues/321)
|
||||
@@ -54,115 +127,94 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
- Large performance optimizations
|
||||
- Developer enhancements: new UI framework, updated documentation for proxy settings on browserless/docker
|
||||
|
||||
</details>
|
||||
|
||||
For full details and former releases, check out the [changelog](docs/changelog.md).
|
||||
|
||||
## ✨ Key Features 👊
|
||||
## 👉 Key Features ✨
|
||||
|
||||

|
||||
[More](docs/pixels/big-AGI-compo2b.png), [screenshots](docs/pixels).
|
||||
|  |  |  |  |  |
|
||||
|---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|
|
||||
| **Chat**<br/>**Call**<br/>**Beam**<br/>**Draw**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
|
||||
|
||||
- **AI Personas**: Tailor your AI interactions with customizable personas
|
||||
- **Sleek UI/UX**: A smooth, intuitive, and mobile-responsive interface
|
||||
- **Efficient Interaction**: Voice commands, OCR, and drag-and-drop file uploads
|
||||
- **Multiple AI Models**: Choose from a variety of leading AI providers
|
||||
- **Privacy First**: Self-host and use your own API keys for full control
|
||||
- **Advanced Tools**: Execute code, import PDFs, and summarize documents
|
||||
- **Seamless Integrations**: Enhance functionality with various third-party services
|
||||
- **Open Roadmap**: Contribute to the progress of big-AGI
|
||||

|
||||
|
||||
## 💖 Support
|
||||
You can easily configure 100s of AI models in big-AGI:
|
||||
|
||||
| **AI models** | _supported vendors_ |
|
||||
|:--------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Opensource Servers | [LocalAI](https://localai.io/) (multimodal) · [Ollama](https://ollama.com/) · [Oobabooga](https://github.com/oobabooga/text-generation-webui) |
|
||||
| Local Servers | [LM Studio](https://lmstudio.ai/) |
|
||||
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
|
||||
| Language services | [Anthropic](https://anthropic.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) |
|
||||
| Image services | [Prodia](https://prodia.com/) (SDXL) |
|
||||
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
|
||||
|
||||
Add extra functionality with these integrations:
|
||||
|
||||
| **More** | _integrations_ |
|
||||
|:-------------|:---------------------------------------------------------------------------------------------------------------|
|
||||
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
|
||||
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
|
||||
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
|
||||
| Sharing | [Paste.gg](https://paste.gg/) (Paste chats) |
|
||||
| Tracking | [Helicone](https://www.helicone.ai) (LLM Observability) |
|
||||
|
||||
[//]: # (- [x] **Flow-state UX** for uncompromised productivity)
|
||||
|
||||
[//]: # (- [x] **AI Personas**: Tailor your AI interactions with customizable personas)
|
||||
|
||||
[//]: # (- [x] **Sleek UI/UX**: A smooth, intuitive, and mobile-responsive interface)
|
||||
|
||||
[//]: # (- [x] **Efficient Interaction**: Voice commands, OCR, and drag-and-drop file uploads)
|
||||
|
||||
[//]: # (- [x] **Privacy First**: Self-host and use your own API keys for full control)
|
||||
|
||||
[//]: # (- [x] **Advanced Tools**: Execute code, import PDFs, and summarize documents)
|
||||
|
||||
[//]: # (- [x] **Seamless Integrations**: Enhance functionality with various third-party services)
|
||||
|
||||
[//]: # (- [x] **Open Roadmap**: Contribute to the progress of big-AGI)
|
||||
|
||||
<br/>
|
||||
|
||||
## 🚀 Installation
|
||||
|
||||
To get started with big-AGI, follow our comprehensive [Installation Guide](docs/installation.md).
|
||||
The guide covers various installation options, whether you're spinning it up on
|
||||
your local computer, deploying on Vercel, on Cloudflare, or rolling it out
|
||||
through Docker.
|
||||
|
||||
Whether you're a developer, system integrator, or enterprise user, you'll find step-by-step instructions
|
||||
to set up big-AGI quickly and easily.
|
||||
|
||||
[](docs/installation.md)
|
||||
|
||||
Or bring your API keys and jump straight into our free instance on [big-AGI.com](https://big-agi.com).
|
||||
|
||||
<br/>
|
||||
|
||||
# 🌟 Get Involved!
|
||||
|
||||
[//]: # ([](https://discord.gg/MkH4qj2Jp9))
|
||||
[](https://discord.gg/MkH4qj2Jp9)
|
||||
|
||||
* Enjoy the hosted open-source app on [big-AGI.com](https://big-agi.com)
|
||||
* [Chat with us](https://discord.gg/MkH4qj2Jp9)
|
||||
* Deploy your [fork](https://github.com/enricoros/big-agi/fork) for your friends and family
|
||||
* send PRs! ...
|
||||
🎭[Editing Personas](https://github.com/enricoros/big-agi/issues/35),
|
||||
🧩[Reasoning Systems](https://github.com/enricoros/big-agi/issues/36),
|
||||
🌐[Community Templates](https://github.com/enricoros/big-agi/issues/35),
|
||||
and [your big-IDEAs](https://github.com/enricoros/big-agi/issues/new?labels=RFC&body=Describe+the+idea)
|
||||
- [ ] 📢️ [**Chat with us** on Discord](https://discord.gg/MkH4qj2Jp9)
|
||||
- [ ] ⭐ **Give us a star** on GitHub 👆
|
||||
- [ ] 🚀 **Do you like code**? You'll love this gem of a project! [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
|
||||
- [ ] 💡 Got a feature suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
|
||||
- [ ] ✨ [Deploy](docs/installation.md) your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
|
||||
|
||||
<br/>
|
||||
|
||||
## 🧩 Develop
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/stargazers))
|
||||
|
||||

|
||||

|
||||

|
||||
[//]: # ([](https://github.com/enricoros/big-agi/network))
|
||||
|
||||
Clone this repo, install the dependencies (all locally), and run the development server (which auto-watches the
|
||||
files for changes):
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/pulls))
|
||||
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-agi.git
|
||||
cd big-agi
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/LICENSE))
|
||||
|
||||
The development app will be running on `http://localhost:3000`. Development builds have the advantage of not requiring
|
||||
a build step, but can be slower than production builds. Also, development builds won't have timeout on edge functions.
|
||||
---
|
||||
|
||||
## 🌐 Deploy manually
|
||||
|
||||
The _production_ build of the application is optimized for performance and is performed by the `npm run build` command,
|
||||
after installing the required dependencies.
|
||||
|
||||
```bash
|
||||
# .. repeat the steps above up to `npm install`, then:
|
||||
npm run build
|
||||
next start --port 3000
|
||||
```
|
||||
|
||||
The app will be running on the specified port, e.g. `http://localhost:3000`.
|
||||
|
||||
Want to deploy with username/password? See the [Authentication](docs/deploy-authentication.md) guide.
|
||||
|
||||
## 🐳 Deploy with Docker
|
||||
|
||||
For more detailed information on deploying with Docker, please refer to the [docker deployment documentation](docs/deploy-docker.md).
|
||||
|
||||
Build and run:
|
||||
|
||||
```bash
|
||||
docker build -t big-agi .
|
||||
docker run -d -p 3000:3000 big-agi
|
||||
```
|
||||
|
||||
Or run the official container:
|
||||
|
||||
- manually: `docker run -d -p 3000:3000 ghcr.io/enricoros/big-agi`
|
||||
- or, with docker-compose: `docker-compose up` or see [the documentation](docs/deploy-docker.md) for a composer file with integrated browsing
|
||||
|
||||
## ☁️ Deploy on Cloudflare Pages
|
||||
|
||||
Please refer to the [Cloudflare deployment documentation](docs/deploy-cloudflare.md).
|
||||
|
||||
## 🚀 Deploy on Vercel
|
||||
|
||||
Create your GitHub fork, create a Vercel project over that fork, and deploy it. Or press the button below for convenience.
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-agi&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-agi)
|
||||
|
||||
## Integrations:
|
||||
|
||||
* Local models: Ollama, Oobabooga, LocalAi, etc.
|
||||
* [ElevenLabs](https://elevenlabs.io/) Voice Synthesis (bring your own voice too) - Settings > Text To Speech
|
||||
* [Helicone](https://www.helicone.ai/) LLM Observability Platform - Models > OpenAI > Advanced > API Host: 'oai.hconeai.com'
|
||||
* [Paste.gg](https://paste.gg/) Paste Sharing - Chat Menu > Share via paste.gg
|
||||
* [Prodia](https://prodia.com/) Image Generation - Settings > Image Generation > Api Key & Model
|
||||
|
||||
<br/>
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
|
||||
[](https://github.com/enricoros/big-agi/stargazers)
|
||||
[](https://github.com/enricoros/big-agi/network)
|
||||
[](https://github.com/enricoros/big-agi/pulls)
|
||||
[](https://github.com/enricoros/big-agi/LICENSE)
|
||||
|
||||
[//]: # ([](https://github.com/enricoros/big-agi/issues))
|
||||
|
||||
Made with 💙
|
||||
2023-2024 · Enrico Ros x [big-AGI](https://big-agi.com) · License: [MIT](LICENSE) · Made with 💙
|
||||
|
||||
@@ -1,52 +1,2 @@
|
||||
import { createEmptyReadableStream, safeErrorString, serverFetchOrThrow } from '~/server/wire';
|
||||
|
||||
import { elevenlabsAccess, elevenlabsVoiceId, ElevenlabsWire, speechInputSchema } from '~/modules/elevenlabs/elevenlabs.router';
|
||||
|
||||
|
||||
/* NOTE: Why does this file even exist?
|
||||
|
||||
This file is a workaround for a limitation in tRPC; it does not support ArrayBuffer responses,
|
||||
and that would force us to use base64 encoding for the audio data, which would be a waste of
|
||||
bandwidth. So instead, we use this file to make the request to ElevenLabs, and then return the
|
||||
response as an ArrayBuffer. Unfortunately this means duplicating the code in the server-side
|
||||
and client-side vs. the tRPC implementation. So at lease we recycle the input structures.
|
||||
|
||||
*/
|
||||
const handler = async (req: Request) => {
|
||||
try {
|
||||
|
||||
// construct the upstream request
|
||||
const {
|
||||
elevenKey, text, voiceId, nonEnglish,
|
||||
streaming, streamOptimization,
|
||||
} = speechInputSchema.parse(await req.json());
|
||||
const path = `/v1/text-to-speech/${elevenlabsVoiceId(voiceId)}` + (streaming ? `/stream?optimize_streaming_latency=${streamOptimization || 1}` : '');
|
||||
const { headers, url } = elevenlabsAccess(elevenKey, path);
|
||||
const body: ElevenlabsWire.TTSRequest = {
|
||||
text: text,
|
||||
...(nonEnglish && { model_id: 'eleven_multilingual_v1' }),
|
||||
};
|
||||
|
||||
// elevenlabs POST
|
||||
const upstreamResponse: Response = await serverFetchOrThrow(url, 'POST', headers, body);
|
||||
|
||||
// NOTE: this is disabled, as we pass-through what we get upstream for speed, as it is not worthy
|
||||
// to wait for the entire audio to be downloaded before we send it to the client
|
||||
// if (!streaming) {
|
||||
// const audioArrayBuffer = await upstreamResponse.arrayBuffer();
|
||||
// return new NextResponse(audioArrayBuffer, { status: 200, headers: { 'Content-Type': 'audio/mpeg' } });
|
||||
// }
|
||||
|
||||
// stream the data to the client
|
||||
const audioReadableStream = upstreamResponse.body || createEmptyReadableStream();
|
||||
return new Response(audioReadableStream, { status: 200, headers: { 'Content-Type': 'audio/mpeg' } });
|
||||
|
||||
} catch (error: any) {
|
||||
const fetchOrVendorError = safeErrorString(error) + (error?.cause ? ' · ' + error.cause : '');
|
||||
console.log(`api/elevenlabs/speech: fetch issue: ${fetchOrVendorError}`);
|
||||
return new Response(`[Issue] elevenlabs: ${fetchOrVendorError}`, { status: 500 });
|
||||
}
|
||||
};
|
||||
|
||||
export const runtime = 'edge';
|
||||
export { handler as POST };
|
||||
export { elevenLabsHandler as POST } from '~/modules/elevenlabs/elevenlabs.server';
|
||||
@@ -5,13 +5,13 @@ import { createTRPCFetchContext } from '~/server/api/trpc.server';
|
||||
|
||||
const handlerEdgeRoutes = (req: Request) =>
|
||||
fetchRequestHandler({
|
||||
router: appRouterEdge,
|
||||
endpoint: '/api/trpc-edge',
|
||||
router: appRouterEdge,
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? '<no-path>'}:`, error)
|
||||
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
|
||||
@@ -5,15 +5,21 @@ import { createTRPCFetchContext } from '~/server/api/trpc.server';
|
||||
|
||||
const handlerNodeRoutes = (req: Request) =>
|
||||
fetchRequestHandler({
|
||||
router: appRouterNode,
|
||||
endpoint: '/api/trpc-node',
|
||||
router: appRouterNode,
|
||||
req,
|
||||
createContext: createTRPCFetchContext,
|
||||
onError:
|
||||
process.env.NODE_ENV === 'development'
|
||||
? ({ path, error }) => console.error(`❌ tRPC-node failed on ${path ?? '<no-path>'}:`, error)
|
||||
? ({ path, error }) => console.error(`❌ tRPC-node failed on ${path ?? 'unk-path'}: ${error.message}`)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
|
||||
// NOTE: the following statement breaks the build on non-pro deployments, and conditionals don't work either
|
||||
// so we resorted to raising the timeout from 10s to 25s in the vercel.json file instead
|
||||
// export const maxDuration = 25;
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
export { handlerNodeRoutes as GET, handlerNodeRoutes as POST };
|
||||
@@ -0,0 +1,59 @@
|
||||
# big-AGI Documentation
|
||||
|
||||
Find all the information you need to get started, configure, and effectively use big-AGI.
|
||||
|
||||
[//]: # (## Quick Start)
|
||||
|
||||
[//]: # (- **[Introduction](big-agi.md)**: Overview of big-AGI's features.)
|
||||
|
||||
## Configuration Guides
|
||||
|
||||
Detailed guides to configure your big-AGI interface and models.
|
||||
|
||||
👉 The following applies to the users of big-AGI.com, as the public instance is empty and to be configured by the user.
|
||||
|
||||
- **Cloud Model Services**:
|
||||
- **[Azure OpenAI](config-azure-openai.md)**
|
||||
- **[OpenRouter](config-openrouter.md)**
|
||||
- easy API key: **Anthropic**, **Google AI**, **Groq**, **Mistral**, **OpenAI**, **Perplexity**, **TogetherAI**
|
||||
|
||||
|
||||
- **Local Model Servers**:
|
||||
- **[LocalAI](config-local-localai.md)**
|
||||
- **[LM Studio](config-local-lmstudio.md)**
|
||||
- **[Ollama](config-local-ollama.md)**
|
||||
- **[Oobabooga](config-local-oobabooga.md)**
|
||||
|
||||
|
||||
- **Advanced Feature Configuration**:
|
||||
- **[Browse](config-feature-browse.md)**: Enable web page download through third-party services or your own cloud (advanced)
|
||||
- **ElevenLabs API**: Voice and cutom voice generation, only requires their API key
|
||||
- **Google Search API**: guide not yet available, see the Google options in '[Environment Variables](environment-variables.md)'
|
||||
- **Prodia API**: Stable Diffusion XL image generation, only requires their API key, alternative to DALL·E
|
||||
|
||||
## Deployment
|
||||
|
||||
System integrators, administrators, whitelabelers: instead of using the public big-AGI instance on get.big-agi.com, you can deploy your own instance.
|
||||
|
||||
Step-by-step deployment and system configuration instructions.
|
||||
|
||||
- **[Installation](installation.md)**: Set up your own instance of big-AGI and related products
|
||||
- build from source or use pre-built
|
||||
- locally, in the public cloud, or on your own servers
|
||||
|
||||
|
||||
- **Advanced Customizations**:
|
||||
- **[Source code alterations guide](customizations.md)**: source code primer and alterations guidelines
|
||||
- **[Basic Authentication](deploy-authentication.md)**: Optional, adds a username and password wall
|
||||
- **[Database Setup](deploy-database.md)**: Optional, enables "Chat Link Sharing"
|
||||
- **[Environment Variables](environment-variables.md)**: 📌 Pre-configures models and services
|
||||
|
||||
## Support and Community
|
||||
|
||||
Join our community or get support:
|
||||
|
||||
- Visit our [GitHub repository](https://github.com/enricoros/big-AGI) for source code and issue tracking
|
||||
- Check the latest updates and features on [Changelog](changelog.md) or the in-app [News](https://get.big-agi.com/news)
|
||||
- Connect with us and other users on [Discord](https://discord.gg/MkH4qj2Jp9) for discussions, help, and sharing your experiences with big-AGI
|
||||
|
||||
Thank you for choosing big-AGI. We're excited to see what you'll build.
|
||||
@@ -5,12 +5,72 @@ by release.
|
||||
|
||||
- For the live roadmap, please see [the GitHub project](https://github.com/users/enricoros/projects/4/views/2)
|
||||
|
||||
### 1.13.0 - Feb 2024
|
||||
### 1.17.0 - Jun 2024
|
||||
|
||||
- milestone: [1.13.0](https://github.com/enricoros/big-agi/milestone/13)
|
||||
- milestone: [1.17.0](https://github.com/enricoros/big-agi/milestone/17)
|
||||
- work in progress: [big-AGI open roadmap](https://github.com/users/enricoros/projects/4/views/2), [help here](https://github.com/users/enricoros/projects/4/views/4)
|
||||
|
||||
## What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline
|
||||
### What's New in 1.16.1...1.16.3 · Jun 20, 2024 (patch releases)
|
||||
|
||||
- 1.16.3: Anthropic Claude 3.5 Sonnet model support
|
||||
- 1.16.2: Improve web downloads, as text, markdwon, or HTML
|
||||
- 1.16.2: Proper support for Gemini models
|
||||
- 1.16.2: Added the latest Mistral model
|
||||
- 1.16.2: Tokenizer support for gpt-4o
|
||||
- 1.16.2: Updates to Beam
|
||||
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
|
||||
|
||||
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
|
||||
|
||||
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
|
||||
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
|
||||
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
|
||||
- Major enhancements to the Auto-Diagrams tool
|
||||
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
|
||||
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
|
||||
- Models update: **Anthropic**, **Groq**, **Ollama**, **OpenAI**, **OpenRouter**, **Perplexity**
|
||||
- Code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, and more [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
|
||||
- Developers: update the LLMs data structures
|
||||
|
||||
### What's New in 1.15.1 · April 10, 2024 (minor release, models support)
|
||||
|
||||
- Support for the newly released Gemini Pro 1.5 models
|
||||
- Support for the new OpenAI 2024-04-09 Turbo models
|
||||
- Resilience fixes after the large success of 1.15.0
|
||||
|
||||
### What's New in 1.15.0 · April 1, 2024 · Beam
|
||||
|
||||
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- Managed Deployments **Auto-Configuration**: simplify the UI mdoels setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
|
||||
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
|
||||
- Enhanced the default Persona
|
||||
- Fixes to Gemini models and SVGs, improvements to UI and icons
|
||||
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
|
||||
|
||||
### What's New in 1.14.1 · March 7, 2024 · Modelmorphic
|
||||
|
||||
- **Anthropic** [Claude-3](https://www.anthropic.com/news/claude-3-family) model family support. [#443](https://github.com/enricoros/big-AGI/issues/443)
|
||||
- New **[Perplexity](https://www.perplexity.ai/)** and **[Groq](https://groq.com/)** integration (thanks @Penagwin). [#407](https://github.com/enricoros/big-AGI/issues/407), [#427](https://github.com/enricoros/big-AGI/issues/427)
|
||||
- **[LocalAI](https://localai.io/models/)** deep integration, including support for [model galleries](https://github.com/enricoros/big-AGI/issues/411)
|
||||
- **Mistral** Large and Google **Gemini 1.5** support
|
||||
- Performance optimizations: runs [much faster](https://twitter.com/enricoros/status/1756553038293303434?utm_source=localhost:3000&utm_medium=big-agi), saves lots of power, reduces memory usage
|
||||
- Enhanced UX with auto-sizing charts, refined search and folder functionalities, perfected scaling
|
||||
- And with more UI improvements, documentation, bug fixes (20 tickets), and developer enhancements
|
||||
- [Release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.14.0), and changes [v1.13.1...v1.14.0](https://github.com/enricoros/big-AGI/compare/v1.13.1...v1.14.0) (233 commits, 8,000+ lines changed)
|
||||
|
||||
### What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385686b13
|
||||
|
||||
- **Side-by-Side Split Windows**: multitask with parallel conversations. [#208](https://github.com/enricoros/big-AGI/issues/208)
|
||||
- **Multi-Chat Mode**: message everyone, all at once. [#388](https://github.com/enricoros/big-AGI/issues/388)
|
||||
- **Export tables as CSV**: big thanks to @aj47. [#392](https://github.com/enricoros/big-AGI/pull/392)
|
||||
- Adjustable text size: customize density. [#399](https://github.com/enricoros/big-AGI/issues/399)
|
||||
- Dev2 Persona Technology Preview
|
||||
- Better looking chats with improved spacing, fonts, and menus
|
||||
- More: new video player, [LM Studio tutorial](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-lmstudio.md) (thanks @aj47), [MongoDB support](https://github.com/enricoros/big-AGI/blob/main/docs/deploy-database.md) (thanks @ranfysvalle02), and speedups
|
||||
|
||||
### What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline
|
||||
|
||||
https://github.com/enricoros/big-AGI/assets/32999/95ceb03c-945d-4fdd-9a9f-3317beb54f3f
|
||||
|
||||
@@ -69,7 +129,7 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
|
||||
- **Attachments System Overhaul**: Drag, paste, link, snap, text, images, PDFs and more. [#251](https://github.com/enricoros/big-agi/issues/251)
|
||||
- **Desktop Webcam Capture**: Image capture now available as Labs feature. [#253](https://github.com/enricoros/big-agi/issues/253)
|
||||
- **Independent Browsing**: Full browsing support with Browserless. [Learn More](https://github.com/enricoros/big-agi/blob/main/docs/config-browse.md)
|
||||
- **Independent Browsing**: Full browsing support with Browserless. [Learn More](https://github.com/enricoros/big-agi/blob/main/docs/config-feature-browse.md)
|
||||
- **Overheat LLMs**: Push the creativity with higher LLM temperatures. [#256](https://github.com/enricoros/big-agi/issues/256)
|
||||
- **Model Options Shortcut**: Quick adjust with `Ctrl+Shift+O`
|
||||
- Optimized Voice Input and Performance
|
||||
@@ -78,7 +138,7 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
|
||||
|
||||
### What's New in 1.6.0 - Nov 28, 2023 · Surf's Up
|
||||
|
||||
- **Web Browsing**: Download web pages within chats - [browsing guide](https://github.com/enricoros/big-agi/blob/main/docs/config-browse.md)
|
||||
- **Web Browsing**: Download web pages within chats - [browsing guide](https://github.com/enricoros/big-agi/blob/main/docs/config-feature-browse.md)
|
||||
- **Branching Discussions**: Create new conversations from any message
|
||||
- **Keyboard Navigation**: Swift chat navigation with new shortcuts (e.g. ctrl+alt+left/right)
|
||||
- **Performance Boost**: Faster rendering for a smoother experience
|
||||
@@ -152,7 +212,7 @@ For Developers:
|
||||
- **[Install Mobile APP](../docs/pixels/feature_pwa.png)** 📲 looks like native (@harlanlewis)
|
||||
- **[UI language](../docs/pixels/feature_language.png)** with auto-detect, and future app language! (@tbodyston)
|
||||
- **PDF Summarization** 🧩🤯 - ask questions to a PDF! (@fredliubojin)
|
||||
- **Code Execution: [Codepen](https://codepen.io/)/[Replit](https://replit.com/)** 💻 (@harlanlewis)
|
||||
- **Code Execution: [Codepen](https://codepen.io/)** 💻 (@harlanlewis)
|
||||
- **[SVG Drawing](../docs/pixels/feature_svg_drawing.png)** - draw with AI 🎨
|
||||
- Chats: multiple chats, AI titles, Import/Export, Selection mode
|
||||
- Rendering: Markdown, SVG, improved Code blocks
|
||||
|
||||
@@ -20,6 +20,9 @@ If you have an `API Endpoint` and `API Key`, you can configure big-AGI as follow
|
||||
The deployed models are now available in the application. If you don't have a configured
|
||||
Azure OpenAI service instance, continue with the next section.
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
## Setting Up Azure
|
||||
|
||||
### Step 1: Azure Account & Subscription
|
||||
|
||||
@@ -3,11 +3,16 @@
|
||||
Allows users to load web pages across various components of `big-AGI`. This feature is supported by Puppeteer-based
|
||||
browsing services, which are the most common way to render web pages in a headless environment.
|
||||
|
||||
Once configured, the Browsing service provides this functionality:
|
||||
Once configured, the Browsing service provides the following functionality:
|
||||
|
||||
- **Paste a URL**: Simply paste/drag a URL into the chat, and `big-AGI` will load and attach the page (very effective)
|
||||
- **Use /browse**: Type `/browse [URL]` in the chat to command `big-AGI` to load the specified web page
|
||||
- **ReAct**: ReAct will automatically use the `loadURL()` function whenever a URL is encountered
|
||||
- ✅ **Paste a URL**: Simply paste/drag a URL into the chat, and `big-AGI` will load and attach the page (very effective)
|
||||
- ✅ **Use /browse**: Type `/browse [URL]` in the chat to command `big-AGI` to load the specified web page
|
||||
- ✅ **ReAct**: ReAct will automatically use the `loadURL()` function whenever a URL is encountered
|
||||
|
||||
It does not yet support the following functionality:
|
||||
|
||||
- ✖️ **Auto-browsing by LLMs**: if an LLM encounters a URL, it will NOT load the page and will likely respond
|
||||
that it cannot browse the web - No technical limitation, just haven't gotten to implement this yet outside of `/react` yet
|
||||
|
||||
First of all, you need to procure a Puppteer web browsing service endpoint. `big-AGI` supports services like:
|
||||
|
||||
@@ -63,7 +68,7 @@ The chat agent won't be able to access the web sites if the browserless containe
|
||||
- MAX_CONCURRENT_SESSIONS=10
|
||||
```
|
||||
|
||||
You can then add the proyy lines to your `.env` file.
|
||||
You can then add the proxy lines to your `.env` file.
|
||||
|
||||
```
|
||||
https_proxy=http://PROXY-IP:PROXY-PORT
|
||||
@@ -109,3 +114,5 @@ If you encounter any issues or have questions about configuring the browse funct
|
||||
---
|
||||
|
||||
Enjoy the enhanced browsing experience within `big-AGI` and explore the web without ever leaving your chat!
|
||||
|
||||
Last updated on Feb 27, 2024 ([edit on GitHub](https://github.com/enricoros/big-AGI/edit/main/docs/config-feature-browse.md))
|
||||
@@ -0,0 +1,54 @@
|
||||
# Integrating LM Studio with big-AGI
|
||||
|
||||
Quickly set up LM Studio with big-AGI to run local and open LLMs on your computer for enhanced privacy and control over AI interactions.
|
||||
|
||||
## Video Tutorial
|
||||
|
||||
For a visual step-by-step guide, watch our [YouTube tutorial](https://www.youtube.com/watch?v=MqXzxVokMDk).
|
||||
|
||||
[](http://www.youtube.com/watch?v=MqXzxVokMDk "Running big-AGI locally with LM Studio")
|
||||
|
||||
|
||||
## Quick Setup Guide
|
||||
|
||||
### Installing big-AGI
|
||||
|
||||
Clone and set up big-AGI:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-agi.git && cd big-agi
|
||||
npm install # Or: yarn install
|
||||
npm run dev # Or: yarn dev
|
||||
# If missing dependencies:
|
||||
npm install @mui/material # Or: yarn add @mui/material
|
||||
```
|
||||
|
||||
### Configuring LM Studio
|
||||
|
||||
Ensure LM Studio is running (default: [http://localhost:1234](http://localhost:1234)).
|
||||
Check the URL and modify if different.
|
||||
1. Download local models in LM Studio
|
||||
2. Start the LM Studio server
|
||||
3. Optionally. Check the logs
|
||||
|
||||
### Integration in big-AGI
|
||||
|
||||
1. In big-AGI, navigate to **Models** > **Add** > **LM Studio**
|
||||
2. Enter the API URL: `http://localhost:1234` (modify if different)
|
||||
3. Refresh by clicking on the `Models` button to load models from LM Studio
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- **Missing @mui/material**: Execute `npm install @mui/material` or `yarn add @mui/material`
|
||||
- **Connection Issues**: Check LM Studio's URL and ensure it's operational
|
||||
|
||||
|
||||
## Further Assistance
|
||||
|
||||
Advanced configurations and more:
|
||||
|
||||
- big-AGI Community: [Discord](https://discord.gg/MkH4qj2Jp9)
|
||||
- LM Studio: [LM Studio home page](https://lmstudio.ai/)
|
||||
@@ -1,34 +1,64 @@
|
||||
# Local LLM integration with `localai`
|
||||
# Run your models with `LocalAI` x `big-AGI`
|
||||
|
||||
Integrate local Large Language Models (LLMs) with [LocalAI](https://localai.io).
|
||||
[LocalAI](https://localai.io) lets you run your AI models locally, or in the cloud. It supports text, image, asr, speech, and more models.
|
||||
|
||||
_Last updated Nov 7, 2023_
|
||||
We are deepening the integration between the two products. As of the time of writing, we integrate the following features:
|
||||
|
||||
## Instructions
|
||||
- ✅ [Text generation](https://localai.io/features/text-generation/) with GPTs
|
||||
- ✅ [Function calling](https://localai.io/features/openai-functions/) by GPTs 🆕
|
||||
- ✅ [Model Gallery](https://localai.io/models/) to list and install models
|
||||
- ✖️ [Vision API](https://localai.io/features/gpt-vision/) for image chats
|
||||
- ✖️ [Image generation](https://localai.io/features/image-generation) with stable diffusion
|
||||
- ✖️ [Audio to Text](https://localai.io/features/audio-to-text/)
|
||||
- ✖️ [Text to Audio](https://localai.io/features/text-to-audio/)
|
||||
- ✖️ [Embeddings generation](https://localai.io/features/embeddings/)
|
||||
- ✖️ [Constrained grammars](https://localai.io/features/constrained_grammars/) (JSON output)
|
||||
- ✖️ Voice cloning 🆕
|
||||
|
||||
_Last updated Feb 21, 2024_
|
||||
|
||||
## Guide
|
||||
|
||||
### LocalAI installation and configuration
|
||||
|
||||
Follow the guide at: https://localai.io/basics/getting_started/
|
||||
|
||||
For instance with [Use luna-ai-llama2 with docker compose](https://localai.io/basics/getting_started/#example-use-luna-ai-llama2-model-with-docker-compose):
|
||||
- verify it works by browsing to [http://localhost:8080/v1/models](http://localhost:8080/v1/models)
|
||||
(or the IP:Port of the machine, if running remotely) and seeing listed the model(s) you downloaded
|
||||
listed in the JSON response.
|
||||
|
||||
- clone LocalAI
|
||||
- get the model
|
||||
- copy the prompt template
|
||||
- start docker
|
||||
- -> the server will be listening on `localhost:8080`
|
||||
- verify it works by going to [http://localhost:8080/v1/models](http://localhost:8080/v1/models) on
|
||||
your browser and seeing listed the model you downloaded
|
||||
|
||||
### Integrating LocalAI with big-AGI
|
||||
### Integration: chat with LocalAI
|
||||
|
||||
- Go to Models > Add a model source of type: **LocalAI**
|
||||
- Enter the address: `http://localhost:8080` (default)
|
||||
- If running remotely, replace localhost with the IP of the machine. Make sure to use the **IP:Port** format
|
||||
- Load the models
|
||||
- Select model & Chat
|
||||
- Enter the default address: `http://localhost:8080`, or the address of your localAI cloud instance
|
||||

|
||||
- If running remotely, replace localhost with the IP of the machine. Make sure to use the **IP:Port** format
|
||||
- Load the models (click on `Models 🔄`)
|
||||
- Select the model and chat
|
||||
|
||||
> NOTE: LocalAI does not list details about the mdoels. Every model is assumed to be
|
||||
> capable of chatting, and with a context window of 4096 tokens.
|
||||
> Please update the [src/modules/llms/transports/server/openai/models.data.ts](../src/modules/llms/server/openai/models.data.ts)
|
||||
> file with the mapping information between LocalAI model IDs and names/descriptions/tokens, etc.
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
### Integration: Models Gallery
|
||||
|
||||
If the running LocalAI instance is configured with a [Model Gallery](https://localai.io/models/):
|
||||
|
||||
- Go to Models > LocalAI
|
||||
- Click on `Gallery Admin`
|
||||
- Select the models to install, and view installation progress
|
||||

|
||||
|
||||
## Troubleshooting
|
||||
|
||||
##### Unknown Context Window Size
|
||||
|
||||
At the time of writing, LocalAI does not publish the model `context window size`.
|
||||
Every model is assumed to be capable of chatting, and with a context window of 4096 tokens.
|
||||
Please update the [src/modules/llms/transports/server/openai/models.data.ts](../src/modules/llms/server/openai/models.data.ts)
|
||||
file with the mapping information between LocalAI model IDs and names/descriptions/tokens, etc.
|
||||
|
||||
# 🤝 Support
|
||||
|
||||
- Hop into the [LocalAI Discord](https://discord.gg/uJAeKSAGDy) for support and questions
|
||||
- Hop into the [big-AGI Discord](https://discord.gg/MkH4qj2Jp9) for questions
|
||||
- For big-AGI support, please open an issue in our [big-AGI issue tracker](https://bit.ly/agi-request)
|
||||
|
||||
@@ -13,7 +13,7 @@ _Last updated Dec 16, 2023_
|
||||
|
||||
1. **Ensure Ollama API Server is Running**: Follow the official instructions to get Ollama up and running on your machine
|
||||
- For detailed instructions on setting up the Ollama API server, please refer to the
|
||||
[Ollama download page](https://ollama.ai/download) and [instructions for linux](https://github.com/jmorganca/ollama/blob/main/docs/linux.md).
|
||||
[Ollama download page](https://ollama.ai/download) and [instructions for linux](https://github.com/jmorganca/ollama/blob/main/docs/linux.md).
|
||||
2. **Add Ollama as a Model Source**: In `big-AGI`, navigate to the **Models** section, select **Add a model source**, and choose **Ollama**
|
||||
3. **Enter Ollama Host URL**: Provide the Ollama Host URL where the API server is accessible (e.g., `http://localhost:11434`)
|
||||
4. **Refresh Model List**: Once connected, refresh the list of available models to include the Ollama models
|
||||
@@ -22,6 +22,9 @@ _Last updated Dec 16, 2023_
|
||||
you'll have to press the 'Pull' button again, until a green message appears.
|
||||
5. **Chat with Ollama models**: select an Ollama model and begin chatting with AI personas
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
**Visual Configuration Guide**:
|
||||
|
||||
* After adding the `Ollama` model vendor, entering the IP address of an Ollama server, and refreshing models:<br/>
|
||||
@@ -37,7 +40,7 @@ _Last updated Dec 16, 2023_
|
||||
|
||||
### ⚠️ Network Troubleshooting
|
||||
|
||||
If you get errors about the server having trouble connecting with Ollama, please see
|
||||
If you get errors about the server having trouble connecting with Ollama, please see
|
||||
[this message](https://github.com/enricoros/big-AGI/issues/276#issuecomment-1858591483) on Issue #276.
|
||||
|
||||
And in brief, make sure the Ollama endpoint is accessible from the servers where you run big-AGI (which could
|
||||
@@ -69,15 +72,19 @@ Then, edit the nginx configuration file `/etc/nginx/sites-enabled/default` and a
|
||||
|
||||
```nginx
|
||||
location /ollama/ {
|
||||
proxy_pass http://localhost:11434;
|
||||
proxy_pass http://127.0.0.1:11434/;
|
||||
|
||||
# Disable buffering for the streaming responses (SSE)
|
||||
proxy_set_header Connection '';
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
|
||||
# Disable buffering for the streaming responses
|
||||
chunked_transfer_encoding off;
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
|
||||
# Longer timeouts
|
||||
proxy_read_timeout 3600;
|
||||
proxy_connect_timeout 3600;
|
||||
proxy_send_timeout 3600;
|
||||
}
|
||||
```
|
||||
|
||||
@@ -25,15 +25,15 @@ This guide assumes that **big-AGI** is already installed on your system. Note th
|
||||
- Stop the Web UI as we need to modify the startup flags to enable the OpenAI API
|
||||
2. Enable the **openai extension**
|
||||
- Edit `CMD_FLAGS.txt`
|
||||
- Make sure that `--listen --api` is present and uncommented
|
||||
- Make sure that `--listen --api` is present and uncommented
|
||||
3. Restart text-generation-webui
|
||||
- Double-click on "start"
|
||||
- You should see something like:
|
||||
- You should see something like:
|
||||
```
|
||||
2023-12-07 21:51:21 INFO:Loading the extension "openai"...
|
||||
2023-12-07 21:51:21 INFO:OpenAI-compatible API URL:
|
||||
|
||||
http://0.0.0.0:5000
|
||||
|
||||
http://0.0.0.0:5000
|
||||
...
|
||||
INFO: Uvicorn running on http://0.0.0.0:5000 (Press CTRL+C to quit)
|
||||
Running on local URL: http://0.0.0.0:7860
|
||||
|
||||
@@ -22,6 +22,9 @@ This document details the process of integrating OpenRouter with big-AGI.
|
||||

|
||||
4. OpenAI GPT4-32k and other models will now be accessible and selectable in the application.
|
||||
|
||||
In addition to using the UI, configuration can also be done using
|
||||
[environment variables](environment-variables.md).
|
||||
|
||||
### Pricing
|
||||
|
||||
OpenRouter independently manages its service and pricing and is not affiliated with big-AGI.
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
# Customizing and Creating Derivative Applications
|
||||
|
||||
This document outlines how to develop applications derived from big-AGI.
|
||||
|
||||
## Manual Customization
|
||||
|
||||
Application customization _requires manual code modifications or the use of environment variables_. Currently, **there is no admin panel to "managed" deployment customization** for enterprise use cases.
|
||||
|
||||
| Required Code Alteration | Not Required |
|
||||
|---------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------|
|
||||
| - Persona changes<br>- UI theme customization<br>- Feature additions or modifications | - Setting API keys in [environment variables](environment-variables.md)<br>- Toggling features with environment variables |
|
||||
| Apply these to the source code before building the application | Set these post-build on local machines or cloud deployment, before application launch |
|
||||
|
||||
<br/>
|
||||
|
||||
## Code Alterations
|
||||
|
||||
Start by creating a fork of the [big-AGI repository](https://github.com/enricoros/big-AGI) on GitHub for a personal development space.
|
||||
Understand the Architecture: big-AGI uses Next.js, React for the front end, and Node.js (Next.js edge functions) for the back end.
|
||||
|
||||
### Add Authentication
|
||||
|
||||
This necessitates a code change (file renaming) before build initiation, detailed in [deploy-authentication.md](deploy-authentication.md).
|
||||
|
||||
### Increase Vercel Functions Timeout
|
||||
|
||||
For long-running operations, Vercel allows paid deployments to increase the timeout on Functions.
|
||||
Note that this applies to old-style Vercel Functions (based on Node.js) and not the new Edge Functions.
|
||||
|
||||
At time of writing, big-AGI has only 2 operations that run on Node.js Functions:
|
||||
browsing (fetching web pages) and sharing. They both can exceed 10 seconds, especially
|
||||
when fetching large pages or waiting for websites to be completed.
|
||||
|
||||
We provide `vercel_PRODUCTION.json` to raise the duration to 25 seconds (from a default of 10), to use it,
|
||||
make sure to rename it to `vercel.json` before build.
|
||||
|
||||
From the Vercel Project > Settings > General > Build & Development Settings,
|
||||
you can for instance set the build command to:
|
||||
|
||||
```bash
|
||||
mv vercel_PRODUCTION.json vercel.json; next build
|
||||
```
|
||||
|
||||
### Change the Personas
|
||||
|
||||
Edit the `src/data.ts` file to customize personas. This file houses the default personas. You can add, remove, or modify these to meet your project's needs.
|
||||
|
||||
- [ ] Modify `src/data.ts` to alter default personas
|
||||
|
||||
### Change the UI
|
||||
|
||||
Adapt the UI to match your project's aesthetic, incorporate new features, or exclude unnecessary ones.
|
||||
|
||||
- [ ] Adjust `src/common/app.theme.ts` for theme changes: colors, spacing, button appearance, animations, etc
|
||||
- [ ] Modify `src/common/app.config.tsx` to alter the application's name
|
||||
- [ ] Update `src/common/app.nav.tsx` to revise the navigation bar
|
||||
|
||||
## Testing & Deployment
|
||||
|
||||
Test your application thoroughly using local development (refer to README.md for local build instructions). Deploy using your preferred hosting service. big-AGI supports deployment on platforms like Vercel, Docker, or any Node.js-compatible service, especially those supporting NextJS's "Edge Runtime."
|
||||
|
||||
- [deploy-cloudflare.md](deploy-cloudflare.md): for Cloudflare Workers deployment
|
||||
- [deploy-docker.md](deploy-docker.md): for Docker deployment instructions and examples
|
||||
|
||||
## Debugging
|
||||
|
||||
We introduced the `/info/debug` page that provides a detailed overview of the application's environment, including the API keys, environment variables, and other configuration settings.
|
||||
|
||||
<br/>
|
||||
|
||||
## Community Projects - Share Your Project
|
||||
|
||||
After deployment, share your project with the community. We will link to your project to help others discover and learn from your work.
|
||||
|
||||
| Project | Features | GitHub |
|
||||
|----------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|
|
||||
| 🚀 CoolAGI: Where AI meets Imagination<br/> | Code Interpreter, Vision, Mind maps, Web Searches, Advanced Data Analytics, Large Data Handling and more! | [nextgen-user/CoolAGI](https://github.com/nextgen-user/CoolAGI) |
|
||||
| HL-GPT | Fully remodeled UI | [harlanlewis/nextjs-chatgpt-app](https://github.com/harlanlewis/nextjs-chatgpt-app) |
|
||||
|
||||
For public projects, update your README.md with your modifications and submit a pull request to add your project to our list, aiding in its discovery.
|
||||
|
||||
<br/>
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Stay Updated**: Frequently merge updates from the main big-AGI repository to incorporate bug fixes and new features.
|
||||
- **Keep It Open Source**: Consider maintaining your derivative as open source to foster community contributions.
|
||||
- **Engage with the Community**: Leverage platforms like GitHub, Discord, or Reddit for feedback, collaboration, and project promotion.
|
||||
|
||||
Developing a derivative application is an opportunity to explore new possibilities with AI and share your innovations with the global community. We look forward to seeing your contributions.
|
||||
@@ -0,0 +1,63 @@
|
||||
# big-AGI Analytics
|
||||
|
||||
The open-source big-AGI project provides support for the following analytics services:
|
||||
|
||||
- **Vercel Analytics**: automatic when deployed to Vercel
|
||||
- **Google Analytics 4**: manual setup required
|
||||
|
||||
The following is a quick overview of the Analytics options for the deployers of this open-source project.
|
||||
big-AGI is deployed to many large-scale and enterprise though various ways (custom builds, Docker, Vercel, Cloudflare, etc.),
|
||||
and this guide is for its customization.
|
||||
|
||||
## Service Configuration
|
||||
|
||||
### Vercel Analytics
|
||||
|
||||
- Why: understand coarse traction, and identify deployment issues - all without tracking individual users
|
||||
- What: top pages, top referrers, country of origin, operating system, browser, and page speed metrics
|
||||
|
||||
Vercel Analytics and Speed Insights are local API endpoints deployed to your domain, so everything stays within your
|
||||
domain. Furthermore, the Vercel Analytics service is privacy-friendly, and does not track individual users.
|
||||
|
||||
This service is avaialble to system administrators when deploying to Vercel. It is automatically enabled when deploying to Vercel.
|
||||
The code that activates Vercel Analytics is located in the `src/pages/_app.tsx` file:
|
||||
|
||||
```tsx
|
||||
const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) => <>
|
||||
...
|
||||
{isVercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{isVercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
...
|
||||
</>;
|
||||
```
|
||||
|
||||
When big-AGI is served on Vercel hosts, the ```process.env.NEXT_PUBLIC_VERCEL_URL``` environment variable is trueish, and
|
||||
analytics will be sent by default to the Vercel Analytics service which is deployed by Vercel IF configured from the
|
||||
Vercel project dashboard.
|
||||
|
||||
In summary: to turn it on: activate the `Analytics` service in the Vercel project dashboard.
|
||||
|
||||
### Google Analytics 4
|
||||
|
||||
- Why: user engagement and retention, performance insights, personalization, content optimization
|
||||
- What: https://support.google.com/analytics/answer/11593727
|
||||
|
||||
Google Analytics 4 (GA4) is a powerful tool for understanding user behavior and engagement.
|
||||
This can help optimize big-AGI, understanding which features are needed/users and which aren't.
|
||||
|
||||
To enable Google Analytics 4, you need to set the `NEXT_PUBLIC_GA4_MEASUREMENT_ID` environment variable
|
||||
before starting the local build or the docker build (i.e. at build time), at which point the
|
||||
server/container will be able to report analytics to your Google Analytics 4 property.
|
||||
|
||||
As of Feb 27, 2024, this feature is in development.
|
||||
|
||||
## Configurations
|
||||
|
||||
| Scope | Default | Description / Instructions |
|
||||
|-----------------------------------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------|
|
||||
| Your source builds of big-AGI | None | **Vercel**: enable Vercel Analytics from the dashboard. · **Google Analytics**: set environment variable at build time. |
|
||||
| Your docker builds of big-AGI | None | **Vercel**: n/a. · **Google Analytics**: set environment variable at `docker build` time. |
|
||||
| [big-agi.com](https://big-agi.com) | Vercel + Google | The main website ([privacy policy](https://big-agi.com/privacy)) hosted for free for anyone. |
|
||||
| [official Docker packages](https://github.com/enricoros/big-AGI/pkgs/container/big-agi) | Google Analytics | **Vercel**: n/a · **Google Analytics**: set to the big-agi.com Google Analytics for analytics and improvements. |
|
||||
|
||||
Note: this information is updated as of Feb 27, 2024 and can change at any time.
|
||||
@@ -0,0 +1,66 @@
|
||||
**Connecting Your Database for Enhanced Features:**
|
||||
|
||||
This guide outlines the database options and setup steps for enabling features like Chat Link Sharing in your application.
|
||||
|
||||
### Choose Your Database:
|
||||
|
||||
**1. Serverless Postgres (default):**
|
||||
|
||||
- Available on Vercel, Neon, and other platforms.
|
||||
- Less feature-rich but a suitable option depending on your needs.
|
||||
- **Connection String:** Replace placeholders with your Postgres credentials.
|
||||
- `postgres://USER:PASS@SOMEHOST.postgres.vercel-storage.com/SOMEDB?pgbouncer=true&connect_timeout=15`
|
||||
|
||||
**2. MongoDB Atlas (alternative):**
|
||||
|
||||
- **Highly Recommended:** More than a database, it's a data platform. MongoDB Atlas is a robust cloud-based platform that offers scalability, security, and a suite of developer tools. No need for a separate vector database, you can query your vector embeddings right within your operational database!
|
||||
- **Additional Features:** MongoDB Atlas is packed with unique features designed to streamline the development process such as: Atlas App Services, Atlas search (with vector search), Atlas charts, Data Federation, and more.
|
||||
- **Connection String:** Replace placeholders with your Atlas credentials.
|
||||
- `mongodb://USER:PASS@CLUSTER-NAME.mongodb.net/DATABASE-NAME?retryWrites=true&w=majority`
|
||||
|
||||
### Environment Variables:
|
||||
|
||||
#### Postgres:
|
||||
|
||||
| Variable | |
|
||||
|---------------------------------------|------------------------------------------------------------------------------------------------------|
|
||||
| `POSTGRES_PRISMA_URL` | `postgres://USER:PASS@SOMEHOST.postgres.vercel-storage.com/SOMEDB?pgbouncer=true&connect_timeout=15` |
|
||||
| `POSTGRES_URL_NON_POOLING` (optional) | URL for the Postgres database without pooling (specific use cases) |
|
||||
|
||||
#### MongoDB:
|
||||
|
||||
| Variable | |
|
||||
|-----------|------------------------------------------------------------------------------------------|
|
||||
| `MDB_URI` | `mongodb://USER:PASS@CLUSTER-NAME.mongodb.net/DATABASE-NAME?retryWrites=true&w=majority` |
|
||||
|
||||
### MongoDB Atlas + Prisma
|
||||
|
||||
When using MongoDB Atlas, you'll need to make the below changes to the file [`src/server/prisma/schema.prisma`](../src/server/prisma/schema.prisma).
|
||||
|
||||
```
|
||||
...
|
||||
datasource db {
|
||||
provider = "mongodb"
|
||||
url = env("MDB_URI")
|
||||
}
|
||||
|
||||
//
|
||||
// Storage of Linked Data
|
||||
//
|
||||
model LinkStorage {
|
||||
id String @id @default(uuid()) @map("_id")
|
||||
|
||||
// ...rest of file
|
||||
```
|
||||
|
||||
### Initial Setup Steps:
|
||||
|
||||
1. **Run `npx prisma db push`:** Create or update the database schema (run once after connecting).
|
||||
|
||||
### Additional Resources:
|
||||
|
||||
- Prisma documentation: [https://www.prisma.io/docs/](https://www.prisma.io/docs/)
|
||||
- MongoDB Atlas: [https://www.mongodb.com/atlas/database](https://www.mongodb.com/atlas/database)
|
||||
- Atlas App Services: [https://www.mongodb.com/docs/atlas/app-services/](https://www.mongodb.com/docs/atlas/app-services/)
|
||||
- Atlas vector search: [https://www.mongodb.com/products/platform/atlas-vector-search/](https://www.mongodb.com/products/platform/atlas-vector-search)
|
||||
- Atlas Data Federation: [https://www.mongodb.com/products/platform/atlas-data-federation](https://www.mongodb.com/products/platform/atlas-data-federation)
|
||||
@@ -9,7 +9,7 @@ Docker ensures faster development cycles, easier collaboration, and seamless env
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-agi.git
|
||||
cd big-agi
|
||||
```
|
||||
```
|
||||
2. **Build the Docker Image**: Build a local docker image from the provided Dockerfile:
|
||||
```bash
|
||||
docker build -t big-agi .
|
||||
@@ -50,7 +50,7 @@ docker-compose up -d
|
||||
### Make Local Services Visible to Docker 🌐
|
||||
|
||||
To make local services running on your host machine accessible to a Docker container, such as a
|
||||
[Browseless](./config-browse.md) service or a local API, you can follow this simplified guide:
|
||||
[Browseless](./config-feature-browse.md) service or a local API, you can follow this simplified guide:
|
||||
|
||||
| Operating System | Steps to Make Local Services Visible to Docker |
|
||||
|:------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
# Why big-AGI?
|
||||
Placeholder for a document that demonstrates the productivity and unique features of Big-AGI.
|
||||
|
||||
## Exclusive features
|
||||
- [x] Call AGI
|
||||
- [x] Continuous Voice mode
|
||||
- [x] Diagram generation
|
||||
- [ ] ...
|
||||
|
||||
## Productivity Features
|
||||
- [x] Multi-window to never wait
|
||||
- [x] Multi-Chat to explore different solutions
|
||||
- [x] Rendering of graphs, charts, mindmaps
|
||||
- [ ] ...
|
||||
@@ -12,10 +12,13 @@ Environment variables can be set by creating a `.env` file in the root directory
|
||||
The following is an example `.env` for copy-paste convenience:
|
||||
|
||||
```bash
|
||||
# Database
|
||||
# Database (Postgres)
|
||||
POSTGRES_PRISMA_URL=
|
||||
POSTGRES_URL_NON_POOLING=
|
||||
|
||||
# Database (MongoDB)
|
||||
MDB_URI=
|
||||
|
||||
# LLMs
|
||||
OPENAI_API_KEY=
|
||||
OPENAI_API_HOST=
|
||||
@@ -25,9 +28,13 @@ AZURE_OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
ANTHROPIC_API_HOST=
|
||||
GEMINI_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
LOCALAI_API_HOST=
|
||||
LOCALAI_API_KEY=
|
||||
MISTRAL_API_KEY=
|
||||
OLLAMA_API_HOST=
|
||||
OPENROUTER_API_KEY=
|
||||
PERPLEXITY_API_KEY=
|
||||
TOGETHERAI_API_KEY=
|
||||
|
||||
# Model Observability: Helicone
|
||||
@@ -51,22 +58,22 @@ BACKEND_ANALYTICS=
|
||||
# Backend HTTP Basic Authentication (see `deploy-authentication.md` for turning on authentication)
|
||||
HTTP_BASIC_AUTH_USERNAME=
|
||||
HTTP_BASIC_AUTH_PASSWORD=
|
||||
|
||||
# Frontend variables
|
||||
NEXT_PUBLIC_GA4_MEASUREMENT_ID=
|
||||
NEXT_PUBLIC_PLANTUML_SERVER_URL=
|
||||
```
|
||||
|
||||
## Variables Documentation
|
||||
## Backend Variables
|
||||
|
||||
These variables are used only by the server-side code, at runtime. Define them before running the nextjs local server (in development or
|
||||
cloud deployment), or pass them to Docker (--env-file or -e) when starting the container.
|
||||
|
||||
### Database
|
||||
|
||||
To enable features such as Chat Link Shring, you need to connect the backend to a database. We require
|
||||
serverless Postgres, which is available on Vercel, Neon and more.
|
||||
To enable Chat Link Sharing, you need to connect the backend to a database. We currently support Postgres and MongoDB.
|
||||
|
||||
Also make sure that you run `npx prisma db:push` to create the initial schema on the database for the
|
||||
first time (or update it on a later stage).
|
||||
|
||||
| Variable | Description |
|
||||
|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `POSTGRES_PRISMA_URL` | The URL of the Postgres database used by Prisma - example: `postgres://USER:PASS@SOMEHOST.postgres.vercel-storage.com/SOMEDB?pgbouncer=true&connect_timeout=15` |
|
||||
| `POSTGRES_URL_NON_POOLING` | The URL of the Postgres database without pooling |
|
||||
For Database configuration see [deploy-database.md](deploy-database.md).
|
||||
|
||||
### LLMs
|
||||
|
||||
@@ -83,12 +90,16 @@ requiring the user to enter an API key
|
||||
| `ANTHROPIC_API_KEY` | The API key for Anthropic | Optional |
|
||||
| `ANTHROPIC_API_HOST` | Changes the backend host for the Anthropic vendor, to enable platforms such as [config-aws-bedrock.md](config-aws-bedrock.md) | Optional |
|
||||
| `GEMINI_API_KEY` | The API key for Google AI's Gemini | Optional |
|
||||
| `GROQ_API_KEY` | The API key for Groq Cloud | Optional |
|
||||
| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional |
|
||||
| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional |
|
||||
| `MISTRAL_API_KEY` | The API key for Mistral | Optional |
|
||||
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-ollama.md](config-ollama.md) | |
|
||||
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama) | |
|
||||
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
|
||||
| `PERPLEXITY_API_KEY` | The API key for Perplexity | Optional |
|
||||
| `TOGETHERAI_API_KEY` | The API key for Together AI | Optional |
|
||||
|
||||
### Model Observability: Helicone
|
||||
### LLM Observability: Helicone
|
||||
|
||||
Helicone provides observability to your LLM calls. It is a paid service, with a generous free tier.
|
||||
It is currently supported for:
|
||||
@@ -100,7 +111,7 @@ It is currently supported for:
|
||||
|--------------------|--------------------------|
|
||||
| `HELICONE_API_KEY` | The API key for Helicone |
|
||||
|
||||
### Specials
|
||||
### Features
|
||||
|
||||
Enable the app to Talk, Draw, and Google things up.
|
||||
|
||||
@@ -110,16 +121,31 @@ Enable the app to Talk, Draw, and Google things up.
|
||||
| `ELEVENLABS_API_KEY` | ElevenLabs API Key - used for calls, etc. |
|
||||
| `ELEVENLABS_API_HOST` | Custom host for ElevenLabs |
|
||||
| `ELEVENLABS_VOICE_ID` | Default voice ID for ElevenLabs |
|
||||
| **Text-To-Image** | [Prodia](https://prodia.com/) is a reliable image generation service |
|
||||
| `PRODIA_API_KEY` | Prodia API Key - used with '/imagine ...' |
|
||||
| **Google Custom Search** | [Google Programmable Search Engine](https://programmablesearchengine.google.com/about/) produces links to pages |
|
||||
| `GOOGLE_CLOUD_API_KEY` | Google Cloud API Key, used with the '/react' command - [Link to GCP](https://console.cloud.google.com/apis/credentials) |
|
||||
| `GOOGLE_CSE_ID` | Google Custom/Programmable Search Engine ID - [Link to PSE](https://programmablesearchengine.google.com/) |
|
||||
| **Text-To-Image** | [Prodia](https://prodia.com/) is a reliable image generation service |
|
||||
| `PRODIA_API_KEY` | Prodia API Key - used with '/imagine ...' |
|
||||
| **Browse** | |
|
||||
| `PUPPETEER_WSS_ENDPOINT` | Puppeteer WebSocket endpoint - used for browsing, etc. |
|
||||
| **Backend** | |
|
||||
| `PUPPETEER_WSS_ENDPOINT` | Puppeteer WebSocket endpoint - used for browsing (pade downloadeing), etc. |
|
||||
| **Backend** | |
|
||||
| `BACKEND_ANALYTICS` | Semicolon-separated list of analytics flags (see backend.analytics.ts). Flags: `domain` logs the responding domain. |
|
||||
| `HTTP_BASIC_AUTH_USERNAME` | See the [Authentication](deploy-authentication.md) guide. Username for HTTP Basic Authentication. |
|
||||
| `HTTP_BASIC_AUTH_PASSWORD` | Password for HTTP Basic Authentication. |
|
||||
|
||||
### Frontend Variables
|
||||
|
||||
The value of these variables are passed to the frontend (Web UI) - make sure they do not contain secrets.
|
||||
|
||||
| Variable | Description |
|
||||
|:----------------------------------|:-----------------------------------------------------------------------------------------|
|
||||
| `NEXT_PUBLIC_GA4_MEASUREMENT_ID` | The measurement ID for Google Analytics 4. (see [deploy-analytics](deploy-analytics.md)) |
|
||||
| `NEXT_PUBLIC_PLANTUML_SERVER_URL` | The URL of the PlantUML server, used for rendering UML diagrams. (code in RederCode.tsx) |
|
||||
|
||||
> Important: these variables must be set at build time, which is required by Next.js to pass them to the frontend.
|
||||
> This is in contrast to the backend variables, which can be set when starting the local server/container.
|
||||
|
||||
---
|
||||
|
||||
For a higher level overview of backend code and environment customization,
|
||||
see the [big-AGI Customization](customizations.md) guide.
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
# Installation Guide
|
||||
|
||||
Welcome to the big-AGI Installation Guide - Whether you're a developer
|
||||
eager to explore, a system integrator, or an enterprise looking for a
|
||||
white-label solution, this comprehensive guide ensures a smooth setup
|
||||
process for your own instance of big-AGI and related products.
|
||||
|
||||
**Try big-AGI** - You don't need to install anything if you want to play with big-AGI
|
||||
and have your API keys to various model services. You can access our free instance on [big-AGI.com](https://big-agi.com).
|
||||
The free instance runs the latest `main-stable` branch from this repository.
|
||||
|
||||
## 🧩 Build-your-own
|
||||
|
||||
If you want to change the code, have a deeper configuration,
|
||||
add your own models, or run your own instance, follow the steps below.
|
||||
|
||||
### Local Development
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- Node.js and npm installed on your machine.
|
||||
|
||||
**Steps:**
|
||||
|
||||
1. Clone the big-AGI repository:
|
||||
```bash
|
||||
git clone https://github.com/enricoros/big-AGI.git
|
||||
cd big-AGI
|
||||
```
|
||||
2. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
3. Run the development server:
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
Your big-AGI instance is now running at `http://localhost:3000`.
|
||||
|
||||
### Local Production build
|
||||
|
||||
The production build is optimized for performance and follows
|
||||
the same steps 1 and 2 as for [local development](#local-development).
|
||||
|
||||
3. Build the production version:
|
||||
```bash
|
||||
# .. repeat the steps above up to `npm install`, then:
|
||||
npm run build
|
||||
```
|
||||
4. Start the production server (`npx` may be optional):
|
||||
```bash
|
||||
npx next start --port 3000
|
||||
```
|
||||
Your big-AGI production instance is on `http://localhost:3000`.
|
||||
|
||||
### Advanced Customization
|
||||
|
||||
Want to pre-enable models, customize the interface, or deploy with username/password or alter code to your needs?
|
||||
Check out the [Customizations Guide](README.md) for detailed instructions.
|
||||
|
||||
## ☁️ Cloud Deployment Options
|
||||
|
||||
To deploy big-AGI on a public server, you have several options. Choose the one that best fits your needs.
|
||||
|
||||
### Deploy on Vercel
|
||||
|
||||
Install big-AGI on Vercel with just a few clicks.
|
||||
|
||||
Create your GitHub fork, create a Vercel project over that fork, and deploy it. Or press the button below for convenience.
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
|
||||
|
||||
### Deploy on Cloudflare
|
||||
|
||||
Deploy on Cloudflare's global network by installing big-AGI on
|
||||
Cloudflare Pages. Check out the [Cloudflare Installation Guide](deploy-cloudflare.md)
|
||||
for step-by-step instructions.
|
||||
|
||||
### Docker Deployments
|
||||
|
||||
Containerize your big-AGI installation using Docker for portability and scalability.
|
||||
Our [Docker Deployment Guide](deploy-docker.md) will walk you through the process,
|
||||
or follow the steps below for a quick start.
|
||||
|
||||
1. (optional) Build the Docker image - if you do not want to use the [pre-built Docker images](https://github.com/enricoros/big-AGI/pkgs/container/big-agi):
|
||||
```bash
|
||||
docker build -t big-agi .
|
||||
```
|
||||
2. Run the Docker container with either:
|
||||
```bash
|
||||
# 2A. if you built the image yourself:
|
||||
docker run -d -p 3000:3000 big-agi
|
||||
|
||||
# 2B. or use the pre-built image:
|
||||
docker run -d -p 3000:3000 ghcr.io/enricoros/big-agi
|
||||
|
||||
# 2C. or use docker-compose:
|
||||
docker-compose up
|
||||
```
|
||||
Access your big-AGI instance at `http://localhost:3000`.
|
||||
|
||||
### Midori AI Subsystem for Docker Deployment
|
||||
|
||||
Follow the instructions found on [Midori AI Subsystem Site](https://io.midori-ai.xyz/subsystem/manager/)
|
||||
for your host OS. After completing the setup process, install the Big-AGI docker backend to the Midori AI Subsystem.
|
||||
|
||||
## Enterprise-Grade Installation
|
||||
|
||||
For businesses seeking a fully-managed, scalable solution, consider our managed installations.
|
||||
Enjoy all the features of big-AGI without the hassle of infrastructure management. [hello@big-agi.com](mailto:hello@big-agi.com) to learn more.
|
||||
|
||||
## Support
|
||||
|
||||
Join our vibrant community of developers, researchers, and AI enthusiasts. Share your projects, get help, and collaborate with others.
|
||||
|
||||
- [Discord Community](https://discord.gg/MkH4qj2Jp9)
|
||||
- [Twitter](https://twitter.com/yourusername)
|
||||
|
||||
For any questions or inquiries, please don't hesitate to [reach out to our team](mailto:hello@big-agi.com).
|
||||
|
After Width: | Height: | Size: 303 KiB |
|
After Width: | Height: | Size: 89 KiB |
|
After Width: | Height: | Size: 206 KiB |
@@ -0,0 +1,5 @@
|
||||
From root:
|
||||
```bash
|
||||
BIG_AGI_BUILD=standalone next build
|
||||
electron . --enable-logging
|
||||
```
|
||||
@@ -0,0 +1,61 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
background: #2e2c29;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
font-family: Arial, sans-serif;
|
||||
}
|
||||
|
||||
.loader-container {
|
||||
position: relative;
|
||||
width: 100px;
|
||||
height: 100px;
|
||||
}
|
||||
|
||||
.spinner {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
border: 5px solid rgba(255, 255, 255, 0.3);
|
||||
border-top: 5px solid #3498db;
|
||||
border-radius: 50%;
|
||||
width: 100px;
|
||||
height: 100px;
|
||||
animation: spin 2s linear infinite;
|
||||
}
|
||||
|
||||
.logo {
|
||||
position: absolute;
|
||||
top: 15px;
|
||||
left: 15px;
|
||||
width: 80px;
|
||||
height: 80px;
|
||||
background: url('tray-icon.png') no-repeat center center;
|
||||
background-size: contain;
|
||||
animation: counter-spin 3.33s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
@keyframes counter-spin {
|
||||
0% { transform: rotate(360deg); }
|
||||
100% { transform: rotate(0deg); }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="loader-container">
|
||||
<div class="spinner"></div>
|
||||
<div class="logo"></div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,178 @@
|
||||
const { app, BrowserWindow, Tray, Menu, ipcMain, screen, nativeTheme, shell } = require('electron');
|
||||
const path = require('path');
|
||||
const startServer = require('./server.js');
|
||||
const { autoUpdater } = require('electron-updater');
|
||||
|
||||
let mainWindow;
|
||||
let tray;
|
||||
const port = 3000;
|
||||
|
||||
async function createWindow() {
|
||||
try {
|
||||
console.log('Starting server...');
|
||||
await startServer(port);
|
||||
console.log('Server started successfully');
|
||||
|
||||
const { width, height } = screen.getPrimaryDisplay().workAreaSize;
|
||||
|
||||
// // Set up a loading screen
|
||||
// loadingScreen = new BrowserWindow({
|
||||
// // width: 150,
|
||||
// // height: 150,
|
||||
// frame: false,
|
||||
// transparent: false,
|
||||
// alwaysOnTop: true,
|
||||
// webPreferences: {
|
||||
// nodeIntegration: true,
|
||||
// },
|
||||
// backgroundColor: '#2e2c29',
|
||||
// });
|
||||
//
|
||||
// loadingScreen.loadFile(path.join(__dirname, 'loading.html'));
|
||||
// loadingScreen.center();
|
||||
// console.log('Loading screen created');
|
||||
|
||||
console.log('Preload script path:', path.join(__dirname, 'preload.js'));
|
||||
|
||||
mainWindow = new BrowserWindow({
|
||||
width: Math.min(1280, width * 0.8),
|
||||
height: Math.min(800, height * 0.8),
|
||||
minWidth: 430,
|
||||
minHeight: 600,
|
||||
webPreferences: {
|
||||
nodeIntegration: false,
|
||||
contextIsolation: true,
|
||||
preload: path.join(__dirname, 'preload.js'),
|
||||
sandbox: false,
|
||||
devTools: false,
|
||||
},
|
||||
backgroundColor: nativeTheme.shouldUseDarkColors ? '#1a1a1a' : '#ffffff',
|
||||
show: true,
|
||||
frame: false,
|
||||
titleBarStyle: 'hidden',
|
||||
icon: path.join(__dirname, 'tray-icon.png'),
|
||||
// New "insane" features:
|
||||
// transparent: true, // Enable window transparency
|
||||
vibrancy: 'under-window', // Add vibrancy effect (macOS only)
|
||||
visualEffectState: 'active', // Keep vibrancy active even when not focused (macOS only)
|
||||
roundedCorners: true, // Enable rounded corners (macOS only)
|
||||
// thickFrame: false, // Use a thinner frame on Windows
|
||||
autoHideMenuBar: true, // Auto-hide the menu bar, press Alt to show it
|
||||
scrollBounce: true, // Enable bounce effect when scrolling (macOS only)
|
||||
});
|
||||
|
||||
mainWindow.removeMenu();
|
||||
mainWindow.setTitle('Your Professional App Name');
|
||||
|
||||
console.log('Attempting to load main window URL...');
|
||||
await mainWindow.loadURL(`http://localhost:${port}`);
|
||||
console.log('Main window URL loaded successfully');
|
||||
|
||||
mainWindow.once('ready-to-show', () => {
|
||||
console.log('Main window ready to show');
|
||||
// if (loadingScreen) {
|
||||
// loadingScreen.close();
|
||||
// }
|
||||
mainWindow.show();
|
||||
mainWindow.focus();
|
||||
});
|
||||
|
||||
createTray();
|
||||
autoUpdater.checkForUpdatesAndNotify();
|
||||
|
||||
// Handle window state
|
||||
let isQuitting = false;
|
||||
mainWindow.on('close', (event) => {
|
||||
if (!isQuitting) {
|
||||
event.preventDefault();
|
||||
mainWindow.hide();
|
||||
}
|
||||
});
|
||||
|
||||
app.on('before-quit', () => {
|
||||
isQuitting = true;
|
||||
});
|
||||
|
||||
// Adjust window behavior
|
||||
mainWindow.on('maximize', () => {
|
||||
mainWindow.webContents.send('window-maximized');
|
||||
});
|
||||
|
||||
mainWindow.on('unmaximize', () => {
|
||||
mainWindow.webContents.send('window-unmaximized');
|
||||
});
|
||||
|
||||
|
||||
// Warn if preloads fail
|
||||
mainWindow.webContents.on('preload-error', (event, preloadPath, error) => {
|
||||
console.error('Preload error:', preloadPath, error);
|
||||
});
|
||||
|
||||
mainWindow.webContents.on('did-fail-load', (event, errorCode, errorDescription) => {
|
||||
console.error('Failed to load:', errorCode, errorDescription);
|
||||
});
|
||||
|
||||
|
||||
// Handle external links
|
||||
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
|
||||
shell.openExternal(url);
|
||||
return { action: 'deny' };
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
console.error('Error in createWindow:', err);
|
||||
app.quit();
|
||||
}
|
||||
}
|
||||
|
||||
function createTray() {
|
||||
tray = new Tray(path.join(__dirname, 'tray-icon.png'));
|
||||
const contextMenu = Menu.buildFromTemplate([
|
||||
{ label: 'Show App', click: () => mainWindow.show() },
|
||||
{ type: 'separator' },
|
||||
{ label: 'Quit', click: () => app.quit() },
|
||||
]);
|
||||
tray.setToolTip('Your Professional App Name');
|
||||
tray.setContextMenu(contextMenu);
|
||||
|
||||
tray.on('click', () => {
|
||||
mainWindow.isVisible() ? mainWindow.hide() : mainWindow.show();
|
||||
});
|
||||
}
|
||||
|
||||
app.whenReady().then(() => {
|
||||
console.log('App is ready, creating window...');
|
||||
createWindow().catch((err) => {
|
||||
console.error('Failed to create window:', err);
|
||||
app.quit();
|
||||
});
|
||||
|
||||
app.on('activate', function() {
|
||||
if (BrowserWindow.getAllWindows().length === 0) createWindow();
|
||||
});
|
||||
});
|
||||
|
||||
app.on('window-all-closed', function() {
|
||||
if (process.platform !== 'darwin') app.quit();
|
||||
});
|
||||
|
||||
// IPC handlers for window controls
|
||||
ipcMain.on('minimize-window', () => mainWindow.minimize());
|
||||
ipcMain.on('maximize-window', () => {
|
||||
if (mainWindow.isMaximized()) {
|
||||
mainWindow.unmaximize();
|
||||
} else {
|
||||
mainWindow.maximize();
|
||||
}
|
||||
});
|
||||
ipcMain.on('close-window', () => mainWindow.close());
|
||||
|
||||
|
||||
// Auto-updater events
|
||||
autoUpdater.on('update-available', () => {
|
||||
mainWindow.webContents.send('update_available');
|
||||
});
|
||||
|
||||
autoUpdater.on('update-downloaded', () => {
|
||||
mainWindow.webContents.send('update_downloaded');
|
||||
});
|
||||
@@ -0,0 +1,36 @@
|
||||
const { contextBridge, desktopCapturer, ipcRenderer } = require('electron');
|
||||
const { readFileSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
|
||||
// Main bridge
|
||||
contextBridge.exposeInMainWorld('electron', {
|
||||
sendEvent: (event) => ipcRenderer.send('app-event', event),
|
||||
onUpdateAvailable: (callback) => ipcRenderer.on('update_available', callback),
|
||||
onUpdateDownloaded: (callback) => ipcRenderer.on('update_downloaded', callback),
|
||||
});
|
||||
|
||||
|
||||
// Screen Capture: inject renderer.js into the web page
|
||||
window.addEventListener('DOMContentLoaded', () => {
|
||||
console.log('Screen Capture: Injecting renderer.js into the web page');
|
||||
const rendererScript = document.createElement('script');
|
||||
rendererScript.text = readFileSync(join(__dirname, 'renderer.js'), 'utf8');
|
||||
document.body.appendChild(rendererScript);
|
||||
});
|
||||
|
||||
// Screen Capture: expose desktopCapturer to the web page
|
||||
contextBridge.exposeInMainWorld('myCustomGetDisplayMedia', async () => {
|
||||
console.log('Screen Capture: Calling desktopCapturer.getSources');
|
||||
const sources = await desktopCapturer.getSources({
|
||||
types: ['window', 'screen'],
|
||||
});
|
||||
|
||||
console.log('Available sources:', sources);
|
||||
|
||||
// you should create some kind of UI to prompt the user
|
||||
// to select the correct source like Google Chrome does
|
||||
// this is just for testing purposes
|
||||
return sources[0];
|
||||
});
|
||||
|
||||
console.log('Preload script loaded');
|
||||
@@ -0,0 +1,30 @@
|
||||
// https://github.com/aabuhijleh/override-getDisplayMedia/blob/main/renderer.js
|
||||
|
||||
// This file is required by the index.html file and will
|
||||
// be executed in the renderer process for that window.
|
||||
// No Node.js APIs are available in this process because
|
||||
// `nodeIntegration` is turned off. Use `preload.js` to
|
||||
// selectively enable features needed in the rendering
|
||||
// process.
|
||||
|
||||
// override getDisplayMedia
|
||||
navigator.mediaDevices.getDisplayMedia = async () => {
|
||||
const selectedSource = await globalThis.myCustomGetDisplayMedia();
|
||||
|
||||
// create MediaStream
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: selectedSource.id,
|
||||
minWidth: 1280,
|
||||
maxWidth: 1280,
|
||||
minHeight: 720,
|
||||
maxHeight: 720,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return stream;
|
||||
};
|
||||
@@ -0,0 +1,71 @@
|
||||
const { createServer } = require('http');
|
||||
const { parse } = require('url');
|
||||
const next = require('next');
|
||||
const path = require('path');
|
||||
|
||||
// const dev = process.env.NODE_ENV !== 'production';
|
||||
const dir = path.join(__dirname, '..'); // This points to the root of your project
|
||||
const app = next({ dev: false, dir });
|
||||
const handle = app.getRequestHandler();
|
||||
|
||||
function startServer(port) {
|
||||
return new Promise((resolve, reject) => {
|
||||
app.prepare()
|
||||
.then(() => {
|
||||
const server = createServer((req, res) => {
|
||||
// Basic request logging
|
||||
console.log(`${new Date().toISOString()} - ${req.method} ${req.url}`);
|
||||
|
||||
// Simple rate limiting
|
||||
if (rateLimiter(req)) {
|
||||
res.statusCode = 429;
|
||||
res.end('Too Many Requests');
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle the request
|
||||
const parsedUrl = parse(req.url, true);
|
||||
handle(req, res, parsedUrl);
|
||||
});
|
||||
|
||||
server.listen(port, (err) => {
|
||||
if (err) reject(err);
|
||||
console.log(`> Ready on http://localhost:${port}`);
|
||||
resolve(server);
|
||||
});
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGTERM', () => {
|
||||
console.log('SIGTERM signal received: closing HTTP server');
|
||||
server.close(() => {
|
||||
console.log('HTTP server closed');
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch(err => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
// Simple in-memory rate limiter
|
||||
const MAX_REQUESTS_PER_MINUTE = 100;
|
||||
const requestCounts = new Map();
|
||||
|
||||
function rateLimiter(req) {
|
||||
const ip = req.socket.remoteAddress;
|
||||
const now = Date.now();
|
||||
const windowStart = now - 60000; // 1 minute ago
|
||||
|
||||
const requestTimestamps = requestCounts.get(ip) || [];
|
||||
const requestsInWindow = requestTimestamps.filter(timestamp => timestamp > windowStart);
|
||||
|
||||
if (requestsInWindow.length >= MAX_REQUESTS_PER_MINUTE) {
|
||||
return true; // Rate limit exceeded
|
||||
}
|
||||
|
||||
requestTimestamps.push(now);
|
||||
requestCounts.set(ip, requestTimestamps);
|
||||
|
||||
return false; // Rate limit not exceeded
|
||||
}
|
||||
|
||||
module.exports = startServer;
|
||||
|
After Width: | Height: | Size: 993 B |
@@ -1,20 +1,33 @@
|
||||
// Non-default build types
|
||||
const buildType =
|
||||
process.env.BIG_AGI_BUILD === 'standalone' ? 'standalone'
|
||||
: process.env.BIG_AGI_BUILD === 'static' ? 'export'
|
||||
: undefined;
|
||||
|
||||
buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
let nextConfig = {
|
||||
reactStrictMode: true,
|
||||
|
||||
// Note: disabled to chech whether the project becomes slower with this
|
||||
// modularizeImports: {
|
||||
// '@mui/icons-material': {
|
||||
// transform: '@mui/icons-material/{{member}}',
|
||||
// },
|
||||
// },
|
||||
// [exports] https://nextjs.org/docs/advanced-features/static-html-export
|
||||
...buildType && {
|
||||
output: buildType,
|
||||
// distDir: 'dist',
|
||||
|
||||
// disable image optimization for exports
|
||||
images: { unoptimized: true },
|
||||
|
||||
// Optional: Change links `/me` -> `/me/` and emit `/me.html` -> `/me/index.html`
|
||||
// trailingSlash: true,
|
||||
},
|
||||
|
||||
// [puppeteer] https://github.com/puppeteer/puppeteer/issues/11052
|
||||
experimental: {
|
||||
serverComponentsExternalPackages: ['puppeteer-core'],
|
||||
},
|
||||
|
||||
webpack: (config, _options) => {
|
||||
webpack: (config, { isServer }) => {
|
||||
// @mui/joy: anything material gets redirected to Joy
|
||||
config.resolve.alias['@mui/material'] = '@mui/joy';
|
||||
|
||||
@@ -24,9 +37,28 @@ let nextConfig = {
|
||||
layers: true,
|
||||
};
|
||||
|
||||
// fix warnings for async functions in the browser (https://github.com/vercel/next.js/issues/64792)
|
||||
if (!isServer) {
|
||||
config.output.environment = { ...config.output.environment, asyncFunction: true };
|
||||
}
|
||||
|
||||
// prevent too many small chunks (40kb min) on 'client' packs (not 'server' or 'edge-server')
|
||||
// noinspection JSUnresolvedReference
|
||||
if (typeof config.optimization.splitChunks === 'object' && config.optimization.splitChunks.minSize) {
|
||||
// noinspection JSUnresolvedReference
|
||||
config.optimization.splitChunks.minSize = 40 * 1024;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
|
||||
// Note: disabled to check whether the project becomes slower with this
|
||||
// modularizeImports: {
|
||||
// '@mui/icons-material': {
|
||||
// transform: '@mui/icons-material/{{member}}',
|
||||
// },
|
||||
// },
|
||||
|
||||
// Uncomment the following leave console messages in production
|
||||
// compiler: {
|
||||
// removeConsole: false,
|
||||
|
||||
@@ -1,77 +1,119 @@
|
||||
{
|
||||
"name": "big-agi",
|
||||
"version": "1.12.0",
|
||||
"version": "1.16.0",
|
||||
"private": true,
|
||||
"author": "Enrico Ros <enrico.ros@gmail.com>",
|
||||
"repository": "https://github.com/enricoros/big-agi",
|
||||
"main": "electron/main.js",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"dev": "node electron/server.js",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"start": "NODE_ENV=production node electron/server.js",
|
||||
"lint": "next lint",
|
||||
"postinstall": "prisma generate",
|
||||
"db:push": "prisma db push",
|
||||
"db:studio": "prisma studio",
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local"
|
||||
"vercel:env:pull": "npx vercel env pull .env.development.local",
|
||||
"electron": "electron .",
|
||||
"electron-dev": "concurrently \"npm run dev\" \"electron .\"",
|
||||
"electron-build": "next build && electron-builder",
|
||||
"electron-start": "npm run build && electron ."
|
||||
},
|
||||
"prisma": {
|
||||
"schema": "src/server/prisma/schema.prisma"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/cache": "^11.11.0",
|
||||
"@emotion/react": "^11.11.3",
|
||||
"@emotion/react": "^11.11.4",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@mui/icons-material": "^5.15.6",
|
||||
"@mui/joy": "^5.0.0-beta.24",
|
||||
"@next/bundle-analyzer": "^14.1.0",
|
||||
"@prisma/client": "^5.8.1",
|
||||
"@emotion/styled": "^11.11.5",
|
||||
"@mui/icons-material": "^5.16.0",
|
||||
"@mui/joy": "^5.0.0-beta.47",
|
||||
"@mui/material": "^5.16.0",
|
||||
"@next/bundle-analyzer": "^14.2.4",
|
||||
"@next/third-parties": "^14.2.4",
|
||||
"@prisma/client": "^5.16.1",
|
||||
"@sanity/diff-match-patch": "^3.1.1",
|
||||
"@t3-oss/env-nextjs": "^0.8.0",
|
||||
"@tanstack/react-query": "~4.36.1",
|
||||
"@trpc/client": "10.44.1",
|
||||
"@trpc/next": "10.44.1",
|
||||
"@trpc/react-query": "10.44.1",
|
||||
"@trpc/server": "10.44.1",
|
||||
"@vercel/analytics": "^1.1.2",
|
||||
"@vercel/speed-insights": "^1.0.8",
|
||||
"@t3-oss/env-nextjs": "^0.10.1",
|
||||
"@tanstack/react-query": "^5.50.1",
|
||||
"@trpc/client": "11.0.0-alpha-tmp-issues-5851-take-two.496",
|
||||
"@trpc/next": "11.0.0-alpha-tmp-issues-5851-take-two.496",
|
||||
"@trpc/react-query": "11.0.0-alpha-tmp-issues-5851-take-two.496",
|
||||
"@trpc/server": "11.0.0-alpha-tmp-issues-5851-take-two.496",
|
||||
"@vercel/analytics": "^1.3.1",
|
||||
"@vercel/speed-insights": "^1.0.12",
|
||||
"browser-fs-access": "^0.35.0",
|
||||
"eventsource-parser": "^1.1.1",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"dexie": "^4.0.7",
|
||||
"dexie-react-hooks": "^1.1.7",
|
||||
"electron-updater": "^6.2.1",
|
||||
"eventsource-parser": "^1.1.2",
|
||||
"idb-keyval": "^6.2.1",
|
||||
"next": "^14.1.0",
|
||||
"nanoid": "^5.0.7",
|
||||
"next": "~14.2.4",
|
||||
"nprogress": "^0.2.0",
|
||||
"pdfjs-dist": "4.0.379",
|
||||
"pdfjs-dist": "4.4.168",
|
||||
"plantuml-encoder": "^1.4.0",
|
||||
"prismjs": "^1.29.0",
|
||||
"react": "^18.2.0",
|
||||
"react": "^18.3.1",
|
||||
"react-beautiful-dnd": "^13.1.1",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-csv": "^2.2.2",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-katex": "^3.0.1",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-resizable-panels": "^1.0.9",
|
||||
"react-player": "^2.16.0",
|
||||
"react-resizable-panels": "^2.0.20",
|
||||
"react-timeago": "^7.2.0",
|
||||
"rehype-katex": "^7.0.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"sharp": "^0.33.4",
|
||||
"superjson": "^2.2.1",
|
||||
"tesseract.js": "^5.0.4",
|
||||
"tiktoken": "^1.0.11",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4",
|
||||
"zustand": "^4.5.0"
|
||||
"tesseract.js": "^5.1.0",
|
||||
"tiktoken": "^1.0.15",
|
||||
"turndown": "^7.2.0",
|
||||
"zod": "^3.23.8",
|
||||
"zustand": "^4.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/puppeteer": "^0.0.5",
|
||||
"@types/node": "^20.11.7",
|
||||
"@cloudflare/puppeteer": "0.0.11",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/nprogress": "^0.2.3",
|
||||
"@types/plantuml-encoder": "^1.4.2",
|
||||
"@types/prismjs": "^1.26.3",
|
||||
"@types/react": "^18.2.48",
|
||||
"@types/prismjs": "^1.26.4",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-beautiful-dnd": "^13.1.8",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/react-csv": "^1.1.10",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@types/react-katex": "^3.0.4",
|
||||
"@types/react-timeago": "^4.1.7",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-next": "^14.1.0",
|
||||
"prettier": "^3.2.4",
|
||||
"prisma": "^5.8.1",
|
||||
"typescript": "^5.3.3"
|
||||
"@types/turndown": "^5.0.4",
|
||||
"concurrently": "^8.2.2",
|
||||
"electron": "^31.1.0",
|
||||
"electron-builder": "^24.13.3",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-next": "^14.2.4",
|
||||
"prettier": "^3.3.2",
|
||||
"prisma": "^5.16.1",
|
||||
"typescript": "^5.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^20.0.0 || ^18.0.0"
|
||||
},
|
||||
"build": {
|
||||
"appId": "com.yourcompany.yourappname",
|
||||
"productName": "Your App Name",
|
||||
"files": [
|
||||
"electron/**/*",
|
||||
".next/**/*",
|
||||
"public/**/*",
|
||||
"next.config.js"
|
||||
],
|
||||
"directories": {
|
||||
"buildResources": "electron"
|
||||
},
|
||||
"extraMetadata": {
|
||||
"main": "electron/main.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
import * as React from 'react';
|
||||
import Head from 'next/head';
|
||||
import { MyAppProps } from 'next/app';
|
||||
import { Analytics as VercelAnalytics } from '@vercel/analytics/react';
|
||||
import { Analytics as VercelAnalytics } from '@vercel/analytics/next';
|
||||
import { SpeedInsights as VercelSpeedInsights } from '@vercel/speed-insights/next';
|
||||
|
||||
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { apiQuery } from '~/common/util/trpc.client';
|
||||
|
||||
@@ -12,14 +11,17 @@ import 'katex/dist/katex.min.css';
|
||||
import '~/common/styles/CodePrism.css';
|
||||
import '~/common/styles/GithubMarkdown.css';
|
||||
import '~/common/styles/NProgress.css';
|
||||
import '~/common/styles/agi.effects.css';
|
||||
import '~/common/styles/app.styles.css';
|
||||
|
||||
import { ProviderBackendAndNoSSR } from '~/common/providers/ProviderBackendAndNoSSR';
|
||||
import { ProviderBackendCapabilities } from '~/common/providers/ProviderBackendCapabilities';
|
||||
import { ProviderBootstrapLogic } from '~/common/providers/ProviderBootstrapLogic';
|
||||
import { ProviderSingleTab } from '~/common/providers/ProviderSingleTab';
|
||||
import { ProviderSnacks } from '~/common/providers/ProviderSnacks';
|
||||
import { ProviderTRPCQueryClient } from '~/common/providers/ProviderTRPCQueryClient';
|
||||
import { ProviderTRPCQuerySettings } from '~/common/providers/ProviderTRPCQuerySettings';
|
||||
import { ProviderTheming } from '~/common/providers/ProviderTheming';
|
||||
import { hasGoogleAnalytics, OptionalGoogleAnalytics } from '~/common/components/GoogleAnalytics';
|
||||
import { isVercelFromFrontend } from '~/common/util/pwaUtils';
|
||||
|
||||
|
||||
const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) =>
|
||||
@@ -32,20 +34,22 @@ const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) =>
|
||||
|
||||
<ProviderTheming emotionCache={emotionCache}>
|
||||
<ProviderSingleTab>
|
||||
<ProviderBootstrapLogic>
|
||||
<ProviderTRPCQueryClient>
|
||||
<ProviderSnacks>
|
||||
<ProviderBackendAndNoSSR>
|
||||
<ProviderTRPCQuerySettings>
|
||||
<ProviderBackendCapabilities>
|
||||
{/* ^ SSR boundary */}
|
||||
<ProviderBootstrapLogic>
|
||||
<ProviderSnacks>
|
||||
<Component {...pageProps} />
|
||||
</ProviderBackendAndNoSSR>
|
||||
</ProviderSnacks>
|
||||
</ProviderTRPCQueryClient>
|
||||
</ProviderBootstrapLogic>
|
||||
</ProviderSnacks>
|
||||
</ProviderBootstrapLogic>
|
||||
</ProviderBackendCapabilities>
|
||||
</ProviderTRPCQuerySettings>
|
||||
</ProviderSingleTab>
|
||||
</ProviderTheming>
|
||||
|
||||
<VercelAnalytics debug={false} />
|
||||
<VercelSpeedInsights debug={false} sampleRate={1 / 10} />
|
||||
{isVercelFromFrontend && <VercelAnalytics debug={false} />}
|
||||
{isVercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
|
||||
{hasGoogleAnalytics && <OptionalGoogleAnalytics />}
|
||||
|
||||
</>;
|
||||
|
||||
|
||||
@@ -24,9 +24,9 @@ export default function MyDocument({ emotionStyleTags }: MyDocumentProps) {
|
||||
<link rel='shortcut icon' href='/favicon.ico' />
|
||||
<link rel='icon' type='image/png' sizes='32x32' href='/icons/favicon-32x32.png' />
|
||||
<link rel='icon' type='image/png' sizes='16x16' href='/icons/favicon-16x16.png' />
|
||||
<link rel='apple-touch-icon' sizes='180x180' href='/icons/apple-touch-icon.png' />
|
||||
<link rel='apple-touch-icon' sizes='180x180' href='/apple-touch-icon.png' />
|
||||
<link rel='manifest' href='/manifest.json' />
|
||||
<meta name='apple-mobile-web-app-capable' content='yes' />
|
||||
<meta name='mobile-web-app-capable' content='yes' />
|
||||
<meta name='apple-mobile-web-app-status-bar-style' content='black' />
|
||||
|
||||
{/* Opengraph */}
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppBeam } from '../../src/apps/beam/AppBeam';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function BeamPage() {
|
||||
return withLayout({ type: 'optima' }, <AppBeam />);
|
||||
}
|
||||
@@ -1,16 +1,13 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppChat } from '../src/apps/chat/AppChat';
|
||||
import { useRedirectToNewsOnUpdates } from '../src/apps/news/news.hooks';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function IndexPage() {
|
||||
// show the News page if there are unseen updates
|
||||
useRedirectToNewsOnUpdates();
|
||||
|
||||
// TODO: This Index page will point to the Dashboard (or a landing page) soon
|
||||
// TODO: This Index page will point to the Dashboard (or a landing page)
|
||||
// For now it offers the chat experience, but this will change. #299
|
||||
|
||||
return withLayout({ type: 'optima' }, <AppChat />);
|
||||
|
||||
@@ -0,0 +1,169 @@
|
||||
import * as React from 'react';
|
||||
import { fileSave } from 'browser-fs-access';
|
||||
|
||||
import { Box, Button, Card, CardContent, Typography } from '@mui/joy';
|
||||
import DownloadIcon from '@mui/icons-material/Download';
|
||||
|
||||
import { AppPlaceholder } from '../../src/apps/AppPlaceholder';
|
||||
|
||||
import { getBackendCapabilities } from '~/modules/backend/store-backend-capabilities';
|
||||
import { getPlantUmlServerUrl } from '~/modules/blocks/code/RenderCode';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
// app config
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { ROUTE_APP_CHAT, ROUTE_INDEX } from '~/common/app.routes';
|
||||
|
||||
// apps access
|
||||
import { incrementalNewsVersion, useAppNewsStateStore } from '../../src/apps/news/news.version';
|
||||
|
||||
// capabilities access
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs, useCapabilityTextToImage } from '~/common/components/useCapabilities';
|
||||
|
||||
// stores access
|
||||
import { getLLMsDebugInfo } from '~/modules/llms/store-llms';
|
||||
import { useAppStateStore } from '~/common/state/store-appstate';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useFolderStore } from '~/common/state/store-folders';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
|
||||
// utils access
|
||||
import { clientHostName, isChromeDesktop, isFirefox, isIPhoneUser, isMacUser, isPwa, isVercelFromFrontend } from '~/common/util/pwaUtils';
|
||||
import { getGA4MeasurementId } from '~/common/components/GoogleAnalytics';
|
||||
import { prettyTimestampForFilenames } from '~/common/util/timeUtils';
|
||||
import { supportsClipboardRead } from '~/common/util/clipboardUtils';
|
||||
import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
|
||||
|
||||
|
||||
function DebugCard(props: { title: string, children: React.ReactNode }) {
|
||||
return (
|
||||
<Box>
|
||||
<Typography level='title-lg'>
|
||||
{props.title}
|
||||
</Typography>
|
||||
{props.children}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
function prettifyJsonString(jsonString: string, deleteChars: number, removeDoubleQuotes: boolean, removeTrailComma: boolean): string {
|
||||
return jsonString.split('\n').map(l => {
|
||||
if (deleteChars > 0)
|
||||
l = l.substring(deleteChars);
|
||||
if (removeDoubleQuotes)
|
||||
l = l.replaceAll('\"', '');
|
||||
if (removeTrailComma && l.endsWith(','))
|
||||
l = l.substring(0, l.length - 1);
|
||||
return l;
|
||||
}).join('\n').trim();
|
||||
}
|
||||
|
||||
function DebugJsonCard(props: { title: string, data: any }) {
|
||||
return (
|
||||
<DebugCard title={props.title}>
|
||||
<Typography level='body-sm' sx={{ whiteSpace: 'break-spaces', fontFamily: 'code', fontSize: { xs: 'xs' } }}>
|
||||
{prettifyJsonString(JSON.stringify(props.data, null, 2), 2, true, true)}
|
||||
</Typography>
|
||||
</DebugCard>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
function AppDebug() {
|
||||
|
||||
// state
|
||||
const [saved, setSaved] = React.useState(false);
|
||||
|
||||
// external state
|
||||
const backendCaps = getBackendCapabilities();
|
||||
const chatsCount = useChatStore.getState().conversations?.length;
|
||||
const uxLabsExperiments = Object.entries(useUXLabsStore.getState()).filter(([_k, v]) => v === true).map(([k, _]) => k).join(', ');
|
||||
const { folders, enableFolders } = useFolderStore.getState();
|
||||
const { lastSeenNewsVersion } = useAppNewsStateStore.getState();
|
||||
const { usageCount } = useAppStateStore.getState();
|
||||
|
||||
|
||||
// derived state
|
||||
const cClient = {
|
||||
// isBrowser,
|
||||
isChromeDesktop,
|
||||
isFirefox,
|
||||
isIPhone: isIPhoneUser,
|
||||
isMac: isMacUser,
|
||||
isPWA: isPwa(),
|
||||
supportsClipboardPaste: supportsClipboardRead,
|
||||
supportsScreenCapture,
|
||||
};
|
||||
const cProduct = {
|
||||
capabilities: {
|
||||
mic: useCapabilityBrowserSpeechRecognition(),
|
||||
elevenLabs: useCapabilityElevenLabs(),
|
||||
textToImage: useCapabilityTextToImage(),
|
||||
},
|
||||
models: getLLMsDebugInfo(),
|
||||
state: {
|
||||
chatsCount,
|
||||
foldersCount: folders?.length,
|
||||
foldersEnabled: enableFolders,
|
||||
newsCurrent: incrementalNewsVersion,
|
||||
newsSeen: lastSeenNewsVersion,
|
||||
labsActive: uxLabsExperiments,
|
||||
reloads: usageCount,
|
||||
},
|
||||
};
|
||||
const cBackend = {
|
||||
configuration: backendCaps,
|
||||
deployment: {
|
||||
home: Brand.URIs.Home,
|
||||
hostName: clientHostName(),
|
||||
isVercelFromFrontend,
|
||||
measurementId: getGA4MeasurementId(),
|
||||
plantUmlServerUrl: getPlantUmlServerUrl(),
|
||||
routeIndex: ROUTE_INDEX,
|
||||
routeChat: ROUTE_APP_CHAT,
|
||||
},
|
||||
};
|
||||
|
||||
const handleDownload = async () => {
|
||||
fileSave(
|
||||
new Blob([JSON.stringify({ client: cClient, agi: cProduct, backend: cBackend }, null, 2)], { type: 'application/json' }),
|
||||
{ fileName: `big-agi_debug_${prettyTimestampForFilenames()}.json`, extensions: ['.json'] },
|
||||
)
|
||||
.then(() => setSaved(true))
|
||||
.catch(e => console.error('Error saving debug.json', e));
|
||||
};
|
||||
|
||||
return (
|
||||
<AppPlaceholder title={`${Brand.Title.Common} Debug`}>
|
||||
<Box sx={{ display: 'grid', gap: 3, my: 3 }}>
|
||||
<Button
|
||||
variant={saved ? 'soft' : 'outlined'} color={saved ? 'success' : 'neutral'}
|
||||
onClick={handleDownload}
|
||||
endDecorator={<DownloadIcon />}
|
||||
sx={{
|
||||
backgroundColor: saved ? undefined : 'background.surface',
|
||||
boxShadow: 'sm',
|
||||
placeSelf: 'start',
|
||||
minWidth: 260,
|
||||
}}
|
||||
>
|
||||
Download debug JSON
|
||||
</Button>
|
||||
<Card>
|
||||
<CardContent sx={{ display: 'grid', gap: 3 }}>
|
||||
<DebugJsonCard title='Client' data={cClient} />
|
||||
<DebugJsonCard title='AGI' data={cProduct} />
|
||||
<DebugJsonCard title='Backend' data={cBackend} />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Box>
|
||||
</AppPlaceholder>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export default function DebugPage() {
|
||||
return withLayout({ type: 'plain' }, <AppDebug />);
|
||||
};
|
||||
@@ -13,7 +13,7 @@ import { withLayout } from '~/common/layout/withLayout';
|
||||
function CallbackOpenRouterPage(props: { openRouterCode: string | undefined }) {
|
||||
|
||||
// external state
|
||||
const { data, isError, error, isLoading } = apiQuery.backend.exchangeOpenRouterKey.useQuery({ code: props.openRouterCode || '' }, {
|
||||
const { data, isError, error, isPending } = apiQuery.backend.exchangeOpenRouterKey.useQuery({ code: props.openRouterCode || '' }, {
|
||||
enabled: !!props.openRouterCode,
|
||||
refetchOnWindowFocus: false,
|
||||
staleTime: Infinity,
|
||||
@@ -56,7 +56,7 @@ function CallbackOpenRouterPage(props: { openRouterCode: string | undefined }) {
|
||||
Welcome Back
|
||||
</Typography>
|
||||
|
||||
{isLoading && <Typography level='body-sm'>Loading...</Typography>}
|
||||
{isPending && <Typography level='body-sm'>Loading...</Typography>}
|
||||
|
||||
{isErrorInput && <InlineError error='There was an issue retrieving the code from OpenRouter.' />}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppLinkChat } from '../../../src/apps/link/AppLinkChat';
|
||||
import { AppLinkChat } from '../../../src/apps/link-chat/AppLinkChat';
|
||||
|
||||
import { useRouterQuery } from '~/common/app.routes';
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
@@ -77,9 +77,12 @@ function AppShareTarget() {
|
||||
setIsDownloading(true);
|
||||
callBrowseFetchPage(intentURL)
|
||||
.then(page => {
|
||||
if (page.stopReason !== 'error')
|
||||
queueComposerTextAndLaunchApp('\n\n```' + intentURL + '\n' + page.content + '\n```\n');
|
||||
else
|
||||
if (page.stopReason !== 'error') {
|
||||
let pageContent = page.content.markdown || page.content.text || page.content.html || '';
|
||||
if (pageContent)
|
||||
pageContent = '\n\n```' + intentURL + '\n' + pageContent + '\n```\n';
|
||||
queueComposerTextAndLaunchApp(pageContent);
|
||||
} else
|
||||
setErrorMessage('Could not read any data' + page.error ? ': ' + page.error : '');
|
||||
})
|
||||
.catch(error => setErrorMessage(error?.message || error || 'Unknown error'))
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppNews } from '../src/apps/news/AppNews';
|
||||
import { useMarkNewsAsSeen } from '../src/apps/news/news.hooks';
|
||||
import { markNewsAsSeen } from '../src/apps/news/news.version';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function NewsPage() {
|
||||
// 'touch' the last seen news version
|
||||
useMarkNewsAsSeen();
|
||||
React.useEffect(() => markNewsAsSeen(), []);
|
||||
|
||||
return withLayout({ type: 'optima', suspendAutoModelsSetup: true }, <AppNews />);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { AppTokens } from '../src/apps/tokens/AppTokens';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function PersonasPage() {
|
||||
return withLayout({ type: 'optima' }, <AppTokens />);
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box } from '@mui/joy';
|
||||
|
||||
// import { AppWorkspace } from '../src/apps/personas/AppWorkspace';
|
||||
|
||||
import { withLayout } from '~/common/layout/withLayout';
|
||||
|
||||
|
||||
export default function PersonasPage() {
|
||||
return withLayout({ type: 'optima' }, <Box />);
|
||||
}
|
||||
|
Before Width: | Height: | Size: 7.3 KiB After Width: | Height: | Size: 7.3 KiB |
|
After Width: | Height: | Size: 254 KiB |
|
After Width: | Height: | Size: 270 KiB |
|
After Width: | Height: | Size: 270 KiB |
|
After Width: | Height: | Size: 348 KiB |
|
After Width: | Height: | Size: 248 KiB |
|
After Width: | Height: | Size: 180 KiB |
|
After Width: | Height: | Size: 191 KiB |
|
After Width: | Height: | Size: 11 KiB |
@@ -3,9 +3,16 @@
|
||||
"short_name": "big-AGI",
|
||||
"theme_color": "#32383E",
|
||||
"background_color": "#9FA6AD",
|
||||
"description": "Personal AGI App",
|
||||
"description": "Your Generative AI Suite",
|
||||
"categories": [
|
||||
"productivity",
|
||||
"AI",
|
||||
"tool",
|
||||
"utilities"
|
||||
],
|
||||
"display": "standalone",
|
||||
"start_url": "/",
|
||||
"start_url": "/?source=pwa",
|
||||
"scope": "/",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icons/icon-192x192.png",
|
||||
@@ -24,6 +31,17 @@
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"file_handlers": [
|
||||
{
|
||||
"action": "/link/share_target",
|
||||
"accept": {
|
||||
"application/big-agi": [
|
||||
".agi",
|
||||
".agi.json"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"share_target": {
|
||||
"action": "/link/share_target",
|
||||
"method": "GET",
|
||||
@@ -33,5 +51,12 @@
|
||||
"text": "text",
|
||||
"url": "url"
|
||||
}
|
||||
}
|
||||
},
|
||||
"shortcuts": [
|
||||
{
|
||||
"name": "Call",
|
||||
"url": "/call",
|
||||
"description": "Call a Persona"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -9,13 +9,17 @@ import { useRouterRoute } from '~/common/app.routes';
|
||||
/**
|
||||
* https://github.com/enricoros/big-AGI/issues/299
|
||||
*/
|
||||
export function AppPlaceholder(props: { text?: string }) {
|
||||
export function AppPlaceholder(props: {
|
||||
title?: string | null,
|
||||
text?: React.ReactNode,
|
||||
children?: React.ReactNode,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const route = useRouterRoute();
|
||||
|
||||
// derived state
|
||||
const placeholderAppName = capitalizeFirstLetter(route.replace('/', '') || 'Home');
|
||||
const placeholderAppName = props.title || capitalizeFirstLetter(route.replace('/', '') || 'Home');
|
||||
|
||||
return (
|
||||
<Box sx={{
|
||||
@@ -25,21 +29,27 @@ export function AppPlaceholder(props: { text?: string }) {
|
||||
border: '1px solid blue',
|
||||
}}>
|
||||
|
||||
<Box sx={{
|
||||
my: 'auto',
|
||||
display: 'flex', flexDirection: 'column', alignItems: 'center',
|
||||
gap: 4,
|
||||
border: '1px solid red',
|
||||
}}>
|
||||
{(props.title !== null || !!props.text) && (
|
||||
<Box sx={{
|
||||
my: 'auto',
|
||||
display: 'flex', flexDirection: 'column', alignItems: 'center',
|
||||
gap: 4,
|
||||
border: '1px solid red',
|
||||
}}>
|
||||
|
||||
<Typography level='h1'>
|
||||
{placeholderAppName}
|
||||
</Typography>
|
||||
<Typography>
|
||||
{props.text || 'Intelligent applications to help you learn, think, and do'}
|
||||
</Typography>
|
||||
<Typography level='h1'>
|
||||
{placeholderAppName}
|
||||
</Typography>
|
||||
{!!props.text && (
|
||||
<Typography>
|
||||
{props.text}
|
||||
</Typography>
|
||||
)}
|
||||
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{props.children}
|
||||
|
||||
</Box>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
import * as React from 'react';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import { Box, Button, Typography } from '@mui/joy';
|
||||
|
||||
import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
|
||||
import { BeamView } from '~/modules/beam/BeamView';
|
||||
import { createBeamVanillaStore } from '~/modules/beam/store-beam-vanilla';
|
||||
import { useModelsStore } from '~/modules/llms/store-llms';
|
||||
|
||||
import { createDConversation, DConversation } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageTextContent, DMessage } from '~/common/stores/chat/chat.message';
|
||||
import { useIsMobile } from '~/common/components/useMatchMedia';
|
||||
import { usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
|
||||
|
||||
function initTestConversation(): DConversation {
|
||||
const conversation = createDConversation();
|
||||
conversation.messages.push(createDMessageTextContent('system', 'You are a helpful assistant.')); // Beam Test - seed1
|
||||
conversation.messages.push(createDMessageTextContent('user', 'Hello, who are you? (please expand...)')); // Beam Test - seed2
|
||||
return conversation;
|
||||
}
|
||||
|
||||
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi = createBeamVanillaStore()): BeamStoreApi {
|
||||
beamStore.getState().open(messages, useModelsStore.getState().chatLLMId, (content) => alert(content));
|
||||
return beamStore;
|
||||
}
|
||||
|
||||
|
||||
export function AppBeam() {
|
||||
|
||||
// state
|
||||
const [showDebug, setShowDebug] = React.useState(false);
|
||||
|
||||
const [conversation, setConversation] = React.useState<DConversation>(() => initTestConversation());
|
||||
const [beamStoreApi] = React.useState(() => createBeamVanillaStore());
|
||||
|
||||
|
||||
// reinit the beam store if the conversation changes
|
||||
React.useEffect(() => {
|
||||
initTestBeamStore(conversation.messages, beamStoreApi);
|
||||
}, [beamStoreApi, conversation]);
|
||||
|
||||
|
||||
// external state
|
||||
const isMobile = useIsMobile();
|
||||
const { isOpen, beamState } = useBeamStore(beamStoreApi, useShallow(state => {
|
||||
return {
|
||||
isOpen: state.isOpen,
|
||||
beamState: showDebug ? state : null,
|
||||
};
|
||||
}));
|
||||
|
||||
|
||||
const handleClose = React.useCallback(() => {
|
||||
beamStoreApi.getState().terminateKeepingSettings();
|
||||
}, [beamStoreApi]);
|
||||
|
||||
|
||||
// layout
|
||||
usePluggableOptimaLayout(null, React.useMemo(() => <>
|
||||
{/* button to toggle debug info */}
|
||||
<Button size='sm' variant='plain' color='neutral' onClick={() => setShowDebug(on => !on)}>
|
||||
{showDebug ? 'Hide' : 'Show'} debug
|
||||
</Button>
|
||||
|
||||
{/* 'open' */}
|
||||
<Button size='sm' variant='plain' color='neutral' onClick={() => setConversation(initTestConversation())}>
|
||||
.open
|
||||
</Button>
|
||||
|
||||
{/* 'close' */}
|
||||
<Button size='sm' variant='plain' color='neutral' onClick={handleClose}>
|
||||
.close
|
||||
</Button>
|
||||
</>, [handleClose, showDebug]), null, 'AppBeam');
|
||||
|
||||
|
||||
return (
|
||||
<Box sx={{ flexGrow: 1, overflowY: 'auto', position: 'relative' }}>
|
||||
|
||||
{isOpen && (
|
||||
<BeamView
|
||||
beamStore={beamStoreApi}
|
||||
isMobile={isMobile}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showDebug && (
|
||||
<Typography level='body-xs' sx={{
|
||||
whiteSpace: 'pre',
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
zIndex: 1 /* debug on top of BeamView */,
|
||||
backdropFilter: 'blur(4px)',
|
||||
padding: '1rem',
|
||||
}}>
|
||||
{JSON.stringify(beamState, null, 2)
|
||||
// add an extra newline between first level properties (space, space, double quote) to make it more readable
|
||||
.split('\n').map(line => line.replace(/^\s\s"/g, '\n ')).join('\n')}
|
||||
</Typography>
|
||||
)}
|
||||
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import * as React from 'react';
|
||||
|
||||
import { Container, Sheet } from '@mui/joy';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
import type { DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { useRouterQuery } from '~/common/app.routes';
|
||||
|
||||
import { CallWizard } from './CallWizard';
|
||||
@@ -65,6 +65,8 @@ export function AppCall() {
|
||||
display: 'flex', flexDirection: 'column', alignItems: 'center',
|
||||
justifyContent: hasIntent ? 'space-evenly' : undefined,
|
||||
gap: hasIntent ? 1 : undefined,
|
||||
// shall force the contacts or telephone to stay within the container
|
||||
overflowY: hasIntent ? 'hidden' : undefined,
|
||||
}}>
|
||||
|
||||
{!hasIntent ? (
|
||||
|
||||
@@ -1,60 +1,22 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, Button, Card, CardContent, IconButton, ListItemDecorator, Typography } from '@mui/joy';
|
||||
import ArrowForwardIcon from '@mui/icons-material/ArrowForward';
|
||||
import ArrowForwardRoundedIcon from '@mui/icons-material/ArrowForwardRounded';
|
||||
import ChatIcon from '@mui/icons-material/Chat';
|
||||
import CheckIcon from '@mui/icons-material/Check';
|
||||
import CloseIcon from '@mui/icons-material/Close';
|
||||
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver';
|
||||
import WarningIcon from '@mui/icons-material/Warning';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
import WarningRoundedIcon from '@mui/icons-material/WarningRounded';
|
||||
|
||||
import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { cssRainbowColorKeyframes } from '~/common/app.theme';
|
||||
import { animationColorRainbow } from '~/common/util/animUtils';
|
||||
import { navigateBack } from '~/common/app.routes';
|
||||
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useChatStore } from '~/common/state/store-chats';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useUICounter } from '~/common/state/store-ui';
|
||||
|
||||
|
||||
/*export const cssRainbowBackgroundKeyframes = keyframes`
|
||||
100%, 0% {
|
||||
background-color: rgb(128, 0, 0);
|
||||
}
|
||||
8% {
|
||||
background-color: rgb(102, 51, 0);
|
||||
}
|
||||
16% {
|
||||
background-color: rgb(64, 64, 0);
|
||||
}
|
||||
25% {
|
||||
background-color: rgb(38, 76, 0);
|
||||
}
|
||||
33% {
|
||||
background-color: rgb(0, 89, 0);
|
||||
}
|
||||
41% {
|
||||
background-color: rgb(0, 76, 41);
|
||||
}
|
||||
50% {
|
||||
background-color: rgb(0, 64, 64);
|
||||
}
|
||||
58% {
|
||||
background-color: rgb(0, 51, 102);
|
||||
}
|
||||
66% {
|
||||
background-color: rgb(0, 0, 128);
|
||||
}
|
||||
75% {
|
||||
background-color: rgb(63, 0, 128);
|
||||
}
|
||||
83% {
|
||||
background-color: rgb(76, 0, 76);
|
||||
}
|
||||
91% {
|
||||
background-color: rgb(102, 0, 51);
|
||||
}`;*/
|
||||
|
||||
function StatusCard(props: { icon: React.JSX.Element, hasIssue: boolean, text: string, button?: React.JSX.Element }) {
|
||||
return (
|
||||
<Card sx={{ width: '100%' }}>
|
||||
@@ -67,7 +29,7 @@ function StatusCard(props: { icon: React.JSX.Element, hasIssue: boolean, text: s
|
||||
{props.button}
|
||||
</Typography>
|
||||
<ListItemDecorator>
|
||||
{props.hasIssue ? <WarningIcon color='warning' /> : <CheckIcon color='success' />}
|
||||
{props.hasIssue ? <WarningRoundedIcon color='warning' /> : <CheckRoundedIcon color='success' />}
|
||||
</ListItemDecorator>
|
||||
</CardContent>
|
||||
</Card>
|
||||
@@ -122,9 +84,9 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
<Box sx={{ flexGrow: 0.5 }} />
|
||||
|
||||
<Typography level='title-lg' sx={{ fontSize: '3rem', fontWeight: 200, textAlign: 'center' }}>
|
||||
<Typography level='title-lg' sx={{ fontSize: '3rem', fontWeight: 'sm', textAlign: 'center' }}>
|
||||
Welcome to<br />
|
||||
<Box component='span' sx={{ animation: `${cssRainbowColorKeyframes} 15s linear infinite` }}>
|
||||
<Box component='span' sx={{ animation: `${animationColorRainbow} 15s linear infinite` }}>
|
||||
your first call
|
||||
</Box>
|
||||
</Typography>
|
||||
@@ -167,7 +129,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
|
||||
{/* Text to Speech status */}
|
||||
<StatusCard
|
||||
icon={<RecordVoiceOverIcon />}
|
||||
icon={<RecordVoiceOverTwoToneIcon />}
|
||||
text={
|
||||
(synthesis.mayWork ? 'Voice synthesis should be ready.' : 'There might be an issue with ElevenLabs voice synthesis.')
|
||||
+ (synthesis.isConfiguredServerSide ? '' : (synthesis.isConfiguredClientSide ? '' : ' Please add your API key in the settings.'))
|
||||
@@ -208,7 +170,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
|
||||
// boxShadow: allGood ? 'md' : 'none',
|
||||
}}
|
||||
>
|
||||
{allGood ? <ArrowForwardIcon sx={{ fontSize: '1.5em' }} /> : <CloseIcon sx={{ fontSize: '1.5em' }} />}
|
||||
{allGood ? <ArrowForwardRoundedIcon sx={{ fontSize: '1.5em' }} /> : <CloseRoundedIcon sx={{ fontSize: '1.5em' }} />}
|
||||
</IconButton>
|
||||
</Box>
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
import { keyframes } from '@emotion/react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Avatar, Box, Card, CardContent, Chip, IconButton, Link as MuiLink, ListDivider, MenuItem, Sheet, Switch, Typography } from '@mui/joy';
|
||||
import CallIcon from '@mui/icons-material/Call';
|
||||
|
||||
import { DConversation, DConversationId, conversationTitle } from '~/common/stores/chat/chat.conversation';
|
||||
import { GitHubProjectIssueCard } from '~/common/components/GitHubProjectIssueCard';
|
||||
import { conversationTitle, DConversation, DConversationId, useChatStore } from '~/common/state/store-chats';
|
||||
import { animationShadowRingLimey } from '~/common/util/animUtils';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
|
||||
import type { AppCallIntent } from './AppCall';
|
||||
@@ -19,27 +19,6 @@ import { useAppCallStore } from './state/store-app-call';
|
||||
const COLLAPSED_COUNT = 2;
|
||||
|
||||
|
||||
export const niceShadowKeyframes = keyframes`
|
||||
100%, 0% {
|
||||
//background-color: rgb(102, 0, 51);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgb(183, 255, 0);
|
||||
}
|
||||
25% {
|
||||
//background-color: rgb(76, 0, 76);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgb(255, 251, 0);
|
||||
//scale: 1.2;
|
||||
}
|
||||
50% {
|
||||
//background-color: rgb(63, 0, 128);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgba(0, 255, 81);
|
||||
//scale: 0.8;
|
||||
}
|
||||
75% {
|
||||
//background-color: rgb(0, 0, 128);
|
||||
box-shadow: 1px 1px 0 white, 2px 2px 12px rgb(255, 153, 0);
|
||||
}`;
|
||||
|
||||
|
||||
const ContactCardAvatar = (props: { size: string, symbol?: string, imageUrl?: string, onClick?: () => void, sx?: SxProps }) =>
|
||||
<Avatar
|
||||
// variant='outlined'
|
||||
@@ -81,7 +60,7 @@ const ContactCardConversationCall = (props: { conversation: DConversation, onCon
|
||||
function CallContactCard(props: {
|
||||
persona: MockPersona,
|
||||
callGrayUI: boolean,
|
||||
conversations: DConversation[],
|
||||
conversations: Readonly<DConversation[]>,
|
||||
setCallIntent: (intent: AppCallIntent) => void,
|
||||
}) {
|
||||
|
||||
@@ -125,7 +104,6 @@ function CallContactCard(props: {
|
||||
sx={{
|
||||
mx: 'auto',
|
||||
mt: '-2.5rem',
|
||||
zIndex: 1,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -211,7 +189,7 @@ function CallContactCard(props: {
|
||||
|
||||
|
||||
function useConversationsByPersona() {
|
||||
const conversations = useChatStore(state => state.conversations, shallow);
|
||||
const conversations = useChatStore(state => state.conversations);
|
||||
|
||||
return React.useMemo(() => {
|
||||
// group by personaId
|
||||
@@ -282,7 +260,7 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
|
||||
borderRadius: '50%',
|
||||
pointerEvents: 'none',
|
||||
backgroundColor: 'background.popup',
|
||||
animation: `${niceShadowKeyframes} 5s infinite`,
|
||||
animation: `${animationShadowRingLimey} 5s infinite`,
|
||||
}}>
|
||||
<CallIcon />
|
||||
</IconButton>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import { Box, Card, ListDivider, ListItemDecorator, MenuItem, Switch, Typography } from '@mui/joy';
|
||||
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
|
||||
@@ -7,20 +7,24 @@ import CallEndIcon from '@mui/icons-material/CallEnd';
|
||||
import CallIcon from '@mui/icons-material/Call';
|
||||
import MicIcon from '@mui/icons-material/Mic';
|
||||
import MicNoneIcon from '@mui/icons-material/MicNone';
|
||||
import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver';
|
||||
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
|
||||
|
||||
import { useChatLLMDropdown } from '../chat/components/applayout/useLLMDropdown';
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
|
||||
import { useChatLLMDropdown } from '../chat/components/layout-bar/useLLMDropdown';
|
||||
|
||||
import { EXPERIMENTAL_speakTextStream } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { SystemPurposeId, SystemPurposes } from '../../data';
|
||||
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';
|
||||
import { useElevenLabsVoiceDropdown } from '~/modules/elevenlabs/useElevenLabsVoiceDropdown';
|
||||
|
||||
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
|
||||
import { Link } from '~/common/components/Link';
|
||||
import { SpeechResult, useSpeechRecognition } from '~/common/components/useSpeechRecognition';
|
||||
import { conversationTitle, createDMessage, DMessage, useChatStore } from '~/common/state/store-chats';
|
||||
import { conversationTitle } from '~/common/stores/chat/chat.conversation';
|
||||
import { createDMessageTextContent, DMessage, messageFragmentsReduceText, messageSingleTextOrThrow } from '~/common/stores/chat/chat.message';
|
||||
import { launchAppChat, navigateToIndex } from '~/common/app.routes';
|
||||
import { playSoundUrl, usePlaySoundUrl } from '~/common/util/audioUtils';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
|
||||
import type { AppCallIntent } from './AppCall';
|
||||
@@ -55,7 +59,7 @@ function CallMenuItems(props: {
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem onClick={handleChangeVoiceToggle}>
|
||||
<ListItemDecorator><RecordVoiceOverIcon /></ListItemDecorator>
|
||||
<ListItemDecorator><RecordVoiceOverTwoToneIcon /></ListItemDecorator>
|
||||
Change Voice
|
||||
<Switch checked={props.override} onChange={handleChangeVoiceToggle} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
@@ -97,7 +101,7 @@ export function Telephone(props: {
|
||||
|
||||
// external state
|
||||
const { chatLLMId, chatLLMDropdown } = useChatLLMDropdown();
|
||||
const { chatTitle, reMessages } = useChatStore(state => {
|
||||
const { chatTitle, reMessages } = useChatStore(useShallow(state => {
|
||||
const conversation = props.callIntent.conversationId
|
||||
? state.conversations.find(conversation => conversation.id === props.callIntent.conversationId) ?? null
|
||||
: null;
|
||||
@@ -105,7 +109,7 @@ export function Telephone(props: {
|
||||
chatTitle: conversation ? conversationTitle(conversation) : null,
|
||||
reMessages: conversation ? conversation.messages : null,
|
||||
};
|
||||
}, shallow);
|
||||
}));
|
||||
const persona = SystemPurposes[props.callIntent.personaId as SystemPurposeId] ?? undefined;
|
||||
const personaCallStarters = persona?.call?.starters ?? undefined;
|
||||
const personaVoiceId = overridePersonaVoice ? undefined : (persona?.voices?.elevenLabs?.voiceId ?? undefined);
|
||||
@@ -116,9 +120,9 @@ export function Telephone(props: {
|
||||
const onSpeechResultCallback = React.useCallback((result: SpeechResult) => {
|
||||
setSpeechInterim(result.done ? null : { ...result });
|
||||
if (result.done) {
|
||||
const transcribed = result.transcript.trim();
|
||||
if (transcribed.length >= 1)
|
||||
setCallMessages(messages => [...messages, createDMessage('user', transcribed)]);
|
||||
const userSpeechTranscribed = result.transcript.trim();
|
||||
if (userSpeechTranscribed.length >= 1)
|
||||
setCallMessages(messages => [...messages, createDMessageTextContent('user', userSpeechTranscribed)]); // [state] append user:speech
|
||||
}
|
||||
}, []);
|
||||
const { isSpeechEnabled, isRecording, isRecordingAudio, isRecordingSpeech, startRecording, stopRecording, toggleRecording } = useSpeechRecognition(onSpeechResultCallback, 1000);
|
||||
@@ -134,11 +138,11 @@ export function Telephone(props: {
|
||||
|
||||
// pickup / hangup
|
||||
React.useEffect(() => {
|
||||
!isRinging && playSoundUrl(isConnected ? '/sounds/chat-begin.mp3' : '/sounds/chat-end.mp3');
|
||||
!isRinging && AudioPlayer.playUrl(isConnected ? '/sounds/chat-begin.mp3' : '/sounds/chat-end.mp3');
|
||||
}, [isRinging, isConnected]);
|
||||
|
||||
// ringtone
|
||||
usePlaySoundUrl(isRinging ? '/sounds/chat-ringtone.mp3' : null, 300, 2800 * 2);
|
||||
AudioPlayer.usePlayUrl(isRinging ? '/sounds/chat-ringtone.mp3' : null, 300, 2800 * 2);
|
||||
|
||||
|
||||
/// CONNECTED
|
||||
@@ -167,7 +171,8 @@ export function Telephone(props: {
|
||||
const phoneMessages = personaCallStarters || ['Hello?', 'Hey!'];
|
||||
const firstMessage = phoneMessages[Math.floor(Math.random() * phoneMessages.length)];
|
||||
|
||||
setCallMessages([createDMessage('assistant', firstMessage)]);
|
||||
setCallMessages([createDMessageTextContent('assistant', firstMessage)]); // [state] set assistant:hello message
|
||||
|
||||
// fire/forget
|
||||
void EXPERIMENTAL_speakTextStream(firstMessage, personaVoiceId);
|
||||
|
||||
@@ -177,22 +182,30 @@ export function Telephone(props: {
|
||||
// [E] persona streaming response - upon new user message
|
||||
React.useEffect(() => {
|
||||
// only act when we have a new user message
|
||||
if (!isConnected || callMessages.length < 1 || callMessages[callMessages.length - 1].role !== 'user')
|
||||
if (!isConnected || callMessages.length < 1)
|
||||
return;
|
||||
switch (callMessages[callMessages.length - 1].text) {
|
||||
|
||||
// Voice commands
|
||||
const lastUserMessage = callMessages[callMessages.length - 1];
|
||||
if (lastUserMessage.role !== 'user')
|
||||
return;
|
||||
switch (messageFragmentsReduceText(lastUserMessage.fragments)) {
|
||||
// do not respond
|
||||
case 'Stop.':
|
||||
return;
|
||||
|
||||
// command: close the call
|
||||
case 'Goodbye.':
|
||||
setStage('ended');
|
||||
setTimeout(launchAppChat, 2000);
|
||||
return;
|
||||
|
||||
// command: regenerate answer
|
||||
case 'Retry.':
|
||||
case 'Try again.':
|
||||
setCallMessages(messages => messages.slice(0, messages.length - 2));
|
||||
return;
|
||||
|
||||
// command: restart chat
|
||||
case 'Restart.':
|
||||
setCallMessages([]);
|
||||
@@ -204,7 +217,7 @@ export function Telephone(props: {
|
||||
|
||||
// temp fix: when the chat has no messages, only assume a single system message
|
||||
const chatMessages: { role: VChatMessageIn['role'], text: string }[] = (reMessages && reMessages.length > 0)
|
||||
? reMessages
|
||||
? reMessages.map(message => ({ role: message.role, text: messageSingleTextOrThrow(message) }))
|
||||
: personaSystemMessage
|
||||
? [{ role: 'system', text: personaSystemMessage }]
|
||||
: [];
|
||||
@@ -215,15 +228,16 @@ export function Telephone(props: {
|
||||
{ role: 'system', content: 'You are having a phone call. Your response style is brief and to the point, and according to your personality, defined below.' },
|
||||
...chatMessages.map(message => ({ role: message.role, content: message.text })),
|
||||
{ role: 'system', content: 'You are now on the phone call related to the chat above. Respect your personality and answer with short, friendly and accurate thoughtful lines.' },
|
||||
...callMessages.map(message => ({ role: message.role, content: message.text })),
|
||||
...callMessages.map(message => ({ role: message.role, content: messageSingleTextOrThrow(message) })),
|
||||
];
|
||||
|
||||
// perform completion
|
||||
responseAbortController.current = new AbortController();
|
||||
let finalText = '';
|
||||
let error: any | null = null;
|
||||
llmStreamingChatGenerate(chatLLMId, callPrompt, null, null, responseAbortController.current.signal, (updatedMessage: Partial<DMessage>) => {
|
||||
const text = updatedMessage.text?.trim();
|
||||
setPersonaTextInterim('💭...');
|
||||
llmStreamingChatGenerate(chatLLMId, callPrompt, 'call', callMessages[0].id, null, null, responseAbortController.current.signal, ({ textSoFar }) => {
|
||||
const text = textSoFar?.trim();
|
||||
if (text) {
|
||||
finalText = text;
|
||||
setPersonaTextInterim(text);
|
||||
@@ -234,7 +248,7 @@ export function Telephone(props: {
|
||||
}).finally(() => {
|
||||
setPersonaTextInterim(null);
|
||||
if (finalText || error)
|
||||
setCallMessages(messages => [...messages, createDMessage('assistant', finalText + (error ? ` (ERROR: ${error.message || error.toString()})` : ''))]);
|
||||
setCallMessages(messages => [...messages, createDMessageTextContent('assistant', finalText + (error ? ` (ERROR: ${error.message || error.toString()})` : ''))]); // [state] append assistant:call_response
|
||||
// fire/forget
|
||||
if (finalText?.length >= 1)
|
||||
void EXPERIMENTAL_speakTextStream(finalText, personaVoiceId);
|
||||
@@ -313,52 +327,62 @@ export function Telephone(props: {
|
||||
|
||||
{/* Live Transcript, w/ streaming messages, audio indication, etc. */}
|
||||
{(isConnected || isEnded) && (
|
||||
<Card variant='soft' sx={{
|
||||
<Card variant='outlined' sx={{
|
||||
flexGrow: 1,
|
||||
maxHeight: '24%',
|
||||
minHeight: '15%',
|
||||
maxHeight: '28%',
|
||||
minHeight: '20%',
|
||||
width: '100%',
|
||||
|
||||
// style
|
||||
backgroundColor: 'background.surface',
|
||||
// backgroundColor: 'background.surface',
|
||||
borderRadius: 'lg',
|
||||
boxShadow: 'sm',
|
||||
// boxShadow: 'sm',
|
||||
|
||||
// children
|
||||
display: 'flex', flexDirection: 'column-reverse',
|
||||
overflow: 'auto',
|
||||
padding: 0, // move this to the ScrollToBottom component
|
||||
}}>
|
||||
|
||||
{/* Messages in reverse order, for auto-scroll from the bottom */}
|
||||
<Box sx={{ display: 'flex', flexDirection: 'column-reverse', gap: 1 }}>
|
||||
<ScrollToBottom stickToBottomInitial>
|
||||
|
||||
{/* Listening... */}
|
||||
{isRecording && (
|
||||
<CallMessage
|
||||
text={<>{speechInterim?.transcript ? speechInterim.transcript + ' ' : ''}<i>{speechInterim?.interimTranscript}</i></>}
|
||||
variant={isRecordingSpeech ? 'solid' : 'outlined'}
|
||||
role='user'
|
||||
/>
|
||||
)}
|
||||
<Box sx={{ minHeight: '100%', p: 1, display: 'flex', flexDirection: 'column', gap: 1 }}>
|
||||
|
||||
{/* Persona streaming text... */}
|
||||
{!!personaTextInterim && (
|
||||
<CallMessage
|
||||
text={personaTextInterim}
|
||||
variant='solid' color='neutral'
|
||||
role='assistant'
|
||||
/>
|
||||
)}
|
||||
{/* Call Messages [] */}
|
||||
{callMessages.map((message) =>
|
||||
<CallMessage
|
||||
key={message.id}
|
||||
text={messageSingleTextOrThrow(message)}
|
||||
variant={message.role === 'assistant' ? 'solid' : 'soft'}
|
||||
color={message.role === 'assistant' ? 'neutral' : 'primary'}
|
||||
role={message.role}
|
||||
/>,
|
||||
)}
|
||||
|
||||
{/* Messages (last 6 messages, in reverse order) */}
|
||||
{callMessages.slice(-6).reverse().map((message) =>
|
||||
<CallMessage
|
||||
key={message.id}
|
||||
text={message.text}
|
||||
variant={message.role === 'assistant' ? 'solid' : 'soft'} color='neutral'
|
||||
role={message.role} />,
|
||||
)}
|
||||
</Box>
|
||||
{/* Persona streaming text... */}
|
||||
{!!personaTextInterim && (
|
||||
<CallMessage
|
||||
text={personaTextInterim}
|
||||
variant='outlined'
|
||||
color='neutral'
|
||||
role='assistant'
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Listening... */}
|
||||
{isRecording && (
|
||||
<CallMessage
|
||||
text={<>{speechInterim?.transcript.trim() || null}{speechInterim?.interimTranscript.trim() ? <i> {speechInterim.interimTranscript}</i> : null}</>}
|
||||
variant={(isRecordingSpeech || !!speechInterim?.transcript) ? 'soft' : 'outlined'}
|
||||
color='primary'
|
||||
role='user'
|
||||
/>
|
||||
)}
|
||||
|
||||
</Box>
|
||||
|
||||
{/* Visibility and actions are handled via Context */}
|
||||
<ScrollToBottomButton />
|
||||
|
||||
</ScrollToBottom>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
@@ -371,11 +395,15 @@ export function Telephone(props: {
|
||||
|
||||
{/* [Calling] Hang / PTT (mute not enabled yet) */}
|
||||
{isConnected && <CallButton Icon={CallEndIcon} text='Hang up' color='danger' variant='soft' onClick={handleCallStop} />}
|
||||
{isConnected && (pushToTalk
|
||||
? <CallButton Icon={MicIcon} onClick={toggleRecording}
|
||||
text={isRecordingSpeech ? 'Listening...' : isRecording ? 'Listening' : 'Push To Talk'}
|
||||
variant={isRecordingSpeech ? 'solid' : isRecording ? 'soft' : 'outlined'} sx={!isRecording ? { backgroundColor: 'background.surface' } : undefined} />
|
||||
: null
|
||||
{isConnected && (pushToTalk ? (
|
||||
<CallButton
|
||||
Icon={MicIcon} onClick={toggleRecording}
|
||||
text={isRecordingSpeech ? 'Listening...' : isRecording ? 'Listening' : 'Push To Talk'}
|
||||
variant={isRecordingSpeech ? 'solid' : isRecording ? 'soft' : 'outlined'}
|
||||
color='primary'
|
||||
sx={!isRecording ? { backgroundColor: 'background.surface' } : undefined}
|
||||
/>
|
||||
) : null
|
||||
// <CallButton disabled={true} Icon={MicOffIcon} onClick={() => setMicMuted(muted => !muted)}
|
||||
// text={micMuted ? 'Muted' : 'Mute'}
|
||||
// color={micMuted ? 'warning' : undefined} variant={micMuted ? 'solid' : 'outlined'} />
|
||||
|
||||
@@ -1,19 +1,8 @@
|
||||
import * as React from 'react';
|
||||
import { keyframes } from '@emotion/react';
|
||||
|
||||
import { Avatar, Box } from '@mui/joy';
|
||||
|
||||
|
||||
const cssScaleKeyframes = keyframes`
|
||||
0% {
|
||||
transform: scale(1);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.2);
|
||||
}
|
||||
100% {
|
||||
transform: scale(1);
|
||||
}`;
|
||||
import { animationScalePulse } from '~/common/util/animUtils';
|
||||
|
||||
|
||||
export function CallAvatar(props: { symbol: string, imageUrl?: string, isRinging?: boolean, onClick: () => void }) {
|
||||
@@ -34,7 +23,7 @@ export function CallAvatar(props: { symbol: string, imageUrl?: string, isRinging
|
||||
<Box
|
||||
sx={{
|
||||
...(props.isRinging
|
||||
? { animation: `${cssScaleKeyframes} 1.4s ease-in-out infinite` }
|
||||
? { animation: `${animationScalePulse} 1.4s ease-in-out infinite` }
|
||||
: {}),
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -12,16 +12,22 @@ export function CallMessage(props: {
|
||||
role: VChatMessageIn['role'],
|
||||
sx?: SxProps,
|
||||
}) {
|
||||
const isUserMessage = props.role === 'user';
|
||||
return (
|
||||
<Chip
|
||||
color={props.color} variant={props.variant}
|
||||
sx={{
|
||||
alignSelf: props.role === 'user' ? 'end' : 'start',
|
||||
alignSelf: isUserMessage ? 'end' : 'start',
|
||||
whiteSpace: 'break-spaces',
|
||||
borderRadius: 'lg',
|
||||
mt: 'auto',
|
||||
...(isUserMessage ? {
|
||||
borderBottomRightRadius: 0,
|
||||
} : {
|
||||
borderBottomLeftRadius: 0,
|
||||
}),
|
||||
// boxShadow: 'md',
|
||||
py: 1,
|
||||
px: 1.5,
|
||||
...(props.sx || {}),
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -1,293 +1,299 @@
|
||||
import * as React from 'react';
|
||||
import ForkRightIcon from '@mui/icons-material/ForkRight';
|
||||
import { Panel, PanelGroup } from 'react-resizable-panels';
|
||||
import { Panel, PanelGroup, PanelResizeHandle } from 'react-resizable-panels';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { useTheme } from '@mui/joy';
|
||||
|
||||
import { DEV_MODE_SETTINGS } from '../settings-modal/UxLabsSettings';
|
||||
import { DiagramConfig, DiagramsModal } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
import { FlattenerModal } from '~/modules/aifn/flatten/FlattenerModal';
|
||||
import { TradeConfig, TradeModal } from '~/modules/trade/TradeModal';
|
||||
import { downloadConversation, openAndLoadConversations } from '~/modules/trade/trade.client';
|
||||
import { getChatLLMId, useChatLLM } from '~/modules/llms/store-llms';
|
||||
import { imaginePromptFromText } from '~/modules/aifn/imagine/imaginePromptFromText';
|
||||
import { speakText } from '~/modules/elevenlabs/elevenlabs.client';
|
||||
import { useAreBeamsOpen } from '~/modules/beam/store-beam.hooks';
|
||||
import { useCapabilityTextToImage } from '~/modules/t2i/t2i.client';
|
||||
|
||||
import { Brand } from '~/common/app.config';
|
||||
import { ConfirmationModal } from '~/common/components/ConfirmationModal';
|
||||
import { GlobalShortcutItem, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcut';
|
||||
import { GoodPanelResizeHandler } from '~/common/components/panes/GoodPanelResizeHandler';
|
||||
import { ConversationsManager } from '~/common/chats/ConversationsManager';
|
||||
import { DConversation, DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { DMessageAttachmentFragment, DMessageContentFragment, duplicateDMessageFragments } from '~/common/stores/chat/chat.fragments';
|
||||
import { GlobalShortcutDefinition, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
|
||||
import { PanelResizeInset } from '~/common/components/panes/GoodPanelResizeHandler';
|
||||
import { PreferencesTab, useOptimaLayout, usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
|
||||
import { addSnackbar, removeSnackbar } from '~/common/components/useSnackbarsStore';
|
||||
import { createDMessage, DConversationId, DMessage, getConversation, useConversation } from '~/common/state/store-chats';
|
||||
import { createDMessageFromFragments, createDMessageTextContent, DMessageMetadata, duplicateDMessageMetadata } from '~/common/stores/chat/chat.message';
|
||||
import { getConversation, getConversationSystemPurposeId, useConversation } from '~/common/stores/chat/store-chats';
|
||||
import { themeBgAppChatComposer } from '~/common/app.theme';
|
||||
import { useFolderStore } from '~/common/state/store-folders';
|
||||
import { useOptimaLayout, usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { useIsMobile } from '~/common/components/useMatchMedia';
|
||||
import { useRouterQuery } from '~/common/app.routes';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
|
||||
import type { ComposerOutputMultiPart } from './components/composer/composer.types';
|
||||
import { ChatDrawerMemo } from './components/applayout/ChatDrawer';
|
||||
import { ChatDropdowns } from './components/applayout/ChatDropdowns';
|
||||
import { ChatMenuItems } from './components/applayout/ChatMenuItems';
|
||||
import { ChatBarAltBeam } from './components/layout-bar/ChatBarAltBeam';
|
||||
import { ChatBarAltTitle } from './components/layout-bar/ChatBarAltTitle';
|
||||
import { ChatBarDropdowns } from './components/layout-bar/ChatBarDropdowns';
|
||||
import { ChatBeamWrapper } from './components/ChatBeamWrapper';
|
||||
import { ChatDrawerMemo } from './components/layout-drawer/ChatDrawer';
|
||||
import { ChatMessageList } from './components/ChatMessageList';
|
||||
import { ChatPageMenuItems } from './components/layout-menu/ChatPageMenuItems';
|
||||
import { Composer } from './components/composer/Composer';
|
||||
import { Ephemerals } from './components/Ephemerals';
|
||||
import { ScrollToBottom } from './components/scroll-to-bottom/ScrollToBottom';
|
||||
import { ScrollToBottomButton } from './components/scroll-to-bottom/ScrollToBottomButton';
|
||||
import { usePanesManager } from './components/panes/usePanesManager';
|
||||
|
||||
import { extractChatCommand, findAllChatCommands } from './commands/commands.registry';
|
||||
import { runAssistantUpdatingState } from './editors/chat-stream';
|
||||
import { runBrowseUpdatingState } from './editors/browse-load';
|
||||
import { runImageGenerationUpdatingState } from './editors/image-generate';
|
||||
import { runReActUpdatingState } from './editors/react-tangent';
|
||||
import type { ChatExecuteMode } from './execute-mode/execute-mode.types';
|
||||
|
||||
/**
|
||||
* Mode: how to treat the input from the Composer
|
||||
*/
|
||||
export type ChatModeId =
|
||||
| 'generate-text'
|
||||
| 'append-user'
|
||||
| 'generate-image'
|
||||
| 'generate-react';
|
||||
import { _handleExecute } from './editors/_handleExecute';
|
||||
import { gcChatImageAssets } from './editors/image-generate';
|
||||
|
||||
|
||||
const SPECIAL_ID_WIPE_ALL: DConversationId = 'wipe-chats';
|
||||
// what to say when a chat is new and has no title
|
||||
export const CHAT_NOVEL_TITLE = 'Chat';
|
||||
|
||||
|
||||
export interface AppChatIntent {
|
||||
initialConversationId: string | null;
|
||||
}
|
||||
|
||||
|
||||
const composerOpenSx: SxProps = {
|
||||
zIndex: 21, // just to allocate a surface, and potentially have a shadow
|
||||
backgroundColor: themeBgAppChatComposer,
|
||||
borderTop: `1px solid`,
|
||||
borderTopColor: 'divider',
|
||||
p: { xs: 1, md: 2 },
|
||||
};
|
||||
|
||||
const composerClosedSx: SxProps = {
|
||||
display: 'none',
|
||||
};
|
||||
|
||||
|
||||
export function AppChat() {
|
||||
|
||||
// state
|
||||
const [isComposerMulticast, setIsComposerMulticast] = React.useState(false);
|
||||
const [isMessageSelectionMode, setIsMessageSelectionMode] = React.useState(false);
|
||||
const [diagramConfig, setDiagramConfig] = React.useState<DiagramConfig | null>(null);
|
||||
const [tradeConfig, setTradeConfig] = React.useState<TradeConfig | null>(null);
|
||||
const [clearConversationId, setClearConversationId] = React.useState<DConversationId | null>(null);
|
||||
const [deleteConversationId, setDeleteConversationId] = React.useState<DConversationId | null>(null);
|
||||
const [deleteConversationIds, setDeleteConversationIds] = React.useState<DConversationId[] | null>(null);
|
||||
const [flattenConversationId, setFlattenConversationId] = React.useState<DConversationId | null>(null);
|
||||
const showNextTitle = React.useRef(false);
|
||||
const showNextTitleChange = React.useRef(false);
|
||||
const composerTextAreaRef = React.useRef<HTMLTextAreaElement>(null);
|
||||
const [_activeFolderId, setActiveFolderId] = React.useState<string | null>(null);
|
||||
|
||||
// external state
|
||||
const theme = useTheme();
|
||||
|
||||
const { openLlmOptions } = useOptimaLayout();
|
||||
const isMobile = useIsMobile();
|
||||
|
||||
const intent = useRouterQuery<Partial<AppChatIntent>>();
|
||||
|
||||
const showAltTitleBar = useUXLabsStore(state => DEV_MODE_SETTINGS && state.labsChatBarAlt === 'title');
|
||||
|
||||
const { openLlmOptions, openModelsSetup, openPreferencesTab } = useOptimaLayout();
|
||||
|
||||
const { chatLLM } = useChatLLM();
|
||||
|
||||
const {
|
||||
// state
|
||||
chatPanes,
|
||||
focusedConversationId,
|
||||
focusedPaneIndex,
|
||||
focusedPaneConversationId,
|
||||
// actions
|
||||
navigateHistoryInFocusedPane,
|
||||
openConversationInFocusedPane,
|
||||
openConversationInSplitPane,
|
||||
paneIndex,
|
||||
duplicatePane,
|
||||
removePane,
|
||||
setFocusedPane,
|
||||
setFocusedPaneIndex,
|
||||
} = usePanesManager();
|
||||
|
||||
const { paneUniqueConversationIds, paneHandlers, paneBeamStores } = React.useMemo(() => {
|
||||
const paneConversationIds: (DConversationId | null)[] = chatPanes.map(pane => pane.conversationId || null);
|
||||
const paneHandlers = paneConversationIds.map(cId => cId ? ConversationsManager.getHandler(cId) : null);
|
||||
const paneBeamStores = paneHandlers.map(handler => handler?.getBeamStore() ?? null);
|
||||
const paneUniqueConversationIds = Array.from(new Set(paneConversationIds.filter(Boolean))) as DConversationId[];
|
||||
return {
|
||||
paneHandlers: paneHandlers,
|
||||
paneBeamStores: paneBeamStores,
|
||||
paneUniqueConversationIds: paneUniqueConversationIds,
|
||||
};
|
||||
}, [chatPanes]);
|
||||
|
||||
const beamsOpens = useAreBeamsOpen(paneBeamStores);
|
||||
const beamOpenStoreInFocusedPane = React.useMemo(() => {
|
||||
const open = focusedPaneIndex !== null ? (beamsOpens?.[focusedPaneIndex] ?? false) : false;
|
||||
return open ? paneBeamStores?.[focusedPaneIndex!] ?? null : null;
|
||||
}, [beamsOpens, focusedPaneIndex, paneBeamStores]);
|
||||
|
||||
const {
|
||||
// focused
|
||||
title: focusedChatTitle,
|
||||
chatIdx: focusedChatNumber,
|
||||
isChatEmpty: isFocusedChatEmpty,
|
||||
areChatsEmpty,
|
||||
newConversationId,
|
||||
conversationsLength,
|
||||
_remove_systemPurposeId: focusedSystemPurposeId,
|
||||
isEmpty: isFocusedChatEmpty,
|
||||
isDeveloper: isFocusedChatDeveloper,
|
||||
conversationIdx: focusedChatNumber,
|
||||
// all
|
||||
hasConversations,
|
||||
recycleNewConversationId,
|
||||
// actions
|
||||
prependNewConversation,
|
||||
branchConversation,
|
||||
deleteConversation,
|
||||
wipeAllConversations,
|
||||
setMessages,
|
||||
} = useConversation(focusedConversationId);
|
||||
deleteConversations,
|
||||
} = useConversation(focusedPaneConversationId);
|
||||
|
||||
const { mayWork: capabilityHasT2I } = useCapabilityTextToImage();
|
||||
|
||||
const { activeFolderId, activeFolderConversationsCount } = useFolderStore(({ enableFolders, folders }) => {
|
||||
const { activeFolderId } = useFolderStore(({ enableFolders, folders }) => {
|
||||
const activeFolderId = enableFolders ? _activeFolderId : null;
|
||||
const activeFolder = activeFolderId ? folders.find(folder => folder.id === activeFolderId) : null;
|
||||
return {
|
||||
activeFolderId: activeFolder?.id ?? null,
|
||||
activeFolderConversationsCount: activeFolder ? activeFolder.conversationIds.length : conversationsLength,
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
// Window actions
|
||||
|
||||
const panesConversationIDs = chatPanes.length > 0 ? chatPanes.map((pane) => pane.conversationId) : [null];
|
||||
const isSplitPane = chatPanes.length > 1;
|
||||
const isMultiPane = chatPanes.length >= 2;
|
||||
const isMultiAddable = chatPanes.length < 4;
|
||||
const isMultiConversationId = paneUniqueConversationIds.length >= 2;
|
||||
const willMulticast = isComposerMulticast && isMultiConversationId;
|
||||
const disableNewButton = isFocusedChatEmpty && !isMultiPane;
|
||||
|
||||
const setFocusedConversationId = React.useCallback((conversationId: DConversationId | null) => {
|
||||
const handleOpenConversationInFocusedPane = React.useCallback((conversationId: DConversationId | null) => {
|
||||
conversationId && openConversationInFocusedPane(conversationId);
|
||||
}, [openConversationInFocusedPane]);
|
||||
|
||||
const openSplitConversationId = React.useCallback((conversationId: DConversationId | null) => {
|
||||
const handleOpenConversationInSplitPane = React.useCallback((conversationId: DConversationId | null) => {
|
||||
conversationId && openConversationInSplitPane(conversationId);
|
||||
}, [openConversationInSplitPane]);
|
||||
|
||||
const toggleSplitPane = React.useCallback(() => {
|
||||
if (isSplitPane)
|
||||
removePane(paneIndex ?? chatPanes.length - 1);
|
||||
else
|
||||
duplicatePane(paneIndex ?? chatPanes.length - 1);
|
||||
}, [chatPanes.length, duplicatePane, isSplitPane, paneIndex, removePane]);
|
||||
|
||||
const handleNavigateHistory = React.useCallback((direction: 'back' | 'forward') => {
|
||||
const handleNavigateHistoryInFocusedPane = React.useCallback((direction: 'back' | 'forward') => {
|
||||
if (navigateHistoryInFocusedPane(direction))
|
||||
showNextTitle.current = true;
|
||||
showNextTitleChange.current = true;
|
||||
}, [navigateHistoryInFocusedPane]);
|
||||
|
||||
// [effect] Handle the initial conversation intent
|
||||
React.useEffect(() => {
|
||||
if (showNextTitle.current) {
|
||||
showNextTitle.current = false;
|
||||
intent.initialConversationId && handleOpenConversationInFocusedPane(intent.initialConversationId);
|
||||
}, [handleOpenConversationInFocusedPane, intent.initialConversationId]);
|
||||
|
||||
// [effect] Show snackbar with the focused chat title after a history navigation in focused pane
|
||||
React.useEffect(() => {
|
||||
if (showNextTitleChange.current) {
|
||||
showNextTitleChange.current = false;
|
||||
const title = (focusedChatNumber >= 0 ? `#${focusedChatNumber + 1} · ` : '') + (focusedChatTitle || 'New Chat');
|
||||
const id = addSnackbar({ key: 'focused-title', message: title, type: 'title' });
|
||||
return () => removeSnackbar(id);
|
||||
}
|
||||
}, [focusedChatNumber, focusedChatTitle]);
|
||||
|
||||
|
||||
// Execution
|
||||
|
||||
const _handleExecute = React.useCallback(async (chatModeId: ChatModeId, conversationId: DConversationId, history: DMessage[]): Promise<void> => {
|
||||
const chatLLMId = getChatLLMId();
|
||||
if (!chatModeId || !conversationId || !chatLLMId) return;
|
||||
const handleExecuteAndOutcome = React.useCallback(async (chatExecuteMode: ChatExecuteMode, conversationId: DConversationId, callerNameDebug: string) => {
|
||||
const outcome = await _handleExecute(chatExecuteMode, conversationId, callerNameDebug);
|
||||
if (outcome === 'err-no-chatllm')
|
||||
openModelsSetup();
|
||||
else if (outcome === 'err-t2i-unconfigured')
|
||||
openPreferencesTab(PreferencesTab.Draw);
|
||||
else if (outcome === 'err-no-persona')
|
||||
addSnackbar({ key: 'chat-no-persona', message: 'No persona selected.', type: 'issue' });
|
||||
else if (outcome === 'err-no-conversation')
|
||||
addSnackbar({ key: 'chat-no-conversation', message: 'No active conversation.', type: 'issue' });
|
||||
else if (outcome === 'err-no-last-message')
|
||||
addSnackbar({ key: 'chat-no-conversation', message: 'No conversation history.', type: 'issue' });
|
||||
return outcome === true;
|
||||
}, [openModelsSetup, openPreferencesTab]);
|
||||
|
||||
// "/command ...": overrides the chat mode
|
||||
const lastMessage = history.length > 0 ? history[history.length - 1] : null;
|
||||
if (lastMessage?.role === 'user') {
|
||||
const chatCommand = extractChatCommand(lastMessage.text)[0];
|
||||
if (chatCommand && chatCommand.type === 'cmd') {
|
||||
switch (chatCommand.providerId) {
|
||||
case 'ass-browse':
|
||||
setMessages(conversationId, history);
|
||||
return await runBrowseUpdatingState(conversationId, chatCommand.params!);
|
||||
const handleComposerAction = React.useCallback((conversationId: DConversationId, chatExecuteMode: ChatExecuteMode, fragments: (DMessageContentFragment | DMessageAttachmentFragment)[], metadata?: DMessageMetadata): boolean => {
|
||||
|
||||
case 'ass-t2i':
|
||||
setMessages(conversationId, history);
|
||||
return await runImageGenerationUpdatingState(conversationId, chatCommand.params!);
|
||||
// [multicast] send the message to all the panes
|
||||
const uniqueConversationIds = willMulticast
|
||||
? Array.from(new Set([conversationId, ...paneUniqueConversationIds]))
|
||||
: [conversationId];
|
||||
|
||||
case 'ass-react':
|
||||
setMessages(conversationId, history);
|
||||
return await runReActUpdatingState(conversationId, chatCommand.params!, chatLLMId);
|
||||
|
||||
case 'chat-alter':
|
||||
Object.assign(lastMessage, {
|
||||
role: chatCommand.command.startsWith('/s') ? 'system' : chatCommand.command.startsWith('/a') ? 'assistant' : 'user',
|
||||
sender: 'Bot',
|
||||
text: chatCommand.params || '',
|
||||
} satisfies Partial<DMessage>);
|
||||
return setMessages(conversationId, history);
|
||||
|
||||
case 'cmd-help':
|
||||
const chatCommandsText = findAllChatCommands()
|
||||
.map(cmd => ` - ${cmd.primary}` + (cmd.alternatives?.length ? ` (${cmd.alternatives.join(', ')})` : '') + `: ${cmd.description}`)
|
||||
.join('\n');
|
||||
const helpMessage = createDMessage('assistant', 'Available Chat Commands:\n' + chatCommandsText);
|
||||
helpMessage.originLLM = Brand.Title.Base;
|
||||
return setMessages(conversationId, [...history, helpMessage]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// synchronous long-duration tasks, which update the state as they go
|
||||
if (chatLLMId && focusedSystemPurposeId) {
|
||||
switch (chatModeId) {
|
||||
case 'generate-text':
|
||||
return await runAssistantUpdatingState(conversationId, history, chatLLMId, focusedSystemPurposeId);
|
||||
|
||||
case 'append-user':
|
||||
return setMessages(conversationId, history);
|
||||
|
||||
case 'generate-image':
|
||||
if (!lastMessage?.text)
|
||||
break;
|
||||
// also add a 'fake' user message with the '/draw' command
|
||||
setMessages(conversationId, history.map(message => message.id !== lastMessage.id ? message : {
|
||||
...message,
|
||||
text: `/draw ${lastMessage.text}`,
|
||||
}));
|
||||
return await runImageGenerationUpdatingState(conversationId, lastMessage.text);
|
||||
|
||||
case 'generate-react':
|
||||
if (!lastMessage?.text)
|
||||
break;
|
||||
setMessages(conversationId, history);
|
||||
return await runReActUpdatingState(conversationId, lastMessage.text, chatLLMId);
|
||||
}
|
||||
}
|
||||
|
||||
// ISSUE: if we're here, it means we couldn't do the job, at least sync the history
|
||||
console.log('handleExecuteConversation: issue running', chatModeId, conversationId, lastMessage);
|
||||
setMessages(conversationId, history);
|
||||
}, [focusedSystemPurposeId, setMessages]);
|
||||
|
||||
const handleComposerAction = (chatModeId: ChatModeId, conversationId: DConversationId, multiPartMessage: ComposerOutputMultiPart): boolean => {
|
||||
// validate inputs
|
||||
if (multiPartMessage.length !== 1 || multiPartMessage[0].type !== 'text-block') {
|
||||
addSnackbar({
|
||||
key: 'chat-composer-action-invalid',
|
||||
message: 'Only a single text part is supported for now.',
|
||||
type: 'issue',
|
||||
overrides: {
|
||||
autoHideDuration: 2000,
|
||||
},
|
||||
});
|
||||
return false;
|
||||
}
|
||||
const userText = multiPartMessage[0].text;
|
||||
|
||||
// find conversation
|
||||
const conversation = getConversation(conversationId);
|
||||
if (!conversation)
|
||||
// validate conversation existence
|
||||
const uniqueConverations = uniqueConversationIds.map(cId => getConversation(cId)).filter(Boolean) as DConversation[];
|
||||
if (!uniqueConverations.length)
|
||||
return false;
|
||||
|
||||
// start execution (async)
|
||||
void _handleExecute(chatModeId, conversationId, [
|
||||
...conversation.messages,
|
||||
createDMessage('user', userText),
|
||||
]);
|
||||
// we loop to handle both the normal and multicast modes
|
||||
for (const conversation of uniqueConverations) {
|
||||
|
||||
// create the user:message
|
||||
// NOTE: this can lead to multiple chat messages with data refs that are referring to the same dblobs,
|
||||
// however, we already got transferred ownership of the dblobs at this point.
|
||||
const userMessage = createDMessageFromFragments('user', duplicateDMessageFragments(fragments)); // [chat] create user:message
|
||||
if (metadata) userMessage.metadata = duplicateDMessageMetadata(metadata);
|
||||
|
||||
ConversationsManager.getHandler(conversation.id).messageAppend(userMessage); // [chat] append user message in each conversation
|
||||
|
||||
// fire/forget
|
||||
void handleExecuteAndOutcome(chatExecuteMode /* various */, conversation.id, 'chat-composer-action'); // append user message, then '*-*'
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
}, [paneUniqueConversationIds, handleExecuteAndOutcome, willMulticast]);
|
||||
|
||||
const handleConversationExecuteHistory = React.useCallback(async (conversationId: DConversationId, history: DMessage[]): Promise<void> => {
|
||||
await _handleExecute('generate-text', conversationId, history);
|
||||
}, [_handleExecute]);
|
||||
const handleConversationExecuteHistory = React.useCallback(async (conversationId: DConversationId) => {
|
||||
await handleExecuteAndOutcome('generate-content', conversationId, 'chat-execute-history'); // replace with 'history', then 'generate-text'
|
||||
}, [handleExecuteAndOutcome]);
|
||||
|
||||
const handleMessageRegenerateLast = React.useCallback(async () => {
|
||||
const focusedConversation = getConversation(focusedConversationId);
|
||||
const handleMessageRegenerateLastInFocusedPane = React.useCallback(async () => {
|
||||
const focusedConversation = getConversation(focusedPaneConversationId);
|
||||
if (focusedPaneConversationId && focusedConversation?.messages?.length) {
|
||||
const lastMessage = focusedConversation.messages[focusedConversation.messages.length - 1];
|
||||
if (lastMessage.role === 'assistant')
|
||||
ConversationsManager.getHandler(focusedPaneConversationId).historyTruncateTo(lastMessage.id, -1);
|
||||
await handleExecuteAndOutcome('generate-content', focusedConversation.id, 'chat-regenerate-last'); // truncate if assistant, then gen-text
|
||||
}
|
||||
}, [focusedPaneConversationId, handleExecuteAndOutcome]);
|
||||
|
||||
const handleMessageBeamLastInFocusedPane = React.useCallback(async () => {
|
||||
// Ctrl + Shift + B
|
||||
const focusedConversation = getConversation(focusedPaneConversationId);
|
||||
if (focusedConversation?.messages?.length) {
|
||||
const lastMessage = focusedConversation.messages[focusedConversation.messages.length - 1];
|
||||
return await _handleExecute('generate-text', focusedConversation.id, lastMessage.role === 'assistant'
|
||||
? focusedConversation.messages.slice(0, -1)
|
||||
: [...focusedConversation.messages],
|
||||
);
|
||||
if (lastMessage.role === 'assistant')
|
||||
ConversationsManager.getHandler(focusedConversation.id).beamInvoke(focusedConversation.messages.slice(0, -1), [lastMessage], lastMessage.id);
|
||||
else if (lastMessage.role === 'user')
|
||||
ConversationsManager.getHandler(focusedConversation.id).beamInvoke(focusedConversation.messages, [], null);
|
||||
}
|
||||
}, [focusedConversationId, _handleExecute]);
|
||||
}, [focusedPaneConversationId]);
|
||||
|
||||
const handleTextDiagram = React.useCallback((diagramConfig: DiagramConfig | null) => setDiagramConfig(diagramConfig), []);
|
||||
|
||||
const handleTextImagine = React.useCallback(async (conversationId: DConversationId, messageText: string): Promise<void> => {
|
||||
const handleImagineFromText = React.useCallback(async (conversationId: DConversationId, messageText: string) => {
|
||||
const conversation = getConversation(conversationId);
|
||||
if (!conversation)
|
||||
return;
|
||||
const imaginedPrompt = await imaginePromptFromText(messageText) || 'An error sign.';
|
||||
return await _handleExecute('generate-image', conversationId, [
|
||||
...conversation.messages,
|
||||
createDMessage('user', imaginedPrompt),
|
||||
]);
|
||||
}, [_handleExecute]);
|
||||
const imaginedPrompt = await imaginePromptFromText(messageText, conversationId) || 'An error sign.';
|
||||
const imaginePrompMessage = createDMessageTextContent('user', imaginedPrompt);
|
||||
ConversationsManager.getHandler(conversationId).messageAppend(imaginePrompMessage); // [chat] append user:imagine prompt
|
||||
await handleExecuteAndOutcome('generate-image', conversationId, 'chat-imagine-from-text'); // append message for 'imagine', then generate-image
|
||||
}, [handleExecuteAndOutcome]);
|
||||
|
||||
const handleTextSpeak = React.useCallback(async (text: string): Promise<void> => {
|
||||
await speakText(text);
|
||||
}, []);
|
||||
|
||||
|
||||
// Chat actions
|
||||
|
||||
const handleConversationNew = React.useCallback(() => {
|
||||
const handleConversationNewInFocusedPane = React.useCallback((forceNoRecycle?: boolean) => {
|
||||
|
||||
// activate an existing new conversation if present, or create another
|
||||
const conversationId = newConversationId
|
||||
? newConversationId
|
||||
: prependNewConversation(focusedSystemPurposeId ?? undefined);
|
||||
setFocusedConversationId(conversationId);
|
||||
// create conversation (or recycle the existing top-of-stack empty conversation)
|
||||
const conversationId = (recycleNewConversationId && !forceNoRecycle)
|
||||
? recycleNewConversationId
|
||||
: prependNewConversation(getConversationSystemPurposeId(focusedPaneConversationId) ?? undefined);
|
||||
|
||||
// switch the focused pane to the new conversation
|
||||
handleOpenConversationInFocusedPane(conversationId);
|
||||
|
||||
// if a folder is active, add the new conversation to the folder
|
||||
if (activeFolderId && conversationId)
|
||||
@@ -296,64 +302,89 @@ export function AppChat() {
|
||||
// focus the composer
|
||||
composerTextAreaRef.current?.focus();
|
||||
|
||||
}, [activeFolderId, focusedSystemPurposeId, newConversationId, prependNewConversation, setFocusedConversationId]);
|
||||
}, [activeFolderId, focusedPaneConversationId, handleOpenConversationInFocusedPane, prependNewConversation, recycleNewConversationId]);
|
||||
|
||||
const handleConversationImportDialog = React.useCallback(() => setTradeConfig({ dir: 'import' }), []);
|
||||
|
||||
const handleConversationExport = React.useCallback((conversationId: DConversationId | null) => setTradeConfig({ dir: 'export', conversationId }), []);
|
||||
const handleConversationExport = React.useCallback((conversationId: DConversationId | null, exportAll: boolean) => {
|
||||
setTradeConfig({ dir: 'export', conversationId, exportAll });
|
||||
}, []);
|
||||
|
||||
const handleConversationBranch = React.useCallback((conversationId: DConversationId, messageId: string | null): DConversationId | null => {
|
||||
showNextTitle.current = true;
|
||||
const branchedConversationId = branchConversation(conversationId, messageId);
|
||||
addSnackbar({
|
||||
key: 'branch-conversation',
|
||||
message: 'Branch started.',
|
||||
type: 'success',
|
||||
overrides: {
|
||||
autoHideDuration: 3000,
|
||||
startDecorator: <ForkRightIcon />,
|
||||
},
|
||||
});
|
||||
const branchInAltPanel = useUXLabsStore.getState().labsSplitBranching;
|
||||
if (branchInAltPanel)
|
||||
openSplitConversationId(branchedConversationId);
|
||||
const handleFileOpenConversation = React.useCallback(() => {
|
||||
openAndLoadConversations(true)
|
||||
.then((outcome) => {
|
||||
// activate the last (most recent) imported conversation
|
||||
if (outcome?.activateConversationId) {
|
||||
showNextTitleChange.current = true;
|
||||
handleOpenConversationInFocusedPane(outcome.activateConversationId);
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
addSnackbar({ key: 'chat-import-fail', message: 'Could not open the file.', type: 'issue' });
|
||||
});
|
||||
}, [handleOpenConversationInFocusedPane]);
|
||||
|
||||
const handleFileSaveConversation = React.useCallback((conversationId: DConversationId | null) => {
|
||||
const conversation = getConversation(conversationId);
|
||||
conversation && downloadConversation(conversation, 'json')
|
||||
.then(() => {
|
||||
addSnackbar({ key: 'chat-save-as-ok', message: 'File saved.', type: 'success' });
|
||||
})
|
||||
.catch((err: any) => {
|
||||
if (err?.name !== 'AbortError')
|
||||
addSnackbar({ key: 'chat-save-as-fail', message: `Could not save the file. ${err?.message || ''}`, type: 'issue' });
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleConversationBranch = React.useCallback((srcConversationId: DConversationId, messageId: string | null): DConversationId | null => {
|
||||
// clone data
|
||||
const branchedConversationId = branchConversation(srcConversationId, messageId);
|
||||
|
||||
// if a folder is active, add the new conversation to the folder
|
||||
if (activeFolderId && branchedConversationId)
|
||||
useFolderStore.getState().addConversationToFolder(activeFolderId, branchedConversationId);
|
||||
|
||||
// replace/open a new pane with this
|
||||
showNextTitleChange.current = true;
|
||||
if (!isMultiAddable)
|
||||
handleOpenConversationInFocusedPane(branchedConversationId);
|
||||
else
|
||||
setFocusedConversationId(branchedConversationId);
|
||||
return branchedConversationId;
|
||||
}, [branchConversation, openSplitConversationId, setFocusedConversationId]);
|
||||
handleOpenConversationInSplitPane(branchedConversationId);
|
||||
|
||||
const handleConversationFlatten = (conversationId: DConversationId) => setFlattenConversationId(conversationId);
|
||||
return branchedConversationId;
|
||||
}, [activeFolderId, branchConversation, handleOpenConversationInFocusedPane, handleOpenConversationInSplitPane, isMultiAddable]);
|
||||
|
||||
const handleConversationFlatten = React.useCallback((conversationId: DConversationId) => setFlattenConversationId(conversationId), []);
|
||||
|
||||
const handleConfirmedClearConversation = React.useCallback(() => {
|
||||
if (clearConversationId) {
|
||||
setMessages(clearConversationId, []);
|
||||
ConversationsManager.getHandler(clearConversationId).historyClear();
|
||||
setClearConversationId(null);
|
||||
}
|
||||
}, [clearConversationId, setMessages]);
|
||||
}, [clearConversationId]);
|
||||
|
||||
const handleConversationClear = React.useCallback((conversationId: DConversationId) => setClearConversationId(conversationId), []);
|
||||
|
||||
const handleConfirmedDeleteConversation = () => {
|
||||
if (deleteConversationId) {
|
||||
let nextConversationId: DConversationId | null;
|
||||
if (deleteConversationId === SPECIAL_ID_WIPE_ALL)
|
||||
nextConversationId = wipeAllConversations(focusedSystemPurposeId ?? undefined, activeFolderId);
|
||||
else
|
||||
nextConversationId = deleteConversation(deleteConversationId);
|
||||
setFocusedConversationId(nextConversationId);
|
||||
setDeleteConversationId(null);
|
||||
}
|
||||
};
|
||||
const handleDeleteConversations = React.useCallback((conversationIds: DConversationId[], bypassConfirmation: boolean) => {
|
||||
if (!bypassConfirmation)
|
||||
return setDeleteConversationIds(conversationIds);
|
||||
|
||||
const handleConversationsDeleteAll = React.useCallback(() => setDeleteConversationId(SPECIAL_ID_WIPE_ALL), []);
|
||||
// perform deletion, and return the next (or a new) conversation
|
||||
const nextConversationId = deleteConversations(conversationIds, /*focusedSystemPurposeId ??*/ undefined);
|
||||
|
||||
// switch the focused pane to the new conversation - NOTE: this makes the assumption that deletion had impact on the focused pane
|
||||
handleOpenConversationInFocusedPane(nextConversationId);
|
||||
|
||||
setDeleteConversationIds(null);
|
||||
|
||||
// run GC for dblobs in this conversation
|
||||
void gcChatImageAssets(); // fire/forget
|
||||
}, [deleteConversations, handleOpenConversationInFocusedPane]);
|
||||
|
||||
const handleConfirmedDeleteConversations = React.useCallback(() => {
|
||||
!!deleteConversationIds?.length && handleDeleteConversations(deleteConversationIds, true);
|
||||
}, [deleteConversationIds, handleDeleteConversations]);
|
||||
|
||||
const handleConversationDelete = React.useCallback(
|
||||
(conversationId: DConversationId, bypassConfirmation: boolean) => {
|
||||
if (bypassConfirmation) setFocusedConversationId(deleteConversation(conversationId));
|
||||
else setDeleteConversationId(conversationId);
|
||||
},
|
||||
[deleteConversation, setFocusedConversationId],
|
||||
);
|
||||
|
||||
// Shortcuts
|
||||
|
||||
@@ -363,158 +394,201 @@ export function AppChat() {
|
||||
openLlmOptions(chatLLMId);
|
||||
}, [openLlmOptions]);
|
||||
|
||||
const shortcuts = React.useMemo((): GlobalShortcutItem[] => [
|
||||
const shortcuts = React.useMemo((): GlobalShortcutDefinition[] => [
|
||||
// focused conversation
|
||||
['b', true, true, false, handleMessageBeamLastInFocusedPane],
|
||||
['r', true, true, false, handleMessageRegenerateLastInFocusedPane],
|
||||
['n', true, false, true, handleConversationNewInFocusedPane],
|
||||
['o', true, false, false, handleFileOpenConversation],
|
||||
['s', true, false, false, () => handleFileSaveConversation(focusedPaneConversationId)],
|
||||
['b', true, false, true, () => isFocusedChatEmpty || (focusedPaneConversationId && handleConversationBranch(focusedPaneConversationId, null))],
|
||||
['x', true, false, true, () => isFocusedChatEmpty || (focusedPaneConversationId && handleConversationClear(focusedPaneConversationId))],
|
||||
['d', true, false, true, () => focusedPaneConversationId && handleDeleteConversations([focusedPaneConversationId], false)],
|
||||
[ShortcutKeyName.Left, true, false, true, () => handleNavigateHistoryInFocusedPane('back')],
|
||||
[ShortcutKeyName.Right, true, false, true, () => handleNavigateHistoryInFocusedPane('forward')],
|
||||
// global
|
||||
['o', true, true, false, handleOpenChatLlmOptions],
|
||||
['r', true, true, false, handleMessageRegenerateLast],
|
||||
['n', true, false, true, handleConversationNew],
|
||||
['b', true, false, true, () => isFocusedChatEmpty || (focusedConversationId && handleConversationBranch(focusedConversationId, null))],
|
||||
['x', true, false, true, () => isFocusedChatEmpty || (focusedConversationId && handleConversationClear(focusedConversationId))],
|
||||
['d', true, false, true, () => focusedConversationId && handleConversationDelete(focusedConversationId, false)],
|
||||
[ShortcutKeyName.Left, true, false, true, () => handleNavigateHistory('back')],
|
||||
[ShortcutKeyName.Right, true, false, true, () => handleNavigateHistory('forward')],
|
||||
], [focusedConversationId, handleConversationBranch, handleConversationClear, handleConversationDelete, handleConversationNew, handleMessageRegenerateLast, handleNavigateHistory, handleOpenChatLlmOptions, isFocusedChatEmpty]);
|
||||
['+', true, true, false, useUIPreferencesStore.getState().increaseContentScaling],
|
||||
['-', true, true, false, useUIPreferencesStore.getState().decreaseContentScaling],
|
||||
], [focusedPaneConversationId, handleConversationBranch, handleConversationClear, handleConversationNewInFocusedPane, handleFileOpenConversation, handleFileSaveConversation, handleDeleteConversations, handleMessageBeamLastInFocusedPane, handleMessageRegenerateLastInFocusedPane, handleNavigateHistoryInFocusedPane, handleOpenChatLlmOptions, isFocusedChatEmpty]);
|
||||
useGlobalShortcuts(shortcuts);
|
||||
|
||||
// Pluggable ApplicationBar components
|
||||
|
||||
const centerItems = React.useMemo(() =>
|
||||
<ChatDropdowns
|
||||
conversationId={focusedConversationId}
|
||||
isSplitPanes={isSplitPane}
|
||||
onToggleSplitPanes={toggleSplitPane}
|
||||
/>,
|
||||
[focusedConversationId, isSplitPane, toggleSplitPane],
|
||||
// Pluggable Optima components
|
||||
|
||||
const barAltTitle = showAltTitleBar ? focusedChatTitle ?? 'No Chat' : null;
|
||||
|
||||
const focusedBarContent = React.useMemo(() => beamOpenStoreInFocusedPane
|
||||
? <ChatBarAltBeam beamStore={beamOpenStoreInFocusedPane} isMobile={isMobile} />
|
||||
: (barAltTitle === null)
|
||||
? <ChatBarDropdowns conversationId={focusedPaneConversationId} />
|
||||
: <ChatBarAltTitle conversationId={focusedPaneConversationId} conversationTitle={barAltTitle} />
|
||||
, [barAltTitle, beamOpenStoreInFocusedPane, focusedPaneConversationId, isMobile],
|
||||
);
|
||||
|
||||
const drawerContent = React.useMemo(() =>
|
||||
<ChatDrawerMemo
|
||||
activeConversationId={focusedConversationId}
|
||||
isMobile={isMobile}
|
||||
activeConversationId={focusedPaneConversationId}
|
||||
activeFolderId={activeFolderId}
|
||||
disableNewButton={isFocusedChatEmpty}
|
||||
onConversationActivate={setFocusedConversationId}
|
||||
onConversationDelete={handleConversationDelete}
|
||||
onConversationExportDialog={handleConversationExport}
|
||||
onConversationImportDialog={handleConversationImportDialog}
|
||||
onConversationNew={handleConversationNew}
|
||||
onConversationsDeleteAll={handleConversationsDeleteAll}
|
||||
chatPanesConversationIds={paneUniqueConversationIds}
|
||||
disableNewButton={disableNewButton}
|
||||
onConversationActivate={handleOpenConversationInFocusedPane}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationNew={handleConversationNewInFocusedPane}
|
||||
onConversationsDelete={handleDeleteConversations}
|
||||
onConversationsExportDialog={handleConversationExport}
|
||||
onConversationsImportDialog={handleConversationImportDialog}
|
||||
setActiveFolderId={setActiveFolderId}
|
||||
/>,
|
||||
[activeFolderId, focusedConversationId, handleConversationDelete, handleConversationExport, handleConversationImportDialog, handleConversationNew, handleConversationsDeleteAll, isFocusedChatEmpty, setFocusedConversationId],
|
||||
[activeFolderId, disableNewButton, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isMobile, paneUniqueConversationIds],
|
||||
);
|
||||
|
||||
const menuItems = React.useMemo(() =>
|
||||
<ChatMenuItems
|
||||
conversationId={focusedConversationId}
|
||||
hasConversations={!areChatsEmpty}
|
||||
isConversationEmpty={isFocusedChatEmpty}
|
||||
const focusedMenuItems = React.useMemo(() =>
|
||||
<ChatPageMenuItems
|
||||
isMobile={isMobile}
|
||||
conversationId={focusedPaneConversationId}
|
||||
disableItems={!focusedPaneConversationId || isFocusedChatEmpty}
|
||||
hasConversations={hasConversations}
|
||||
isMessageSelectionMode={isMessageSelectionMode}
|
||||
setIsMessageSelectionMode={setIsMessageSelectionMode}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationClear={handleConversationClear}
|
||||
onConversationFlatten={handleConversationFlatten}
|
||||
// onConversationNew={handleConversationNewInFocusedPane}
|
||||
setIsMessageSelectionMode={setIsMessageSelectionMode}
|
||||
/>,
|
||||
[areChatsEmpty, focusedConversationId, handleConversationBranch, handleConversationClear, isFocusedChatEmpty, isMessageSelectionMode],
|
||||
[focusedPaneConversationId, handleConversationBranch, handleConversationClear, handleConversationFlatten, hasConversations, isFocusedChatEmpty, isMessageSelectionMode, isMobile],
|
||||
);
|
||||
|
||||
usePluggableOptimaLayout(drawerContent, centerItems, menuItems, 'AppChat');
|
||||
usePluggableOptimaLayout(drawerContent, focusedBarContent, focusedMenuItems, 'AppChat');
|
||||
|
||||
|
||||
return <>
|
||||
|
||||
<PanelGroup
|
||||
direction='horizontal'
|
||||
direction={isMobile ? 'vertical' : 'horizontal'}
|
||||
id='app-chat-panels'
|
||||
>
|
||||
|
||||
{panesConversationIDs.map((_conversationId, idx, panels) =>
|
||||
<React.Fragment key={`chat-pane-${idx}-${panels.length}-${_conversationId}`}>
|
||||
{chatPanes.map((pane, idx) => {
|
||||
const _paneIsFocused = idx === focusedPaneIndex;
|
||||
const _paneConversationId = pane.conversationId;
|
||||
const _paneChatHandler = paneHandlers[idx] ?? null;
|
||||
const _paneBeamStore = paneBeamStores[idx] ?? null;
|
||||
const _paneBeamIsOpen = !!beamsOpens?.[idx] && !!_paneBeamStore;
|
||||
const _panesCount = chatPanes.length;
|
||||
const _keyAndId = `chat-pane-${pane.paneId}`;
|
||||
const _sepId = `sep-pane-${idx}`;
|
||||
return <React.Fragment key={_keyAndId}>
|
||||
|
||||
<Panel
|
||||
id={'chat-pane-' + _conversationId}
|
||||
id={_keyAndId}
|
||||
order={idx}
|
||||
collapsible
|
||||
defaultSize={panels.length > 0 ? Math.round(100 / panels.length) : undefined}
|
||||
collapsible={chatPanes.length === 2}
|
||||
defaultSize={(_panesCount === 3 && idx === 1) ? 34 : Math.round(100 / _panesCount)}
|
||||
minSize={20}
|
||||
onClick={(event) => {
|
||||
const setFocus = chatPanes.length < 2 || !event.altKey;
|
||||
setFocusedPane(setFocus ? idx : -1);
|
||||
setFocusedPaneIndex(setFocus ? idx : -1);
|
||||
}}
|
||||
onCollapse={() => {
|
||||
// NOTE: despite the delay to try to let the draggin settle, there seems to be an issue with the Pane locking the screen
|
||||
// setTimeout(() => removePane(idx), 50);
|
||||
// more than 2 will result in an assertion from the framework
|
||||
if (chatPanes.length === 2) removePane(idx);
|
||||
}}
|
||||
onCollapse={() => setTimeout(() => removePane(idx), 50)}
|
||||
style={{
|
||||
// for anchoring the scroll button in place
|
||||
position: 'relative',
|
||||
// border only for active pane (if two or more panes)
|
||||
...(panesConversationIDs.length < 2
|
||||
? {}
|
||||
: (_conversationId === focusedConversationId)
|
||||
? { border: `2px solid ${theme.palette.primary.solidBg}` }
|
||||
: { border: `2px solid ${theme.palette.background.level1}` }),
|
||||
...(isMultiPane ? {
|
||||
borderRadius: '0.375rem',
|
||||
border: `2px solid ${_paneIsFocused
|
||||
? ((willMulticast || !isMultiConversationId) ? theme.palette.primary.solidBg : theme.palette.primary.solidBg)
|
||||
: ((willMulticast || !isMultiConversationId) ? theme.palette.primary.softActiveBg : theme.palette.background.level1)}`,
|
||||
// DISABLED on 2024-03-13, it gets in the way quite a lot
|
||||
// filter: (!willMulticast && !_paneIsFocused)
|
||||
// ? (!isMultiConversationId ? 'grayscale(66.67%)' /* clone of the same */ : 'grayscale(66.67%)')
|
||||
// : undefined,
|
||||
} : {
|
||||
// NOTE: this is a workaround for the 'stuck-after-collapse-close' issue. We will collapse the 'other' pane, which
|
||||
// will get it removed (onCollapse), and somehow this pane will be stuck with a pointerEvents: 'none' style, which de-facto
|
||||
// disables further interaction with the chat. This is a workaround to re-enable the pointer events.
|
||||
// The root cause seems to be a Dragstate not being reset properly, however the pointerEvents has been set since 0.0.56 while
|
||||
// it was optional before: https://github.com/bvaughn/react-resizable-panels/issues/241
|
||||
pointerEvents: 'auto',
|
||||
}),
|
||||
}}
|
||||
>
|
||||
|
||||
<ScrollToBottom
|
||||
bootToBottom
|
||||
stickToBottom
|
||||
sx={{
|
||||
// allows the content to be scrolled (all browsers)
|
||||
overflowY: 'auto',
|
||||
// actually make sure this scrolls & fills
|
||||
height: '100%',
|
||||
}}
|
||||
stickToBottomInitial
|
||||
sx={{ display: 'flex', flexDirection: 'column' }}
|
||||
>
|
||||
|
||||
<ChatMessageList
|
||||
conversationId={_conversationId}
|
||||
capabilityHasT2I={capabilityHasT2I}
|
||||
chatLLMContextTokens={chatLLM?.contextTokens ?? null}
|
||||
isMessageSelectionMode={isMessageSelectionMode}
|
||||
setIsMessageSelectionMode={setIsMessageSelectionMode}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationExecuteHistory={handleConversationExecuteHistory}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={handleTextImagine}
|
||||
onTextSpeak={handleTextSpeak}
|
||||
sx={{
|
||||
minHeight: '100%', // ensures filling of the blank space on newer chats
|
||||
}}
|
||||
/>
|
||||
{!_paneBeamIsOpen && (
|
||||
<ChatMessageList
|
||||
conversationId={_paneConversationId}
|
||||
conversationHandler={_paneChatHandler}
|
||||
capabilityHasT2I={capabilityHasT2I}
|
||||
chatLLMContextTokens={chatLLM?.contextTokens ?? null}
|
||||
fitScreen={isMobile || isMultiPane}
|
||||
isMobile={isMobile}
|
||||
isMessageSelectionMode={isMessageSelectionMode}
|
||||
setIsMessageSelectionMode={setIsMessageSelectionMode}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationExecuteHistory={handleConversationExecuteHistory}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={handleImagineFromText}
|
||||
onTextSpeak={handleTextSpeak}
|
||||
sx={{
|
||||
flexGrow: 1,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Ephemerals
|
||||
conversationId={_conversationId}
|
||||
sx={{
|
||||
// TODO: Fixme post panels?
|
||||
// flexGrow: 0.1,
|
||||
flexShrink: 0.5,
|
||||
overflowY: 'auto',
|
||||
minHeight: 64,
|
||||
}}
|
||||
/>
|
||||
{_paneBeamIsOpen && (
|
||||
<ChatBeamWrapper
|
||||
beamStore={_paneBeamStore}
|
||||
isMobile={isMobile}
|
||||
inlineSx={{
|
||||
flexGrow: 1,
|
||||
// minHeight: 'calc(100vh - 69px - var(--AGI-Nav-width))',
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Visibility and actions are handled via Context */}
|
||||
<ScrollToBottomButton />
|
||||
|
||||
</ScrollToBottom>
|
||||
|
||||
</Panel>
|
||||
|
||||
{/* Panel Separators & Resizers */}
|
||||
{idx < panels.length - 1 && <GoodPanelResizeHandler />}
|
||||
{idx < _panesCount - 1 && (
|
||||
<PanelResizeHandle id={_sepId}>
|
||||
<PanelResizeInset />
|
||||
</PanelResizeHandle>
|
||||
)}
|
||||
|
||||
</React.Fragment>)}
|
||||
</React.Fragment>;
|
||||
})}
|
||||
|
||||
</PanelGroup>
|
||||
|
||||
<Composer
|
||||
isMobile={isMobile}
|
||||
chatLLM={chatLLM}
|
||||
composerTextAreaRef={composerTextAreaRef}
|
||||
conversationId={focusedConversationId}
|
||||
targetConversationId={focusedPaneConversationId}
|
||||
capabilityHasT2I={capabilityHasT2I}
|
||||
isDeveloperMode={focusedSystemPurposeId === 'Developer'}
|
||||
isMulticast={!isMultiConversationId ? null : isComposerMulticast}
|
||||
isDeveloperMode={isFocusedChatDeveloper}
|
||||
onAction={handleComposerAction}
|
||||
onTextImagine={handleTextImagine}
|
||||
sx={{
|
||||
zIndex: 21, // position: 'sticky', bottom: 0,
|
||||
backgroundColor: themeBgAppChatComposer,
|
||||
borderTop: `1px solid`,
|
||||
borderTopColor: 'divider',
|
||||
p: { xs: 1, md: 2 },
|
||||
}}
|
||||
onTextImagine={handleImagineFromText}
|
||||
setIsMulticast={setIsComposerMulticast}
|
||||
sx={beamOpenStoreInFocusedPane ? composerClosedSx : composerOpenSx}
|
||||
/>
|
||||
|
||||
{/* Diagrams */}
|
||||
@@ -533,7 +607,7 @@ export function AppChat() {
|
||||
{!!tradeConfig && (
|
||||
<TradeModal
|
||||
config={tradeConfig}
|
||||
onConversationActivate={setFocusedConversationId}
|
||||
onConversationActivate={handleOpenConversationInFocusedPane}
|
||||
onClose={() => setTradeConfig(null)}
|
||||
/>
|
||||
)}
|
||||
@@ -541,23 +615,20 @@ export function AppChat() {
|
||||
{/* [confirmation] Reset Conversation */}
|
||||
{!!clearConversationId && (
|
||||
<ConfirmationModal
|
||||
open
|
||||
onClose={() => setClearConversationId(null)}
|
||||
onPositive={handleConfirmedClearConversation}
|
||||
open onClose={() => setClearConversationId(null)} onPositive={handleConfirmedClearConversation}
|
||||
confirmationText='Are you sure you want to discard all messages?'
|
||||
positiveActionText='Clear conversation'
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* [confirmation] Delete All */}
|
||||
{!!deleteConversationId && <ConfirmationModal
|
||||
open onClose={() => setDeleteConversationId(null)} onPositive={handleConfirmedDeleteConversation}
|
||||
confirmationText={deleteConversationId === SPECIAL_ID_WIPE_ALL
|
||||
? `Are you absolutely sure you want to delete ${activeFolderId ? 'ALL conversations in this folder' : 'ALL conversations'}? This action cannot be undone.`
|
||||
: 'Are you sure you want to delete this conversation?'}
|
||||
positiveActionText={deleteConversationId === SPECIAL_ID_WIPE_ALL
|
||||
? `Yes, delete all ${activeFolderConversationsCount} conversations`
|
||||
: 'Delete conversation'}
|
||||
/>}
|
||||
{!!deleteConversationIds?.length && (
|
||||
<ConfirmationModal
|
||||
open onClose={() => setDeleteConversationIds(null)} onPositive={handleConfirmedDeleteConversations}
|
||||
confirmationText={`Are you absolutely sure you want to delete ${deleteConversationIds.length === 1 ? 'this conversation' : 'these conversations'}? This action cannot be undone.`}
|
||||
positiveActionText={deleteConversationIds.length === 1 ? 'Delete conversation' : `Yes, delete all ${deleteConversationIds.length} conversations`}
|
||||
/>
|
||||
)}
|
||||
|
||||
</>;
|
||||
}
|
||||
|
||||
@@ -1,19 +1,26 @@
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsAlter: ICommandsProvider = {
|
||||
id: 'chat-alter',
|
||||
rank: 20,
|
||||
id: 'cmd-chat-alter',
|
||||
rank: 25,
|
||||
|
||||
getCommands: () => [{
|
||||
primary: '/assistant',
|
||||
alternatives: ['/a'],
|
||||
arguments: ['text'],
|
||||
arguments: ['text...'],
|
||||
description: 'Injects assistant response',
|
||||
}, {
|
||||
primary: '/system',
|
||||
alternatives: ['/s'],
|
||||
arguments: ['text'],
|
||||
arguments: ['text...'],
|
||||
description: 'Injects system message',
|
||||
}, {
|
||||
primary: '/clear',
|
||||
arguments: ['all'],
|
||||
description: 'Clears the chat (removes all messages)',
|
||||
Icon: ClearIcon,
|
||||
}],
|
||||
|
||||
};
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
|
||||
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsBeam: ICommandsProvider = {
|
||||
id: 'cmd-mode-beam',
|
||||
rank: 9,
|
||||
|
||||
getCommands: () => [{
|
||||
primary: '/beam',
|
||||
arguments: ['prompt'],
|
||||
description: 'Combine the smarts of models',
|
||||
Icon: ChatBeamIcon,
|
||||
}],
|
||||
|
||||
};
|
||||
@@ -3,8 +3,8 @@ import LanguageIcon from '@mui/icons-material/Language';
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsBrowse: ICommandsProvider = {
|
||||
id: 'ass-browse',
|
||||
rank: 25,
|
||||
id: 'cmd-ass-browse',
|
||||
rank: 20,
|
||||
|
||||
getCommands: () => [{
|
||||
primary: '/browse',
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import FormatPaintIcon from '@mui/icons-material/FormatPaint';
|
||||
import FormatPaintTwoToneIcon from '@mui/icons-material/FormatPaintTwoTone';
|
||||
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export function textToDrawCommand(text: string): string {
|
||||
return `/draw ${text}`;
|
||||
}
|
||||
|
||||
export const CommandsDraw: ICommandsProvider = {
|
||||
id: 'ass-t2i',
|
||||
id: 'cmd-ass-t2i',
|
||||
rank: 10,
|
||||
|
||||
getCommands: () => [{
|
||||
@@ -11,7 +15,7 @@ export const CommandsDraw: ICommandsProvider = {
|
||||
alternatives: ['/imagine', '/img'],
|
||||
arguments: ['prompt'],
|
||||
description: 'Assistant will draw the text',
|
||||
Icon: FormatPaintIcon,
|
||||
Icon: FormatPaintTwoToneIcon,
|
||||
}],
|
||||
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ import PsychologyIcon from '@mui/icons-material/Psychology';
|
||||
import type { ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
export const CommandsReact: ICommandsProvider = {
|
||||
id: 'ass-react',
|
||||
id: 'cmd-mode-react',
|
||||
rank: 15,
|
||||
|
||||
getCommands: () => [{
|
||||
|
||||
@@ -1,25 +1,27 @@
|
||||
import { ChatCommand, ICommandsProvider } from './ICommandsProvider';
|
||||
|
||||
import { CommandsAlter } from './CommandsAlter';
|
||||
import { CommandsBeam } from './CommandsBeam';
|
||||
import { CommandsBrowse } from './CommandsBrowse';
|
||||
import { CommandsDraw } from './CommandsDraw';
|
||||
import { CommandsHelp } from './CommandsHelp';
|
||||
import { CommandsReact } from './CommandsReact';
|
||||
|
||||
|
||||
export type CommandsProviderId = 'ass-browse' | 'ass-t2i' | 'ass-react' | 'chat-alter' | 'cmd-help';
|
||||
export type CommandsProviderId = 'cmd-ass-browse' | 'cmd-ass-t2i' | 'cmd-chat-alter' | 'cmd-help' | 'cmd-mode-beam' | 'cmd-mode-react';
|
||||
|
||||
type TextCommandPiece =
|
||||
| { type: 'text'; value: string; }
|
||||
| { type: 'cmd'; providerId: CommandsProviderId, command: string; params?: string, isError?: boolean };
|
||||
| { type: 'nocmd'; value: string; }
|
||||
| { type: 'cmd'; providerId: CommandsProviderId, command: string; params?: string, isErrorNoArgs?: boolean };
|
||||
|
||||
|
||||
const ChatCommandsProviders: Record<CommandsProviderId, ICommandsProvider> = {
|
||||
'ass-browse': CommandsBrowse,
|
||||
'ass-react': CommandsReact,
|
||||
'ass-t2i': CommandsDraw,
|
||||
'chat-alter': CommandsAlter,
|
||||
'cmd-ass-browse': CommandsBrowse,
|
||||
'cmd-ass-t2i': CommandsDraw,
|
||||
'cmd-chat-alter': CommandsAlter,
|
||||
'cmd-help': CommandsHelp,
|
||||
'cmd-mode-beam': CommandsBeam,
|
||||
'cmd-mode-react': CommandsReact,
|
||||
};
|
||||
|
||||
export function findAllChatCommands(): ChatCommand[] {
|
||||
@@ -29,16 +31,25 @@ export function findAllChatCommands(): ChatCommand[] {
|
||||
.flat();
|
||||
}
|
||||
|
||||
export function helpPrettyChatCommands() {
|
||||
return findAllChatCommands()
|
||||
.map(cmd => ` - ${cmd.primary}` + (cmd.alternatives?.length ? ` (${cmd.alternatives.join(', ')})` : '') + `: ${cmd.description}`)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
export function extractChatCommand(input: string): TextCommandPiece[] {
|
||||
const inputTrimmed = input.trim();
|
||||
|
||||
// quick exit: command does not start with '/'
|
||||
if (!inputTrimmed.startsWith('/'))
|
||||
return [{ type: 'text', value: input }];
|
||||
return [{ type: 'nocmd', value: input }];
|
||||
|
||||
// Find the first space to separate the command from its parameters (if any)
|
||||
const firstSpaceIndex = inputTrimmed.indexOf(' ');
|
||||
const potentialCommand = inputTrimmed.substring(0, firstSpaceIndex >= 0 ? firstSpaceIndex : inputTrimmed.length);
|
||||
const commandMatch = inputTrimmed.match(/^\/\S+/);
|
||||
const potentialCommand = commandMatch ? commandMatch[0] : inputTrimmed;
|
||||
|
||||
const textAfterCommand = firstSpaceIndex >= 0 ? inputTrimmed.substring(firstSpaceIndex + 1) : '';
|
||||
|
||||
// Check if the potential command is an actual command
|
||||
for (const provider of Object.values(ChatCommandsProviders)) {
|
||||
@@ -46,22 +57,33 @@ export function extractChatCommand(input: string): TextCommandPiece[] {
|
||||
if (cmd.primary === potentialCommand || cmd.alternatives?.includes(potentialCommand)) {
|
||||
|
||||
// command needs arguments: take the rest of the input as parameters
|
||||
if (cmd.arguments?.length) {
|
||||
const params = firstSpaceIndex >= 0 ? inputTrimmed.substring(firstSpaceIndex + 1) : '';
|
||||
return [{ type: 'cmd', providerId: provider.id, command: potentialCommand, params: params || undefined, isError: !params || undefined }];
|
||||
}
|
||||
if (cmd.arguments?.length) return [{
|
||||
type: 'cmd',
|
||||
providerId: provider.id,
|
||||
command: potentialCommand,
|
||||
params: textAfterCommand || undefined,
|
||||
isErrorNoArgs: !textAfterCommand,
|
||||
}];
|
||||
|
||||
// command without arguments, treat any text after as a separate text piece
|
||||
const pieces: TextCommandPiece[] = [{ type: 'cmd', providerId: provider.id, command: potentialCommand, params: undefined }];
|
||||
const textAfterCommand = firstSpaceIndex >= 0 ? inputTrimmed.substring(firstSpaceIndex + 1) : '';
|
||||
if (textAfterCommand)
|
||||
pieces.push({ type: 'text', value: textAfterCommand });
|
||||
const pieces: TextCommandPiece[] = [{
|
||||
type: 'cmd',
|
||||
providerId: provider.id,
|
||||
command: potentialCommand,
|
||||
params: undefined,
|
||||
}];
|
||||
textAfterCommand && pieces.push({
|
||||
type: 'nocmd',
|
||||
value: textAfterCommand,
|
||||
});
|
||||
return pieces;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No command found, return the entire input as text
|
||||
return [{ type: 'text', value: input }];
|
||||
return [{
|
||||
type: 'nocmd',
|
||||
value: input,
|
||||
}];
|
||||
}
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, Modal, ModalClose } from '@mui/joy';
|
||||
|
||||
import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
|
||||
import { BeamView } from '~/modules/beam/BeamView';
|
||||
|
||||
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
|
||||
|
||||
|
||||
/*const overlaySx: SxProps = {
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
zIndex: themeZIndexBeamView, // stay on top of Message > Chips (:1), and Overlays (:2) - note: Desktop Drawer (:26)
|
||||
}*/
|
||||
|
||||
|
||||
export function ChatBeamWrapper(props: {
|
||||
beamStore: BeamStoreApi,
|
||||
isMobile: boolean,
|
||||
inlineSx?: SxProps,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const isMaximized = useBeamStore(props.beamStore, state => state.isMaximized);
|
||||
|
||||
const handleUnMaximize = React.useCallback(() => {
|
||||
props.beamStore.getState().setIsMaximized(false);
|
||||
}, [props.beamStore]);
|
||||
|
||||
// memo the beamview
|
||||
const beamView = React.useMemo(() => (
|
||||
<BeamView
|
||||
beamStore={props.beamStore}
|
||||
isMobile={props.isMobile}
|
||||
showExplainer
|
||||
/>
|
||||
), [props.beamStore, props.isMobile]);
|
||||
|
||||
return isMaximized ? (
|
||||
<Modal open onClose={handleUnMaximize}>
|
||||
<Box sx={{
|
||||
backgroundColor: 'background.level1',
|
||||
position: 'absolute',
|
||||
inset: 0,
|
||||
}}>
|
||||
<ScrollToBottom disableAutoStick>
|
||||
{beamView}
|
||||
</ScrollToBottom>
|
||||
<ModalClose sx={{ color: 'white', backgroundColor: 'background.surface', boxShadow: 'xs', mr: 2 }} />
|
||||
</Box>
|
||||
</Modal>
|
||||
) : (
|
||||
<Box sx={props.inlineSx}>
|
||||
{beamView}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -1,22 +1,29 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import type { SxProps } from '@mui/joy/styles/types';
|
||||
import { Box, List } from '@mui/joy';
|
||||
import { SxProps } from '@mui/joy/styles/types';
|
||||
|
||||
import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
|
||||
|
||||
import type { ConversationHandler } from '~/common/chats/ConversationHandler';
|
||||
import type { DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import type { DMessageFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
|
||||
import { InlineError } from '~/common/components/InlineError';
|
||||
import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
import { ShortcutKeyName, useGlobalShortcut } from '~/common/components/useGlobalShortcut';
|
||||
import { createDMessage, DConversationId, DMessage, getConversation, useChatStore } from '~/common/state/store-chats';
|
||||
import { ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcuts';
|
||||
import { createDMessageTextContent, DMessageId, DMessageUserFlag, messageToggleUserFlag } from '~/common/stores/chat/chat.message';
|
||||
import { getConversation, useChatStore } from '~/common/stores/chat/store-chats';
|
||||
import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating';
|
||||
import { useCapabilityElevenLabs } from '~/common/components/useCapabilities';
|
||||
import { useEphemerals } from '~/common/chats/EphemeralsStore';
|
||||
import { useScrollToBottom } from '~/common/scroll-to-bottom/useScrollToBottom';
|
||||
|
||||
import { ChatMessageMemo } from './message/ChatMessage';
|
||||
import { ChatMessage, ChatMessageMemo } from './message/ChatMessage';
|
||||
import { CleanerMessage, MessagesSelectionHeader } from './message/CleanerMessage';
|
||||
import { Ephemerals } from './Ephemerals';
|
||||
import { PersonaSelector } from './persona-selector/PersonaSelector';
|
||||
import { useChatShowSystemMessages } from '../store-app-chat';
|
||||
import { useScrollToBottom } from './scroll-to-bottom/useScrollToBottom';
|
||||
import { useChatAutoSuggestHTMLUI, useChatShowSystemMessages } from '../store-app-chat';
|
||||
|
||||
|
||||
/**
|
||||
@@ -24,14 +31,18 @@ import { useScrollToBottom } from './scroll-to-bottom/useScrollToBottom';
|
||||
*/
|
||||
export function ChatMessageList(props: {
|
||||
conversationId: DConversationId | null,
|
||||
conversationHandler: ConversationHandler | null,
|
||||
capabilityHasT2I: boolean,
|
||||
chatLLMContextTokens: number | null,
|
||||
isMessageSelectionMode: boolean, setIsMessageSelectionMode: (isMessageSelectionMode: boolean) => void,
|
||||
fitScreen: boolean,
|
||||
isMobile: boolean,
|
||||
isMessageSelectionMode: boolean,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string) => void,
|
||||
onConversationExecuteHistory: (conversationId: DConversationId, history: DMessage[]) => Promise<void>,
|
||||
onConversationExecuteHistory: (conversationId: DConversationId) => Promise<void>,
|
||||
onTextDiagram: (diagramConfig: DiagramConfig | null) => void,
|
||||
onTextImagine: (conversationId: DConversationId, selectedText: string) => Promise<void>,
|
||||
onTextSpeak: (selectedText: string) => Promise<void>,
|
||||
setIsMessageSelectionMode: (isMessageSelectionMode: boolean) => void,
|
||||
sx?: SxProps,
|
||||
}) {
|
||||
|
||||
@@ -43,61 +54,101 @@ export function ChatMessageList(props: {
|
||||
// external state
|
||||
const { notifyBooting } = useScrollToBottom();
|
||||
const { openPreferencesTab } = useOptimaLayout();
|
||||
const danger_experimentalHtmlWebUi = useChatAutoSuggestHTMLUI();
|
||||
const [showSystemMessages] = useChatShowSystemMessages();
|
||||
const { conversationMessages, historyTokenCount, editMessage, deleteMessage, setMessages } = useChatStore(state => {
|
||||
const optionalTranslationWarning = useBrowserTranslationWarning();
|
||||
const { conversationMessages, historyTokenCount } = useChatStore(useShallow(state => {
|
||||
const conversation = state.conversations.find(conversation => conversation.id === props.conversationId);
|
||||
return {
|
||||
conversationMessages: conversation ? conversation.messages : [],
|
||||
historyTokenCount: conversation ? conversation.tokenCount : 0,
|
||||
deleteMessage: state.deleteMessage,
|
||||
editMessage: state.editMessage,
|
||||
setMessages: state.setMessages,
|
||||
};
|
||||
}, shallow);
|
||||
}));
|
||||
const ephemerals = useEphemerals(props.conversationHandler);
|
||||
const { mayWork: isSpeakable } = useCapabilityElevenLabs();
|
||||
|
||||
// derived state
|
||||
const { conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine, onTextSpeak } = props;
|
||||
const { conversationHandler, conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine, onTextSpeak } = props;
|
||||
|
||||
|
||||
// text actions
|
||||
|
||||
const handleRunExample = React.useCallback(async (text: string) => {
|
||||
conversationId && await onConversationExecuteHistory(conversationId, [...conversationMessages, createDMessage('user', text)]);
|
||||
}, [conversationId, conversationMessages, onConversationExecuteHistory]);
|
||||
const handleRunExample = React.useCallback(async (examplePrompt: string) => {
|
||||
if (conversationId && conversationHandler) {
|
||||
conversationHandler.messageAppend(createDMessageTextContent('user', examplePrompt)); // [chat] append user:persona question
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory]);
|
||||
|
||||
|
||||
// message menu methods proxy
|
||||
|
||||
const handleConversationBranch = React.useCallback((messageId: string) => {
|
||||
const handleMessageAssistantFrom = React.useCallback(async (messageId: DMessageId, offset: number) => {
|
||||
if (conversationId && conversationHandler) {
|
||||
conversationHandler.historyTruncateTo(messageId, offset);
|
||||
await onConversationExecuteHistory(conversationId);
|
||||
}
|
||||
}, [conversationHandler, conversationId, onConversationExecuteHistory]);
|
||||
|
||||
const handleMessageBeam = React.useCallback(async (messageId: DMessageId) => {
|
||||
// Right-click menu Beam
|
||||
if (!conversationId || !props.conversationHandler) return;
|
||||
const messages = getConversation(conversationId)?.messages;
|
||||
if (messages?.length) {
|
||||
const truncatedHistory = messages.slice(0, messages.findIndex(m => m.id === messageId) + 1);
|
||||
const lastMessage = truncatedHistory[truncatedHistory.length - 1];
|
||||
if (lastMessage) {
|
||||
// assistant: do an in-place beam
|
||||
if (lastMessage.role === 'assistant') {
|
||||
if (truncatedHistory.length >= 2)
|
||||
props.conversationHandler.beamInvoke(truncatedHistory.slice(0, -1), [lastMessage], lastMessage.id);
|
||||
} else {
|
||||
// user: truncate and append (but if the next message is an assistant message, import it)
|
||||
const nextMessage = messages[truncatedHistory.length];
|
||||
if (nextMessage?.role === 'assistant')
|
||||
props.conversationHandler.beamInvoke(truncatedHistory, [nextMessage], null);
|
||||
else
|
||||
props.conversationHandler.beamInvoke(truncatedHistory, [], null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [conversationId, props.conversationHandler]);
|
||||
|
||||
const handleMessageBranch = React.useCallback((messageId: DMessageId) => {
|
||||
conversationId && onConversationBranch(conversationId, messageId);
|
||||
}, [conversationId, onConversationBranch]);
|
||||
|
||||
const handleConversationRestartFrom = React.useCallback(async (messageId: string, offset: number) => {
|
||||
const messages = getConversation(conversationId)?.messages;
|
||||
if (messages) {
|
||||
const truncatedHistory = messages.slice(0, messages.findIndex(m => m.id === messageId) + offset + 1);
|
||||
conversationId && await onConversationExecuteHistory(conversationId, truncatedHistory);
|
||||
}
|
||||
}, [conversationId, onConversationExecuteHistory]);
|
||||
const handleMessageTruncate = React.useCallback((messageId: DMessageId) => {
|
||||
props.conversationHandler?.historyTruncateTo(messageId, 0);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleConversationTruncate = React.useCallback((messageId: string) => {
|
||||
const messages = getConversation(conversationId)?.messages;
|
||||
if (conversationId && messages) {
|
||||
const truncatedHistory = messages.slice(0, messages.findIndex(m => m.id === messageId) + 1);
|
||||
setMessages(conversationId, truncatedHistory);
|
||||
}
|
||||
}, [conversationId, setMessages]);
|
||||
const handleMessageDelete = React.useCallback((messageId: DMessageId) => {
|
||||
props.conversationHandler?.messagesDelete([messageId]);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleMessageDelete = React.useCallback((messageId: string) => {
|
||||
conversationId && deleteMessage(conversationId, messageId);
|
||||
}, [conversationId, deleteMessage]);
|
||||
const handleMessageAppendFragment = React.useCallback((messageId: DMessageId, fragment: DMessageFragment) => {
|
||||
props.conversationHandler?.messageFragmentAppend(messageId, fragment, false, false);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleMessageEdit = React.useCallback((messageId: string, newText: string) => {
|
||||
conversationId && editMessage(conversationId, messageId, { text: newText }, true);
|
||||
}, [conversationId, editMessage]);
|
||||
const handleMessageDeleteFragment = React.useCallback((messageId: DMessageId, fragmentId: DMessageFragmentId) => {
|
||||
props.conversationHandler?.messageFragmentDelete(messageId, fragmentId, false, true);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleTextDiagram = React.useCallback(async (messageId: string, text: string) => {
|
||||
const handleMessageReplaceFragment = React.useCallback((messageId: DMessageId, fragmentId: DMessageFragmentId, newFragment: DMessageFragment) => {
|
||||
props.conversationHandler?.messageFragmentReplace(messageId, fragmentId, newFragment, false);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleMessageToggleUserFlag = React.useCallback((messageId: DMessageId, userFlag: DMessageUserFlag) => {
|
||||
props.conversationHandler?.messageEdit(messageId, (message) => ({
|
||||
userFlags: messageToggleUserFlag(message, userFlag),
|
||||
}), false, false);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleReplyTo = React.useCallback((_messageId: DMessageId, text: string) => {
|
||||
props.conversationHandler?.getOverlayStore().getState().setReplyToText(text);
|
||||
}, [props.conversationHandler]);
|
||||
|
||||
const handleTextDiagram = React.useCallback(async (messageId: DMessageId, text: string) => {
|
||||
conversationId && onTextDiagram({ conversationId: conversationId, messageId, text });
|
||||
}, [conversationId, onTextDiagram]);
|
||||
|
||||
@@ -130,36 +181,35 @@ export function ChatMessageList(props: {
|
||||
setSelectedMessages(newSelected);
|
||||
};
|
||||
|
||||
const handleSelectMessage = (messageId: string, selected: boolean) => {
|
||||
const handleSelectMessage = (messageId: DMessageId, selected: boolean) => {
|
||||
const newSelected = new Set(selectedMessages);
|
||||
selected ? newSelected.add(messageId) : newSelected.delete(messageId);
|
||||
setSelectedMessages(newSelected);
|
||||
};
|
||||
|
||||
const handleSelectionDelete = () => {
|
||||
if (conversationId)
|
||||
for (const selectedMessage of selectedMessages)
|
||||
deleteMessage(conversationId, selectedMessage);
|
||||
const handleSelectionDelete = React.useCallback(() => {
|
||||
props.conversationHandler?.messagesDelete(Array.from(selectedMessages));
|
||||
setSelectedMessages(new Set());
|
||||
};
|
||||
}, [props.conversationHandler, selectedMessages]);
|
||||
|
||||
useGlobalShortcut(props.isMessageSelectionMode && ShortcutKeyName.Esc, false, false, false, () => {
|
||||
useGlobalShortcuts([[props.isMessageSelectionMode && ShortcutKeyName.Esc, false, false, false, () => {
|
||||
props.setIsMessageSelectionMode(false);
|
||||
});
|
||||
}]]);
|
||||
|
||||
|
||||
// text-diff functionality, find the messages to diff with
|
||||
// text-diff functionality: only diff the last complete message, and they're similar in size
|
||||
|
||||
const { diffMessage, diffText } = React.useMemo(() => {
|
||||
const [msgB, msgA] = conversationMessages.filter(m => m.role === 'assistant').reverse();
|
||||
if (msgB?.text && msgA?.text && !msgB?.typing) {
|
||||
const textA = msgA.text, textB = msgB.text;
|
||||
const lenA = textA.length, lenB = textB.length;
|
||||
if (lenA > 80 && lenB > 80 && lenA > lenB / 3 && lenB > lenA / 3)
|
||||
return { diffMessage: msgB, diffText: textA };
|
||||
}
|
||||
return { diffMessage: undefined, diffText: undefined };
|
||||
}, [conversationMessages]);
|
||||
// const { diffTargetMessage, diffPrevText } = React.useMemo(() => {
|
||||
// const [msgB, msgA] = conversationMessages.filter(m => m.role === 'assistant').reverse();
|
||||
// const textB = msgB ? singleTextOrThrow(msgB) : undefined;
|
||||
// const textA = msgA ? singleTextOrThrow(msgA) : undefined;
|
||||
// if (textB && textA && !msgB?.pendingIncomplete) {
|
||||
// const lenA = textA.length, lenB = textB.length;
|
||||
// if (lenA > 80 && lenB > 80 && lenA > lenB / 3 && lenB > lenA / 3)
|
||||
// return { diffTargetMessage: msgB, diffPrevText: textA };
|
||||
// }
|
||||
// return { diffTargetMessage: undefined, diffPrevText: undefined };
|
||||
// }, [conversationMessages]);
|
||||
|
||||
|
||||
// scroll to the very bottom of a new chat
|
||||
@@ -185,15 +235,20 @@ export function ChatMessageList(props: {
|
||||
);
|
||||
|
||||
return (
|
||||
<List sx={{
|
||||
p: 0, ...(props.sx || {}),
|
||||
// this makes sure that the the window is scrolled to the bottom (column-reverse)
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
<List role='chat-messages-list' sx={{
|
||||
p: 0,
|
||||
...(props.sx || {}),
|
||||
|
||||
// fix for the double-border on the last message (one by the composer, one to the bottom of the message)
|
||||
// marginBottom: '-1px',
|
||||
|
||||
// layout
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
}}>
|
||||
|
||||
{optionalTranslationWarning}
|
||||
|
||||
{props.isMessageSelectionMode && (
|
||||
<MessagesSelectionHeader
|
||||
hasSelected={selectedMessages.size > 0}
|
||||
@@ -204,35 +259,61 @@ export function ChatMessageList(props: {
|
||||
/>
|
||||
)}
|
||||
|
||||
{filteredMessages.map((message, idx, { length: count }) =>
|
||||
props.isMessageSelectionMode ? (
|
||||
{filteredMessages.map((message, idx, { length: count }) => {
|
||||
|
||||
<CleanerMessage
|
||||
key={'sel-' + message.id}
|
||||
message={message}
|
||||
remainingTokens={props.chatLLMContextTokens ? (props.chatLLMContextTokens - historyTokenCount) : undefined}
|
||||
selected={selectedMessages.has(message.id)} onToggleSelected={handleSelectMessage}
|
||||
/>
|
||||
// Optimization: only memo complete components, or we'd be memoizing garbage
|
||||
const ChatMessageMemoOrNot = !message.pendingIncomplete ? ChatMessageMemo : ChatMessage;
|
||||
|
||||
) : (
|
||||
return props.isMessageSelectionMode ? (
|
||||
|
||||
<ChatMessageMemo
|
||||
key={'msg-' + message.id}
|
||||
message={message}
|
||||
diffPreviousText={message === diffMessage ? diffText : undefined}
|
||||
isBottom={idx === count - 1}
|
||||
isImagining={isImagining} isSpeaking={isSpeaking}
|
||||
onConversationBranch={handleConversationBranch}
|
||||
onConversationRestartFrom={handleConversationRestartFrom}
|
||||
onConversationTruncate={handleConversationTruncate}
|
||||
onMessageDelete={handleMessageDelete}
|
||||
onMessageEdit={handleMessageEdit}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={handleTextImagine}
|
||||
onTextSpeak={handleTextSpeak}
|
||||
/>
|
||||
<CleanerMessage
|
||||
key={'sel-' + message.id}
|
||||
message={message}
|
||||
remainingTokens={props.chatLLMContextTokens ? (props.chatLLMContextTokens - historyTokenCount) : undefined}
|
||||
selected={selectedMessages.has(message.id)} onToggleSelected={handleSelectMessage}
|
||||
/>
|
||||
|
||||
),
|
||||
) : (
|
||||
|
||||
<ChatMessageMemoOrNot
|
||||
key={'msg-' + message.id}
|
||||
message={message}
|
||||
// diffPreviousText={message === diffTargetMessage ? diffPrevText : undefined}
|
||||
fitScreen={props.fitScreen}
|
||||
isMobile={props.isMobile}
|
||||
isBottom={idx === count - 1}
|
||||
isImagining={isImagining}
|
||||
isSpeaking={isSpeaking}
|
||||
showUnsafeHtml={danger_experimentalHtmlWebUi}
|
||||
onMessageAssistantFrom={handleMessageAssistantFrom}
|
||||
onMessageBeam={handleMessageBeam}
|
||||
onMessageBranch={handleMessageBranch}
|
||||
onMessageDelete={handleMessageDelete}
|
||||
onMessageFragmentAppend={handleMessageAppendFragment}
|
||||
onMessageFragmentDelete={handleMessageDeleteFragment}
|
||||
onMessageFragmentReplace={handleMessageReplaceFragment}
|
||||
onMessageToggleUserFlag={handleMessageToggleUserFlag}
|
||||
onMessageTruncate={handleMessageTruncate}
|
||||
onReplyTo={handleReplyTo}
|
||||
onTextDiagram={handleTextDiagram}
|
||||
onTextImagine={capabilityHasT2I ? handleTextImagine : undefined}
|
||||
onTextSpeak={isSpeakable ? handleTextSpeak : undefined}
|
||||
/>
|
||||
|
||||
);
|
||||
},
|
||||
)}
|
||||
|
||||
{!!ephemerals.length && (
|
||||
<Ephemerals
|
||||
ephemerals={ephemerals}
|
||||
conversationId={props.conversationId}
|
||||
sx={{
|
||||
mt: 'auto',
|
||||
overflowY: 'auto',
|
||||
minHeight: 64,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
</List>
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
|
||||
import { Box, Grid, IconButton, Sheet, styled, Typography } from '@mui/joy';
|
||||
import { SxProps } from '@mui/joy/styles/types';
|
||||
import CloseIcon from '@mui/icons-material/Close';
|
||||
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
|
||||
|
||||
import { DConversationId, DEphemeral, useChatStore } from '~/common/state/store-chats';
|
||||
import { lineHeightChatText } from '~/common/app.theme';
|
||||
import type { DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import type { DEphemeral } from '~/common/chats/EphemeralsStore';
|
||||
import { ConversationsManager } from '~/common/chats/ConversationsManager';
|
||||
import { lineHeightChatTextMd } from '~/common/app.theme';
|
||||
|
||||
|
||||
const StateLine = styled(Typography)(({ theme }) => ({
|
||||
@@ -16,7 +17,7 @@ const StateLine = styled(Typography)(({ theme }) => ({
|
||||
fontSize: theme.fontSize.xs,
|
||||
fontFamily: theme.fontFamily.code,
|
||||
marginLeft: theme.spacing(1),
|
||||
lineHeight: lineHeightChatText,
|
||||
lineHeight: lineHeightChatTextMd,
|
||||
}));
|
||||
|
||||
function isPrimitive(value: any): boolean {
|
||||
@@ -75,6 +76,11 @@ function StateRenderer(props: { state: object }) {
|
||||
|
||||
|
||||
function EphemeralItem({ conversationId, ephemeral }: { conversationId: string, ephemeral: DEphemeral }) {
|
||||
|
||||
const handleDelete = React.useCallback(() => {
|
||||
ConversationsManager.getHandler(conversationId).ephemeralsStore.delete(ephemeral.id);
|
||||
}, [conversationId, ephemeral.id]);
|
||||
|
||||
return <Box
|
||||
sx={{
|
||||
p: { xs: 1, md: 2 },
|
||||
@@ -93,7 +99,7 @@ function EphemeralItem({ conversationId, ephemeral }: { conversationId: string,
|
||||
|
||||
{/* Left pane (console) */}
|
||||
<Grid xs={12} md={ephemeral.state ? 6 : 12}>
|
||||
<Typography fontSize='smaller' sx={{ overflowWrap: 'anywhere', whiteSpace: 'break-spaces', lineHeight: lineHeightChatText }}>
|
||||
<Typography fontSize='smaller' sx={{ overflowWrap: 'anywhere', whiteSpace: 'break-spaces', lineHeight: lineHeightChatTextMd }}>
|
||||
{ephemeral.text}
|
||||
</Typography>
|
||||
</Grid>
|
||||
@@ -112,12 +118,12 @@ function EphemeralItem({ conversationId, ephemeral }: { conversationId: string,
|
||||
{/* Close button (right of title) */}
|
||||
<IconButton
|
||||
size='sm'
|
||||
onClick={() => useChatStore.getState().deleteEphemeral(conversationId, ephemeral.id)}
|
||||
onClick={handleDelete}
|
||||
sx={{
|
||||
position: 'absolute', top: 8, right: 8,
|
||||
opacity: { xs: 1, sm: 0.5 }, transition: 'opacity 0.3s',
|
||||
}}>
|
||||
<CloseIcon />
|
||||
<CloseRoundedIcon />
|
||||
</IconButton>
|
||||
|
||||
</Box>;
|
||||
@@ -130,19 +136,22 @@ function EphemeralItem({ conversationId, ephemeral }: { conversationId: string,
|
||||
// `);
|
||||
|
||||
|
||||
export function Ephemerals(props: { conversationId: DConversationId | null, sx?: SxProps }) {
|
||||
export function Ephemerals(props: { ephemerals: DEphemeral[], conversationId: DConversationId | null, sx?: SxProps }) {
|
||||
// global state
|
||||
const ephemerals = useChatStore(state => {
|
||||
const conversation = state.conversations.find(conversation => conversation.id === props.conversationId);
|
||||
return conversation ? conversation.ephemerals : [];
|
||||
}, shallow);
|
||||
// const ephemerals = useChatStore(state => {
|
||||
// const conversation = state.conversations.find(conversation => conversation.id === props.conversationId);
|
||||
// return conversation ? conversation.ephemerals : [];
|
||||
// }, shallow);
|
||||
|
||||
if (!ephemerals?.length) return null;
|
||||
const ephemerals = props.ephemerals;
|
||||
// if (!ephemerals?.length) return null;
|
||||
|
||||
return (
|
||||
<Sheet
|
||||
variant='soft' color='success' invertedColors
|
||||
sx={{
|
||||
borderTop: '1px solid',
|
||||
borderTopColor: 'divider',
|
||||
// backgroundImage: `url("data:image/svg+xml,${dashedBorderSVG.replace('currentColor', '%23A1E8A1')}")`,
|
||||
// backgroundSize: '100% 100%',
|
||||
// backgroundRepeat: 'no-repeat',
|
||||
|
||||
@@ -1,367 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
|
||||
import { Box, IconButton, ListDivider, ListItem, ListItemButton, ListItemDecorator, Tooltip } from '@mui/joy';
|
||||
import AddIcon from '@mui/icons-material/Add';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import FileDownloadIcon from '@mui/icons-material/FileDownload';
|
||||
import FileUploadIcon from '@mui/icons-material/FileUpload';
|
||||
import FolderIcon from '@mui/icons-material/Folder';
|
||||
import FolderOpenOutlinedIcon from '@mui/icons-material/FolderOpenOutlined';
|
||||
import FolderOutlinedIcon from '@mui/icons-material/FolderOutlined';
|
||||
|
||||
import DebounceInput from '~/common/components/DebounceInput';
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
import { DFolder, useFolderStore } from '~/common/state/store-folders';
|
||||
import { PageDrawerHeader } from '~/common/layout/optima/components/PageDrawerHeader';
|
||||
import { PageDrawerList, PageDrawerTallItemSx } from '~/common/layout/optima/components/PageDrawerList';
|
||||
import { conversationTitle, DConversationId, useChatStore } from '~/common/state/store-chats';
|
||||
import { useOptimaDrawers } from '~/common/layout/optima/useOptimaDrawers';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
import { ChatDrawerItemMemo, ChatNavigationItemData, FolderChangeRequest } from './ChatDrawerItem';
|
||||
import { ChatFolderList } from './folder/ChatFolderList';
|
||||
import { ClearFolderText } from './folder/useFolderDropdown';
|
||||
|
||||
|
||||
// this is here to make shallow comparisons work on the next hook
|
||||
const noFolders: DFolder[] = [];
|
||||
|
||||
/*
|
||||
* Lists folders and returns the active folder
|
||||
*/
|
||||
export const useFolders = (activeFolderId: string | null) => useFolderStore(({ enableFolders, folders, toggleEnableFolders }) => {
|
||||
|
||||
// finds the active folder if any
|
||||
const activeFolder = (enableFolders && activeFolderId)
|
||||
? folders.find(folder => folder.id === activeFolderId) ?? null
|
||||
: null;
|
||||
|
||||
return {
|
||||
activeFolder,
|
||||
allFolders: enableFolders ? folders : noFolders,
|
||||
enableFolders,
|
||||
toggleEnableFolders,
|
||||
};
|
||||
}, shallow);
|
||||
|
||||
|
||||
/*
|
||||
* Optimization: return a reduced version of the DConversation object for 'Drawer Items' purposes,
|
||||
* to avoid unnecessary re-renders on each new character typed by the assistant
|
||||
*/
|
||||
export const useChatNavigationItemsData = (activeFolder: DFolder | null, allFolders: DFolder[], activeConversationId: DConversationId | null): ChatNavigationItemData[] =>
|
||||
useChatStore(({ conversations }) => {
|
||||
|
||||
const activeConversations = activeFolder
|
||||
? conversations.filter(_c => activeFolder.conversationIds.includes(_c.id))
|
||||
: conversations;
|
||||
|
||||
return activeConversations.map((_c): ChatNavigationItemData => ({
|
||||
conversationId: _c.id,
|
||||
isActive: _c.id === activeConversationId,
|
||||
isEmpty: !_c.messages.length && !_c.userTitle,
|
||||
title: conversationTitle(_c),
|
||||
folder: !allFolders.length
|
||||
? undefined // don't show folder select if folders are disabled
|
||||
: _c.id === activeConversationId // only show the folder for active conversation(s)
|
||||
? allFolders.find(folder => folder.conversationIds.includes(_c.id)) ?? null
|
||||
: null,
|
||||
messageCount: _c.messages.length,
|
||||
assistantTyping: !!_c.abortController,
|
||||
systemPurposeId: _c.systemPurposeId,
|
||||
}));
|
||||
|
||||
}, (a, b) => {
|
||||
// custom equality function to avoid unnecessary re-renders
|
||||
return a.length === b.length && a.every((_a, i) => shallow(_a, b[i]));
|
||||
});
|
||||
|
||||
|
||||
export const ChatDrawerMemo = React.memo(ChatDrawer);
|
||||
|
||||
function ChatDrawer(props: {
|
||||
activeConversationId: DConversationId | null,
|
||||
activeFolderId: string | null,
|
||||
disableNewButton: boolean,
|
||||
onConversationActivate: (conversationId: DConversationId) => void,
|
||||
onConversationDelete: (conversationId: DConversationId, bypassConfirmation: boolean) => void,
|
||||
onConversationExportDialog: (conversationId: DConversationId | null) => void,
|
||||
onConversationImportDialog: () => void,
|
||||
onConversationNew: () => void,
|
||||
onConversationsDeleteAll: () => void,
|
||||
setActiveFolderId: (folderId: string | null) => void,
|
||||
}) {
|
||||
|
||||
const { onConversationActivate, onConversationDelete, onConversationExportDialog, onConversationNew } = props;
|
||||
|
||||
// local state
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = React.useState('');
|
||||
const [folderChangeRequest, setFolderChangeRequest] = React.useState<FolderChangeRequest | null>(null);
|
||||
|
||||
// external state
|
||||
const { closeDrawer, closeDrawerOnMobile } = useOptimaDrawers();
|
||||
const { activeFolder, allFolders, enableFolders, toggleEnableFolders } = useFolders(props.activeFolderId);
|
||||
const chatNavItems = useChatNavigationItemsData(activeFolder, allFolders, props.activeConversationId);
|
||||
const showSymbols = useUIPreferencesStore(state => state.zenMode !== 'cleaner');
|
||||
|
||||
// derived state
|
||||
const selectConversationsCount = chatNavItems.length;
|
||||
const nonEmptyChats = selectConversationsCount > 1 || (selectConversationsCount === 1 && !chatNavItems[0].isEmpty);
|
||||
const singleChat = selectConversationsCount === 1;
|
||||
const softMaxReached = selectConversationsCount >= 10;
|
||||
|
||||
|
||||
const handleButtonNew = React.useCallback(() => {
|
||||
onConversationNew();
|
||||
closeDrawerOnMobile();
|
||||
}, [closeDrawerOnMobile, onConversationNew]);
|
||||
|
||||
|
||||
const handleConversationActivate = React.useCallback((conversationId: DConversationId, closeMenu: boolean) => {
|
||||
onConversationActivate(conversationId);
|
||||
if (closeMenu)
|
||||
closeDrawerOnMobile();
|
||||
}, [closeDrawerOnMobile, onConversationActivate]);
|
||||
|
||||
|
||||
const handleConversationDelete = React.useCallback((conversationId: DConversationId) => {
|
||||
!singleChat && conversationId && onConversationDelete(conversationId, true);
|
||||
}, [onConversationDelete, singleChat]);
|
||||
|
||||
|
||||
// Folder change request
|
||||
|
||||
const handleConversationFolderChange = React.useCallback((folderChangeRequest: FolderChangeRequest) => setFolderChangeRequest(folderChangeRequest), []);
|
||||
|
||||
const handleConversationFolderCancel = React.useCallback(() => setFolderChangeRequest(null), []);
|
||||
|
||||
const handleConversationFolderSet = React.useCallback((conversationId: DConversationId, nextFolderId: string | null) => {
|
||||
// Remove conversation from existing folders
|
||||
const { addConversationToFolder, folders, removeConversationFromFolder } = useFolderStore.getState();
|
||||
folders.forEach(folder => folder.conversationIds.includes(conversationId) && removeConversationFromFolder(folder.id, conversationId));
|
||||
|
||||
// Add conversation to the selected folder
|
||||
nextFolderId && addConversationToFolder(nextFolderId, conversationId);
|
||||
|
||||
// Close the menu
|
||||
setFolderChangeRequest(null);
|
||||
}, []);
|
||||
|
||||
|
||||
// Filter chatNavItems based on the search query and rank them by search frequency
|
||||
const filteredChatNavItems = React.useMemo(() => {
|
||||
if (!debouncedSearchQuery) return chatNavItems;
|
||||
return chatNavItems
|
||||
.map(item => {
|
||||
// Get the conversation by ID
|
||||
const conversation = useChatStore.getState().conversations.find(c => c.id === item.conversationId);
|
||||
// Calculate the frequency of the search term in the title and messages
|
||||
const titleFrequency = (item.title.toLowerCase().match(new RegExp(debouncedSearchQuery.toLowerCase(), 'g')) || []).length;
|
||||
const messageFrequency = conversation?.messages.reduce((count, message) => {
|
||||
return count + (message.text.toLowerCase().match(new RegExp(debouncedSearchQuery.toLowerCase(), 'g')) || []).length;
|
||||
}, 0) || 0;
|
||||
// Return the item with the searchFrequency property
|
||||
return {
|
||||
...item,
|
||||
searchFrequency: titleFrequency + messageFrequency,
|
||||
};
|
||||
})
|
||||
// Exclude items with a searchFrequency of 0
|
||||
.filter(item => item.searchFrequency > 0)
|
||||
// Sort the items by searchFrequency in descending order
|
||||
.sort((a, b) => b.searchFrequency! - a.searchFrequency!);
|
||||
}, [chatNavItems, debouncedSearchQuery]);
|
||||
|
||||
|
||||
// basis for the underline bar
|
||||
const bottomBarBasis = filteredChatNavItems.reduce((longest, _c) => Math.max(longest, _c.searchFrequency ?? _c.messageCount), 1);
|
||||
|
||||
|
||||
// grouping
|
||||
/*let sortedIds = conversationIDs;
|
||||
if (grouping === 'persona') {
|
||||
const conversations = useChatStore.getState().conversations;
|
||||
|
||||
// group conversations by persona
|
||||
const groupedConversations: { [personaId: string]: string[] } = {};
|
||||
conversations.forEach(conversation => {
|
||||
const persona = conversation.systemPurposeId;
|
||||
if (persona) {
|
||||
if (!groupedConversations[persona])
|
||||
groupedConversations[persona] = [];
|
||||
groupedConversations[persona].push(conversation.id);
|
||||
}
|
||||
});
|
||||
|
||||
// flatten grouped conversations
|
||||
sortedIds = Object.values(groupedConversations).flat();
|
||||
}*/
|
||||
|
||||
return <>
|
||||
|
||||
{/* Drawer Header */}
|
||||
<PageDrawerHeader title='Chats' onClose={closeDrawer}>
|
||||
<Tooltip title={enableFolders ? 'Hide Folders' : 'Use Folders'}>
|
||||
<IconButton onClick={toggleEnableFolders}>
|
||||
{enableFolders ? <FolderOpenOutlinedIcon /> : <FolderOutlinedIcon />}
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
</PageDrawerHeader>
|
||||
|
||||
{/* Folders List */}
|
||||
{/*<Box sx={{*/}
|
||||
{/* display: 'grid',*/}
|
||||
{/* gridTemplateRows: !enableFolders ? '0fr' : '1fr',*/}
|
||||
{/* transition: 'grid-template-rows 0.42s cubic-bezier(.17,.84,.44,1)',*/}
|
||||
{/* '& > div': {*/}
|
||||
{/* padding: enableFolders ? 2 : 0,*/}
|
||||
{/* transition: 'padding 0.42s cubic-bezier(.17,.84,.44,1)',*/}
|
||||
{/* overflow: 'hidden',*/}
|
||||
{/* },*/}
|
||||
{/*}}>*/}
|
||||
{enableFolders && (
|
||||
<ChatFolderList
|
||||
folders={allFolders}
|
||||
activeFolderId={props.activeFolderId}
|
||||
onFolderSelect={props.setActiveFolderId}
|
||||
/>
|
||||
)}
|
||||
{/*</Box>*/}
|
||||
|
||||
{/* Chats List */}
|
||||
<PageDrawerList variant='plain' noTopPadding noBottomPadding tallRows>
|
||||
|
||||
{enableFolders && <ListDivider sx={{ mb: 0 }} />}
|
||||
|
||||
{/* Search Input Field */}
|
||||
<DebounceInput
|
||||
minChars={2}
|
||||
onDebounce={setDebouncedSearchQuery}
|
||||
debounceTimeout={300}
|
||||
placeholder='Search...'
|
||||
aria-label='Search'
|
||||
sx={{ m: 2 }}
|
||||
/>
|
||||
|
||||
<ListItem sx={{ '--ListItem-minHeight': '2.75rem' }}>
|
||||
<ListItemButton disabled={props.disableNewButton} onClick={handleButtonNew} sx={PageDrawerTallItemSx}>
|
||||
<ListItemDecorator><AddIcon /></ListItemDecorator>
|
||||
<Box sx={{
|
||||
// style
|
||||
fontSize: 'sm',
|
||||
fontWeight: 'lg',
|
||||
// content
|
||||
flexGrow: 1,
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
gap: 1,
|
||||
}}>
|
||||
New chat
|
||||
{/*<KeyStroke combo='Ctrl + Alt + N' sx={props.disableNewButton ? { opacity: 0.5 } : undefined} />*/}
|
||||
</Box>
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
|
||||
{/*<ListDivider sx={{ mt: 0 }} />*/}
|
||||
|
||||
<Box sx={{ flex: 1, overflowY: 'auto' }}>
|
||||
{/*<ListItem sticky sx={{ justifyContent: 'space-between', boxShadow: 'sm' }}>*/}
|
||||
{/* <Typography level='body-sm'>*/}
|
||||
{/* Conversations*/}
|
||||
{/* </Typography>*/}
|
||||
{/* <ToggleButtonGroup variant='soft' size='sm' value={grouping} onChange={(_event, newValue) => newValue && setGrouping(newValue)}>*/}
|
||||
{/* <IconButton value='off'>*/}
|
||||
{/* <AccessTimeIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* <IconButton value='persona'>*/}
|
||||
{/* <PersonIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* </ToggleButtonGroup>*/}
|
||||
{/*</ListItem>*/}
|
||||
|
||||
{filteredChatNavItems.map(item =>
|
||||
<ChatDrawerItemMemo
|
||||
key={'nav-' + item.conversationId}
|
||||
item={item}
|
||||
isLonely={singleChat}
|
||||
showSymbols={showSymbols}
|
||||
bottomBarBasis={(softMaxReached || debouncedSearchQuery) ? bottomBarBasis : 0}
|
||||
onConversationActivate={handleConversationActivate}
|
||||
onConversationDelete={handleConversationDelete}
|
||||
onConversationExport={onConversationExportDialog}
|
||||
onConversationFolderChange={handleConversationFolderChange}
|
||||
/>)}
|
||||
</Box>
|
||||
|
||||
<ListDivider sx={{ mt: 0 }} />
|
||||
|
||||
<Box sx={{ display: 'flex', alignItems: 'center' }}>
|
||||
<ListItemButton onClick={props.onConversationImportDialog} sx={{ flex: 1 }}>
|
||||
<ListItemDecorator>
|
||||
<FileUploadIcon />
|
||||
</ListItemDecorator>
|
||||
Import
|
||||
{/*<OpenAIIcon sx={{ ml: 'auto' }} />*/}
|
||||
</ListItemButton>
|
||||
|
||||
<ListItemButton disabled={!nonEmptyChats} onClick={() => props.onConversationExportDialog(props.activeConversationId)} sx={{ flex: 1 }}>
|
||||
<ListItemDecorator>
|
||||
<FileDownloadIcon />
|
||||
</ListItemDecorator>
|
||||
Export
|
||||
</ListItemButton>
|
||||
</Box>
|
||||
|
||||
<ListItemButton disabled={!nonEmptyChats} onClick={props.onConversationsDeleteAll}>
|
||||
<ListItemDecorator>
|
||||
<DeleteOutlineIcon />
|
||||
</ListItemDecorator>
|
||||
Delete {selectConversationsCount >= 2 ? `all ${selectConversationsCount} chats` : 'chat'}
|
||||
</ListItemButton>
|
||||
|
||||
</PageDrawerList>
|
||||
|
||||
|
||||
{/* [Menu] Chat Item Folder Change */}
|
||||
{!!folderChangeRequest?.anchorEl && (
|
||||
<CloseableMenu
|
||||
open anchorEl={folderChangeRequest.anchorEl} onClose={handleConversationFolderCancel}
|
||||
placement='bottom-start'
|
||||
zIndex={1301 /* need to be on top of the Modal on Mobile */}
|
||||
sx={{ minWidth: 200 }}
|
||||
>
|
||||
|
||||
{/* Folder Assignment Buttons */}
|
||||
{allFolders.map(folder => {
|
||||
const isRequestFolder = folder === folderChangeRequest.currentFolder;
|
||||
return (
|
||||
<ListItem
|
||||
key={folder.id}
|
||||
variant={isRequestFolder ? 'soft' : 'plain'}
|
||||
onClick={() => handleConversationFolderSet(folderChangeRequest.conversationId, folder.id)}
|
||||
>
|
||||
<ListItemButton>
|
||||
<ListItemDecorator>
|
||||
<FolderIcon sx={{ color: folder.color }} />
|
||||
</ListItemDecorator>
|
||||
{folder.title}
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Remove Folder Assignment */}
|
||||
{!!folderChangeRequest.currentFolder && (
|
||||
<ListItem onClick={() => handleConversationFolderSet(folderChangeRequest.conversationId, null)}>
|
||||
<ListItemButton>
|
||||
{ClearFolderText}
|
||||
</ListItemButton>
|
||||
</ListItem>
|
||||
)}
|
||||
|
||||
</CloseableMenu>
|
||||
)}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,339 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Avatar, Box, Divider, IconButton, ListItem, ListItemButton, ListItemDecorator, Sheet, styled, Tooltip, Typography } from '@mui/joy';
|
||||
import AutoFixHighIcon from '@mui/icons-material/AutoFixHigh';
|
||||
import CloseIcon from '@mui/icons-material/Close';
|
||||
import DeleteForeverIcon from '@mui/icons-material/DeleteForever';
|
||||
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
|
||||
import EditIcon from '@mui/icons-material/Edit';
|
||||
import FileDownloadOutlinedIcon from '@mui/icons-material/FileDownloadOutlined';
|
||||
import FolderIcon from '@mui/icons-material/Folder';
|
||||
import FolderOutlinedIcon from '@mui/icons-material/FolderOutlined';
|
||||
|
||||
import { SystemPurposeId, SystemPurposes } from '../../../../data';
|
||||
|
||||
import { conversationAutoTitle } from '~/modules/aifn/autotitle/autoTitle';
|
||||
|
||||
import type { DFolder } from '~/common/state/store-folders';
|
||||
import { DConversationId, useChatStore } from '~/common/state/store-chats';
|
||||
import { InlineTextarea } from '~/common/components/InlineTextarea';
|
||||
|
||||
|
||||
// set to true to display the conversation IDs
|
||||
// const DEBUG_CONVERSATION_IDS = false;
|
||||
|
||||
|
||||
export const FadeInButton = styled(IconButton)({
|
||||
opacity: 0.5,
|
||||
transition: 'opacity 0.2s',
|
||||
'&:hover': { opacity: 1 },
|
||||
});
|
||||
|
||||
|
||||
export const ChatDrawerItemMemo = React.memo(ChatDrawerItem);
|
||||
|
||||
export interface ChatNavigationItemData {
|
||||
conversationId: DConversationId;
|
||||
isActive: boolean;
|
||||
isEmpty: boolean;
|
||||
title: string;
|
||||
folder: DFolder | null | undefined; // null: 'All', undefined: do not show folder select
|
||||
messageCount: number;
|
||||
assistantTyping: boolean;
|
||||
systemPurposeId: SystemPurposeId;
|
||||
searchFrequency?: number;
|
||||
}
|
||||
|
||||
export interface FolderChangeRequest {
|
||||
conversationId: DConversationId;
|
||||
anchorEl: HTMLButtonElement;
|
||||
currentFolder: DFolder | null;
|
||||
}
|
||||
|
||||
function ChatDrawerItem(props: {
|
||||
item: ChatNavigationItemData,
|
||||
isLonely: boolean,
|
||||
showSymbols: boolean,
|
||||
bottomBarBasis: number,
|
||||
onConversationActivate: (conversationId: DConversationId, closeMenu: boolean) => void,
|
||||
onConversationDelete: (conversationId: DConversationId) => void,
|
||||
onConversationExport: (conversationId: DConversationId) => void,
|
||||
onConversationFolderChange: (folderChangeRequest: FolderChangeRequest) => void,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const [isEditingTitle, setIsEditingTitle] = React.useState(false);
|
||||
const [deleteArmed, setDeleteArmed] = React.useState(false);
|
||||
|
||||
// derived state
|
||||
const { onConversationExport, onConversationFolderChange } = props;
|
||||
const { conversationId, isActive, title, folder, messageCount, assistantTyping, systemPurposeId, searchFrequency } = props.item;
|
||||
const isNew = messageCount === 0;
|
||||
|
||||
|
||||
// [effect] auto-disarm when inactive
|
||||
const shallClose = deleteArmed && !isActive;
|
||||
React.useEffect(() => {
|
||||
if (shallClose)
|
||||
setDeleteArmed(false);
|
||||
}, [shallClose]);
|
||||
|
||||
|
||||
// Activate
|
||||
|
||||
const handleConversationActivate = () => props.onConversationActivate(conversationId, true);
|
||||
|
||||
|
||||
// export
|
||||
|
||||
const handleConversationExport = React.useCallback((event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
conversationId && onConversationExport(conversationId);
|
||||
}, [conversationId, onConversationExport]);
|
||||
|
||||
|
||||
// Folder change
|
||||
|
||||
const handleFolderChangeBegin = React.useCallback((event: React.MouseEvent<HTMLButtonElement>) => {
|
||||
event.stopPropagation();
|
||||
onConversationFolderChange({
|
||||
conversationId,
|
||||
anchorEl: event.currentTarget,
|
||||
currentFolder: folder ?? null,
|
||||
});
|
||||
}, [conversationId, folder, onConversationFolderChange]);
|
||||
|
||||
|
||||
// Title Edit
|
||||
|
||||
const handleTitleEditBegin = React.useCallback(() => setIsEditingTitle(true), []);
|
||||
|
||||
const handleTitleEditCancel = React.useCallback(() => {
|
||||
setIsEditingTitle(false);
|
||||
}, []);
|
||||
|
||||
const handleTitleEditChange = React.useCallback((text: string) => {
|
||||
setIsEditingTitle(false);
|
||||
useChatStore.getState().setUserTitle(conversationId, text.trim());
|
||||
}, [conversationId]);
|
||||
|
||||
const handleTitleEditAuto = React.useCallback(() => {
|
||||
conversationAutoTitle(conversationId, true);
|
||||
}, [conversationId]);
|
||||
|
||||
|
||||
// Delete
|
||||
|
||||
const handleDeleteButtonShow = React.useCallback(() => setDeleteArmed(true), []);
|
||||
|
||||
const handleDeleteButtonHide = React.useCallback(() => setDeleteArmed(false), []);
|
||||
|
||||
const handleConversationDelete = React.useCallback((event: React.MouseEvent) => {
|
||||
if (deleteArmed) {
|
||||
setDeleteArmed(false);
|
||||
event.stopPropagation();
|
||||
props.onConversationDelete(conversationId);
|
||||
}
|
||||
}, [conversationId, deleteArmed, props]);
|
||||
|
||||
|
||||
const textSymbol = SystemPurposes[systemPurposeId]?.symbol || '❓';
|
||||
|
||||
const progress = props.bottomBarBasis ? 100 * (searchFrequency ?? messageCount) / props.bottomBarBasis : 0;
|
||||
|
||||
|
||||
const titleRowComponent = React.useMemo(() => <>
|
||||
|
||||
{/* Symbol, if globally enabled */}
|
||||
{props.showSymbols && <ListItemDecorator>
|
||||
{assistantTyping
|
||||
? (
|
||||
<Avatar
|
||||
alt='typing' variant='plain'
|
||||
src='https://i.giphy.com/media/jJxaUysjzO9ri/giphy.webp'
|
||||
sx={{
|
||||
width: '1.5rem',
|
||||
height: '1.5rem',
|
||||
borderRadius: 'var(--joy-radius-sm)',
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<Typography>
|
||||
{isNew ? '' : textSymbol}
|
||||
</Typography>
|
||||
)}
|
||||
</ListItemDecorator>}
|
||||
|
||||
{/* Title */}
|
||||
{!isEditingTitle ? (
|
||||
<Typography
|
||||
// level={isActive ? 'title-md' : 'body-md'}
|
||||
onDoubleClick={handleTitleEditBegin}
|
||||
sx={{
|
||||
color: isActive ? 'text.primary' : 'text.secondary',
|
||||
flex: 1,
|
||||
}}
|
||||
>
|
||||
{/*{DEBUG_CONVERSATION_IDS && `${conversationId} - `}*/}
|
||||
{title.trim() ? title : 'Chat'}{assistantTyping && '...'}
|
||||
</Typography>
|
||||
) : (
|
||||
<InlineTextarea
|
||||
invertedColors
|
||||
initialText={title}
|
||||
onEdit={handleTitleEditChange}
|
||||
onCancel={handleTitleEditCancel}
|
||||
sx={{
|
||||
flexGrow: 1,
|
||||
ml: -1.5, mr: -0.5,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Display search frequency if it exists and is greater than 0 */}
|
||||
{searchFrequency && searchFrequency > 0 && (
|
||||
<Box sx={{ ml: 1 }}>
|
||||
<Typography level='body-sm'>
|
||||
{searchFrequency}
|
||||
</Typography>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
</>, [assistantTyping, handleTitleEditBegin, handleTitleEditCancel, handleTitleEditChange, isActive, isEditingTitle, isNew, props.showSymbols, searchFrequency, textSymbol, title]);
|
||||
|
||||
const progressBarFixedComponent = React.useMemo(() =>
|
||||
progress > 0 && (
|
||||
<Box sx={{
|
||||
backgroundColor: 'neutral.softBg',
|
||||
position: 'absolute', left: 0, bottom: 0, width: progress + '%', height: 4,
|
||||
}} />
|
||||
), [progress]);
|
||||
|
||||
|
||||
return isActive ? (
|
||||
|
||||
// Active Conversation
|
||||
<Sheet
|
||||
variant={isActive ? 'solid' : 'plain'}
|
||||
invertedColors={isActive}
|
||||
sx={{
|
||||
// common
|
||||
'--ListItem-minHeight': '2.75rem',
|
||||
position: 'relative', // for the progress bar
|
||||
// '--variant-borderWidth': '0.125rem',
|
||||
border: 'none', // there's a default border of 1px and invisible.. hmm
|
||||
// style
|
||||
borderRadius: 'md',
|
||||
mx: '0.25rem',
|
||||
'&:hover > button': {
|
||||
opacity: 1, // fade in buttons when hovering, but by default wash them out a bit
|
||||
},
|
||||
}}
|
||||
>
|
||||
|
||||
<ListItem sx={{ border: 'none', display: 'grid', gap: 0, px: 'calc(var(--ListItem-paddingX) - 0.25rem)' }}>
|
||||
|
||||
{/* Title row */}
|
||||
<Box sx={{ display: 'flex', gap: 'var(--ListItem-gap)', minHeight: '2.25rem', alignItems: 'center' }}>
|
||||
|
||||
{titleRowComponent}
|
||||
|
||||
</Box>
|
||||
|
||||
{/* buttons row */}
|
||||
<Box sx={{ display: 'flex', gap: 1, minHeight: '2.25rem', alignItems: 'center' }}>
|
||||
|
||||
<ListItemDecorator />
|
||||
|
||||
{/* Current Folder color, and change initiator */}
|
||||
{(folder !== undefined) && <>
|
||||
<Tooltip disableInteractive title={folder ? `Change Folder (${folder.title})` : 'Add to Folder'}>
|
||||
{folder ? (
|
||||
<IconButton size='sm' onClick={handleFolderChangeBegin}>
|
||||
<FolderIcon style={{ color: folder.color || 'inherit' }} />
|
||||
</IconButton>
|
||||
) : (
|
||||
<FadeInButton size='sm' onClick={handleFolderChangeBegin}>
|
||||
<FolderOutlinedIcon />
|
||||
</FadeInButton>
|
||||
)}
|
||||
</Tooltip>
|
||||
|
||||
<Divider orientation='vertical' sx={{ my: 1, opacity: 0.5 }} />
|
||||
</>}
|
||||
|
||||
<Tooltip disableInteractive title='Rename'>
|
||||
<FadeInButton size='sm' disabled={isEditingTitle} onClick={handleTitleEditBegin}>
|
||||
<EditIcon />
|
||||
</FadeInButton>
|
||||
</Tooltip>
|
||||
|
||||
{!isNew && <>
|
||||
<Tooltip disableInteractive title='Auto-Title'>
|
||||
<FadeInButton size='sm' disabled={isEditingTitle} onClick={handleTitleEditAuto}>
|
||||
<AutoFixHighIcon />
|
||||
</FadeInButton>
|
||||
</Tooltip>
|
||||
|
||||
<Divider orientation='vertical' sx={{ my: 1, opacity: 0.5 }} />
|
||||
|
||||
<Tooltip disableInteractive title='Export'>
|
||||
<FadeInButton size='sm' onClick={handleConversationExport}>
|
||||
<FileDownloadOutlinedIcon />
|
||||
</FadeInButton>
|
||||
</Tooltip>
|
||||
</>}
|
||||
|
||||
|
||||
{/* --> */}
|
||||
<Box sx={{ flex: 1 }} />
|
||||
|
||||
{/* Delete [armed, arming] buttons */}
|
||||
{!props.isLonely && !searchFrequency && <>
|
||||
{deleteArmed && (
|
||||
<Tooltip disableInteractive title='Confirm Deletion'>
|
||||
<FadeInButton key='btn-del' variant='solid' color='success' size='sm' onClick={handleConversationDelete} sx={{ opacity: 1 }}>
|
||||
<DeleteForeverIcon sx={{ color: 'danger.solidBg' }} />
|
||||
</FadeInButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip disableInteractive title={deleteArmed ? 'Cancel Delete' : 'Delete'}>
|
||||
<FadeInButton key='btn-arm' size='sm' onClick={deleteArmed ? handleDeleteButtonHide : handleDeleteButtonShow} sx={deleteArmed ? { opacity: 1 } : {}}>
|
||||
{deleteArmed ? <CloseIcon /> : <DeleteOutlineIcon />}
|
||||
</FadeInButton>
|
||||
</Tooltip>
|
||||
</>}
|
||||
|
||||
</Box>
|
||||
|
||||
</ListItem>
|
||||
|
||||
{/* Optional progress bar, underlay */}
|
||||
{progressBarFixedComponent}
|
||||
|
||||
</Sheet>
|
||||
|
||||
) : (
|
||||
|
||||
// Inactive Conversation - click to activate
|
||||
<ListItem sx={{ '--ListItem-minHeight': '2.75rem' }}>
|
||||
|
||||
<ListItemButton
|
||||
onClick={handleConversationActivate}
|
||||
sx={{
|
||||
border: 'none', // there's a default border of 1px and invisible.. hmm
|
||||
position: 'relative', // for the progress bar
|
||||
}}
|
||||
>
|
||||
|
||||
{titleRowComponent}
|
||||
|
||||
{/* Optional progress bar, underlay */}
|
||||
{progressBarFixedComponent}
|
||||
|
||||
</ListItemButton>
|
||||
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { IconButton } from '@mui/joy';
|
||||
import VerticalSplitIcon from '@mui/icons-material/VerticalSplit';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
import { useUXLabsStore } from '~/common/state/store-ux-labs';
|
||||
|
||||
import { useChatLLMDropdown } from './useLLMDropdown';
|
||||
import { usePersonaIdDropdown } from './usePersonaDropdown';
|
||||
import { useFolderDropdown } from './folder/useFolderDropdown';
|
||||
|
||||
|
||||
export function ChatDropdowns(props: {
|
||||
conversationId: DConversationId | null
|
||||
isSplitPanes: boolean
|
||||
onToggleSplitPanes: () => void
|
||||
}) {
|
||||
|
||||
// state
|
||||
const { chatLLMDropdown } = useChatLLMDropdown();
|
||||
const { personaDropdown } = usePersonaIdDropdown(props.conversationId);
|
||||
const { folderDropdown } = useFolderDropdown(props.conversationId);
|
||||
|
||||
// external state
|
||||
const labsSplitBranching = useUXLabsStore(state => state.labsSplitBranching);
|
||||
|
||||
return <>
|
||||
|
||||
{/* Persona selector */}
|
||||
{personaDropdown}
|
||||
|
||||
{/* Model selector */}
|
||||
{chatLLMDropdown}
|
||||
|
||||
{/* Folder selector */}
|
||||
{folderDropdown}
|
||||
|
||||
{/* Split Panes button */}
|
||||
{labsSplitBranching && <IconButton
|
||||
variant={props.isSplitPanes ? 'solid' : undefined}
|
||||
onClick={props.onToggleSplitPanes}
|
||||
// sx={{
|
||||
// ml: 'auto',
|
||||
// }}
|
||||
>
|
||||
<VerticalSplitIcon />
|
||||
</IconButton>}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, ListDivider, ListItemDecorator, MenuItem, Switch } from '@mui/joy';
|
||||
import CheckBoxOutlineBlankOutlinedIcon from '@mui/icons-material/CheckBoxOutlineBlankOutlined';
|
||||
import CheckBoxOutlinedIcon from '@mui/icons-material/CheckBoxOutlined';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import CompressIcon from '@mui/icons-material/Compress';
|
||||
import ForkRightIcon from '@mui/icons-material/ForkRight';
|
||||
import SettingsSuggestIcon from '@mui/icons-material/SettingsSuggest';
|
||||
|
||||
import type { DConversationId } from '~/common/state/store-chats';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { useOptimaDrawers } from '~/common/layout/optima/useOptimaDrawers';
|
||||
|
||||
import { useChatShowSystemMessages } from '../../store-app-chat';
|
||||
|
||||
|
||||
export function ChatMenuItems(props: {
|
||||
conversationId: DConversationId | null,
|
||||
hasConversations: boolean,
|
||||
isConversationEmpty: boolean,
|
||||
isMessageSelectionMode: boolean,
|
||||
setIsMessageSelectionMode: (isMessageSelectionMode: boolean) => void,
|
||||
onConversationBranch: (conversationId: DConversationId, messageId: string | null) => void,
|
||||
onConversationClear: (conversationId: DConversationId) => void,
|
||||
onConversationFlatten: (conversationId: DConversationId) => void,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const { closePageMenu } = useOptimaDrawers();
|
||||
const [showSystemMessages, setShowSystemMessages] = useChatShowSystemMessages();
|
||||
|
||||
// derived state
|
||||
const disabled = !props.conversationId || props.isConversationEmpty;
|
||||
|
||||
|
||||
const closeMenu = (event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
closePageMenu();
|
||||
};
|
||||
|
||||
const handleConversationClear = (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
closeMenu(event);
|
||||
props.conversationId && props.onConversationClear(props.conversationId);
|
||||
};
|
||||
|
||||
const handleConversationBranch = (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
closeMenu(event);
|
||||
props.conversationId && props.onConversationBranch(props.conversationId, null);
|
||||
};
|
||||
|
||||
const handleConversationFlatten = (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
closeMenu(event);
|
||||
props.conversationId && props.onConversationFlatten(props.conversationId);
|
||||
};
|
||||
|
||||
const handleToggleMessageSelectionMode = (event: React.MouseEvent) => {
|
||||
closeMenu(event);
|
||||
props.setIsMessageSelectionMode(!props.isMessageSelectionMode);
|
||||
};
|
||||
|
||||
const handleToggleSystemMessages = () => setShowSystemMessages(!showSystemMessages);
|
||||
|
||||
|
||||
return <>
|
||||
|
||||
{/*<ListItem>*/}
|
||||
{/* <Typography level='body-sm'>*/}
|
||||
{/* Conversation*/}
|
||||
{/* </Typography>*/}
|
||||
{/*</ListItem>*/}
|
||||
|
||||
<MenuItem onClick={handleToggleSystemMessages}>
|
||||
<ListItemDecorator><SettingsSuggestIcon /></ListItemDecorator>
|
||||
System message
|
||||
<Switch checked={showSystemMessages} onChange={handleToggleSystemMessages} sx={{ ml: 'auto' }} />
|
||||
</MenuItem>
|
||||
|
||||
<ListDivider inset='startContent' />
|
||||
|
||||
<MenuItem disabled={disabled} onClick={handleConversationBranch}>
|
||||
<ListItemDecorator><ForkRightIcon /></ListItemDecorator>
|
||||
Branch
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem disabled={disabled} onClick={handleConversationFlatten}>
|
||||
<ListItemDecorator><CompressIcon color='success' /></ListItemDecorator>
|
||||
Flatten
|
||||
</MenuItem>
|
||||
|
||||
<ListDivider inset='startContent' />
|
||||
|
||||
<MenuItem disabled={disabled} onClick={handleToggleMessageSelectionMode}>
|
||||
<ListItemDecorator>{props.isMessageSelectionMode ? <CheckBoxOutlinedIcon /> : <CheckBoxOutlineBlankOutlinedIcon />}</ListItemDecorator>
|
||||
<span style={props.isMessageSelectionMode ? { fontWeight: 800 } : {}}>
|
||||
Cleanup ...
|
||||
</span>
|
||||
</MenuItem>
|
||||
|
||||
<MenuItem disabled={disabled} onClick={handleConversationClear}>
|
||||
<ListItemDecorator><ClearIcon /></ListItemDecorator>
|
||||
<Box sx={{ flexGrow: 1, display: 'flex', justifyContent: 'space-between', gap: 1 }}>
|
||||
Reset Chat
|
||||
{!disabled && <KeyStroke combo='Ctrl + Alt + X' />}
|
||||
</Box>
|
||||
</MenuItem>
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { Droppable, DroppableProps } from "react-beautiful-dnd";
|
||||
|
||||
export const StrictModeDroppable = ({ children, ...props }: DroppableProps) => {
|
||||
const [enabled, setEnabled] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const animation = requestAnimationFrame(() => setEnabled(true));
|
||||
|
||||
return () => {
|
||||
cancelAnimationFrame(animation);
|
||||
setEnabled(false);
|
||||
};
|
||||
}, []);
|
||||
|
||||
if (!enabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <Droppable {...props}>{children}</Droppable>;
|
||||
};
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
|
||||
import { Box, ListItemButton, ListItemDecorator } from '@mui/joy';
|
||||
import BuildCircleIcon from '@mui/icons-material/BuildCircle';
|
||||
import SettingsIcon from '@mui/icons-material/Settings';
|
||||
|
||||
import { DLLM, DLLMId, DModelSourceId, useModelsStore } from '~/modules/llms/store-llms';
|
||||
|
||||
import { PageBarDropdown, DropdownItems } from '~/common/layout/optima/components/PageBarDropdown';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
|
||||
|
||||
|
||||
function AppBarLLMDropdown(props: {
|
||||
llms: DLLM[],
|
||||
chatLlmId: DLLMId | null,
|
||||
setChatLlmId: (llmId: DLLMId | null) => void,
|
||||
placeholder?: string,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const { openLlmOptions, openModelsSetup } = useOptimaLayout();
|
||||
|
||||
// build model menu items, filtering-out hidden models, and add Source separators
|
||||
const llmItems: DropdownItems = {};
|
||||
let prevSourceId: DModelSourceId | null = null;
|
||||
for (const llm of props.llms) {
|
||||
|
||||
// filter-out hidden models
|
||||
if (!(!llm.hidden || llm.id === props.chatLlmId))
|
||||
continue;
|
||||
|
||||
// add separators when changing sources
|
||||
if (!prevSourceId || llm.sId !== prevSourceId) {
|
||||
if (prevSourceId)
|
||||
llmItems[`sep-${llm.id}`] = {
|
||||
type: 'separator',
|
||||
title: llm.sId,
|
||||
};
|
||||
prevSourceId = llm.sId;
|
||||
}
|
||||
|
||||
// add the model item
|
||||
llmItems[llm.id] = {
|
||||
title: llm.label,
|
||||
// icon: llm.id.startsWith('some vendor') ? <VendorIcon /> : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
const handleChatLLMChange = (_event: any, value: DLLMId | null) => value && props.setChatLlmId(value);
|
||||
|
||||
const handleOpenLLMOptions = () => props.chatLlmId && openLlmOptions(props.chatLlmId);
|
||||
|
||||
|
||||
return (
|
||||
<PageBarDropdown
|
||||
items={llmItems}
|
||||
value={props.chatLlmId} onChange={handleChatLLMChange}
|
||||
placeholder={props.placeholder || 'Models …'}
|
||||
appendOption={<>
|
||||
|
||||
{props.chatLlmId && (
|
||||
<ListItemButton key='menu-opt' onClick={handleOpenLLMOptions}>
|
||||
<ListItemDecorator><SettingsIcon color='success' /></ListItemDecorator>
|
||||
<Box sx={{ flexGrow: 1, display: 'flex', justifyContent: 'space-between', gap: 1 }}>
|
||||
Options
|
||||
<KeyStroke combo='Ctrl + Shift + O' />
|
||||
</Box>
|
||||
</ListItemButton>
|
||||
)}
|
||||
|
||||
<ListItemButton key='menu-llms' onClick={openModelsSetup}>
|
||||
<ListItemDecorator><BuildCircleIcon color='success' /></ListItemDecorator>
|
||||
<Box sx={{ flexGrow: 1, display: 'flex', justifyContent: 'space-between', gap: 1 }}>
|
||||
Models
|
||||
<KeyStroke combo='Ctrl + Shift + M' />
|
||||
</Box>
|
||||
</ListItemButton>
|
||||
|
||||
</>}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export function useChatLLMDropdown() {
|
||||
// external state
|
||||
const { llms, chatLLMId, setChatLLMId } = useModelsStore(state => ({
|
||||
llms: state.llms,
|
||||
chatLLMId: state.chatLLMId,
|
||||
setChatLLMId: state.setChatLLMId,
|
||||
}), shallow);
|
||||
|
||||
const chatLLMDropdown = React.useMemo(
|
||||
() => <AppBarLLMDropdown llms={llms} chatLlmId={chatLLMId} setChatLlmId={setChatLLMId} />,
|
||||
[llms, chatLLMId, setChatLLMId],
|
||||
);
|
||||
|
||||
return { chatLLMId, chatLLMDropdown };
|
||||
}
|
||||
|
||||
/*export function useTempLLMDropdown(props: { initialLlmId: DLLMId | null }) {
|
||||
// local state
|
||||
const [llmId, setLlmId] = React.useState<DLLMId | null>(props.initialLlmId);
|
||||
|
||||
// external state
|
||||
const llms = useModelsStore(state => state.llms, shallow);
|
||||
|
||||
const chatLLMDropdown = React.useMemo(
|
||||
() => <AppBarLLMDropdown llms={llms} llmId={llmId} setLlmId={setLlmId} />,
|
||||
[llms, llmId, setLlmId],
|
||||
);
|
||||
|
||||
return { llmId, chatLLMDropdown };
|
||||
}*/
|
||||
@@ -1,60 +0,0 @@
|
||||
import * as React from 'react';
|
||||
import { shallow } from 'zustand/shallow';
|
||||
|
||||
import { SystemPurposeId, SystemPurposes } from '../../../../data';
|
||||
|
||||
import { DConversationId, useChatStore } from '~/common/state/store-chats';
|
||||
import { PageBarDropdown } from '~/common/layout/optima/components/PageBarDropdown';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
|
||||
function AppBarPersonaDropdown(props: {
|
||||
systemPurposeId: SystemPurposeId | null,
|
||||
setSystemPurposeId: (systemPurposeId: SystemPurposeId | null) => void,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const { zenMode } = useUIPreferencesStore(state => ({
|
||||
zenMode: state.zenMode,
|
||||
}), shallow);
|
||||
|
||||
const handleSystemPurposeChange = (_event: any, value: SystemPurposeId | null) => props.setSystemPurposeId(value);
|
||||
|
||||
|
||||
// options
|
||||
|
||||
// let appendOption: React.JSX.Element | undefined = undefined;
|
||||
|
||||
return (
|
||||
<PageBarDropdown
|
||||
items={SystemPurposes} showSymbols={zenMode !== 'cleaner'}
|
||||
value={props.systemPurposeId} onChange={handleSystemPurposeChange}
|
||||
// appendOption={appendOption}
|
||||
/>
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
export function usePersonaIdDropdown(conversationId: DConversationId | null) {
|
||||
|
||||
// external state
|
||||
const { systemPurposeId } = useChatStore(state => {
|
||||
const conversation = state.conversations.find(conversation => conversation.id === conversationId);
|
||||
return {
|
||||
systemPurposeId: conversation?.systemPurposeId ?? null,
|
||||
};
|
||||
}, shallow);
|
||||
|
||||
const personaDropdown = React.useMemo(() => systemPurposeId
|
||||
? <AppBarPersonaDropdown
|
||||
systemPurposeId={systemPurposeId}
|
||||
setSystemPurposeId={(systemPurposeId) => {
|
||||
if (conversationId && systemPurposeId)
|
||||
useChatStore.getState().setSystemPurposeId(conversationId, systemPurposeId);
|
||||
}}
|
||||
/> : null,
|
||||
[conversationId, systemPurposeId],
|
||||
);
|
||||
|
||||
return { personaDropdown };
|
||||
}
|
||||
@@ -7,61 +7,21 @@ import InfoIcon from '@mui/icons-material/Info';
|
||||
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
|
||||
|
||||
import { InlineError } from '~/common/components/InlineError';
|
||||
import { downloadVideoFrameAsPNG, renderVideoFrameAsPNGFile } from '~/common/util/videoUtils';
|
||||
import { useCameraCapture } from '~/common/components/useCameraCapture';
|
||||
|
||||
|
||||
function prettyFileName(renderedFrame: HTMLCanvasElement) {
|
||||
const prettyDate = new Date().toISOString().replace(/[:-]/g, '').replace('T', '-').replace('Z', '');
|
||||
const prettyResolution = `${renderedFrame.width}x${renderedFrame.height}`;
|
||||
return `camera-${prettyDate}-${prettyResolution}.png`;
|
||||
}
|
||||
|
||||
function renderVideoFrameToCanvas(videoElement: HTMLVideoElement): HTMLCanvasElement {
|
||||
// paint the video on a canvas, to save it
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = videoElement.videoWidth || 640;
|
||||
canvas.height = videoElement.videoHeight || 480;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx?.drawImage(videoElement, 0, 0);
|
||||
return canvas;
|
||||
}
|
||||
|
||||
function renderVideoFrameToFile(videoElement: HTMLVideoElement, callback: (file: File) => void) {
|
||||
// video to canvas
|
||||
const renderedFrame = renderVideoFrameToCanvas(videoElement);
|
||||
|
||||
// canvas to blob to file to callback
|
||||
renderedFrame.toBlob((blob) => {
|
||||
if (blob) {
|
||||
const file = new File([blob], prettyFileName(renderedFrame), { type: blob.type });
|
||||
callback(file);
|
||||
}
|
||||
}, 'image/png');
|
||||
}
|
||||
|
||||
function downloadVideoFrameAsPNG(videoElement: HTMLVideoElement) {
|
||||
// video to canvas to png
|
||||
const renderedFrame = renderVideoFrameToCanvas(videoElement);
|
||||
const imageDataURL = renderedFrame.toDataURL('image/png');
|
||||
|
||||
// auto-download
|
||||
const link = document.createElement('a');
|
||||
link.download = prettyFileName(renderedFrame);
|
||||
link.href = imageDataURL;
|
||||
link.click();
|
||||
}
|
||||
|
||||
|
||||
export function CameraCaptureModal(props: {
|
||||
onCloseModal: () => void,
|
||||
onAttachImage: (file: File) => void
|
||||
// onOCR: (ocrText: string) => void }
|
||||
}) {
|
||||
// state
|
||||
// const [ocrProgress/*, setOCRProgress*/] = React.useState<number | null>(null);
|
||||
const [showInfo, setShowInfo] = React.useState(false);
|
||||
|
||||
// camera operations
|
||||
// state
|
||||
const [showInfo, setShowInfo] = React.useState(false);
|
||||
// const [ocrProgress/*, setOCRProgress*/] = React.useState<number | null>(null);
|
||||
|
||||
// external state
|
||||
const {
|
||||
videoRef,
|
||||
cameras, cameraIdx, setCameraIdx,
|
||||
@@ -70,10 +30,14 @@ export function CameraCaptureModal(props: {
|
||||
} = useCameraCapture();
|
||||
|
||||
|
||||
const stopAndClose = () => {
|
||||
// derived state
|
||||
const { onCloseModal, onAttachImage } = props;
|
||||
|
||||
|
||||
const stopAndClose = React.useCallback(() => {
|
||||
resetVideo();
|
||||
props.onCloseModal();
|
||||
};
|
||||
onCloseModal();
|
||||
}, [onCloseModal, resetVideo]);
|
||||
|
||||
/*const handleVideoOCRClicked = async () => {
|
||||
if (!videoRef.current) return;
|
||||
@@ -94,18 +58,21 @@ export function CameraCaptureModal(props: {
|
||||
props.onOCR(result.data.text);
|
||||
};*/
|
||||
|
||||
const handleVideoSnapClicked = () => {
|
||||
const handleVideoSnapClicked = React.useCallback(async () => {
|
||||
if (!videoRef.current) return;
|
||||
renderVideoFrameToFile(videoRef.current, (file) => {
|
||||
props.onAttachImage(file);
|
||||
try {
|
||||
const file = await renderVideoFrameAsPNGFile(videoRef.current, 'camera');
|
||||
onAttachImage(file);
|
||||
stopAndClose();
|
||||
});
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error capturing video frame:', error);
|
||||
}
|
||||
}, [onAttachImage, stopAndClose, videoRef]);
|
||||
|
||||
const handleVideoDownloadClicked = () => {
|
||||
const handleVideoDownloadClicked = React.useCallback(() => {
|
||||
if (!videoRef.current) return;
|
||||
downloadVideoFrameAsPNG(videoRef.current);
|
||||
};
|
||||
downloadVideoFrameAsPNG(videoRef.current, 'camera');
|
||||
}, [videoRef]);
|
||||
|
||||
|
||||
return (
|
||||
@@ -118,7 +85,7 @@ export function CameraCaptureModal(props: {
|
||||
}}>
|
||||
|
||||
{/* Top bar */}
|
||||
<Sheet variant='solid' invertedColors sx={{ zIndex: 10, display: 'flex', justifyContent: 'space-between', p: 1 }}>
|
||||
<Sheet variant='solid' invertedColors sx={{ display: 'flex', justifyContent: 'space-between', p: 1 }}>
|
||||
<Select
|
||||
variant='solid' color='neutral'
|
||||
value={cameraIdx} onChange={(_event: any, value: number | null) => setCameraIdx(value === null ? -1 : value)}
|
||||
@@ -149,7 +116,7 @@ export function CameraCaptureModal(props: {
|
||||
|
||||
{showInfo && !!info && <Typography
|
||||
sx={{
|
||||
position: 'absolute', top: 0, left: 0, right: 0, bottom: 0, zIndex: 1,
|
||||
position: 'absolute', inset: 0, zIndex: 1, /* camera info on top of video */
|
||||
background: 'rgba(0,0,0,0.5)', color: 'white',
|
||||
whiteSpace: 'pre', overflowY: 'scroll',
|
||||
}}>
|
||||
@@ -160,7 +127,7 @@ export function CameraCaptureModal(props: {
|
||||
</Box>
|
||||
|
||||
{/* Bottom controls (zoom, ocr, download) & progress */}
|
||||
<Sheet variant='soft' sx={{ display: 'flex', flexDirection: 'column', zIndex: 20, gap: 1, p: 1 }}>
|
||||
<Sheet variant='soft' sx={{ display: 'flex', flexDirection: 'column', gap: 1, p: 1 }}>
|
||||
|
||||
{!!error && <InlineError error={error} />}
|
||||
|
||||
@@ -170,7 +137,7 @@ export function CameraCaptureModal(props: {
|
||||
|
||||
<Box sx={{ display: 'flex', gap: 1, justifyContent: 'space-between' }}>
|
||||
{/* Info */}
|
||||
<IconButton size='lg' disabled={!info} variant='soft' onClick={() => setShowInfo(info => !info)} sx={{ zIndex: 30 }}>
|
||||
<IconButton size='lg' disabled={!info} variant='soft' onClick={() => setShowInfo(info => !info)}>
|
||||
<InfoIcon />
|
||||
</IconButton>
|
||||
{/*<Button disabled={ocrProgress !== null} fullWidth variant='solid' size='lg' onClick={handleVideoOCRClicked} sx={{ flex: 1, maxWidth: 260 }}>*/}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
import { KeyStroke } from '~/common/components/KeyStroke';
|
||||
import { useUIPreferencesStore } from '~/common/state/store-ui';
|
||||
|
||||
import { ChatModeId } from '../../AppChat';
|
||||
|
||||
|
||||
interface ChatModeDescription {
|
||||
label: string;
|
||||
description: string | React.JSX.Element;
|
||||
shortcut?: string;
|
||||
requiresTTI?: boolean;
|
||||
}
|
||||
|
||||
const ChatModeItems: { [key in ChatModeId]: ChatModeDescription } = {
|
||||
'generate-text': {
|
||||
label: 'Chat',
|
||||
description: 'Persona replies',
|
||||
},
|
||||
'append-user': {
|
||||
label: 'Write',
|
||||
description: 'Appends a message',
|
||||
shortcut: 'Alt + Enter',
|
||||
},
|
||||
'generate-image': {
|
||||
label: 'Draw',
|
||||
description: 'AI Image Generation',
|
||||
requiresTTI: true,
|
||||
},
|
||||
'generate-react': {
|
||||
label: 'Reason + Act · α',
|
||||
description: 'Answers questions in multiple steps',
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
function fixNewLineShortcut(shortcut: string, enterIsNewLine: boolean) {
|
||||
if (shortcut === 'ENTER')
|
||||
return enterIsNewLine ? 'Shift + Enter' : 'Enter';
|
||||
return shortcut;
|
||||
}
|
||||
|
||||
export function ChatModeMenu(props: {
|
||||
anchorEl: HTMLAnchorElement | null, onClose: () => void,
|
||||
chatModeId: ChatModeId, onSetChatModeId: (chatMode: ChatModeId) => void
|
||||
capabilityHasTTI: boolean,
|
||||
}) {
|
||||
|
||||
// external state
|
||||
const enterIsNewline = useUIPreferencesStore(state => state.enterIsNewline);
|
||||
|
||||
return <CloseableMenu
|
||||
placement='top-end' sx={{ minWidth: 320 }}
|
||||
open anchorEl={props.anchorEl} onClose={props.onClose}
|
||||
>
|
||||
|
||||
{/*<MenuItem color='neutral' selected>*/}
|
||||
{/* Conversation Mode*/}
|
||||
{/*</MenuItem>*/}
|
||||
{/**/}
|
||||
{/*<ListDivider />*/}
|
||||
|
||||
{/* ChatMode items */}
|
||||
{Object.entries(ChatModeItems)
|
||||
.map(([key, data]) =>
|
||||
<MenuItem key={'chat-mode-' + key} onClick={() => props.onSetChatModeId(key as ChatModeId)}>
|
||||
<Box sx={{ flexGrow: 1, display: 'flex', flexDirection: 'row', alignItems: 'center', gap: 2 }}>
|
||||
<Radio checked={key === props.chatModeId} />
|
||||
<Box sx={{ flexGrow: 1 }}>
|
||||
<Typography>{data.label}</Typography>
|
||||
<Typography level='body-xs'>{data.description}{(data.requiresTTI && !props.capabilityHasTTI) ? 'Unconfigured' : ''}</Typography>
|
||||
</Box>
|
||||
{(key === props.chatModeId || !!data.shortcut) && (
|
||||
<KeyStroke combo={fixNewLineShortcut((key === props.chatModeId) ? 'ENTER' : data.shortcut ? data.shortcut : 'ENTER', enterIsNewline)} />
|
||||
)}
|
||||
</Box>
|
||||
</MenuItem>)}
|
||||
|
||||
</CloseableMenu>;
|
||||
}
|
||||
@@ -3,41 +3,81 @@ import * as React from 'react';
|
||||
import { Badge, Box, ColorPaletteProp, Tooltip } from '@mui/joy';
|
||||
|
||||
|
||||
function alignRight(value: number, columnSize: number = 7) {
|
||||
function alignRight(value: number, columnSize: number = 8) {
|
||||
const str = value.toLocaleString();
|
||||
return str.padStart(columnSize);
|
||||
}
|
||||
|
||||
function formatCost(cost: number) {
|
||||
return cost < 1
|
||||
? (cost * 100).toFixed(cost < 0.010 ? 2 : 1) + ' ¢'
|
||||
: '$ ' + cost.toFixed(2);
|
||||
}
|
||||
|
||||
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number): {
|
||||
color: ColorPaletteProp, message: string, remainingTokens: number
|
||||
|
||||
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number, tokenPriceIn?: number, tokenPriceOut?: number): {
|
||||
color: ColorPaletteProp,
|
||||
message: string,
|
||||
remainingTokens: number,
|
||||
costMax?: number,
|
||||
costMin?: number,
|
||||
} {
|
||||
const usedTokens = directTokens + (historyTokens || 0) + (responseMaxTokens || 0);
|
||||
const remainingTokens = tokenLimit - usedTokens;
|
||||
const usedInputTokens = directTokens + (historyTokens || 0);
|
||||
const usedMaxTokens = usedInputTokens + (responseMaxTokens || 0);
|
||||
const remainingTokens = tokenLimit - usedMaxTokens;
|
||||
const gteLimit = (remainingTokens <= 0 && tokenLimit > 0);
|
||||
|
||||
// message
|
||||
let message: string = gteLimit ? '⚠️ ' : '';
|
||||
|
||||
// costs
|
||||
let costMax: number | undefined = undefined;
|
||||
let costMin: number | undefined = undefined;
|
||||
|
||||
// no limit: show used tokens only
|
||||
if (!tokenLimit) {
|
||||
message += `Requested: ${usedTokens.toLocaleString()} tokens`;
|
||||
message += `Requested: ${usedMaxTokens.toLocaleString()} tokens`;
|
||||
}
|
||||
// has full information (d + i < l)
|
||||
else if (historyTokens || responseMaxTokens) {
|
||||
message +=
|
||||
`${Math.abs(remainingTokens).toLocaleString()} ${remainingTokens >= 0 ? 'available' : 'excess'} message tokens\n\n` +
|
||||
`▶ ${Math.abs(remainingTokens).toLocaleString()} ${remainingTokens >= 0 ? 'available' : 'excess'} message tokens\n\n` +
|
||||
` = Model max tokens: ${alignRight(tokenLimit)}\n` +
|
||||
` - This message: ${alignRight(directTokens)}\n` +
|
||||
` - History: ${alignRight(historyTokens || 0)}\n` +
|
||||
` - Max response: ${alignRight(responseMaxTokens || 0)}`;
|
||||
|
||||
// add the price, if available
|
||||
if (tokenPriceIn || tokenPriceOut) {
|
||||
costMin = tokenPriceIn ? usedInputTokens * tokenPriceIn / 1E6 : undefined;
|
||||
const costOutMax = (tokenPriceOut && responseMaxTokens) ? responseMaxTokens * tokenPriceOut / 1E6 : undefined;
|
||||
if (costMin || costOutMax) {
|
||||
message += `\n\n\n▶ Chat Turn Cost (max, approximate)\n`;
|
||||
|
||||
if (costMin) message += '\n' +
|
||||
` Input tokens: ${alignRight(usedInputTokens)}\n` +
|
||||
` Input Price $/M: ${tokenPriceIn!.toFixed(2).padStart(8)}\n` +
|
||||
` Input cost: ${('$' + costMin!.toFixed(4)).padStart(8)}\n`;
|
||||
|
||||
if (costOutMax) message += '\n' +
|
||||
` Max output tokens: ${alignRight(responseMaxTokens!)}\n` +
|
||||
` Output Price $/M: ${tokenPriceOut!.toFixed(2).padStart(8)}\n` +
|
||||
` Max output cost: ${('$' + costOutMax!.toFixed(4)).padStart(8)}\n`;
|
||||
|
||||
if (costMin) message += '\n' +
|
||||
` > Min turn cost: ${formatCost(costMin).padStart(8)}`;
|
||||
costMax = (costMin && costOutMax) ? costMin + costOutMax : undefined;
|
||||
if (costMax) message += '\n' +
|
||||
` < Max turn cost: ${formatCost(costMax).padStart(8)}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Cleaner mode: d + ? < R (total is the remaining in this case)
|
||||
else {
|
||||
message +=
|
||||
`${(tokenLimit + usedTokens).toLocaleString()} available tokens after deleting this\n\n` +
|
||||
`${(tokenLimit + usedMaxTokens).toLocaleString()} available tokens after deleting this\n\n` +
|
||||
` = Currently free: ${alignRight(tokenLimit)}\n` +
|
||||
` + This message: ${alignRight(usedTokens)}`;
|
||||
` + This message: ${alignRight(usedMaxTokens)}`;
|
||||
}
|
||||
|
||||
const color: ColorPaletteProp =
|
||||
@@ -47,23 +87,21 @@ export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, h
|
||||
? 'warning'
|
||||
: 'primary';
|
||||
|
||||
return { color, message, remainingTokens };
|
||||
return { color, message, remainingTokens, costMax, costMin };
|
||||
}
|
||||
|
||||
|
||||
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.JSX.Element }) =>
|
||||
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.ReactElement }) =>
|
||||
<Tooltip
|
||||
placement={props.placement}
|
||||
variant={props.color !== 'primary' ? 'solid' : 'soft'} color={props.color}
|
||||
title={props.message
|
||||
? <Box sx={{ p: 2, whiteSpace: 'pre' }}>
|
||||
{props.message}
|
||||
</Box>
|
||||
: null
|
||||
}
|
||||
title={props.message ? <Box sx={{ p: 2, whiteSpace: 'pre' }}>{props.message}</Box> : null}
|
||||
sx={{
|
||||
fontFamily: 'code',
|
||||
boxShadow: 'xl',
|
||||
// fontSize: '0.8125rem',
|
||||
border: '1px solid',
|
||||
borderColor: `${props.color}.outlinedColor`,
|
||||
boxShadow: 'md',
|
||||
}}
|
||||
>
|
||||
{props.children}
|
||||
@@ -76,38 +114,80 @@ export const TokenTooltip = (props: { message: string | null, color: ColorPalett
|
||||
export const TokenBadgeMemo = React.memo(TokenBadge);
|
||||
|
||||
function TokenBadge(props: {
|
||||
direct: number, history?: number, responseMax?: number, limit: number,
|
||||
showExcess?: boolean, absoluteBottomRight?: boolean, inline?: boolean,
|
||||
direct: number,
|
||||
history?: number,
|
||||
responseMax?: number,
|
||||
limit: number,
|
||||
|
||||
tokenPriceIn?: number,
|
||||
tokenPriceOut?: number,
|
||||
|
||||
enableHover?: boolean,
|
||||
showCost?: boolean
|
||||
showExcess?: boolean,
|
||||
absoluteBottomRight?: boolean,
|
||||
inline?: boolean,
|
||||
}) {
|
||||
|
||||
const { message, color, remainingTokens } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax);
|
||||
// state
|
||||
const [isHovering, setIsHovering] = React.useState(false);
|
||||
|
||||
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
|
||||
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
|
||||
? Math.abs(remainingTokens)
|
||||
: props.direct;
|
||||
const { message, color, remainingTokens, costMax, costMin } =
|
||||
tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax, props.tokenPriceIn, props.tokenPriceOut);
|
||||
|
||||
|
||||
// handlers
|
||||
const handleHoverEnter = React.useCallback(() => setIsHovering(true), []);
|
||||
|
||||
const handleHoverLeave = React.useCallback(() => setIsHovering(false), []);
|
||||
|
||||
|
||||
let badgeValue: string;
|
||||
|
||||
const showAltCosts = !!props.showCost && !!costMax && costMin !== undefined;
|
||||
if (showAltCosts) {
|
||||
badgeValue = (!props.enableHover || isHovering)
|
||||
? '< ' + formatCost(costMax)
|
||||
: '> ' + formatCost(costMin);
|
||||
} else {
|
||||
|
||||
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
|
||||
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
|
||||
? Math.abs(remainingTokens)
|
||||
: props.direct;
|
||||
|
||||
badgeValue = value.toLocaleString();
|
||||
}
|
||||
|
||||
const shallHide = !props.direct && remainingTokens >= 0 && !showAltCosts;
|
||||
if (shallHide) return null;
|
||||
|
||||
return (
|
||||
<Badge
|
||||
variant='solid' color={color} max={100000}
|
||||
invisible={!props.direct && remainingTokens >= 0}
|
||||
badgeContent={
|
||||
<TokenTooltip color={color} message={message}>
|
||||
<span>{value.toLocaleString()}</span>
|
||||
</TokenTooltip>
|
||||
}
|
||||
sx={{
|
||||
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
|
||||
cursor: 'help',
|
||||
}}
|
||||
slotProps={{
|
||||
badge: {
|
||||
sx: {
|
||||
fontFamily: 'code',
|
||||
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
|
||||
<TokenTooltip color={color} message={message} placement='top-end'>
|
||||
<Badge
|
||||
variant='soft' color={color} max={1000000}
|
||||
// invisible={shallHide}
|
||||
onMouseEnter={props.enableHover ? handleHoverEnter : undefined}
|
||||
onMouseLeave={props.enableHover ? handleHoverLeave : undefined}
|
||||
badgeContent={badgeValue}
|
||||
slotProps={{
|
||||
root: {
|
||||
sx: {
|
||||
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
|
||||
cursor: 'help',
|
||||
},
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>
|
||||
badge: {
|
||||
sx: {
|
||||
// the badge (not the tooltip)
|
||||
// boxShadow: 'sm',
|
||||
fontFamily: 'code',
|
||||
fontSize: 'xs',
|
||||
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</TokenTooltip>
|
||||
);
|
||||
}
|
||||
@@ -12,7 +12,15 @@ import { tokensPrettyMath, TokenTooltip } from './TokenBadge';
|
||||
*/
|
||||
export const TokenProgressbarMemo = React.memo(TokenProgressbar);
|
||||
|
||||
function TokenProgressbar(props: { direct: number, history: number, responseMax: number, limit: number }) {
|
||||
function TokenProgressbar(props: {
|
||||
direct: number,
|
||||
history: number,
|
||||
responseMax: number,
|
||||
limit: number,
|
||||
|
||||
tokenPriceIn?: number,
|
||||
tokenPriceOut?: number,
|
||||
}) {
|
||||
// external state
|
||||
const theme = useTheme();
|
||||
|
||||
@@ -40,7 +48,7 @@ function TokenProgressbar(props: { direct: number, history: number, responseMax:
|
||||
const overflowColor = theme.palette.danger.softColor;
|
||||
|
||||
// tooltip message/color
|
||||
const { message, color } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax);
|
||||
const { message, color } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax, props.tokenPriceIn, props.tokenPriceOut);
|
||||
|
||||
// sizes
|
||||
const containerHeight = 8;
|
||||
|
||||
@@ -21,7 +21,11 @@ export function ActilePopup(props: {
|
||||
const hasAnyIcon = props.items.some(item => !!item.Icon);
|
||||
|
||||
return (
|
||||
<CloseableMenu open anchorEl={props.anchorEl} onClose={props.onClose} noTopPadding noBottomPadding sx={{ minWidth: 320 }}>
|
||||
<CloseableMenu
|
||||
noTopPadding noBottomPadding
|
||||
open anchorEl={props.anchorEl} onClose={props.onClose}
|
||||
sx={{ minWidth: 320 }}
|
||||
>
|
||||
|
||||
{!!props.title && (
|
||||
<Sheet variant='soft' sx={{ p: 1, borderBottom: '1px solid', borderBottomColor: 'neutral.softActiveBg' }}>
|
||||
@@ -45,12 +49,12 @@ export function ActilePopup(props: {
|
||||
const labelNormal = item.label.slice(props.activePrefixLength);
|
||||
return (
|
||||
<ListItem
|
||||
key={item.id}
|
||||
key={item.key}
|
||||
variant={isActive ? 'soft' : undefined}
|
||||
color={isActive ? 'primary' : undefined}
|
||||
onClick={() => props.onItemClick(item)}
|
||||
>
|
||||
<ListItemButton>
|
||||
<ListItemButton color='primary'>
|
||||
{hasAnyIcon && (
|
||||
<ListItemDecorator>
|
||||
{item.Icon ? <item.Icon /> : null}
|
||||
@@ -60,7 +64,7 @@ export function ActilePopup(props: {
|
||||
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
|
||||
<Typography level='title-sm' color={isActive ? 'primary' : undefined}>
|
||||
<span style={{ fontWeight: 600, textDecoration: 'underline' }}>{labelBold}</span>{labelNormal}
|
||||
<span style={{ textDecoration: 'underline' }}><b>{labelBold}</b></span>{labelNormal}
|
||||
</Typography>
|
||||
{item.argument && <Typography level='body-sm'>
|
||||
{item.argument}
|
||||
|
||||
@@ -1,22 +1,15 @@
|
||||
import type { FunctionComponent } from 'react';
|
||||
|
||||
export interface ActileItem {
|
||||
id: string;
|
||||
key: string;
|
||||
label: string;
|
||||
argument?: string;
|
||||
description?: string;
|
||||
Icon?: FunctionComponent;
|
||||
}
|
||||
|
||||
type ActileProviderIds = 'actile-commands' | 'actile-attach-reference';
|
||||
|
||||
export interface ActileProvider {
|
||||
id: ActileProviderIds;
|
||||
title: string;
|
||||
searchPrefix: string;
|
||||
|
||||
checkTriggerText: (trailingText: string) => boolean;
|
||||
|
||||
fetchItems: () => Promise<ActileItem[]>;
|
||||
export interface ActileProvider<TItem extends ActileItem = ActileItem> {
|
||||
fastCheckTriggerText: (trailingText: string) => boolean;
|
||||
fetchItems: () => Promise<{ title: string, searchPrefix: string, items: TItem[] }>;
|
||||
onItemSelect: (item: ActileItem) => void;
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
//import { ActileItem, ActileProvider } from './ActileProvider';
|
||||
|
||||
|
||||
/*export const providerAttachReference: ActileProvider = {
|
||||
id: 'actile-attach-reference',
|
||||
title: 'Attach Reference',
|
||||
searchPrefix: '@',
|
||||
|
||||
checkTriggerText: (trailingText: string) =>
|
||||
trailingText.endsWith(' @'),
|
||||
|
||||
fetchItems: async () => {
|
||||
return [{
|
||||
id: 'test-1',
|
||||
label: 'Attach This',
|
||||
description: 'Attach this to the message',
|
||||
Icon: undefined,
|
||||
}];
|
||||
},
|
||||
|
||||
onItemSelect: (item: ActileItem) => {
|
||||
console.log('Selected item:', item);
|
||||
},
|
||||
};*/
|
||||
@@ -2,23 +2,25 @@ import { ActileItem, ActileProvider } from './ActileProvider';
|
||||
import { findAllChatCommands } from '../../../commands/commands.registry';
|
||||
|
||||
|
||||
export const providerCommands = (onItemSelect: (item: ActileItem) => void): ActileProvider => ({
|
||||
id: 'actile-commands',
|
||||
title: 'Chat Commands',
|
||||
searchPrefix: '/',
|
||||
export function providerCommands(onCommandSelect: (item: ActileItem) => void): ActileProvider {
|
||||
return {
|
||||
|
||||
checkTriggerText: (trailingText: string) =>
|
||||
trailingText.trim() === '/',
|
||||
// only the literal '/' is a trigger
|
||||
fastCheckTriggerText: (trailingText: string) => trailingText === '/',
|
||||
|
||||
fetchItems: async () => {
|
||||
return findAllChatCommands().map((cmd) => ({
|
||||
id: cmd.primary,
|
||||
label: cmd.primary,
|
||||
argument: cmd.arguments?.join(' ') ?? undefined,
|
||||
description: cmd.description,
|
||||
Icon: cmd.Icon,
|
||||
}));
|
||||
},
|
||||
// no real need to be async
|
||||
fetchItems: async () => ({
|
||||
title: 'Chat Commands',
|
||||
searchPrefix: '/',
|
||||
items: findAllChatCommands().map((cmd) => ({
|
||||
key: cmd.primary,
|
||||
label: cmd.primary,
|
||||
argument: cmd.arguments?.join(' ') ?? undefined,
|
||||
description: cmd.description,
|
||||
Icon: cmd.Icon,
|
||||
} satisfies ActileItem)),
|
||||
}),
|
||||
|
||||
onItemSelect,
|
||||
});
|
||||
onItemSelect: onCommandSelect,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
import { conversationTitle, DConversationId } from '~/common/stores/chat/chat.conversation';
|
||||
import { messageFragmentsReduceText, messageHasUserFlag } from '~/common/stores/chat/chat.message';
|
||||
import { useChatStore } from '~/common/stores/chat/store-chats';
|
||||
|
||||
import { ActileItem, ActileProvider } from './ActileProvider';
|
||||
|
||||
|
||||
export interface StarredMessageItem extends ActileItem {
|
||||
conversationId: DConversationId,
|
||||
messageId: string,
|
||||
}
|
||||
|
||||
export function providerStarredMessage(onMessageSeelect: (item: StarredMessageItem) => void): ActileProvider<StarredMessageItem> {
|
||||
return {
|
||||
|
||||
// only the literal '@' at start of chat, or ' @' at end of chat
|
||||
fastCheckTriggerText: (trailingText: string) => trailingText === '@' || trailingText.endsWith(' @'),
|
||||
|
||||
// finds all the starred messages in all the conversations - this could be heavy
|
||||
fetchItems: async () => {
|
||||
const { conversations } = useChatStore.getState();
|
||||
|
||||
const starredMessages: StarredMessageItem[] = [];
|
||||
conversations.forEach((conversation) => {
|
||||
conversation.messages.forEach((message) => {
|
||||
messageHasUserFlag(message, 'starred') && starredMessages.push({
|
||||
// data
|
||||
conversationId: conversation.id,
|
||||
messageId: message.id,
|
||||
// looks
|
||||
key: message.id,
|
||||
label: conversationTitle(conversation) + ' - ' + messageFragmentsReduceText(message.fragments).slice(0, 32) + '...',
|
||||
// description: message.text.slice(32, 100),
|
||||
Icon: undefined,
|
||||
} satisfies StarredMessageItem);
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
title: 'Starred Messages',
|
||||
searchPrefix: '',
|
||||
items: starredMessages,
|
||||
};
|
||||
},
|
||||
|
||||
onItemSelect: item => onMessageSeelect(item as StarredMessageItem),
|
||||
};
|
||||
}
|
||||
@@ -9,6 +9,7 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
const [popupOpen, setPopupOpen] = React.useState(false);
|
||||
const [provider, setProvider] = React.useState<ActileProvider | null>(null);
|
||||
|
||||
const [title, setTitle] = React.useState<string>('');
|
||||
const [items, setItems] = React.useState<ActileItem[]>([]);
|
||||
const [activeSearchString, setActiveSearchString] = React.useState<string>('');
|
||||
const [activeItemIndex, setActiveItemIndex] = React.useState<number>(0);
|
||||
@@ -17,7 +18,7 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
// derived state
|
||||
const activeItems = React.useMemo(() => {
|
||||
const search = activeSearchString.trim().toLowerCase();
|
||||
return items.filter(item => item.label.toLowerCase().startsWith(search));
|
||||
return items.filter(item => item.label?.toLowerCase().startsWith(search));
|
||||
}, [items, activeSearchString]);
|
||||
const activeItem = activeItemIndex >= 0 && activeItemIndex < activeItems.length ? activeItems[activeItemIndex] : null;
|
||||
|
||||
@@ -25,6 +26,7 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
const handleClose = React.useCallback(() => {
|
||||
setPopupOpen(false);
|
||||
setProvider(null);
|
||||
setTitle('');
|
||||
setItems([]);
|
||||
setActiveSearchString('');
|
||||
setActiveItemIndex(0);
|
||||
@@ -42,13 +44,19 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
|
||||
const actileInterceptTextChange = React.useCallback((trailingText: string) => {
|
||||
for (const provider of providers) {
|
||||
if (provider.checkTriggerText(trailingText)) {
|
||||
setProvider(provider);
|
||||
setPopupOpen(true);
|
||||
setActiveSearchString(provider.searchPrefix);
|
||||
if (provider.fastCheckTriggerText(trailingText)) {
|
||||
provider
|
||||
.fetchItems()
|
||||
.then(items => setItems(items))
|
||||
.then(({ title, searchPrefix, items }) => {
|
||||
// if there are no items, ignore
|
||||
if (items.length) {
|
||||
setPopupOpen(true);
|
||||
setProvider(provider);
|
||||
setTitle(title);
|
||||
setItems(items);
|
||||
setActiveSearchString(searchPrefix);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
handleClose();
|
||||
console.error('Failed to fetch popup items:', error);
|
||||
@@ -100,14 +108,14 @@ export const useActileManager = (providers: ActileProvider[], anchorRef: React.R
|
||||
<ActilePopup
|
||||
anchorEl={anchorRef.current}
|
||||
onClose={handleClose}
|
||||
title={provider?.title}
|
||||
title={title}
|
||||
items={activeItems}
|
||||
activeItemIndex={activeItemIndex}
|
||||
activePrefixLength={activeSearchString.length}
|
||||
onItemClick={handlePopupItemClicked}
|
||||
/>
|
||||
);
|
||||
}, [activeItemIndex, activeItems, activeSearchString.length, anchorRef, handleClose, handlePopupItemClicked, popupOpen, provider?.title]);
|
||||
}, [activeItemIndex, activeItems, activeSearchString.length, anchorRef, handleClose, handlePopupItemClicked, popupOpen, title]);
|
||||
|
||||
return {
|
||||
actileComponent,
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, ListDivider, ListItemDecorator, MenuItem, Radio, Typography } from '@mui/joy';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
|
||||
import KeyboardArrowLeftIcon from '@mui/icons-material/KeyboardArrowLeft';
|
||||
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
|
||||
import VerticalAlignBottomIcon from '@mui/icons-material/VerticalAlignBottom';
|
||||
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
import { copyToClipboard } from '~/common/util/clipboardUtils';
|
||||
|
||||
import type { LLMAttachment } from './useLLMAttachments';
|
||||
import { useAttachmentsStore } from './store-attachments';
|
||||
|
||||
|
||||
// enable for debugging
|
||||
export const DEBUG_ATTACHMENTS = true;
|
||||
|
||||
|
||||
export function AttachmentMenu(props: {
|
||||
llmAttachment: LLMAttachment,
|
||||
menuAnchor: HTMLAnchorElement,
|
||||
isPositionFirst: boolean,
|
||||
isPositionLast: boolean,
|
||||
onAttachmentInlineText: (attachmentId: string) => void,
|
||||
onClose: () => void,
|
||||
}) {
|
||||
|
||||
// derived state
|
||||
|
||||
const isPositionFixed = props.isPositionFirst && props.isPositionLast;
|
||||
|
||||
const {
|
||||
attachment,
|
||||
attachmentOutputs,
|
||||
isUnconvertible,
|
||||
isOutputMissing,
|
||||
isOutputTextInlineable,
|
||||
tokenCountApprox,
|
||||
} = props.llmAttachment;
|
||||
|
||||
const {
|
||||
id: aId,
|
||||
input: aInput,
|
||||
converters: aConverters,
|
||||
converterIdx: aConverterIdx,
|
||||
outputs: aOutputs,
|
||||
} = attachment;
|
||||
|
||||
|
||||
// operations
|
||||
|
||||
const { onClose, onAttachmentInlineText } = props;
|
||||
|
||||
const handleInlineText = React.useCallback(() => {
|
||||
onClose();
|
||||
onAttachmentInlineText(aId);
|
||||
}, [aId, onAttachmentInlineText, onClose]);
|
||||
|
||||
const handleMoveUp = React.useCallback(() => {
|
||||
useAttachmentsStore.getState().moveAttachment(aId, -1);
|
||||
}, [aId]);
|
||||
|
||||
const handleMoveDown = React.useCallback(() => {
|
||||
useAttachmentsStore.getState().moveAttachment(aId, 1);
|
||||
}, [aId]);
|
||||
|
||||
const handleRemove = React.useCallback(() => {
|
||||
onClose();
|
||||
useAttachmentsStore.getState().removeAttachment(aId);
|
||||
}, [aId, onClose]);
|
||||
|
||||
const handleSetConverterIdx = React.useCallback(async (converterIdx: number | null) => {
|
||||
return useAttachmentsStore.getState().setConverterIdx(aId, converterIdx);
|
||||
}, [aId]);
|
||||
|
||||
// const handleSummarizeText = React.useCallback(() => {
|
||||
// onAttachmentSummarizeText(aId);
|
||||
// }, [aId, onAttachmentSummarizeText]);
|
||||
|
||||
const handleCopyOutputToClipboard = React.useCallback(() => {
|
||||
if (attachmentOutputs.length >= 1) {
|
||||
const concat = attachmentOutputs.map(output => {
|
||||
if (output.type === 'text-block')
|
||||
return output.text;
|
||||
else if (output.type === 'image-part')
|
||||
return output.base64Url;
|
||||
else
|
||||
return null;
|
||||
}).join('\n\n---\n\n');
|
||||
copyToClipboard(concat.trim(), 'Converted attachment');
|
||||
}
|
||||
}, [attachmentOutputs]);
|
||||
|
||||
|
||||
return (
|
||||
<CloseableMenu
|
||||
dense placement='top' sx={{ minWidth: 200 }}
|
||||
open anchorEl={props.menuAnchor} onClose={props.onClose}
|
||||
noTopPadding noBottomPadding
|
||||
>
|
||||
|
||||
{/* Move Arrows */}
|
||||
{!isPositionFixed && <Box sx={{ display: 'flex', alignItems: 'center' }}>
|
||||
<MenuItem
|
||||
disabled={props.isPositionFirst}
|
||||
onClick={handleMoveUp}
|
||||
sx={{ flex: 1, display: 'flex', justifyContent: 'center' }}
|
||||
>
|
||||
<KeyboardArrowLeftIcon />
|
||||
</MenuItem>
|
||||
<MenuItem
|
||||
disabled={props.isPositionLast}
|
||||
onClick={handleMoveDown}
|
||||
sx={{ flex: 1, display: 'flex', justifyContent: 'center' }}
|
||||
>
|
||||
<KeyboardArrowRightIcon />
|
||||
</MenuItem>
|
||||
</Box>}
|
||||
{!isPositionFixed && <ListDivider sx={{ mt: 0 }} />}
|
||||
|
||||
{/* Render Converters as menu items */}
|
||||
{/*{!isUnconvertible && <ListItem>*/}
|
||||
{/* <Typography level='body-md'>*/}
|
||||
{/* Attach as:*/}
|
||||
{/* </Typography>*/}
|
||||
{/*</ListItem>}*/}
|
||||
{!isUnconvertible && aConverters.map((c, idx) =>
|
||||
<MenuItem
|
||||
disabled={c.disabled}
|
||||
key={'c-' + c.id}
|
||||
onClick={async () => idx !== aConverterIdx && await handleSetConverterIdx(idx)}
|
||||
>
|
||||
<ListItemDecorator>
|
||||
<Radio checked={idx === aConverterIdx} />
|
||||
</ListItemDecorator>
|
||||
{c.unsupported
|
||||
? <Box>Unsupported 🤔 <Typography level='body-xs'>{c.name}</Typography></Box>
|
||||
: c.name}
|
||||
</MenuItem>,
|
||||
)}
|
||||
{!isUnconvertible && <ListDivider />}
|
||||
|
||||
{DEBUG_ATTACHMENTS && !!aInput && (
|
||||
<MenuItem onClick={handleCopyOutputToClipboard} disabled={!isOutputTextInlineable}>
|
||||
<ListItemDecorator><ContentCopyIcon /></ListItemDecorator>
|
||||
<Box>
|
||||
{!!aInput && <Typography level='body-xs'>
|
||||
🡐 {aInput.mimeType}, {aInput.dataSize.toLocaleString()} bytes
|
||||
</Typography>}
|
||||
{/*<Typography level='body-xs'>*/}
|
||||
{/* Converters: {aConverters.map(((converter, idx) => ` ${converter.id}${(idx === aConverterIdx) ? '*' : ''}`)).join(', ')}*/}
|
||||
{/*</Typography>*/}
|
||||
<Typography level='body-xs'>
|
||||
🡒 {isOutputMissing ? 'empty' : aOutputs.map(output => `${output.type}, ${output.type === 'text-block' ? output.text.length.toLocaleString() : '(base64 image)'} bytes`).join(' · ')}
|
||||
</Typography>
|
||||
{!!tokenCountApprox && <Typography level='body-xs'>
|
||||
🡒 {tokenCountApprox.toLocaleString()} tokens
|
||||
</Typography>}
|
||||
</Box>
|
||||
</MenuItem>
|
||||
)}
|
||||
{DEBUG_ATTACHMENTS && !!aInput && <ListDivider />}
|
||||
|
||||
{/* Destructive Operations */}
|
||||
{/*<MenuItem onClick={handleCopyOutputToClipboard} disabled={!isOutputTextInlineable}>*/}
|
||||
{/* <ListItemDecorator><ContentCopyIcon /></ListItemDecorator>*/}
|
||||
{/* Copy*/}
|
||||
{/*</MenuItem>*/}
|
||||
{/*<MenuItem onClick={handleSummarizeText} disabled={!isOutputTextInlineable}>*/}
|
||||
{/* <ListItemDecorator><CompressIcon color='success' /></ListItemDecorator>*/}
|
||||
{/* Shrink*/}
|
||||
{/*</MenuItem>*/}
|
||||
<MenuItem onClick={handleInlineText} disabled={!isOutputTextInlineable}>
|
||||
<ListItemDecorator><VerticalAlignBottomIcon /></ListItemDecorator>
|
||||
Inline text
|
||||
</MenuItem>
|
||||
<MenuItem onClick={handleRemove}>
|
||||
<ListItemDecorator><ClearIcon /></ListItemDecorator>
|
||||
Remove
|
||||
</MenuItem>
|
||||
|
||||
</CloseableMenu>
|
||||
);
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { Box, IconButton, ListItemDecorator, MenuItem } from '@mui/joy';
|
||||
import ClearIcon from '@mui/icons-material/Clear';
|
||||
import ExpandLessIcon from '@mui/icons-material/ExpandLess';
|
||||
import VerticalAlignBottomIcon from '@mui/icons-material/VerticalAlignBottom';
|
||||
|
||||
import { CloseableMenu } from '~/common/components/CloseableMenu';
|
||||
import { ConfirmationModal } from '~/common/components/ConfirmationModal';
|
||||
|
||||
import type { AttachmentId } from './store-attachments';
|
||||
import type { LLMAttachments } from './useLLMAttachments';
|
||||
import { AttachmentItem } from './AttachmentItem';
|
||||
import { AttachmentMenu } from './AttachmentMenu';
|
||||
|
||||
|
||||
/**
|
||||
* Renderer of attachments, with menus, etc.
|
||||
*/
|
||||
export function Attachments(props: {
|
||||
llmAttachments: LLMAttachments,
|
||||
onAttachmentInlineText: (attachmentId: AttachmentId) => void,
|
||||
onAttachmentsClear: () => void,
|
||||
onAttachmentsInlineText: () => void,
|
||||
}) {
|
||||
|
||||
// state
|
||||
const [confirmClearAttachments, setConfirmClearAttachments] = React.useState<boolean>(false);
|
||||
const [itemMenu, setItemMenu] = React.useState<{ anchor: HTMLAnchorElement, attachmentId: AttachmentId } | null>(null);
|
||||
const [overallMenuAnchor, setOverallMenuAnchor] = React.useState<HTMLAnchorElement | null>(null);
|
||||
|
||||
// derived state
|
||||
const { llmAttachments, onAttachmentsClear, onAttachmentInlineText, onAttachmentsInlineText } = props;
|
||||
|
||||
const { attachments, isOutputTextInlineable } = llmAttachments;
|
||||
|
||||
const hasAttachments = attachments.length >= 1;
|
||||
|
||||
// derived item menu state
|
||||
|
||||
const itemMenuAnchor = itemMenu?.anchor;
|
||||
const itemMenuAttachmentId = itemMenu?.attachmentId;
|
||||
const itemMenuAttachment = itemMenuAttachmentId ? attachments.find(la => la.attachment.id === itemMenu.attachmentId) : undefined;
|
||||
const itemMenuIndex = itemMenuAttachment ? attachments.indexOf(itemMenuAttachment) : -1;
|
||||
|
||||
|
||||
// item menu
|
||||
|
||||
const handleItemMenuToggle = React.useCallback((attachmentId: AttachmentId, anchor: HTMLAnchorElement) => {
|
||||
handleOverallMenuHide();
|
||||
setItemMenu(prev => prev?.attachmentId === attachmentId ? null : { anchor, attachmentId });
|
||||
}, []);
|
||||
|
||||
const handleItemMenuHide = React.useCallback(() => {
|
||||
setItemMenu(null);
|
||||
}, []);
|
||||
|
||||
|
||||
// item menu operations
|
||||
|
||||
const handleAttachmentInlineText = React.useCallback((attachmentId: string) => {
|
||||
handleItemMenuHide();
|
||||
onAttachmentInlineText(attachmentId);
|
||||
}, [handleItemMenuHide, onAttachmentInlineText]);
|
||||
|
||||
|
||||
// menu
|
||||
|
||||
const handleOverallMenuHide = () => setOverallMenuAnchor(null);
|
||||
|
||||
const handleOverallMenuToggle = (event: React.MouseEvent<HTMLAnchorElement>) =>
|
||||
setOverallMenuAnchor(anchor => anchor ? null : event.currentTarget);
|
||||
|
||||
|
||||
// overall operations
|
||||
|
||||
const handleAttachmentsInlineText = React.useCallback(() => {
|
||||
handleOverallMenuHide();
|
||||
onAttachmentsInlineText();
|
||||
}, [onAttachmentsInlineText]);
|
||||
|
||||
const handleClearAttachments = () => setConfirmClearAttachments(true);
|
||||
|
||||
const handleClearAttachmentsConfirmed = React.useCallback(() => {
|
||||
handleOverallMenuHide();
|
||||
setConfirmClearAttachments(false);
|
||||
onAttachmentsClear();
|
||||
}, [onAttachmentsClear]);
|
||||
|
||||
|
||||
// no components without attachments
|
||||
if (!hasAttachments)
|
||||
return null;
|
||||
|
||||
return <>
|
||||
|
||||
{/* Attachments bar */}
|
||||
<Box sx={{ position: 'relative' }}>
|
||||
|
||||
{/* Horizontally scrollable Attachments */}
|
||||
<Box sx={{ display: 'flex', overflowX: 'auto', gap: 1, height: '100%', pr: 5 }}>
|
||||
{attachments.map((llmAttachment) =>
|
||||
<AttachmentItem
|
||||
key={llmAttachment.attachment.id}
|
||||
llmAttachment={llmAttachment}
|
||||
menuShown={llmAttachment.attachment.id === itemMenuAttachmentId}
|
||||
onItemMenuToggle={handleItemMenuToggle}
|
||||
/>,
|
||||
)}
|
||||
</Box>
|
||||
|
||||
{/* Overall Menu button */}
|
||||
<IconButton
|
||||
onClick={handleOverallMenuToggle}
|
||||
sx={{
|
||||
// borderRadius: 'sm',
|
||||
borderRadius: 0,
|
||||
position: 'absolute', right: 0, top: 0,
|
||||
backgroundColor: 'neutral.softDisabledBg',
|
||||
}}
|
||||
>
|
||||
<ExpandLessIcon />
|
||||
</IconButton>
|
||||
|
||||
</Box>
|
||||
|
||||
|
||||
{/* Attachment Menu */}
|
||||
{!!itemMenuAnchor && !!itemMenuAttachment && (
|
||||
<AttachmentMenu
|
||||
llmAttachment={itemMenuAttachment}
|
||||
menuAnchor={itemMenuAnchor}
|
||||
isPositionFirst={itemMenuIndex === 0}
|
||||
isPositionLast={itemMenuIndex === attachments.length - 1}
|
||||
onAttachmentInlineText={handleAttachmentInlineText}
|
||||
onClose={handleItemMenuHide}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
{/* Overall Menu */}
|
||||
{!!overallMenuAnchor && (
|
||||
<CloseableMenu
|
||||
placement='top-start'
|
||||
open anchorEl={overallMenuAnchor} onClose={handleOverallMenuHide}
|
||||
noTopPadding noBottomPadding
|
||||
>
|
||||
<MenuItem onClick={handleAttachmentsInlineText} disabled={!isOutputTextInlineable}>
|
||||
<ListItemDecorator><VerticalAlignBottomIcon /></ListItemDecorator>
|
||||
Inline <span style={{ opacity: 0.5 }}>text attachments</span>
|
||||
</MenuItem>
|
||||
<MenuItem onClick={handleClearAttachments}>
|
||||
<ListItemDecorator><ClearIcon /></ListItemDecorator>
|
||||
Clear
|
||||
</MenuItem>
|
||||
</CloseableMenu>
|
||||
)}
|
||||
|
||||
{/* 'Clear' Confirmation */}
|
||||
{confirmClearAttachments && (
|
||||
<ConfirmationModal
|
||||
open onClose={() => setConfirmClearAttachments(false)} onPositive={handleClearAttachmentsConfirmed}
|
||||
title='Confirm Removal'
|
||||
positiveActionText='Remove All'
|
||||
confirmationText={`This action will remove all (${attachments.length}) attachments. Do you want to proceed?`}
|
||||
/>
|
||||
)}
|
||||
|
||||
</>;
|
||||
}
|
||||
@@ -1,345 +0,0 @@
|
||||
import { callBrowseFetchPage } from '~/modules/browse/browse.client';
|
||||
|
||||
import { createBase36Uid } from '~/common/util/textUtils';
|
||||
import { htmlTableToMarkdown } from '~/common/util/htmlTableToMarkdown';
|
||||
import { pdfToText } from '~/common/util/pdfUtils';
|
||||
|
||||
import type { Attachment, AttachmentConverter, AttachmentId, AttachmentInput, AttachmentSource } from './store-attachments';
|
||||
import type { ComposerOutputMultiPart } from '../composer.types';
|
||||
|
||||
|
||||
// extensions to treat as plain text
|
||||
const PLAIN_TEXT_EXTENSIONS: string[] = ['.ts', '.tsx'];
|
||||
|
||||
// mimetypes to treat as plain text
|
||||
const PLAIN_TEXT_MIMETYPES: string[] = [
|
||||
'text/plain',
|
||||
'text/html',
|
||||
'text/markdown',
|
||||
'text/csv',
|
||||
'text/css',
|
||||
'application/json',
|
||||
];
|
||||
|
||||
/**
|
||||
* Creates a new Attachment object.
|
||||
*/
|
||||
export function attachmentCreate(source: AttachmentSource, checkDuplicates: AttachmentId[]): Attachment {
|
||||
return {
|
||||
id: createBase36Uid(checkDuplicates),
|
||||
source: source,
|
||||
label: 'Loading...',
|
||||
ref: '',
|
||||
inputLoading: false,
|
||||
inputError: null,
|
||||
input: undefined,
|
||||
converters: [],
|
||||
converterIdx: null,
|
||||
outputsConverting: false,
|
||||
outputs: [],
|
||||
// metadata: {},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously loads the input for an Attachment object.
|
||||
*
|
||||
* @param {Readonly<AttachmentSource>} source - The source of the attachment.
|
||||
* @param {(changes: Partial<Attachment>) => void} edit - A function to edit the Attachment object.
|
||||
*/
|
||||
export async function attachmentLoadInputAsync(source: Readonly<AttachmentSource>, edit: (changes: Partial<Attachment>) => void) {
|
||||
edit({ inputLoading: true });
|
||||
|
||||
switch (source.media) {
|
||||
|
||||
// Download URL (page, file, ..) and attach as input
|
||||
case 'url':
|
||||
edit({ label: source.refUrl, ref: source.refUrl });
|
||||
try {
|
||||
const page = await callBrowseFetchPage(source.url);
|
||||
if (page.content) {
|
||||
edit({
|
||||
input: {
|
||||
mimeType: 'text/plain',
|
||||
data: page.content,
|
||||
dataSize: page.content.length,
|
||||
},
|
||||
});
|
||||
} else
|
||||
edit({ inputError: 'No content found at this link' });
|
||||
} catch (error: any) {
|
||||
edit({ inputError: `Issue downloading page: ${error?.message || (typeof error === 'string' ? error : JSON.stringify(error))}` });
|
||||
}
|
||||
break;
|
||||
|
||||
// Attach file as input
|
||||
case 'file':
|
||||
edit({ label: source.refPath, ref: source.refPath });
|
||||
|
||||
// fix missing/wrong mimetypes
|
||||
let mimeType = source.fileWithHandle.type;
|
||||
if (!mimeType) {
|
||||
// see note on 'attachAppendDataTransfer'; this is a fallback for drag/drop missing Mimes sometimes
|
||||
console.warn('Assuming the attachment is text/plain. From:', source.origin, ', name:', source.refPath);
|
||||
mimeType = 'text/plain';
|
||||
} else {
|
||||
// possibly fix wrongly assigned mimetypes (from the extension alone)
|
||||
if (!mimeType.startsWith('text/') && PLAIN_TEXT_EXTENSIONS.some(ext => source.refPath.endsWith(ext)))
|
||||
mimeType = 'text/plain';
|
||||
}
|
||||
|
||||
// UX: just a hint of a loading state
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
try {
|
||||
const fileArrayBuffer = await source.fileWithHandle.arrayBuffer();
|
||||
edit({
|
||||
input: {
|
||||
mimeType,
|
||||
data: fileArrayBuffer,
|
||||
dataSize: fileArrayBuffer.byteLength,
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
edit({ inputError: `Issue loading file: ${error?.message || (typeof error === 'string' ? error : JSON.stringify(error))}` });
|
||||
}
|
||||
break;
|
||||
|
||||
case 'text':
|
||||
if (source.textHtml && source.textPlain) {
|
||||
edit({
|
||||
label: 'Rich Text',
|
||||
ref: '',
|
||||
input: {
|
||||
mimeType: 'text/plain',
|
||||
data: source.textPlain,
|
||||
dataSize: source.textPlain!.length,
|
||||
altMimeType: 'text/html',
|
||||
altData: source.textHtml,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
const text = source.textHtml || source.textPlain || '';
|
||||
edit({
|
||||
label: 'Text',
|
||||
ref: '',
|
||||
input: {
|
||||
mimeType: 'text/plain',
|
||||
data: text,
|
||||
dataSize: text.length,
|
||||
},
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
edit({ inputLoading: false });
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the possible converters for an Attachment object based on its input type.
|
||||
*
|
||||
* @param {AttachmentSource['media']} sourceType - The media type of the attachment source.
|
||||
* @param {Readonly<AttachmentInput>} input - The input of the attachment.
|
||||
* @param {(changes: Partial<Attachment>) => void} edit - A function to edit the Attachment object.
|
||||
*/
|
||||
export function attachmentDefineConverters(sourceType: AttachmentSource['media'], input: Readonly<AttachmentInput>, edit: (changes: Partial<Attachment>) => void) {
|
||||
|
||||
// return all the possible converters for the input
|
||||
const converters: AttachmentConverter[] = [];
|
||||
|
||||
switch (true) {
|
||||
|
||||
// plain text types
|
||||
case PLAIN_TEXT_MIMETYPES.includes(input.mimeType):
|
||||
// handle a secondary layer of HTML 'text' origins: drop, paste, and clipboard-read
|
||||
const textOriginHtml = sourceType === 'text' && input.altMimeType === 'text/html' && !!input.altData;
|
||||
const isHtmlTable = !!input.altData?.startsWith('<table');
|
||||
|
||||
// p1: Tables
|
||||
if (textOriginHtml && isHtmlTable) {
|
||||
converters.push({
|
||||
id: 'rich-text-table',
|
||||
name: 'Markdown Table',
|
||||
});
|
||||
}
|
||||
|
||||
// p2: Text
|
||||
converters.push({
|
||||
id: 'text',
|
||||
name: 'Text',
|
||||
});
|
||||
|
||||
// p3: Html
|
||||
if (textOriginHtml) {
|
||||
converters.push({
|
||||
id: 'rich-text',
|
||||
name: 'HTML',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
// PDF
|
||||
case ['application/pdf', 'application/x-pdf', 'application/acrobat'].includes(input.mimeType):
|
||||
converters.push({ id: 'pdf-text', name: `PDF To Text` });
|
||||
converters.push({ id: 'pdf-images', name: `PDF To Images`, disabled: true });
|
||||
break;
|
||||
|
||||
// images
|
||||
case input.mimeType.startsWith('image/'):
|
||||
converters.push({ id: 'image', name: `Image (coming soon)` });
|
||||
converters.push({ id: 'image-ocr', name: 'As Text (OCR)' });
|
||||
break;
|
||||
|
||||
// catch-all
|
||||
default:
|
||||
converters.push({ id: 'unhandled', name: `${input.mimeType}`, unsupported: true });
|
||||
converters.push({ id: 'text', name: 'As Text' });
|
||||
break;
|
||||
}
|
||||
|
||||
edit({ converters });
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the input of an Attachment object based on the selected converter.
|
||||
*
|
||||
* @param {Readonly<Attachment>} attachment - The Attachment object to convert.
|
||||
* @param {number | null} converterIdx - The index of the selected conversion in the Attachment object's converters array.
|
||||
* @param {(changes: Partial<Attachment>) => void} edit - A function to edit the Attachment object.
|
||||
*/
|
||||
export async function attachmentPerformConversion(attachment: Readonly<Attachment>, converterIdx: number | null, edit: (changes: Partial<Attachment>) => void) {
|
||||
|
||||
// set converter index
|
||||
converterIdx = (converterIdx !== null && converterIdx >= 0 && converterIdx < attachment.converters.length) ? converterIdx : null;
|
||||
edit({
|
||||
converterIdx: converterIdx,
|
||||
outputs: [],
|
||||
});
|
||||
|
||||
// get converter
|
||||
const { ref, input } = attachment;
|
||||
const converter = converterIdx !== null ? attachment.converters[converterIdx] : null;
|
||||
if (!converter || !input)
|
||||
return;
|
||||
|
||||
edit({
|
||||
outputsConverting: true,
|
||||
});
|
||||
|
||||
// input datacould be a string or an ArrayBuffer
|
||||
function inputDataToString(data: string | ArrayBuffer | null | undefined): string {
|
||||
if (typeof data === 'string')
|
||||
return data;
|
||||
if (data instanceof ArrayBuffer)
|
||||
return new TextDecoder().decode(data);
|
||||
return '';
|
||||
}
|
||||
|
||||
// apply converter to the input
|
||||
const outputs: ComposerOutputMultiPart = [];
|
||||
switch (converter.id) {
|
||||
|
||||
// text as-is
|
||||
case 'text':
|
||||
outputs.push({
|
||||
type: 'text-block',
|
||||
text: inputDataToString(input.data),
|
||||
title: ref,
|
||||
collapsible: true,
|
||||
});
|
||||
break;
|
||||
|
||||
// html as-is
|
||||
case 'rich-text':
|
||||
outputs.push({
|
||||
type: 'text-block',
|
||||
text: input.altData!,
|
||||
title: ref || '\n<!DOCTYPE html>',
|
||||
collapsible: true,
|
||||
});
|
||||
break;
|
||||
|
||||
// html to markdown table
|
||||
case 'rich-text-table':
|
||||
let mdTable: string;
|
||||
try {
|
||||
mdTable = htmlTableToMarkdown(input.altData!, false);
|
||||
} catch (error) {
|
||||
// fallback to text/plain
|
||||
mdTable = inputDataToString(input.data);
|
||||
}
|
||||
outputs.push({
|
||||
type: 'text-block',
|
||||
text: mdTable,
|
||||
title: ref,
|
||||
collapsible: true,
|
||||
});
|
||||
break;
|
||||
|
||||
case 'pdf-text':
|
||||
if (!(input.data instanceof ArrayBuffer)) {
|
||||
console.log('Expected ArrayBuffer for PDF converter, got:', typeof input.data);
|
||||
break;
|
||||
}
|
||||
// duplicate the ArrayBuffer to avoid mutation
|
||||
const pdfData = new Uint8Array(input.data.slice(0));
|
||||
const pdfText = await pdfToText(pdfData);
|
||||
outputs.push({
|
||||
type: 'text-block',
|
||||
text: pdfText,
|
||||
title: ref,
|
||||
collapsible: true,
|
||||
});
|
||||
break;
|
||||
|
||||
case 'pdf-images':
|
||||
// TODO: extract all pages as individual images
|
||||
break;
|
||||
|
||||
case 'image':
|
||||
// TODO: continue here
|
||||
/*outputs.push({
|
||||
type: 'image-part',
|
||||
base64Url: `data:notImplemented.yet:)`,
|
||||
collapsible: false,
|
||||
});*/
|
||||
break;
|
||||
|
||||
case 'image-ocr':
|
||||
if (!(input.data instanceof ArrayBuffer)) {
|
||||
console.log('Expected ArrayBuffer for Image OCR converter, got:', typeof input.data);
|
||||
break;
|
||||
}
|
||||
try {
|
||||
const { recognize } = await import('tesseract.js');
|
||||
const buffer = Buffer.from(input.data);
|
||||
const result = await recognize(buffer, undefined, {
|
||||
errorHandler: e => console.error(e),
|
||||
logger: (message) => {
|
||||
if (message.status === 'recognizing text')
|
||||
console.log('OCR progress:', message.progress);
|
||||
},
|
||||
});
|
||||
outputs.push({
|
||||
type: 'text-block',
|
||||
text: result.data.text,
|
||||
title: ref,
|
||||
collapsible: true,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'unhandled':
|
||||
// force the user to explicitly select 'as text' if they want to proceed
|
||||
break;
|
||||
}
|
||||
|
||||
// update
|
||||
edit({
|
||||
outputsConverting: false,
|
||||
outputs,
|
||||
});
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
/*
|
||||
|
||||
/// REDUCER
|
||||
|
||||
import { ContentReducer } from '~/modules/aifn/summarize/ContentReducer';
|
||||
|
||||
const [reducerText, setReducerText] = React.useState('');
|
||||
const [reducerTextTokens, setReducerTextTokens] = React.useState(0);
|
||||
|
||||
{reducerText?.length >= 1 &&
|
||||
<ContentReducer
|
||||
initialText={reducerText} initialTokens={reducerTextTokens} tokenLimit={remainingTokens}
|
||||
onReducedText={handleReducedText} onClose={handleReducerClose}
|
||||
/>
|
||||
}
|
||||
const handleReducerClose = () => setReducerText('');
|
||||
|
||||
const handleReducedText = (text: string) => {
|
||||
handleReducerClose();
|
||||
setComposeText(_t => _t + text);
|
||||
};
|
||||
|
||||
const handleAttachFiles = async (files: FileList, overrideFileNames?: string[]): Promise<void> => {
|
||||
|
||||
// see how we fare on budget
|
||||
if (chatLLMId) {
|
||||
const newTextTokens = countModelTokens(newText, chatLLMId, 'reducer trigger') ?? 0;
|
||||
|
||||
// simple trigger for the reduction dialog
|
||||
if (newTextTokens > remainingTokens) {
|
||||
setReducerTextTokens(newTextTokens);
|
||||
setReducerText(newText);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// within the budget, so just append
|
||||
setComposeText(text => expandPromptTemplate(PromptTemplates.Concatenate, { text: newText })(text));
|
||||
|
||||
|
||||
|
||||
*/
|
||||