Compare commits

...

263 Commits

Author SHA1 Message Date
Enrico Ros e7a800350a Fix #840 2025-10-13 17:44:46 -07:00
Enrico Ros 0f86ad36ef Update Dockerfile for #840 2025-10-12 10:21:58 -07:00
Enrico Ros cd421b2f6e Update documentation for v1.16.11 final release and EOL 2025-10-09 14:33:22 -07:00
Enrico Ros 41b66e009a Finalize v1 README 2025-10-09 14:23:23 -07:00
Enrico Ros d96467f850 Finalize v1 legacy workflow - v1.1* only, as 'v1' 'v1-stable' 'v1.*' tags 2025-10-09 13:55:05 -07:00
Enrico Ros 258b19e2f4 Drawer: notice 2025-10-07 11:13:04 -07:00
Enrico Ros 01a87158b9 News: highlight first 2025-10-07 11:09:12 -07:00
Enrico Ros 46b768f9cf News: roll 2025-10-07 10:59:41 -07:00
Enrico Ros 4f0b6a5d09 Trade: rename exported file for convenience 2025-10-07 10:59:31 -07:00
Enrico Ros bfbcdb70fe News: show the links to 2.0 2025-10-07 10:59:09 -07:00
Enrico Ros 2c4602cf39 Add Node24 support 2025-10-07 09:46:34 -07:00
Enrico Ros 68f5d3946b Move tf pointers 2025-09-29 08:12:50 -07:00
Enrico Ros 14724a864c OpenRouter: compensate for older typescript 2025-04-05 10:30:15 -07:00
Enrico Ros 5e2b196c4d OpenRouter: models list: prevent schema changes from breaking working models. Fixes #787 2025-04-05 10:26:44 -07:00
Enrico Ros e7686f60b1 OpenRouter: models list: ignore missing fields on 'openrouter/auto'. Fixes #787 2025-04-05 10:25:35 -07:00
Enrico Ros 380f666d35 Roll Gemini descriptions. Fixes #783 2025-03-29 12:35:29 -07:00
Enrico Ros 3e277b1a35 Optional desc. #783 2025-03-29 12:35:05 -07:00
Enrico Ros 9bac46ea75 1.16.9 Release 2025-01-21 18:09:37 -08:00
Enrico Ros 2af4ee7dbe Remove v1-dev, fully absorbed into v2-dev. 2025-01-21 18:05:18 -08:00
Enrico Ros 590fc0d021 Gemini: relax parser - Fixes #700 2024-12-19 01:09:40 -08:00
Enrico Ros 746b0dad40 Update Node to 22 2024-12-19 01:08:41 -08:00
Enrico Ros b327da3ded Fix #675 (pre-v2) 2024-11-06 16:37:18 -08:00
Enrico Ros 7a818bdcd0 Update branch names 2024-10-28 20:09:53 -07:00
Enrico Ros c92ee2e22a v1: document branch names 2024-10-28 20:02:18 -07:00
Enrico Ros 632a4a565f [stable] OpenAI: update models 2024-10-25 10:13:13 -07:00
Enrico Ros d712c275a0 [stable] Anthropic: update models 2024-10-25 10:06:42 -07:00
Enrico Ros 1adff7481b Dev survey for Big-AGI 2. 2024-10-11 21:55:46 -07:00
Enrico Ros 393e19dda9 Vercel: fix timeout 2024-10-03 12:37:21 -07:00
Enrico Ros 39c5c7c9ba Call out to Big-AGI 2 2024-09-13 14:06:12 -07:00
Enrico Ros e64a5e59ef 1.16.8 Release 2024-09-13 13:50:00 -07:00
Enrico Ros 574c2cf0e3 Call out to Big-AGI 2 2024-09-13 13:49:11 -07:00
Enrico Ros 1d3321b336 OpenAI: o1 support label 2024-09-13 11:02:33 -07:00
Enrico Ros de25e5822d OpenAI: o1 relabel 2024-09-13 10:59:40 -07:00
Enrico Ros 6a904c9f37 OpenAI: 3.5 non legacy 2024-09-13 10:59:32 -07:00
Enrico Ros 30c3283572 OpenAI: add o1 2024-09-13 10:53:42 -07:00
Enrico Ros 10bba19079 OpenAI: add ChatGPT-4o-latest 2024-09-13 10:53:32 -07:00
Enrico Ros 713079f2f2 OpenAI: bits 2024-09-13 10:53:20 -07:00
Enrico Ros 6e16e989ac OpenAI: move 4o-mini 2024-09-13 10:53:09 -07:00
Enrico Ros 4e89e0b1e4 OpenAI: clean IDs 2024-09-13 10:52:19 -07:00
Enrico Ros 6067c289ab OpenAI: remove vision previews 2024-09-13 10:52:00 -07:00
Enrico Ros 32ebfea9cb OpenAI: reorder 2024-09-13 10:20:52 -07:00
Enrico Ros dec280d54d 1.16.7 Release
(cherry picked from commit 22b32d571d)
2024-08-07 02:51:59 -07:00
Enrico Ros 4823e97783 Mapping doc, for the future.
(cherry picked from commit a416cafc4e)
2024-08-07 02:51:59 -07:00
Enrico Ros 6a5685995f OpenAI: update models
(cherry picked from commit 5f5efe6133)
2024-08-07 02:51:59 -07:00
Enrico Ros 3b4d5691d7 1.16.6: Release. Fixes #604 2024-07-24 21:31:57 -07:00
Enrico Ros 45c09d021a Groq: update output tokens (max 8,000 for 3.1) 2024-07-24 21:27:20 -07:00
Enrico Ros 8ef759fe0f Groq: update Models 2024-07-24 21:27:12 -07:00
Enrico Ros c06735fdd2 1.16.5: Release 2024-07-18 16:15:53 -07:00
Enrico Ros cf4297a1af OpenAI: support 4o Mini (16384 token output) 2024-07-18 16:15:37 -07:00
Enrico Ros 5d458d68bd Warn devs. 2024-07-18 16:12:17 -07:00
Enrico Ros c3db077ae8 1.16.4: release 2024-07-15 14:13:36 -07:00
Enrico Ros 779b265b20 Anthropic: 8192 tokens 2024-07-15 14:08:02 -07:00
Enrico Ros 7d6d7e619b Anthropic: hardcode date 2024-06-20 12:42:10 -07:00
Enrico Ros 34caa16e39 1.16.3: release 2024-06-20 12:27:42 -07:00
Enrico Ros 976426dbd3 Anthropic: support Claude 3.5 Sonnet 2024-06-20 12:27:26 -07:00
Enrico Ros b4d8e39d56 Gemini: acknowledge the new capability to createCachedContent. Fixes #565 2024-06-10 23:56:02 -07:00
Enrico Ros 11c41e7381 Function call: increase debug verbosity 2024-06-07 14:18:01 -07:00
Enrico Ros 358d8a54ff Increase llms alignment before function calling. 2024-06-07 14:11:36 -07:00
Enrico Ros 3c8fedce68 Highlight issues with chatGenerateWithFunctions 2024-06-07 12:38:21 -07:00
Enrico Ros 1744b5b9d0 Throw if function calling on a model that doesn't support it 2024-06-07 12:15:25 -07:00
Enrico Ros 0c15476dd2 1.16.2: release 2024-06-06 22:10:27 -07:00
Enrico Ros 94ef76c67e Gemini: update
(cherry picked from commit 3050b546ac)
2024-06-06 21:42:47 -07:00
Enrico Ros bd5bf6f94f Gemini: update
(cherry picked from commit 1429726ba6)
2024-06-06 21:42:47 -07:00
Enrico Ros 1fbf454c3c Add Codestral - Fixes #558
(cherry picked from commit 4075581acd)
2024-06-06 21:42:47 -07:00
Enrico Ros 07b62fe5c1 Streaming uplink: index sources for unification. 2024-06-06 21:42:47 -07:00
Enrico Ros 7fbf6ee2e8 Fix Domino issue (crash) by upgrading Turndown to 7.2.0
See:
https://github.com/mixmark-io/turndown/issues/439
https://github.com/fgnass/domino/issues/146
(cherry picked from commit baad3ae1c3)
2024-06-06 21:41:04 -07:00
Enrico Ros ba66fc30c5 Fix TimeoutError issue
(cherry picked from commit 7c099cab94)
2024-06-06 21:41:04 -07:00
Enrico Ros 45b7ed3220 Mistral: update pricing
(cherry picked from commit 05aa4b547f)
2024-06-06 21:41:04 -07:00
Enrico Ros 20f1c4c0ae Mistral: update
#518

(cherry picked from commit 6afb61d25d)
2024-06-06 21:41:04 -07:00
Enrico Ros 97b6fc5e2b Already Set
(cherry picked from commit a7ce5c1ca6)
2024-06-06 21:41:04 -07:00
Enrico Ros 44d8c30187 Start opened
(cherry picked from commit 952bd2bd93)
2024-06-06 21:41:04 -07:00
Enrico Ros e3957bf08b Page download: improve
(cherry picked from commit f9d33d4888)
2024-06-06 21:41:03 -07:00
Enrico Ros acfe0aba21 Beam: bits
(cherry picked from commit 81d99f19d4)
2024-06-06 21:41:03 -07:00
Enrico Ros 6247b5411b Beam: recall importing rays
(cherry picked from commit 454a4257da)
2024-06-06 21:41:03 -07:00
Enrico Ros 5cc0b0a011 Beam: fix reactive bug
(cherry picked from commit e513b42786)
2024-06-06 21:41:03 -07:00
Enrico Ros 1fed2fb18c Beam: if auto-start, give the chance to change merge model
(cherry picked from commit b607e3c034)
2024-06-06 21:41:03 -07:00
Enrico Ros 8a0e7a4e3d Tiktoken: in the future, show tokens
(cherry picked from commit d5c3f5012b)
2024-06-06 21:41:03 -07:00
Enrico Ros 29a784c6c6 Update TikToken for perfect token computation on 'o' models.
(cherry picked from commit 21d045be59)
2024-06-06 21:41:03 -07:00
Enrico Ros 409a3ee194 DChat: remove IDB migration
(cherry picked from commit 44ab0483b6)
2024-06-06 21:41:03 -07:00
Enrico Ros 54caa3e01a Gemini: improve support (incl. interfaces, cost, visibility)
(cherry picked from commit 9eb0cc0b62)
2024-06-06 21:41:03 -07:00
Enrico Ros e1a723a39f (bits)
(cherry picked from commit 2db74867f5)
2024-06-06 21:41:03 -07:00
Enrico Ros 463ea35d7c Default to the full context window
(cherry picked from commit fd30baafb8)
2024-06-06 21:41:03 -07:00
Enrico Ros f751c91c68 Browse: improve markdown transform
(cherry picked from commit 3623eef47f)
2024-06-06 21:41:03 -07:00
Enrico Ros ad24c8771a Browse: full support for markdown transform
(cherry picked from commit 7b07bb7884)
2024-06-06 21:41:03 -07:00
Enrico Ros 6f82e2c3ed Browse: markdown transform as default
(cherry picked from commit 7946cd6614)
2024-06-06 21:41:03 -07:00
Enrico Ros f4b39071f0 Browse: support transform (skel)
(cherry picked from commit 51b6e30986)
2024-06-06 21:41:03 -07:00
Enrico Ros 621c968f3f Hold Shift to delete without confirmation: fixes #537
(cherry picked from commit 002df7b0f9)
2024-06-06 21:41:03 -07:00
Enrico Ros 564cf0fed0 1.16.1: default:hidden on the first Turbo 2024-05-13 12:04:31 -07:00
Enrico Ros dee9492d4c 1.16.0: update 2024-05-13 12:02:02 -07:00
Enrico Ros 6ae026f7c5 OpenAI: un-star Turbo 2024-05-13 11:49:10 -07:00
Enrico Ros 6bcbe286f3 OpenAI: add support for 'o' models 2024-05-13 11:47:55 -07:00
Enrico Ros 6f35f72607 Beam: auto-merge 2024-05-12 23:39:06 -07:00
Enrico Ros 3a7aa75538 Soft-wrap as a global preference. Fixes #517 2024-05-10 04:05:50 -07:00
Enrico Ros e4e7ac260a pdfjs: image generation (just in case) 2024-05-10 02:46:28 -07:00
Enrico Ros b8aaa4bb42 pdfjs: better parsing (for humans) 2024-05-10 02:19:45 -07:00
Enrico Ros 7793e2694b pdfjs: roll 2024-05-10 02:19:32 -07:00
Enrico Ros 83f2c72f29 Roll packages 2024-05-10 01:38:56 -07:00
Enrico Ros 1caeaee7f0 1.16.0: update News 2024-05-09 01:00:53 -07:00
Enrico Ros f354134234 Update README 2024-05-09 00:56:50 -07:00
Enrico Ros 66219d30e0 ReplyTo: fix bubble 2024-05-09 00:48:24 -07:00
Enrico Ros b9e3942ed8 ChatMessage: fix broken overflow 2024-05-09 00:18:29 -07:00
Enrico Ros 2354cdc1d1 ReplyTo: render in ChatMessage 2024-05-09 00:18:21 -07:00
Enrico Ros d929438df9 ReplyTo: extract 2024-05-09 00:09:17 -07:00
Enrico Ros 1acaed1de7 ReplyTo: Move Bubble 2024-05-09 00:03:22 -07:00
Enrico Ros 16195f8a55 ReplyTo: works 100 for OpenAI, ok for Anthropic, exposes Chat sequencing issues for a couple 2024-05-09 00:00:37 -07:00
Enrico Ros d7fc8c178f 1.16.0: enable cost by default 2024-05-08 15:39:03 -07:00
Enrico Ros 2894e16706 Merge branch 'release-1.16.0' 2024-05-08 15:11:10 -07:00
Enrico Ros c2340f3432 1.16.0: README 2024-05-08 15:03:32 -07:00
Enrico Ros 3b7b3106db Misc 2024-05-08 14:37:31 -07:00
Enrico Ros cff92819f9 1.16.0: News 2024-05-08 14:13:01 -07:00
Enrico Ros 2f981d852b Show message costs (option) 2024-05-08 13:11:21 -07:00
Enrico Ros 8eef74d776 1.16.0: version 2024-05-08 11:53:56 -07:00
Enrico Ros 60e46204dc Update default contextWindow to 8192
And override as per https://github.com/enricoros/big-AGI/pull/518#issuecomment-2090736347
2024-05-07 04:44:38 -07:00
Enrico Ros 6a5d783435 Show Costs on Hover. #480, #341 2024-05-07 04:33:39 -07:00
Enrico Ros 0223e076c4 LLM Options: improve 2024-05-07 03:54:28 -07:00
Enrico Ros ce80c78319 1.16.0: disable Reply-To (note: full in a different branch) 2024-05-07 02:55:14 -07:00
Enrico Ros cc0085ae61 Group vendors (disabled) 2024-05-07 02:46:41 -07:00
Enrico Ros f28e243b9d Chat: perfect execution error reporting, Fixes #523 2024-05-07 02:19:54 -07:00
Enrico Ros 2e4532593f Toggle JSON mode, Fixes #515 2024-05-07 00:58:02 -07:00
Enrico Ros 1f10905a03 Fix model temperaturs 2024-05-07 00:47:11 -07:00
Enrico Ros 88762db484 Anthropic: more precise usage link to show the token usage. Fixes #524 2024-05-06 23:48:41 -07:00
Enrico Ros 3b5ab0ac70 Beam: fix relaxed parsing. Fixes #528 2024-05-06 23:45:34 -07:00
Enrico Ros 8903c9296b OpenRouter: update parser 2024-05-06 22:56:09 -07:00
Enrico Ros 97858a3c94 docs/installation: mention optionality 2024-05-06 22:26:40 -07:00
Enrico Ros 0ec3e83518 Merge pull request #521 from dandv/patch-1
Docs: fix command to run local build
2024-05-06 22:25:51 -07:00
Enrico Ros 8c007b5bf7 Merge pull request #522 from dandv/patch-2
E: grammar in OpenAISourceSetup.tsx
2024-05-06 22:21:46 -07:00
Enrico Ros 768236b0e2 Merge pull request #525 from PrivTEC/patch-1
Correct typo in config-feature-browse.md
2024-05-06 22:20:18 -07:00
Enrico Ros 495d78b885 Perplexity: update models, with the ne online models 2024-05-06 21:20:02 -07:00
Enrico Ros 34b1e515fe Figure out unused model vendors 2024-05-06 21:04:02 -07:00
PrivTEC 79edbd3fa5 Correct typo in config-feature-browse.md
Corrected the typo from "proyy" to "proxy" in the file `config-feature-browse.md`. This change addresses a small, but significant error in the configuration documentation.
2024-05-06 03:51:04 +02:00
Dan Dascalescu f50d9994e2 E: grammar in OpenAISourceSetup.tsx 2024-05-04 22:22:34 +03:00
Dan Dascalescu 1603d3085f Docs: fix command to run local build 2024-05-04 22:16:12 +03:00
Enrico Ros ccf7036f33 Longer timeouts 2024-05-02 00:43:10 -07:00
Enrico Ros a0a1a5e3c1 Update the proxy desc 2024-05-02 00:09:17 -07:00
Enrico Ros fbf9120859 Default to llama3 2024-05-01 23:59:09 -07:00
Enrico Ros 8a770beec3 Update Ollama models 2024-05-01 23:05:30 -07:00
Enrico Ros 6b31669765 Fix diagrams in Dark mode. Fixes #520 2024-05-01 22:54:53 -07:00
Enrico Ros 26d72fc2d8 DMesage: add metadata 2024-04-25 22:17:36 -07:00
Enrico Ros 5eb56d0994 Move Diff'er. 2024-04-25 22:16:14 -07:00
Enrico Ros dbc4a922d5 Message Toolbar: good looking too. 2024-04-25 22:15:20 -07:00
Enrico Ros 141f423842 Diagrams: auto-switch 2024-04-25 22:15:00 -07:00
Enrico Ros 667f2433ab Diagrams: enter 2024-04-25 22:14:59 -07:00
Enrico Ros fd930ef548 Message Toolbar: fix disappearance 2024-04-25 22:14:49 -07:00
Enrico Ros 7eadfb1a63 E: PageDrawerHeader style 2024-04-25 22:11:28 -07:00
Enrico Ros 67cb07ac92 E: Style 2024-04-25 21:53:10 -07:00
Enrico Ros 96d28c43fc Manifest: update 2024-04-25 18:38:56 -07:00
Enrico Ros e57e3f5f0a Code: soft wrap. Closes #517 2024-04-25 11:41:34 -07:00
Enrico Ros 7b99bd71da Update overlay buttons 2024-04-25 11:36:58 -07:00
Enrico Ros 861a037321 Tweaks 2024-04-24 18:51:40 -07:00
Enrico Ros 84cbe6c434 RenderCode: title looks 2024-04-24 18:33:45 -07:00
Enrico Ros 2cbb811523 RenderCode: fix titles 2024-04-24 12:32:17 -07:00
Enrico Ros 8ef4faa10f Llms: update 'latest' 2024-04-24 12:25:34 -07:00
Enrico Ros f6a1c9bf52 Diagrams: fix centering 2024-04-24 03:42:50 -07:00
Enrico Ros 5d9f6fb4f5 Code blocks: undo the removal of ? 2024-04-24 03:31:00 -07:00
Enrico Ros 66840a8ecd Diagrams: center Mermaid and PlantUML diagrams 2024-04-24 03:30:28 -07:00
Enrico Ros a8ee6b255a Diagrams: improve hotfixes for Haiku and 3.5 2024-04-24 03:30:16 -07:00
Enrico Ros bd73d1c533 Diagrams: improve prompts 2024-04-24 03:30:05 -07:00
Enrico Ros e33c0ebc42 Fix code block separation in case of nested blocks. 2024-04-24 02:42:43 -07:00
Enrico Ros 57e4a35fee AppChat: extract chat executor (1st step) 2024-04-24 01:59:49 -07:00
Enrico Ros d490b57410 Diagrams: improve instructions 2024-04-24 01:59:08 -07:00
Enrico Ros 0416602e5f Diagrams: improve dialog 2024-04-24 01:59:01 -07:00
Enrico Ros ddc27b2eb9 BlockCode: improve looks 2024-04-24 01:36:32 -07:00
Enrico Ros 374deb147b Composer: improve ReplyTo integration 2024-04-24 00:03:30 -07:00
Enrico Ros d2eabd1ad0 Composer: correctness of activation 2024-04-24 00:02:42 -07:00
Enrico Ros efbc625cc3 Composer: onAction callback 2024-04-23 23:52:09 -07:00
Enrico Ros 91ae0b8cb0 Codeblocks: broader inclusion of filenames 2024-04-23 23:46:20 -07:00
Enrico Ros ddc5741b00 Attachments: getCollapsedAttachments 2024-04-23 23:18:39 -07:00
Enrico Ros 4729aca6b0 ReplyTo: improve bubble 2024-04-23 22:56:05 -07:00
Enrico Ros bb4fc3a70c Anthropic: relax key validation on custom deployments. Closes #511 2024-04-23 20:32:08 -07:00
Enrico Ros 5d8084b650 Llms: streaming: cleanups 2024-04-23 05:07:55 -07:00
Enrico Ros f316b892f5 Revert "Llms: fix Streaming timeouts (2)"
This reverts commit cbda1d7cd0.
2024-04-23 03:15:07 -07:00
Enrico Ros cbda1d7cd0 Llms: fix Streaming timeouts (2) 2024-04-23 02:07:20 -07:00
Enrico Ros 2f8e879976 Llms: fix Streaming timeouts 2024-04-23 01:45:27 -07:00
Enrico Ros cc0ac5ae3c React: fix llm naming 2024-04-22 23:59:30 -07:00
Enrico Ros 0185d24fb3 Beam: improve Merge disablement 2024-04-22 23:59:08 -07:00
Enrico Ros 97dbdc9c31 Beam: improve inlining (not ready yet) 2024-04-22 23:58:26 -07:00
Enrico Ros a07c66c9a3 Beam: lay down some inlining code 2024-04-22 21:49:14 -07:00
Enrico Ros 308bd25bc0 Beam: improve Tutorial 2024-04-22 21:48:00 -07:00
Enrico Ros 70066a03b6 Explainer Carousel: improvements 2024-04-22 21:44:17 -07:00
Enrico Ros a7f3872af3 Beam: update bar icons 2024-04-22 16:38:26 -07:00
Enrico Ros 22e10e675a RMB on Chat Avatar brings up the menu 2024-04-22 16:31:30 -07:00
Enrico Ros 89679e946d Beam: remove optionality (/beam, chat mode, composer button & shortcut, message beam from) 2024-04-22 16:12:09 -07:00
Enrico Ros 1d1bb9d3df Beam: explain a possible missing user message 2024-04-22 15:58:39 -07:00
Enrico Ros 8faf2b2595 Beam: move scroll button to the Gather pane 2024-04-22 15:58:18 -07:00
Enrico Ros e47ad9700e Anthropic: workaround for history[0] being assistant 2024-04-22 15:40:48 -07:00
Enrico Ros 372b19a057 Formulas: fix rendering for OpenAI-style inline '\(' and block '\[' latex. Fixes #508 2024-04-22 04:39:12 -07:00
Enrico Ros cbe156a868 Merge branch 'refs/heads/main-stable' 2024-04-22 02:57:08 -07:00
Enrico Ros 181a3881e2 Groq: update models
(cherry picked from commit 3eef03b303)
2024-04-22 02:56:47 -07:00
Enrico Ros 3eef03b303 Groq: update models 2024-04-22 02:52:19 -07:00
Enrico Ros ad56e3165c Beam: fix pixel-bound loading of presets 2024-04-22 02:27:07 -07:00
Enrico Ros b1a96b6e75 Beam: clear heuristics for llm selection 2024-04-22 02:26:48 -07:00
Enrico Ros 56419b1b4e Beam: persist the last configuration 2024-04-22 02:19:17 -07:00
Enrico Ros 372f14a9c5 Beam: auto-configure from Elo 2024-04-22 01:01:43 -07:00
Enrico Ros e1ec56a120 Beam: remove fallbackLlmId 2024-04-22 01:01:33 -07:00
Enrico Ros 5bb11249d6 Beam: remove reactive (view-based) ray conf 2024-04-22 01:01:17 -07:00
Enrico Ros 9fbcca1ff2 Llms: avoid name clash 2024-04-22 00:54:41 -07:00
Enrico Ros 323f2b2c3e Llms: cleaner 2024-04-22 00:52:56 -07:00
Enrico Ros b971d38dd5 Llms: heuristic to auto-pick the best diverse LLMs 2024-04-22 00:49:06 -07:00
Enrico Ros 278f479a3a Beam: rename terminate 2024-04-22 00:48:36 -07:00
Enrico Ros 03aea5678d Llms: misc 2024-04-22 00:17:49 -07:00
Enrico Ros b62b8ee7e6 Beam: App: fix state 2024-04-22 00:12:49 -07:00
Enrico Ros 63f55551e5 Beam: gather show all prompts 2024-04-21 23:30:41 -07:00
Enrico Ros b185fbc57d Beam: fallback llm Id 2024-04-21 23:24:52 -07:00
Enrico Ros ceb9d58e72 Beam: fix import rays 2024-04-21 23:10:47 -07:00
Enrico Ros a0bb515a4f Beam: minor bits 2024-04-21 22:28:36 -07:00
Enrico Ros 2cfac2f18b Beam: combine two menus into one 2024-04-21 22:05:08 -07:00
Enrico Ros d412f538b2 Make it more explicit we're only not rolling this one. 2024-04-21 21:30:26 -07:00
Enrico Ros 94f90ad861 Roll packages, but hold Next back. 2024-04-21 21:22:47 -07:00
Enrico Ros 4a402e7937 Roll pdfjs 2024-04-21 21:19:30 -07:00
Enrico Ros c226d6c391 Lock Next to 14.1, as 14.2 introduces the async/await messages when running/building, and we don't know what it means yet.
"The generated code contains 'async/await' because this module is using "topLevelAwait"."

See: https://github.com/vercel/next.js/issues/64792
2024-04-21 21:17:24 -07:00
Enrico Ros 67410e6c59 Revert "Roll packages." - Next v14.2.2 shows some async/await messages.
See https://github.com/vercel/next.js/issues/64792

This reverts commit 419c361147.
2024-04-21 21:12:32 -07:00
Enrico Ros 419c361147 Roll packages. 2024-04-21 20:39:56 -07:00
Enrico Ros 3769a53ffa Merge pull request #507 from mludvig/arm-build-1
Build multi-arch docker image for x64-64 and ARM64
2024-04-15 22:04:07 -07:00
Michael Ludvig ec4aaa3bfb Cleanup 2024-04-16 16:51:57 +12:00
Michael Ludvig be52680fcd Put back hashes and comments 2024-04-16 16:20:48 +12:00
Michael Ludvig 9d41ab9339 Merge branch 'enricoros:main' into arm-build-1 2024-04-16 12:36:23 +12:00
Michael Ludvig f126fc3087 Cleanup 2024-04-16 11:52:58 +12:00
Michael Ludvig 764377037c Disabled arm 32 again (not supported by Prisma) 2024-04-16 11:22:15 +12:00
Michael Ludvig 8e09eaab45 Add sha tag 2024-04-16 11:10:32 +12:00
Michael Ludvig 6523da186c Update versions, add arm32 2024-04-16 10:29:18 +12:00
Michael Ludvig 6471fd8b6f Enable action 2024-04-16 10:01:41 +12:00
Michael Ludvig 247a74881a Added buildx support 2024-04-15 11:34:42 +12:00
Enrico Ros 3ef09f0a5f Models: upgrade data structure to v2 - auto-pick 2024-04-12 05:50:46 -07:00
Enrico Ros b924d331f9 Models: upgrade data structure to v2 2024-04-12 05:36:18 -07:00
Enrico Ros 14041b6012 Beam: simplify a bit 2024-04-12 03:44:54 -07:00
Enrico Ros 2c6cc5ecec Cleanup models update logic 2024-04-12 02:44:14 -07:00
Enrico Ros ac022b1df0 Models: adding prices and benchmarks for a few models 2024-04-12 02:09:14 -07:00
Enrico Ros 0a2081de08 Better Beam Hint 2024-04-12 01:06:25 -07:00
Enrico Ros 64a8e554c7 Designer update 2024-04-12 00:46:58 -07:00
Enrico Ros 082d29fd2f Improve style 2024-04-12 00:45:00 -07:00
Enrico Ros ba5cf9d002 Composer: show the bubble 2024-04-12 00:22:55 -07:00
Enrico Ros 57a55318df Stabilize 2024-04-12 00:07:40 -07:00
Enrico Ros e70f4f7a59 ChatMessageList: this side is probably done 2024-04-11 21:10:56 -07:00
Enrico Ros 1d217fad67 Warning 2024-04-11 21:10:39 -07:00
Enrico Ros e95d46f085 ConversationHandler: prepare chat overlays 2024-04-11 21:08:04 -07:00
Enrico Ros f4577878e1 ChatMessage: Reply on 2024-04-11 20:36:32 -07:00
Enrico Ros 1bd1e5c8e3 ChatMessage: Toolbar complete 2024-04-11 20:19:30 -07:00
Enrico Ros c975dee965 ChatMessageList: remove menu items if t2i off 2024-04-11 19:22:03 -07:00
Enrico Ros 9d690f4219 ChatMessage: fix double-closure 2024-04-11 18:22:12 -07:00
Enrico Ros 29ddb3f58d ChatMessage: improve menu 2024-04-11 18:12:44 -07:00
Enrico Ros 8626bc0b1c BlocksRenderer: selection color 2024-04-11 18:12:37 -07:00
Enrico Ros c362cf6596 Propagate information on whether this can be spoken 2024-04-11 17:52:50 -07:00
Enrico Ros 97264fc5ff ChatMessage: toolbar framework 2024-04-11 17:04:44 -07:00
Enrico Ros 494c4409c1 BlocksRenderer: more v-padding for an improved mouse-up behavior 2024-04-11 16:40:47 -07:00
Enrico Ros d46e366c81 Blocks Renderer: use refs 2024-04-11 13:16:13 -07:00
Enrico Ros 6afe33ee9c decolor 2024-04-11 10:13:54 -07:00
Enrico Ros 903c9e1cc3 Improve options 2024-04-11 10:12:03 -07:00
Enrico Ros 3ef43fc3f5 Merge branch 'joriskalz-chat-with-youtube' 2024-04-11 09:58:56 -07:00
Enrico Ros b1c3be05dd Integrate YouTube transcriber (hidden by default) 2024-04-11 09:58:45 -07:00
Enrico Ros efee23b4a7 Update shadows 2024-04-11 09:49:13 -07:00
Enrico Ros 06b67a7586 Merge branch 'chat-with-youtube' of https://github.com/joriskalz/big-AGI-dev into joriskalz-chat-with-youtube 2024-04-11 09:33:56 -07:00
Joris Kalz 889a2dbf9d Remvoved unwanted new line. 2024-04-11 11:45:03 +01:00
Joris Kalz 2f80fcc888 Removed comments 2024-04-11 11:43:54 +01:00
Joris Kalz f7ee479c1d Removed comments 2024-04-11 11:36:27 +01:00
Joris Kalz 94fa0981fe Update YouTube Transcriber voiceId in data.ts 2024-04-11 11:33:55 +01:00
Joris Kalz 4c74afe438 Update YouTube Transcriber system message in data.ts 2024-04-11 11:33:42 +01:00
Joris Kalz f76cea22de Fix YouTube Transcriber activation bug in PersonaSelector component 2024-04-10 22:18:35 +01:00
Joris Kalz 3d49110808 Implement handleAddMessage function in PersonaSelector component 2024-04-10 22:14:15 +01:00
Joris Kalz 88a4579f7a Refactor PersonaSelector component to handle YouTube Transcriber tile click 2024-04-10 22:00:29 +01:00
Joris Kalz 241bde0333 Update YouTubeURLInput component to handle YouTube video transcripts 2024-04-10 21:48:20 +01:00
Joris Kalz 73c7867cd6 Add YouTube Transcriber persona and handle YouTube Transcriber tile click 2024-04-10 11:53:48 +01:00
Enrico Ros b35254f7ad Qol 2024-04-10 03:14:15 -07:00
Enrico Ros 213e78c956 Beam: save the merge model, and shrink rays when loading a smaller preset 2024-04-10 03:01:18 -07:00
176 changed files with 5817 additions and 2735 deletions
+22 -10
View File
@@ -11,11 +11,8 @@ name: Create and publish Docker images
on:
push:
branches:
- main
#- main-stable # Disabled as the v* tag is used for stable releases
tags:
- 'v*' # Trigger on version tags (e.g., v1.7.0)
- 'v1.1*' # V1 legacy tags only (v1.10.x - v1.19.x range)
env:
REGISTRY: ghcr.io
@@ -32,6 +29,12 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with:
@@ -45,17 +48,26 @@ jobs:
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=raw,value=development,enable=${{ github.ref == 'refs/heads/main' }}
type=raw,value=stable,enable=${{ github.ref == 'refs/heads/main-stable' }}
type=ref,event=tag # Use the tag name as a tag for tag builds
type=semver,pattern={{version}} # Generate semantic versioning tags for tag builds
# V1 legacy pointers
type=raw,value=v1
type=raw,value=v1-stable
# Exact version tags (v1.16.11 and 1.16.11)
type=ref,event=tag
type=semver,pattern={{version}}
- name: Build and push Docker image
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with:
context: .
file: Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
labels: |
org.opencontainers.image.title=Big-AGI v1 (Legacy)
org.opencontainers.image.description=Big-AGI v1 - Legacy version. For the latest version, see GitHub and big-agi.com
org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}
org.opencontainers.image.documentation=https://big-agi.com
build-args: |
NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }}
+12 -6
View File
@@ -1,6 +1,6 @@
# Base
FROM node:18-alpine AS base
ENV NEXT_TELEMETRY_DISABLED 1
FROM node:22-alpine AS base
ENV NEXT_TELEMETRY_DISABLED=1
# Dependencies
@@ -11,8 +11,11 @@ WORKDIR /app
COPY package*.json ./
COPY src/server/prisma ./src/server/prisma
# link ssl3 for latest Alpine
RUN sh -c '[ ! -e /lib/libssl.so.3 ] && ln -s /usr/lib/libssl.so.3 /lib/libssl.so.3 || echo "Link already exists"'
# Install dependencies, including dev (release builds should use npm ci)
ENV NODE_ENV development
ENV NODE_ENV=development
RUN npm ci
@@ -28,8 +31,11 @@ ENV NEXT_PUBLIC_GA4_MEASUREMENT_ID=${NEXT_PUBLIC_GA4_MEASUREMENT_ID}
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# link ssl3 for latest Alpine
RUN sh -c '[ ! -e /lib/libssl.so.3 ] && ln -s /usr/lib/libssl.so.3 /lib/libssl.so.3 || echo "Link already exists"'
# Build the application
ENV NODE_ENV production
ENV NODE_ENV=production
RUN npm run build
# Reduce installed packages to production-only
@@ -51,8 +57,8 @@ COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=nextjs:nodejs /app/src/server/prisma ./src/server/prisma
# Minimal ENV for production
ENV NODE_ENV production
ENV PATH $PATH:/app/node_modules/.bin
ENV NODE_ENV=production
ENV PATH=$PATH:/app/node_modules/.bin
# Run as non-root user
USER nextjs
+37 -195
View File
@@ -1,199 +1,41 @@
# BIG-AGI 🧠
# BIG-AGI Legacy
Welcome to big-AGI, the AI suite for professionals that need function, form,
simplicity, and speed. Powered by the latest models from 12 vendors and
open-source servers, `big-AGI` offers best-in-class Chats,
[Beams](https://github.com/enricoros/big-AGI/issues/470),
and [Calls](https://github.com/enricoros/big-AGI/issues/354) with AI personas,
visualizations, coding, drawing, side-by-side chatting, and more -- all wrapped in a polished UX.
Stay ahead of the curve with big-AGI. 🚀 Pros & Devs love big-AGI. 🤖
[![Official Website](https://img.shields.io/badge/BIG--AGI.com-%23096bde?style=for-the-badge&logo=vercel&label=launch)](https://big-agi.com)
Or fork & run on Vercel
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
## 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [installation](docs/installation.md) 👉 [documentation](docs/README.md)
[//]: # (big-AGI is an open book; see the **[ready-to-ship and future ideas](https://github.com/users/enricoros/projects/4/views/2)** in our open roadmap)
#### What's New in 1.15.1 · April 10, 2024 (minor release, models support)
- Support for the newly released Gemini Pro 1.5 models
- Support for the new OpenAI 2024-04-09 Turbo models
- Ctrl+S and Ctrl+O to save/load chats on desktop
- Resilience fixes after the large success of 1.15.0
> Note: Beam-2 and new larger features are being cooked outside of `main`.
### 3,000 Commits Milestone · April 7, 2024
![big-AGI Milestone](https://github.com/enricoros/big-AGI/assets/32999/47fddbb1-9bd6-4b58-ace4-781dfcb80923)
- 🥇 Today we <b>celebrate commit 3000</b> in just over one year, and going stronger 🚀
- 📢️ Thanks everyone for your support and words of love for Big-AGI, we are committed to creating the best AI experiences for everyone.
### What's New in 1.15.0 · April 1, 2024 · Beam
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
- Managed Deployments **Auto-Configuration**: simplify the UI models setup with backend-set models. [#436](https://github.com/enricoros/big-AGI/issues/436)
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
- Enhanced the default Persona
- Fixes to Gemini models and SVGs, improvements to UI and icons
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
### What's New in 1.14.1 · March 7, 2024 · Modelmorphic
- **Anthropic** [Claude-3](https://www.anthropic.com/news/claude-3-family) model family support. [#443](https://github.com/enricoros/big-AGI/issues/443)
- New **[Perplexity](https://www.perplexity.ai/)** and **[Groq](https://groq.com/)** integration (thanks @Penagwin). [#407](https://github.com/enricoros/big-AGI/issues/407), [#427](https://github.com/enricoros/big-AGI/issues/427)
- **[LocalAI](https://localai.io/models/)** deep integration, including support for [model galleries](https://github.com/enricoros/big-AGI/issues/411)
- **Mistral** Large and Google **Gemini 1.5** support
- Performance optimizations: runs [much faster](https://twitter.com/enricoros/status/1756553038293303434?utm_source=localhost:3000&utm_medium=big-agi), saves lots of power, reduces memory usage
- Enhanced UX with auto-sizing charts, refined search and folder functionalities, perfected scaling
- And with more UI improvements, documentation, bug fixes (20 tickets), and developer enhancements
### What's New in 1.13.0 · Feb 8, 2024 · Multi + Mind
https://github.com/enricoros/big-AGI/assets/32999/01732528-730e-41dc-adc7-511385686b13
- **Side-by-Side Split Windows**: multitask with parallel conversations. [#208](https://github.com/enricoros/big-AGI/issues/208)
- **Multi-Chat Mode**: message everyone, all at once. [#388](https://github.com/enricoros/big-AGI/issues/388)
- **Export tables as CSV**: big thanks to @aj47. [#392](https://github.com/enricoros/big-AGI/pull/392)
- Adjustable text size: customize density. [#399](https://github.com/enricoros/big-AGI/issues/399)
- Dev2 Persona Technology Preview
- Better looking chats with improved spacing, fonts, and menus
- More: new video player, [LM Studio tutorial](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-lmstudio.md) (thanks @aj47), [MongoDB support](https://github.com/enricoros/big-AGI/blob/main/docs/deploy-database.md) (thanks @ranfysvalle02), and speedups
<details>
<summary>What's New in 1.12.0 · Jan 26, 2024 · AGI Hotline</summary>
https://github.com/enricoros/big-AGI/assets/32999/95ceb03c-945d-4fdd-9a9f-3317beb54f3f
- **Voice Calls**: real-time voice call your personas out of the blue or in relation to a chat [#354](https://github.com/enricoros/big-AGI/issues/354)
- Support **OpenAI 0125** Models. [#364](https://github.com/enricoros/big-AGI/issues/364)
- Rename or Auto-Rename chats. [#222](https://github.com/enricoros/big-AGI/issues/222), [#360](https://github.com/enricoros/big-AGI/issues/360)
- More control over **Link Sharing** [#356](https://github.com/enricoros/big-AGI/issues/356)
- **Accessibility** to screen readers [#358](https://github.com/enricoros/big-AGI/issues/358)
- Export chats to Markdown [#337](https://github.com/enricoros/big-AGI/issues/337)
- Paste tables from Excel [#286](https://github.com/enricoros/big-AGI/issues/286)
- Ollama model updates and context window detection fixes [#309](https://github.com/enricoros/big-AGI/issues/309)
</details>
<details>
<summary>What's New in 1.11.0 · Jan 16, 2024 · Singularity</summary>
https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cfcb110c68
- **Find chats**: search in titles and content, with frequency ranking. [#329](https://github.com/enricoros/big-AGI/issues/329)
- **Commands**: command auto-completion (type '/'). [#327](https://github.com/enricoros/big-AGI/issues/327)
- **[Together AI](https://www.together.ai/products#inference)** inference platform support (good speed and newer models). [#346](https://github.com/enricoros/big-AGI/issues/346)
- Persona Creator history, deletion, custom creation, fix llm API timeouts
- Enable adding up to five custom OpenAI-compatible endpoints
- Developer enhancements: new 'Actiles' framework
</details>
<details>
<summary>What's New in 1.10.0 · Jan 6, 2024 · The Year of AGI</summary>
- **New UI**: for both desktop and mobile, sets the stage for future scale. [#201](https://github.com/enricoros/big-AGI/issues/201)
- **Conversation Folders**: enhanced conversation organization. [#321](https://github.com/enricoros/big-AGI/issues/321)
- **[LM Studio](https://lmstudio.ai/)** support and improved token management
- Resizable panes in split-screen conversations.
- Large performance optimizations
- Developer enhancements: new UI framework, updated documentation for proxy settings on browserless/docker
</details>
For full details and former releases, check out the [changelog](docs/changelog.md).
## 👉 Key Features ✨
| ![Advanced AI](https://img.shields.io/badge/Advanced%20AI-32383e?style=for-the-badge&logo=ai&logoColor=white) | ![100+ AI Models](https://img.shields.io/badge/100%2B%20AI%20Models-32383e?style=for-the-badge&logo=ai&logoColor=white) | ![Flow-state UX](https://img.shields.io/badge/Flow--state%20UX-32383e?style=for-the-badge&logo=flow&logoColor=white) | ![Privacy First](https://img.shields.io/badge/Privacy%20First-32383e?style=for-the-badge&logo=privacy&logoColor=white) | ![Advanced Tools](https://img.shields.io/badge/Fun%20To%20Use-f22a85?style=for-the-badge&logo=tools&logoColor=white) |
|---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|
| **Chat**<br/>**Call**<br/>**Beam**<br/>**Draw**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
![big-AGI screenshot](docs/pixels/big-AGI-compo-20240201_small.png)
You can easily configure 100s of AI models in big-AGI:
| **AI models** | _supported vendors_ |
|:--------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Opensource Servers | [LocalAI](https://localai.com) (multimodal) · [Ollama](https://ollama.com/) · [Oobabooga](https://github.com/oobabooga/text-generation-webui) |
| Local Servers | [LM Studio](https://lmstudio.ai/) |
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
| Language services | [Anthropic](https://anthropic.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) |
| Image services | [Prodia](https://prodia.com/) (SDXL) |
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
Add extra functionality with these integrations:
| **More** | _integrations_ |
|:-------------|:---------------------------------------------------------------------------------------------------------------|
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
| Sharing | [Paste.gg](https://paste.gg/) (Paste chats) |
| Tracking | [Helicone](https://www.helicone.ai) (LLM Observability) |
[//]: # (- [x] **Flow-state UX** for uncompromised productivity)
[//]: # (- [x] **AI Personas**: Tailor your AI interactions with customizable personas)
[//]: # (- [x] **Sleek UI/UX**: A smooth, intuitive, and mobile-responsive interface)
[//]: # (- [x] **Efficient Interaction**: Voice commands, OCR, and drag-and-drop file uploads)
[//]: # (- [x] **Privacy First**: Self-host and use your own API keys for full control)
[//]: # (- [x] **Advanced Tools**: Execute code, import PDFs, and summarize documents)
[//]: # (- [x] **Seamless Integrations**: Enhance functionality with various third-party services)
[//]: # (- [x] **Open Roadmap**: Contribute to the progress of big-AGI)
<br/>
## 🚀 Installation
To get started with big-AGI, follow our comprehensive [Installation Guide](docs/installation.md).
The guide covers various installation options, whether you're spinning it up on
your local computer, deploying on Vercel, on Cloudflare, or rolling it out
through Docker.
Whether you're a developer, system integrator, or enterprise user, you'll find step-by-step instructions
to set up big-AGI quickly and easily.
[![Installation Guide](https://img.shields.io/badge/Installation%20Guide-blue?style=for-the-badge&logo=read-the-docs&logoColor=white)](docs/installation.md)
Or bring your API keys and jump straight into our free instance on [big-AGI.com](https://big-agi.com).
<br/>
# 🌟 Get Involved!
[//]: # ([![Official Discord]&#40;https://img.shields.io/discord/1098796266906980422?label=discord&logo=discord&logoColor=%23fff&style=for-the-badge&#41;]&#40;https://discord.gg/MkH4qj2Jp9&#41;)
[![Official Discord](https://discordapp.com/api/guilds/1098796266906980422/widget.png?style=banner2)](https://discord.gg/MkH4qj2Jp9)
- [ ] 📢️ [**Chat with us** on Discord](https://discord.gg/MkH4qj2Jp9)
- [ ]**Give us a star** on GitHub 👆
- [ ] 🚀 **Do you like code**? You'll love this gem of a project! [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
- [ ] 💡 Got a feature suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
- [ ] ✨ [Deploy](docs/installation.md) your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
<br/>
[//]: # ([![GitHub stars]&#40;https://img.shields.io/github/stars/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/stargazers&#41;)
[//]: # ([![GitHub forks]&#40;https://img.shields.io/github/forks/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/network&#41;)
[//]: # ([![GitHub pull requests]&#40;https://img.shields.io/github/issues-pr/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/pulls&#41;)
[//]: # ([![License]&#40;https://img.shields.io/github/license/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/LICENSE&#41;)
> **⚠️ IMPORTANT: `v1` Branch - End of Life**
>
> This is the **v1** (formerly **v1-stable**) branch, which reached end-of-life in October 2025.
> - **For the latest Big-AGI**, see the [**main** branch](https://github.com/enricoros/big-AGI/tree/main) and [**big-agi.com**](https://big-agi.com)
> - **v1.16.11 is the final legacy release** - No further updates will be provided
> - Docker users: `:stable`, `:development` and `:latest` point to the `main` branch
---
2023-2024 · Enrico Ros x [big-AGI](https://big-agi.com) · License: [MIT](LICENSE) · Made with 💙
**For the latest Big-AGI:**
- [**Big-AGI Open**](https://github.com/enricoros/big-AGI/tree/main) - Open Source, for self-hosting, with bleeding edge models support
- [**Big-AGI Pro**](https://big-agi.com) - Hosted for Professionals with extra services and Cloud Sync
---
### What's New in 1.16.11 · October 2025
- Final v1 legacy release. Branch reaches end-of-life.
### What's New in 1.16.1...1.16.10 · 2024-2025 (patch releases)
- 1.16.10: Openrouter models fixes
- 1.16.9: Docker Gemini fix (R1 models are supported in latest Big-AGI)
- 1.16.8: OpenAI ChatGPT-4o Latest (o1 models are supported in latest Big-AGI)
- 1.16.7: OpenAI support for GPT-4o 2024-08-06
- 1.16.6: Groq support for Llama 3.1 models
- 1.16.5: GPT-4o Mini support
- 1.16.4: 8192 tokens support for Claude 3.5 Sonnet
- 1.16.3: Anthropic Claude 3.5 Sonnet model support
- 1.16.2: Improve web downloads, as text, markdwon, or HTML
- 1.16.2: Proper support for Gemini models
- 1.16.2: Added the latest Mistral model
- 1.16.2: Tokenizer support for gpt-4o
- 1.16.2: Updates to Beam
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
---
2023-2025 · Enrico Ros x [Big-AGI](https://big-agi.com) · License: [MIT](LICENSE) · Made with 💙
+6 -4
View File
@@ -1,6 +1,8 @@
# big-AGI Documentation
# big-AGI v1 Documentation (Legacy)
Find all the information you need to get started, configure, and effectively use big-AGI.
> **Note:** This is documentation for the **v1 legacy branch**. For the latest Big-AGI, see the [main branch](https://github.com/enricoros/big-AGI/tree/main) or visit [big-agi.com](https://big-agi.com).
Find all the information you need to get started, configure, and effectively use big-AGI v1.
[//]: # (## Quick Start)
@@ -33,7 +35,7 @@ Detailed guides to configure your big-AGI interface and models.
## Deployment
System integrators, administrators, whitelabelers: instead of using the public big-AGI instance on get.big-agi.com, you can deploy your own instance.
System integrators, administrators, whitelabelers: instead of using the public big-AGI instance on app.big-agi.com, you can deploy your own instance.
Step-by-step deployment and system configuration instructions.
@@ -53,7 +55,7 @@ Step-by-step deployment and system configuration instructions.
Join our community or get support:
- Visit our [GitHub repository](https://github.com/enricoros/big-AGI) for source code and issue tracking
- Check the latest updates and features on [Changelog](changelog.md) or the in-app [News](https://get.big-agi.com/news)
- Check the latest updates and features on [Changelog](changelog.md) or the in-app [News](https://big-agi.com/news)
- Connect with us and other users on [Discord](https://discord.gg/MkH4qj2Jp9) for discussions, help, and sharing your experiences with big-AGI
Thank you for choosing big-AGI. We're excited to see what you'll build.
+37 -7
View File
@@ -1,14 +1,44 @@
## Changelog
## Changelog (v1 Legacy Branch)
This is a high-level changelog. Calls out some of the high level features batched
by release.
This is a high-level changelog for the v1 legacy branch. For the latest Big-AGI, see the [main branch](https://github.com/enricoros/big-AGI).
- For the live roadmap, please see [the GitHub project](https://github.com/users/enricoros/projects/4/views/2)
### 1.16.11 · October 2025 (Final Release)
### 1.16.0 - Mar 2024
Final v1 legacy release. Branch reaches end-of-life.
- milestone: [1.16.0](https://github.com/enricoros/big-agi/milestone/16)
- work in progress: [big-AGI open roadmap](https://github.com/users/enricoros/projects/4/views/2), [help here](https://github.com/users/enricoros/projects/4/views/4)
### What's New in 1.16.1...1.16.10 · 2024-2025 (patch releases)
- 1.16.9: Docker Gemini fix (R1 models are supported in Big-AGI 2)
- 1.16.8: OpenAI ChatGPT-4o Latest (o1 models are supported in Big-AGI 2)
- 1.16.7: OpenAI support for GPT-4o 2024-08-06
- 1.16.6: Groq support for Llama 3.1 models
- 1.16.5: GPT-4o Mini support
- 1.16.4: 8192 tokens support for Claude 3.5 Sonnet
- 1.16.3: Anthropic Claude 3.5 Sonnet model support
- 1.16.2: Improve web downloads, as text, markdwon, or HTML
- 1.16.2: Proper support for Gemini models
- 1.16.2: Added the latest Mistral model
- 1.16.2: Tokenizer support for gpt-4o
- 1.16.2: Updates to Beam
- 1.16.1: Support for the new OpenAI GPT-4o 2024-05-13 model
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
- Major enhancements to the Auto-Diagrams tool
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
- Models update: **Anthropic**, **Groq**, **Ollama**, **OpenAI**, **OpenRouter**, **Perplexity**
- Code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, and more [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
- Developers: update the LLMs data structures
### What's New in 1.15.1 · April 10, 2024 (minor release, models support)
- Support for the newly released Gemini Pro 1.5 models
- Support for the new OpenAI 2024-04-09 Turbo models
- Resilience fixes after the large success of 1.15.0
### What's New in 1.15.0 · April 1, 2024 · Beam
+2 -2
View File
@@ -68,7 +68,7 @@ The chat agent won't be able to access the web sites if the browserless containe
- MAX_CONCURRENT_SESSIONS=10
```
You can then add the proyy lines to your `.env` file.
You can then add the proxy lines to your `.env` file.
```
https_proxy=http://PROXY-IP:PROXY-PORT
@@ -115,4 +115,4 @@ If you encounter any issues or have questions about configuring the browse funct
Enjoy the enhanced browsing experience within `big-AGI` and explore the web without ever leaving your chat!
Last updated on Feb 27, 2024 ([edit on GitHub](https://github.com/enricoros/big-AGI/edit/main/docs/config-feature-browse.md))
Last updated on Feb 27, 2024 ([edit on GitHub](https://github.com/enricoros/big-AGI/edit/main/docs/config-feature-browse.md))
+11 -7
View File
@@ -72,15 +72,19 @@ Then, edit the nginx configuration file `/etc/nginx/sites-enabled/default` and a
```nginx
location /ollama/ {
proxy_pass http://localhost:11434;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:11434/;
# Disable buffering for the streaming responses
# Disable buffering for the streaming responses (SSE)
proxy_set_header Connection '';
proxy_http_version 1.1;
chunked_transfer_encoding off;
proxy_buffering off;
proxy_cache off;
# Longer timeouts
proxy_read_timeout 3600;
proxy_connect_timeout 3600;
proxy_send_timeout 3600;
}
```
+5 -1
View File
@@ -1,4 +1,8 @@
# Deploying `big-AGI` with Docker
# Deploying `big-AGI` with Docker (v1 Legacy)
> **Note:** This documentation is for the **v1 legacy branch**. For the latest Big-AGI, use Docker tags `:latest`, `:stable`, or `:development` which point to the [main branch](https://github.com/enricoros/big-AGI/tree/main).
>
> To use v1 legacy specifically, use Docker tags `:v1` or `:v1-stable`.
Utilize Docker containers to deploy the big-AGI application for an efficient and automated deployment process.
Docker ensures faster development cycles, easier collaboration, and seamless environment management.
+4 -4
View File
@@ -47,10 +47,10 @@ the same steps 1 and 2 as for [local development](#local-development).
# .. repeat the steps above up to `npm install`, then:
npm run build
```
4. Start the production server:
4. Start the production server (`npx` may be optional):
```bash
next start --port 3000
```
npx next start --port 3000
```
Your big-AGI production instance is on `http://localhost:3000`.
### Advanced Customization
@@ -116,4 +116,4 @@ Join our vibrant community of developers, researchers, and AI enthusiasts. Share
- [Discord Community](https://discord.gg/MkH4qj2Jp9)
- [Twitter](https://twitter.com/yourusername)
For any questions or inquiries, please don't hesitate to [reach out to our team](mailto:hello@big-agi.com).
For any questions or inquiries, please don't hesitate to [reach out to our team](mailto:hello@big-agi.com).
+885 -295
View File
File diff suppressed because it is too large Load Diff
+30 -24
View File
@@ -1,6 +1,6 @@
{
"name": "big-agi",
"version": "1.15.1",
"version": "1.16.0",
"private": true,
"author": "Enrico Ros <enrico.ros@gmail.com>",
"repository": "https://github.com/enricoros/big-agi",
@@ -22,13 +22,14 @@
"@emotion/react": "^11.11.4",
"@emotion/server": "^11.11.0",
"@emotion/styled": "^11.11.5",
"@mui/icons-material": "^5.15.15",
"@mui/joy": "^5.0.0-beta.32",
"@next/bundle-analyzer": "^14.1.4",
"@next/third-parties": "^14.2.0-canary.60",
"@prisma/client": "^5.12.1",
"@mui/icons-material": "^5.15.17",
"@mui/joy": "^5.0.0-beta.36",
"@mui/material": "^5.15.17",
"@next/bundle-analyzer": "^14.2.3",
"@next/third-parties": "^14.2.3",
"@prisma/client": "^5.13.0",
"@sanity/diff-match-patch": "^3.1.1",
"@t3-oss/env-nextjs": "^0.9.2",
"@t3-oss/env-nextjs": "^0.10.1",
"@tanstack/react-query": "~4.36.1",
"@trpc/client": "10.44.1",
"@trpc/next": "10.44.1",
@@ -37,51 +38,56 @@
"@vercel/analytics": "^1.2.2",
"@vercel/speed-insights": "^1.0.10",
"browser-fs-access": "^0.35.0",
"cheerio": "^1.0.0-rc.12",
"eventsource-parser": "^1.1.2",
"idb-keyval": "^6.2.1",
"next": "^14.1.4",
"next": "~14.1.4",
"nprogress": "^0.2.0",
"pdfjs-dist": "4.0.379",
"pdfjs-dist": "4.2.67",
"plantuml-encoder": "^1.4.0",
"prismjs": "^1.29.0",
"react": "^18.2.0",
"react": "^18.3.1",
"react-beautiful-dnd": "^13.1.1",
"react-csv": "^2.2.2",
"react-dom": "^18.2.0",
"react-dom": "^18.3.1",
"react-katex": "^3.0.1",
"react-markdown": "^9.0.1",
"react-player": "^2.15.1",
"react-resizable-panels": "^2.0.16",
"react-player": "^2.16.0",
"react-resizable-panels": "^2.0.19",
"react-timeago": "^7.2.0",
"rehype-katex": "^7.0.0",
"remark-gfm": "^4.0.0",
"remark-math": "^6.0.0",
"sharp": "^0.33.3",
"superjson": "^2.2.1",
"tesseract.js": "^5.0.5",
"tiktoken": "^1.0.13",
"tesseract.js": "^5.1.0",
"tiktoken": "^1.0.15",
"turndown": "^7.2.0",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod": "^3.23.8",
"zustand": "^4.5.2"
},
"devDependencies": {
"@cloudflare/puppeteer": "0.0.5",
"@types/node": "^20.12.5",
"@types/node": "^20.12.11",
"@types/nprogress": "^0.2.3",
"@types/plantuml-encoder": "^1.4.2",
"@types/prismjs": "^1.26.3",
"@types/react": "^18.2.74",
"@types/prismjs": "^1.26.4",
"@types/react": "^18.3.1",
"@types/react-beautiful-dnd": "^13.1.8",
"@types/react-csv": "^1.1.10",
"@types/react-dom": "^18.2.24",
"@types/react-dom": "^18.3.0",
"@types/react-katex": "^3.0.4",
"@types/react-timeago": "^4.1.7",
"@types/turndown": "^5.0.4",
"@types/uuid": "^9.0.8",
"eslint": "^8.57.0",
"eslint-config-next": "^14.1.4",
"eslint-config-next": "^14.2.3",
"prettier": "^3.2.5",
"prisma": "^5.12.1",
"typescript": "^5.4.4"
"prisma": "^5.13.0",
"typescript": "^5.4.5"
},
"engines": {
"node": "^20.0.0 || ^18.0.0"
"node": "^24.0.0 || ^22.0.0 || ^20.0.0 || ^18.0.0"
}
}
+3 -2
View File
@@ -17,7 +17,7 @@ import { Brand } from '~/common/app.config';
import { ROUTE_APP_CHAT, ROUTE_INDEX } from '~/common/app.routes';
// apps access
import { incrementalNewsVersion } from '../../src/apps/news/news.version';
import { incrementalNewsVersion, useAppNewsStateStore } from '../../src/apps/news/news.version';
// capabilities access
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs, useCapabilityTextToImage } from '~/common/components/useCapabilities';
@@ -81,7 +81,8 @@ function AppDebug() {
const chatsCount = useChatStore.getState().conversations?.length;
const uxLabsExperiments = Object.entries(useUXLabsStore.getState()).filter(([_k, v]) => v === true).map(([k, _]) => k).join(', ');
const { folders, enableFolders } = useFolderStore.getState();
const { lastSeenNewsVersion, usageCount } = useAppStateStore.getState();
const { lastSeenNewsVersion } = useAppNewsStateStore.getState();
const { usageCount } = useAppStateStore.getState();
// derived state
+6 -3
View File
@@ -77,9 +77,12 @@ function AppShareTarget() {
setIsDownloading(true);
callBrowseFetchPage(intentURL)
.then(page => {
if (page.stopReason !== 'error')
queueComposerTextAndLaunchApp('\n\n```' + intentURL + '\n' + page.content + '\n```\n');
else
if (page.stopReason !== 'error') {
let pageContent = page.content.markdown || page.content.text || page.content.html || '';
if (pageContent)
pageContent = '\n\n```' + intentURL + '\n' + pageContent + '\n```\n';
queueComposerTextAndLaunchApp(pageContent);
} else
setErrorMessage('Could not read any data' + page.error ? ': ' + page.error : '');
})
.catch(error => setErrorMessage(error?.message || error || 'Unknown error'))
Binary file not shown.

After

Width:  |  Height:  |  Size: 248 KiB

+9 -2
View File
@@ -11,7 +11,7 @@
"utilities"
],
"display": "standalone",
"start_url": "/",
"start_url": "/?source=pwa",
"scope": "/",
"icons": [
{
@@ -51,5 +51,12 @@
"text": "text",
"url": "url"
}
}
},
"shortcuts": [
{
"name": "Call",
"url": "/call",
"description": "Call a Persona"
}
]
}
File diff suppressed because one or more lines are too long
+13 -8
View File
@@ -30,8 +30,16 @@ export function AppBeam() {
// state
const [showDebug, setShowDebug] = React.useState(false);
const conversation = React.useRef<DConversation>(initTestConversation());
const beamStoreApi = React.useRef(initTestBeamStore(conversation.current.messages)).current;
const [conversation, setConversation] = React.useState<DConversation>(() => initTestConversation());
const [beamStoreApi] = React.useState(() => createBeamVanillaStore());
// reinit the beam store if the conversation changes
React.useEffect(() => {
initTestBeamStore(conversation.messages, beamStoreApi);
}, [beamStoreApi, conversation]);
// external state
const isMobile = useIsMobile();
@@ -44,7 +52,7 @@ export function AppBeam() {
const handleClose = React.useCallback(() => {
beamStoreApi.getState().terminate();
beamStoreApi.getState().terminateKeepingSettings();
}, [beamStoreApi]);
@@ -56,10 +64,7 @@ export function AppBeam() {
</Button>
{/* 'open' */}
<Button size='sm' variant='plain' color='neutral' onClick={() => {
conversation.current = initTestConversation();
initTestBeamStore(conversation.current.messages, beamStoreApi);
}}>
<Button size='sm' variant='plain' color='neutral' onClick={() => setConversation(initTestConversation())}>
.open
</Button>
@@ -67,7 +72,7 @@ export function AppBeam() {
<Button size='sm' variant='plain' color='neutral' onClick={handleClose}>
.close
</Button>
</>, [beamStoreApi, handleClose, showDebug]), null, 'AppBeam');
</>, [handleClose, showDebug]), null, 'AppBeam');
return (
+4 -4
View File
@@ -1,5 +1,5 @@
import * as React from 'react';
import { shallow } from 'zustand/shallow';
import { useShallow } from 'zustand/react/shallow';
import { Box, Card, ListDivider, ListItemDecorator, MenuItem, Switch, Typography } from '@mui/joy';
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
@@ -99,7 +99,7 @@ export function Telephone(props: {
// external state
const { chatLLMId, chatLLMDropdown } = useChatLLMDropdown();
const { chatTitle, reMessages } = useChatStore(state => {
const { chatTitle, reMessages } = useChatStore(useShallow(state => {
const conversation = props.callIntent.conversationId
? state.conversations.find(conversation => conversation.id === props.callIntent.conversationId) ?? null
: null;
@@ -107,7 +107,7 @@ export function Telephone(props: {
chatTitle: conversation ? conversationTitle(conversation) : null,
reMessages: conversation ? conversation.messages : null,
};
}, shallow);
}));
const persona = SystemPurposes[props.callIntent.personaId as SystemPurposeId] ?? undefined;
const personaCallStarters = persona?.call?.starters ?? undefined;
const personaVoiceId = overridePersonaVoice ? undefined : (persona?.voices?.elevenLabs?.voiceId ?? undefined);
@@ -225,7 +225,7 @@ export function Telephone(props: {
let finalText = '';
let error: any | null = null;
setPersonaTextInterim('💭...');
llmStreamingChatGenerate(chatLLMId, callPrompt, null, null, responseAbortController.current.signal, ({ textSoFar }) => {
llmStreamingChatGenerate(chatLLMId, callPrompt, 'call', callMessages[0].id, null, null, responseAbortController.current.signal, ({ textSoFar }) => {
const text = textSoFar?.trim();
if (text) {
finalText = text;
+92 -185
View File
@@ -1,8 +1,10 @@
import * as React from 'react';
import { Panel, PanelGroup, PanelResizeHandle } from 'react-resizable-panels';
import type { SxProps } from '@mui/joy/styles/types';
import { useTheme } from '@mui/joy';
import { DEV_MODE_SETTINGS } from '../settings-modal/UxLabsSettings';
import { DiagramConfig, DiagramsModal } from '~/modules/aifn/digrams/DiagramsModal';
import { FlattenerModal } from '~/modules/aifn/flatten/FlattenerModal';
import { TradeConfig, TradeModal } from '~/modules/trade/TradeModal';
@@ -17,17 +19,17 @@ import { ConfirmationModal } from '~/common/components/ConfirmationModal';
import { ConversationsManager } from '~/common/chats/ConversationsManager';
import { GlobalShortcutItem, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcut';
import { PanelResizeInset } from '~/common/components/panes/GoodPanelResizeHandler';
import { PreferencesTab, useOptimaLayout, usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
import { addSnackbar, removeSnackbar } from '~/common/components/useSnackbarsStore';
import { createDMessage, DConversationId, DMessage, getConversation, getConversationSystemPurposeId, useConversation } from '~/common/state/store-chats';
import { getUXLabsHighPerformance, useUXLabsStore } from '~/common/state/store-ux-labs';
import { createDMessage, DConversationId, DMessage, DMessageMetadata, getConversation, getConversationSystemPurposeId, useConversation } from '~/common/state/store-chats';
import { themeBgAppChatComposer } from '~/common/app.theme';
import { useFolderStore } from '~/common/state/store-folders';
import { useIsMobile } from '~/common/components/useMatchMedia';
import { useOptimaLayout, usePluggableOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
import { useRouterQuery } from '~/common/app.routes';
import { useUIPreferencesStore } from '~/common/state/store-ui';
import { useUXLabsStore } from '~/common/state/store-ux-labs';
import type { ComposerOutputMultiPart } from './components/composer/composer.types';
import { ChatBarAltBeam } from './components/ChatBarAltBeam';
@@ -38,14 +40,9 @@ import { ChatDrawerMemo } from './components/ChatDrawer';
import { ChatMessageList } from './components/ChatMessageList';
import { ChatPageMenuItems } from './components/ChatPageMenuItems';
import { Composer } from './components/composer/Composer';
import { getInstantAppChatPanesCount, usePanesManager } from './components/panes/usePanesManager';
import { usePanesManager } from './components/panes/usePanesManager';
import { DEV_MODE_SETTINGS } from '../settings-modal/UxLabsSettings';
import { extractChatCommand, findAllChatCommands } from './commands/commands.registry';
import { runAssistantUpdatingState } from './editors/chat-stream';
import { runBrowseGetPageUpdatingState } from './editors/browse-load';
import { runImageGenerationUpdatingState } from './editors/image-generate';
import { runReActUpdatingState } from './editors/react-tangent';
import { _handleExecute } from './editors/_handleExecute';
// what to say when a chat is new and has no title
@@ -68,6 +65,19 @@ export interface AppChatIntent {
}
const composerOpenSx: SxProps = {
zIndex: 21, // just to allocate a surface, and potentially have a shadow
backgroundColor: themeBgAppChatComposer,
borderTop: `1px solid`,
borderTopColor: 'divider',
p: { xs: 1, md: 2 },
};
const composerClosedSx: SxProps = {
display: 'none',
};
export function AppChat() {
// state
@@ -91,7 +101,7 @@ export function AppChat() {
const showAltTitleBar = useUXLabsStore(state => DEV_MODE_SETTINGS && state.labsChatBarAlt === 'title');
const { openLlmOptions } = useOptimaLayout();
const { openLlmOptions, openModelsSetup, openPreferencesTab } = useOptimaLayout();
const { chatLLM } = useChatLLM();
@@ -187,116 +197,20 @@ export function AppChat() {
// Execution
const _handleExecute = React.useCallback(async (chatModeId: ChatModeId, conversationId: DConversationId, history: DMessage[]): Promise<void> => {
const chatLLMId = getChatLLMId();
if (!chatModeId || !conversationId || !chatLLMId) return;
const handleExecuteAndOutcome = React.useCallback(async (chatModeId: ChatModeId, conversationId: DConversationId, history: DMessage[]) => {
const outcome = await _handleExecute(chatModeId, conversationId, history);
if (outcome === 'err-no-chatllm')
openModelsSetup();
else if (outcome === 'err-t2i-unconfigured')
openPreferencesTab(PreferencesTab.Draw);
else if (outcome === 'err-no-persona')
addSnackbar({ key: 'chat-no-persona', message: 'No persona selected.', type: 'issue' });
else if (outcome === 'err-no-conversation')
addSnackbar({ key: 'chat-no-conversation', message: 'No active conversation.', type: 'issue' });
return outcome === true;
}, [openModelsSetup, openPreferencesTab]);
// Update the system message from the active persona to the history
// NOTE: this does NOT call setMessages anymore (optimization). make sure to:
// 1. all the callers need to pass a new array
// 2. all the exit points need to call setMessages
const cHandler = ConversationsManager.getHandler(conversationId);
cHandler.inlineUpdatePurposeInHistory(history, chatLLMId);
// Valid /commands are intercepted here, and override chat modes, generally for mechanics or sidebars
const lastMessage = history.length > 0 ? history[history.length - 1] : null;
if (lastMessage?.role === 'user') {
const chatCommand = extractChatCommand(lastMessage.text)[0];
if (chatCommand && chatCommand.type === 'cmd') {
switch (chatCommand.providerId) {
case 'ass-browse':
cHandler.messagesReplace(history); // show command
return await runBrowseGetPageUpdatingState(cHandler, chatCommand.params);
case 'ass-t2i':
cHandler.messagesReplace(history); // show command
return await runImageGenerationUpdatingState(cHandler, chatCommand.params);
case 'ass-react':
cHandler.messagesReplace(history); // show command
return await runReActUpdatingState(cHandler, chatCommand.params, chatLLMId);
case 'chat-alter':
// /clear
if (chatCommand.command === '/clear') {
if (chatCommand.params === 'all')
return cHandler.messagesReplace([]);
cHandler.messagesReplace(history);
cHandler.messageAppendAssistant('Issue: this command requires the \'all\' parameter to confirm the operation.', undefined, 'issue', false);
return;
}
// /assistant, /system
Object.assign(lastMessage, {
role: chatCommand.command.startsWith('/s') ? 'system' : chatCommand.command.startsWith('/a') ? 'assistant' : 'user',
sender: 'Bot',
text: chatCommand.params || '',
} satisfies Partial<DMessage>);
return cHandler.messagesReplace(history);
case 'cmd-help':
const chatCommandsText = findAllChatCommands()
.map(cmd => ` - ${cmd.primary}` + (cmd.alternatives?.length ? ` (${cmd.alternatives.join(', ')})` : '') + `: ${cmd.description}`)
.join('\n');
cHandler.messagesReplace(history);
cHandler.messageAppendAssistant('Available Chat Commands:\n' + chatCommandsText, undefined, 'help', false);
return;
case 'mode-beam':
if (chatCommand.isError)
return cHandler.messagesReplace(history);
// remove '/beam ', as we want to be a user chat message
Object.assign(lastMessage, { text: chatCommand.params || '' });
cHandler.messagesReplace(history);
return ConversationsManager.getHandler(conversationId).beamInvoke(history, [], null);
default:
return cHandler.messagesReplace([...history, createDMessage('assistant', 'This command is not supported.')]);
}
}
}
// get the system purpose (note: we don't react to it, or it would invalidate half UI components..)
if (!getConversationSystemPurposeId(conversationId)) {
cHandler.messagesReplace(history);
cHandler.messageAppendAssistant('Issue: no Persona selected.', undefined, 'issue', false);
return;
}
// synchronous long-duration tasks, which update the state as they go
switch (chatModeId) {
case 'generate-text':
cHandler.messagesReplace(history);
return await runAssistantUpdatingState(conversationId, history, chatLLMId, getUXLabsHighPerformance() ? 0 : getInstantAppChatPanesCount());
case 'generate-text-beam':
cHandler.messagesReplace(history);
return cHandler.beamInvoke(history, [], null);
case 'append-user':
return cHandler.messagesReplace(history);
case 'generate-image':
if (!lastMessage?.text) break;
// also add a 'fake' user message with the '/draw' command
cHandler.messagesReplace(history.map(message => (message.id !== lastMessage.id) ? message : {
...message,
text: `/draw ${lastMessage.text}`,
}));
return await runImageGenerationUpdatingState(cHandler, lastMessage.text);
case 'generate-react':
if (!lastMessage?.text) break;
cHandler.messagesReplace(history);
return await runReActUpdatingState(cHandler, lastMessage.text, chatLLMId);
}
// ISSUE: if we're here, it means we couldn't do the job, at least sync the history
console.log('Chat execute: issue running', chatModeId, conversationId, lastMessage);
cHandler.messagesReplace(history);
}, []);
const handleComposerAction = React.useCallback((chatModeId: ChatModeId, conversationId: DConversationId, multiPartMessage: ComposerOutputMultiPart): boolean => {
const handleComposerAction = React.useCallback((conversationId: DConversationId, chatModeId: ChatModeId, multiPartMessage: ComposerOutputMultiPart, metadata?: DMessageMetadata): boolean => {
// validate inputs
if (multiPartMessage.length !== 1 || multiPartMessage[0].type !== 'text-block') {
addSnackbar({
@@ -312,35 +226,38 @@ export function AppChat() {
const userText = multiPartMessage[0].text;
// multicast: send the message to all the panes
const uniqueIds = new Set([conversationId]);
const uniqueConversationIds = new Set([conversationId]);
if (willMulticast)
chatPanes.forEach(pane => pane.conversationId && uniqueIds.add(pane.conversationId));
chatPanes.forEach(pane => pane.conversationId && uniqueConversationIds.add(pane.conversationId));
// we loop to handle both the normal and multicast modes
let enqueued = false;
for (const _cId of uniqueIds) {
const _conversation = getConversation(_cId);
if (_conversation) {
// start execution fire/forget
void _handleExecute(chatModeId, _cId, [..._conversation.messages, createDMessage('user', userText)]);
enqueued = true;
}
}
return enqueued;
}, [chatPanes, willMulticast, _handleExecute]);
let enqueuedAny = false;
for (const _cId of uniqueConversationIds) {
const history = getConversation(_cId)?.messages;
if (!history) continue;
const handleConversationExecuteHistory = React.useCallback(async (conversationId: DConversationId, history: DMessage[]): Promise<void> => {
await _handleExecute('generate-text', conversationId, history);
}, [_handleExecute]);
const newUserMessage = createDMessage('user', userText);
if (metadata) newUserMessage.metadata = metadata;
// fire/forget
void handleExecuteAndOutcome(chatModeId, _cId, [...history, newUserMessage]);
enqueuedAny = true;
}
return enqueuedAny;
}, [chatPanes, handleExecuteAndOutcome, willMulticast]);
const handleConversationExecuteHistory = React.useCallback(async (conversationId: DConversationId, history: DMessage[]) => {
await handleExecuteAndOutcome('generate-text', conversationId, history);
}, [handleExecuteAndOutcome]);
const handleMessageRegenerateLastInFocusedPane = React.useCallback(async () => {
const focusedConversation = getConversation(focusedPaneConversationId);
if (focusedConversation?.messages?.length) {
const lastMessage = focusedConversation.messages[focusedConversation.messages.length - 1];
const history = lastMessage.role === 'assistant' ? focusedConversation.messages.slice(0, -1) : [...focusedConversation.messages];
return await _handleExecute('generate-text', focusedConversation.id, history);
await handleExecuteAndOutcome('generate-text', focusedConversation.id, history);
}
}, [_handleExecute, focusedPaneConversationId]);
}, [focusedPaneConversationId, handleExecuteAndOutcome]);
const handleMessageBeamLastInFocusedPane = React.useCallback(async () => {
// Ctrl + Shift + B
@@ -356,16 +273,16 @@ export function AppChat() {
const handleTextDiagram = React.useCallback((diagramConfig: DiagramConfig | null) => setDiagramConfig(diagramConfig), []);
const handleTextImagine = React.useCallback(async (conversationId: DConversationId, messageText: string): Promise<void> => {
const handleTextImagine = React.useCallback(async (conversationId: DConversationId, messageText: string) => {
const conversation = getConversation(conversationId);
if (!conversation)
return;
const imaginedPrompt = await imaginePromptFromText(messageText) || 'An error sign.';
return await _handleExecute('generate-image', conversationId, [
const imaginedPrompt = await imaginePromptFromText(messageText, conversationId) || 'An error sign.';
await handleExecuteAndOutcome('generate-image', conversationId, [
...conversation.messages,
createDMessage('user', imaginedPrompt),
]);
}, [_handleExecute]);
}, [handleExecuteAndOutcome]);
const handleTextSpeak = React.useCallback(async (text: string): Promise<void> => {
await speakText(text);
@@ -560,8 +477,8 @@ export function AppChat() {
const _paneIsFocused = idx === focusedPaneIndex;
const _paneConversationId = pane.conversationId;
const _paneChatHandler = chatHandlers[idx] ?? null;
const _paneChatBeamStore = beamsStores[idx] ?? null;
const _paneChatBeamIsOpen = !!beamsOpens?.[idx];
const _paneBeamStore = beamsStores[idx] ?? null;
const _paneBeamIsOpen = !!beamsOpens?.[idx] && !!_paneBeamStore;
const _panesCount = chatPanes.length;
const _keyAndId = `chat-pane-${pane.paneId}`;
const _sepId = `sep-pane-${idx}`;
@@ -609,47 +526,45 @@ export function AppChat() {
<ScrollToBottom
bootToBottom
stickToBottomInitial
sx={_paneChatBeamIsOpen ? { display: 'none' } : undefined}
sx={{ display: 'flex', flexDirection: 'column' }}
>
<ChatMessageList
conversationId={_paneConversationId}
conversationHandler={_paneChatHandler}
capabilityHasT2I={capabilityHasT2I}
chatLLMContextTokens={chatLLM?.contextTokens ?? null}
fitScreen={isMobile || isMultiPane}
isMessageSelectionMode={isMessageSelectionMode}
setIsMessageSelectionMode={setIsMessageSelectionMode}
onConversationBranch={handleConversationBranch}
onConversationExecuteHistory={handleConversationExecuteHistory}
onTextDiagram={handleTextDiagram}
onTextImagine={handleTextImagine}
onTextSpeak={handleTextSpeak}
sx={{
minHeight: '100%', // ensures filling of the blank space on newer chats
}}
/>
{!_paneBeamIsOpen && (
<ChatMessageList
conversationId={_paneConversationId}
conversationHandler={_paneChatHandler}
capabilityHasT2I={capabilityHasT2I}
chatLLMContextTokens={chatLLM?.contextTokens ?? null}
fitScreen={isMobile || isMultiPane}
isMessageSelectionMode={isMessageSelectionMode}
setIsMessageSelectionMode={setIsMessageSelectionMode}
onConversationBranch={handleConversationBranch}
onConversationExecuteHistory={handleConversationExecuteHistory}
onTextDiagram={handleTextDiagram}
onTextImagine={handleTextImagine}
onTextSpeak={handleTextSpeak}
sx={{
flexGrow: 1,
}}
/>
)}
{/*<Ephemerals*/}
{/* conversationId={_paneConversationId}*/}
{/* sx={{*/}
{/* // TODO: Fixme post panels?*/}
{/* // flexGrow: 0.1,*/}
{/* flexShrink: 0.5,*/}
{/* overflowY: 'auto',*/}
{/* minHeight: 64,*/}
{/* }}*/}
{/*/>*/}
{_paneBeamIsOpen && (
<ChatBeamWrapper
beamStore={_paneBeamStore}
isMobile={isMobile}
inlineSx={{
flexGrow: 1,
// minHeight: 'calc(100vh - 69px - var(--AGI-Nav-width))',
}}
/>
)}
{/* Visibility and actions are handled via Context */}
<ScrollToBottomButton />
</ScrollToBottom>
{(_paneChatBeamIsOpen && !!_paneChatBeamStore) && (
<ChatBeamWrapper beamStore={_paneChatBeamStore} isMobile={isMobile} />
)}
</Panel>
{/* Panel Separators & Resizers */}
@@ -675,15 +590,7 @@ export function AppChat() {
onAction={handleComposerAction}
onTextImagine={handleTextImagine}
setIsMulticast={setIsComposerMulticast}
sx={beamOpenStoreInFocusedPane ? {
display: 'none',
} : {
zIndex: 21, // just to allocate a surface, and potentially have a shadow
backgroundColor: themeBgAppChatComposer,
borderTop: `1px solid`,
borderTopColor: 'divider',
p: { xs: 1, md: 2 },
}}
sx={beamOpenStoreInFocusedPane ? composerClosedSx : composerOpenSx}
/>
{/* Diagrams */}
+2 -3
View File
@@ -1,5 +1,4 @@
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
import { useUXLabsStore } from '~/common/state/store-ux-labs';
import type { ICommandsProvider } from './ICommandsProvider';
@@ -7,11 +6,11 @@ export const CommandsBeam: ICommandsProvider = {
id: 'mode-beam',
rank: 9,
getCommands: () => useUXLabsStore.getState().labsBeam ? [{
getCommands: () => [{
primary: '/beam',
arguments: ['prompt'],
description: 'Combine the smarts of models',
Icon: ChatBeamIcon,
}] : [],
}],
};
+20 -16
View File
@@ -31,7 +31,7 @@ export function ChatBarAltBeam(props: {
requiresConfirmation: store.isScattering || store.isGatheringAny || store.raysReady > 0,
// actions
setIsMaximized: store.setIsMaximized,
terminateBeam: store.terminate,
terminateBeam: store.terminateKeepingSettings,
})));
@@ -63,16 +63,7 @@ export function ChatBarAltBeam(props: {
return (
<Box sx={{ display: 'flex', gap: { xs: 1, md: 3 }, alignItems: 'center' }}>
{/* [desktop] maximize button, or a disabled spacer */}
{props.isMobile ? null : (
<GoodTooltip title='Maximize'>
<IconButton size='sm' onClick={handleMaximizeBeam}>
<FullscreenRoundedIcon />
</IconButton>
</GoodTooltip>
)}
<Box sx={{ display: 'flex', gap: { xs: 1, md: 2 }, alignItems: 'center' }}>
{/* Title & Status */}
<Typography level='title-md'>
@@ -89,11 +80,24 @@ export function ChatBarAltBeam(props: {
</Typography>
{/* Right Close Icon */}
<GoodTooltip usePlain title={<Box sx={{ p: 1, display: 'flex', flexDirection: 'column', gap: 1 }}>Close Beam Mode <KeyStroke combo='Esc' /></Box>}>
<IconButton aria-label='Close' size='sm' onClick={handleCloseBeam}>
<CloseRoundedIcon />
</IconButton>
</GoodTooltip>
<Box sx={{ display: 'flex' }}>
{/* [desktop] maximize button, or a disabled spacer */}
{!props.isMobile && (
<GoodTooltip usePlain title={<Box sx={{ p: 1 }}>Maximize</Box>}>
<IconButton size='sm' onClick={handleMaximizeBeam}>
<FullscreenRoundedIcon />
</IconButton>
</GoodTooltip>
)}
<GoodTooltip usePlain title={<Box sx={{ p: 1, display: 'flex', flexDirection: 'column', gap: 1 }}>Back to Chat <KeyStroke combo='Esc' /></Box>}>
<IconButton aria-label='Close' size='sm' onClick={handleCloseBeam}>
<CloseRoundedIcon />
</IconButton>
</GoodTooltip>
</Box>
{/* Confirmation Modal */}
+14 -7
View File
@@ -1,16 +1,25 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { Box, Modal, ModalClose } from '@mui/joy';
import { BeamStoreApi, useBeamStore } from '~/modules/beam/store-beam.hooks';
import { BeamView } from '~/modules/beam/BeamView';
import { themeZIndexBeamView } from '~/common/app.theme';
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
/*const overlaySx: SxProps = {
position: 'absolute',
inset: 0,
zIndex: themeZIndexBeamView, // stay on top of Message > Chips (:1), and Overlays (:2) - note: Desktop Drawer (:26)
}*/
export function ChatBeamWrapper(props: {
beamStore: BeamStoreApi,
isMobile: boolean,
inlineSx?: SxProps,
}) {
// state
@@ -36,16 +45,14 @@ export function ChatBeamWrapper(props: {
position: 'absolute',
inset: 0,
}}>
{beamView}
<ScrollToBottom disableAutoStick>
{beamView}
</ScrollToBottom>
<ModalClose sx={{ color: 'white', backgroundColor: 'background.surface', boxShadow: 'xs', mr: 2 }} />
</Box>
</Modal>
) : (
<Box sx={{
position: 'absolute',
inset: 0,
zIndex: themeZIndexBeamView, // stay on top of Message > Chips (:1), and Overlays (:2) - note: Desktop Drawer (:26)
}}>
<Box sx={props.inlineSx}>
{beamView}
</Box>
);
+61 -8
View File
@@ -1,7 +1,7 @@
import * as React from 'react';
import { useShallow } from 'zustand/react/shallow';
import { Box, Button, Dropdown, IconButton, ListDivider, ListItem, ListItemButton, ListItemDecorator, Menu, MenuButton, MenuItem, Tooltip, Typography } from '@mui/joy';
import { Box, Button, Card, CardContent, Dropdown, IconButton, ListDivider, ListItem, ListItemButton, ListItemDecorator, Menu, MenuButton, MenuItem, Tooltip, Typography } from '@mui/joy';
import AddIcon from '@mui/icons-material/Add';
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
import ClearIcon from '@mui/icons-material/Clear';
@@ -10,6 +10,7 @@ import FileDownloadOutlinedIcon from '@mui/icons-material/FileDownloadOutlined';
import FileUploadOutlinedIcon from '@mui/icons-material/FileUploadOutlined';
import FolderIcon from '@mui/icons-material/Folder';
import MoreVertIcon from '@mui/icons-material/MoreVert';
import RocketLaunchRoundedIcon from '@mui/icons-material/RocketLaunchRounded';
import StarOutlineRoundedIcon from '@mui/icons-material/StarOutlineRounded';
import type { DConversationId } from '~/common/state/store-chats';
@@ -18,8 +19,10 @@ import { DFolder, useFolderStore } from '~/common/state/store-folders';
import { DebounceInputMemo } from '~/common/components/DebounceInput';
import { FoldersToggleOff } from '~/common/components/icons/FoldersToggleOff';
import { FoldersToggleOn } from '~/common/components/icons/FoldersToggleOn';
import { Link } from '~/common/components/Link';
import { PageDrawerHeader } from '~/common/layout/optima/components/PageDrawerHeader';
import { PageDrawerList } from '~/common/layout/optima/components/PageDrawerList';
import { ROUTE_APP_NEWS } from '~/common/app.routes';
import { capitalizeFirstLetter } from '~/common/util/textUtils';
import { themeScalingMap, themeZIndexOverMobileDrawer } from '~/common/app.theme';
import { useOptimaDrawers } from '~/common/layout/optima/useOptimaDrawers';
@@ -78,6 +81,13 @@ function ChatDrawer(props: {
const [searchSorting, setSearchSorting] = React.useState<ChatSearchSorting>('frequency');
const [debouncedSearchQuery, setDebouncedSearchQuery] = React.useState('');
const [folderChangeRequest, setFolderChangeRequest] = React.useState<FolderChangeRequest | null>(null);
const [bigAgi2CalloutDismissed, setBigAgi2CalloutDismissed] = React.useState(() => {
try {
return localStorage.getItem('dismissedBA2ChatDrawerNotice') === 'true';
} catch {
return false;
}
});
// external state
const { closeDrawer, closeDrawerOnMobile } = useOptimaDrawers();
@@ -125,6 +135,15 @@ function ChatDrawer(props: {
props.activeConversationId && onConversationsExportDialog(props.activeConversationId, true);
}, [onConversationsExportDialog, props.activeConversationId]);
const handleDismissBigAgi2Callout = React.useCallback(() => {
setBigAgi2CalloutDismissed(true);
try {
localStorage.setItem('dismissedBA2ChatDrawerNotice', 'true');
} catch {
// ignore
}
}, []);
// Folder change request
@@ -277,7 +296,6 @@ function ChatDrawer(props: {
<Button
// variant='outlined'
variant={disableNewButton ? undefined : 'soft'}
color='primary'
disabled={disableNewButton}
onClick={handleButtonNew}
sx={{
@@ -285,16 +303,12 @@ function ChatDrawer(props: {
justifyContent: 'flex-start',
padding: '0px 0.75rem',
// text size
fontSize: 'sm',
fontWeight: 'lg',
// style
// backgroundColor: 'background.popup',
border: '1px solid',
borderColor: 'neutral.outlinedBorder',
borderRadius: 'sm',
'--ListItemDecorator-size': 'calc(2.5rem - 1px)', // compensate for the border
// backgroundColor: 'background.popup',
// boxShadow: (disableNewButton || props.isMobile) ? 'none' : 'xs',
// transition: 'box-shadow 0.2s',
}}
@@ -315,7 +329,7 @@ function ChatDrawer(props: {
bottomBarBasis={filteredChatsBarBasis}
onConversationActivate={handleConversationActivate}
onConversationBranch={onConversationBranch}
onConversationDelete={handleConversationDeleteNoConfirmation}
onConversationDeleteNoConfirmation={handleConversationDeleteNoConfirmation}
onConversationExport={onConversationsExportDialog}
onConversationFolderChange={handleConversationFolderChange}
/>
@@ -345,6 +359,45 @@ function ChatDrawer(props: {
)}
</Box>
{/* Big-AGI 2.0 Callout */}
{!bigAgi2CalloutDismissed && (
<Box sx={{ p: 2 }}>
<Card variant='solid' color='primary' invertedColors>
<CardContent sx={{ gap: 1, position: 'relative' }}>
<IconButton
size='sm'
onClick={handleDismissBigAgi2Callout}
sx={{
position: 'absolute',
top: -4,
right: -8,
}}
>
<ClearIcon />
</IconButton>
<Typography level='title-sm'>
Big-AGI 2.0 is Live!
</Typography>
<Typography level='body-xs' sx={{ mb: 1 }}>
Experience Beam 2, Personas, and Cloud Sync.
</Typography>
<Button
fullWidth
size='sm'
variant='solid'
color='neutral'
endDecorator={<RocketLaunchRoundedIcon />}
component={Link}
href={ROUTE_APP_NEWS}
noLinkStyle
>
Learn More
</Button>
</CardContent>
</Card>
</Box>
)}
<ListDivider sx={{ my: 0 }} />
{/* Bottom commands */}
+14 -5
View File
@@ -42,7 +42,7 @@ export const ChatDrawerItemMemo = React.memo(ChatDrawerItem, (prev, next) =>
prev.bottomBarBasis === next.bottomBarBasis &&
prev.onConversationActivate === next.onConversationActivate &&
prev.onConversationBranch === next.onConversationBranch &&
prev.onConversationDelete === next.onConversationDelete &&
prev.onConversationDeleteNoConfirmation === next.onConversationDeleteNoConfirmation &&
prev.onConversationExport === next.onConversationExport &&
prev.onConversationFolderChange === next.onConversationFolderChange,
);
@@ -76,7 +76,7 @@ function ChatDrawerItem(props: {
bottomBarBasis: number,
onConversationActivate: (conversationId: DConversationId, closeMenu: boolean) => void,
onConversationBranch: (conversationId: DConversationId, messageId: string | null) => void,
onConversationDelete: (conversationId: DConversationId) => void,
onConversationDeleteNoConfirmation: (conversationId: DConversationId) => void,
onConversationExport: (conversationId: DConversationId, exportAll: boolean) => void,
onConversationFolderChange: (folderChangeRequest: FolderChangeRequest) => void,
}) {
@@ -155,7 +155,16 @@ function ChatDrawerItem(props: {
// Delete
const handleDeleteButtonShow = React.useCallback(() => setDeleteArmed(true), []);
const { onConversationDeleteNoConfirmation } = props;
const handleDeleteButtonShow = React.useCallback((event: React.MouseEvent) => {
// special case: if 'Shift' is pressed, delete immediately
if (event.shiftKey) {
event.stopPropagation();
onConversationDeleteNoConfirmation(conversationId);
return;
}
setDeleteArmed(true);
}, [conversationId, onConversationDeleteNoConfirmation]);
const handleDeleteButtonHide = React.useCallback(() => setDeleteArmed(false), []);
@@ -163,9 +172,9 @@ function ChatDrawerItem(props: {
if (deleteArmed) {
setDeleteArmed(false);
event.stopPropagation();
props.onConversationDelete(conversationId);
onConversationDeleteNoConfirmation(conversationId);
}
}, [conversationId, deleteArmed, props]);
}, [conversationId, deleteArmed, onConversationDeleteNoConfirmation]);
const textSymbol = SystemPurposes[systemPurposeId]?.symbol || '❓';
+14 -6
View File
@@ -136,6 +136,10 @@ export function ChatMessageList(props: {
}), false);
}, [conversationId, editMessage]);
const handleReplyTo = React.useCallback((_messageId: string, text: string) => {
props.conversationHandler?.getOverlayStore().getState().setReplyToText(text);
}, [props.conversationHandler]);
const handleTextDiagram = React.useCallback(async (messageId: string, text: string) => {
conversationId && onTextDiagram({ conversationId: conversationId, messageId, text });
}, [conversationId, onTextDiagram]);
@@ -225,12 +229,15 @@ export function ChatMessageList(props: {
return (
<List sx={{
p: 0, ...(props.sx || {}),
// this makes sure that the the window is scrolled to the bottom (column-reverse)
display: 'flex',
flexDirection: 'column',
p: 0,
...(props.sx || {}),
// fix for the double-border on the last message (one by the composer, one to the bottom of the message)
// marginBottom: '-1px',
// layout
display: 'flex',
flexDirection: 'column',
}}>
{optionalTranslationWarning}
@@ -276,9 +283,10 @@ export function ChatMessageList(props: {
onMessageEdit={handleMessageEdit}
onMessageToggleUserFlag={handleMessageToggleUserFlag}
onMessageTruncate={handleMessageTruncate}
// onReplyTo={handleReplyTo}
onTextDiagram={handleTextDiagram}
onTextImagine={handleTextImagine}
onTextSpeak={handleTextSpeak}
onTextImagine={capabilityHasT2I ? handleTextImagine : undefined}
onTextSpeak={isSpeakable ? handleTextSpeak : undefined}
/>
);
@@ -7,7 +7,6 @@ import { KeyStroke, platformAwareKeystrokes } from '~/common/components/KeyStrok
import { useUIPreferencesStore } from '~/common/state/store-ui';
import { ChatModeId } from '../../AppChat';
import { useUXLabsStore } from '~/common/state/store-ux-labs';
interface ChatModeDescription {
@@ -63,7 +62,6 @@ export function ChatModeMenu(props: {
}) {
// external state
const labsBeam = useUXLabsStore(state => state.labsBeam);
const enterIsNewline = useUIPreferencesStore(state => state.enterIsNewline);
return (
@@ -81,7 +79,6 @@ export function ChatModeMenu(props: {
{/* ChatMode items */}
{Object.entries(ChatModeItems)
.filter(([key, _data]) => key !== 'generate-text-beam' || labsBeam)
.filter(([_key, data]) => !data.hideOnDesktop || props.isMobile)
.map(([key, data]) =>
<MenuItem key={'chat-mode-' + key} onClick={() => props.onSetChatModeId(key as ChatModeId)}>
+69 -37
View File
@@ -1,5 +1,5 @@
import * as React from 'react';
import { shallow } from 'zustand/shallow';
import { useShallow } from 'zustand/react/shallow';
import { fileOpen, FileWithHandle } from 'browser-fs-access';
import { Box, Button, ButtonGroup, Card, Dropdown, Grid, IconButton, Menu, MenuButton, MenuItem, Textarea, Tooltip, Typography } from '@mui/joy';
@@ -23,10 +23,11 @@ import type { LLMOptionsOpenAI } from '~/modules/llms/vendors/openai/openai.vend
import { useBrowseCapability } from '~/modules/browse/store-module-browsing';
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
import { ConversationsManager } from '~/common/chats/ConversationsManager';
import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout';
import { SpeechResult, useSpeechRecognition } from '~/common/components/useSpeechRecognition';
import { animationEnterBelow } from '~/common/util/animUtils';
import { conversationTitle, DConversationId, getConversation, useChatStore } from '~/common/state/store-chats';
import { conversationTitle, DConversationId, DMessageMetadata, getConversation, useChatStore } from '~/common/state/store-chats';
import { countModelTokens } from '~/common/util/token-counter';
import { isMacUser } from '~/common/util/pwaUtils';
import { launchAppCall } from '~/common/app.routes';
@@ -36,6 +37,7 @@ import { playSoundUrl } from '~/common/util/audioUtils';
import { supportsClipboardRead } from '~/common/util/clipboardUtils';
import { supportsScreenCapture } from '~/common/util/screenCaptureUtils';
import { useAppStateStore } from '~/common/state/store-appstate';
import { useChatOverlayStore } from '~/common/chats/store-chat-overlay-vanilla';
import { useDebouncer } from '~/common/components/useDebouncer';
import { useGlobalShortcut } from '~/common/components/useGlobalShortcut';
import { useUICounter, useUIPreferencesStore } from '~/common/state/store-ui';
@@ -48,7 +50,7 @@ import { useActileManager } from './actile/useActileManager';
import type { AttachmentId } from './attachments/store-attachments';
import { Attachments } from './attachments/Attachments';
import { getTextBlockText, useLLMAttachments } from './attachments/useLLMAttachments';
import { getSingleTextBlockText, useLLMAttachments } from './attachments/useLLMAttachments';
import { useAttachments } from './attachments/useAttachments';
import type { ComposerOutputMultiPart } from './composer.types';
@@ -63,6 +65,7 @@ import { ButtonMicMemo } from './buttons/ButtonMic';
import { ButtonMultiChatMemo } from './buttons/ButtonMultiChat';
import { ButtonOptionsDraw } from './buttons/ButtonOptionsDraw';
import { ChatModeMenu } from './ChatModeMenu';
import { ReplyToBubble } from '../message/ReplyToBubble';
import { TokenBadgeMemo } from './TokenBadge';
import { TokenProgressbarMemo } from './TokenProgressbar';
import { useComposerStartupText } from './store-composer';
@@ -98,7 +101,7 @@ export function Composer(props: {
capabilityHasT2I: boolean;
isMulticast: boolean | null;
isDeveloperMode: boolean;
onAction: (chatModeId: ChatModeId, conversationId: DConversationId, multiPartMessage: ComposerOutputMultiPart) => boolean;
onAction: (conversationId: DConversationId, chatModeId: ChatModeId, multiPartMessage: ComposerOutputMultiPart, metadata?: DMessageMetadata) => boolean;
onTextImagine: (conversationId: DConversationId, text: string) => void;
setIsMulticast: (on: boolean) => void;
sx?: SxProps;
@@ -114,11 +117,11 @@ export function Composer(props: {
// external state
const { openPreferencesTab /*, setIsFocusedMode*/ } = useOptimaLayout();
const { labsAttachScreenCapture, labsBeam, labsCameraDesktop } = useUXLabsStore(state => ({
const { labsAttachScreenCapture, labsCameraDesktop, labsShowCost } = useUXLabsStore(useShallow(state => ({
labsAttachScreenCapture: state.labsAttachScreenCapture,
labsBeam: state.labsBeam,
labsCameraDesktop: state.labsCameraDesktop,
}), shallow);
labsShowCost: state.labsShowCost,
})));
const timeToShowTips = useAppStateStore(state => state.usageCount > 2);
const { novel: explainShiftEnter, touch: touchShiftEnter } = useUICounter('composer-shift-enter');
const { novel: explainAltEnter, touch: touchAltEnter } = useUICounter('composer-alt-enter');
@@ -126,7 +129,7 @@ export function Composer(props: {
const [startupText, setStartupText] = useComposerStartupText();
const enterIsNewline = useUIPreferencesStore(state => state.enterIsNewline);
const chatMicTimeoutMs = useChatMicTimeoutMsValue();
const { assistantAbortible, systemPurposeId, tokenCount: _historyTokenCount, stopTyping } = useChatStore(state => {
const { assistantAbortible, systemPurposeId, tokenCount: _historyTokenCount, stopTyping } = useChatStore(useShallow(state => {
const conversation = state.conversations.find(_c => _c.id === props.conversationId);
return {
assistantAbortible: conversation ? !!conversation.abortController : false,
@@ -134,11 +137,18 @@ export function Composer(props: {
tokenCount: conversation ? conversation.tokenCount : 0,
stopTyping: state.stopTyping,
};
}, shallow);
}));
const { inComposer: browsingInComposer } = useBrowseCapability();
const { attachAppendClipboardItems, attachAppendDataTransfer, attachAppendEgoMessage, attachAppendFile, attachments: _attachments, clearAttachments, removeAttachment } =
useAttachments(browsingInComposer && !composeText.startsWith('/'));
// external overlay state (extra conversationId-dependent state)
const conversationHandler = props.conversationId ? ConversationsManager.getHandler(props.conversationId) : null;
const conversationOverlayStore = conversationHandler?.getOverlayStore() ?? null;
const { replyToGenerateText } = useChatOverlayStore(conversationOverlayStore, useShallow(store => ({
replyToGenerateText: chatModeId === 'generate-text' ? store.replyToText?.trim() || null : null,
})));
// derived state
@@ -163,6 +173,8 @@ export function Composer(props: {
const tokensHistory = _historyTokenCount;
const tokensReponseMax = (props.chatLLM?.options as LLMOptionsOpenAI /* FIXME: BIG ASSUMPTION */)?.llmResponseTokens || 0;
const tokenLimit = props.chatLLM?.contextTokens || 0;
const tokenPriceIn = props.chatLLM?.pricing?.chatIn;
const tokenPriceOut = props.chatLLM?.pricing?.chatOut;
// Effect: load initial text if queued up (e.g. by /link/share_targe)
@@ -174,6 +186,18 @@ export function Composer(props: {
}, [setComposeText, setStartupText, startupText]);
// Overlay actions
const handleReplyToCleared = React.useCallback(() => {
conversationOverlayStore?.getState().setReplyToText(null);
}, [conversationOverlayStore]);
React.useEffect(() => {
if (replyToGenerateText)
setTimeout(() => props.composerTextAreaRef.current?.focus(), 1 /* prevent focus theft */);
}, [replyToGenerateText, props.composerTextAreaRef]);
// Primary button
const { conversationId, onAction } = props;
@@ -182,28 +206,32 @@ export function Composer(props: {
if (!conversationId)
return false;
// get attachments
const multiPartMessage = llmAttachments.getAttachmentsOutputs(composerText || null);
// get the multipart output including all attachments
const multiPartMessage = llmAttachments.collapseWithAttachments(composerText || null);
if (!multiPartMessage.length)
return false;
// metadata
const metadata = replyToGenerateText ? { inReplyToText: replyToGenerateText } : undefined;
// send the message
const enqueued = onAction(_chatModeId, conversationId, multiPartMessage);
const enqueued = onAction(conversationId, _chatModeId, multiPartMessage, metadata);
if (enqueued) {
clearAttachments();
handleReplyToCleared();
setComposeText('');
}
return enqueued;
}, [clearAttachments, conversationId, llmAttachments, onAction, setComposeText]);
}, [clearAttachments, conversationId, handleReplyToCleared, llmAttachments, onAction, replyToGenerateText, setComposeText]);
const handleSendClicked = React.useCallback(() => {
handleSendAction(chatModeId, composeText);
}, [chatModeId, composeText, handleSendAction]);
const handleSendTextBeamClicked = React.useCallback(() => {
labsBeam && handleSendAction('generate-text-beam', composeText);
}, [composeText, handleSendAction, labsBeam]);
handleSendAction('generate-text-beam', composeText);
}, [composeText, handleSendAction]);
const handleStopClicked = React.useCallback(() => {
!!props.conversationId && stopTyping(props.conversationId);
@@ -304,15 +332,15 @@ export function Composer(props: {
// Alt (Windows) or Option (Mac) + Enter: append the message instead of sending it
if (e.altKey) {
touchAltEnter();
handleSendAction('append-user', composeText);
if (handleSendAction('append-user', composeText))
touchAltEnter();
return e.preventDefault();
}
// Ctrl (Windows) or Command (Mac) + Enter: send for beaming
if (labsBeam && ((isMacUser && e.metaKey && !e.ctrlKey) || (!isMacUser && e.ctrlKey && !e.metaKey))) {
touchCtrlEnter();
handleSendAction('generate-text-beam', composeText);
if ((isMacUser && e.metaKey && !e.ctrlKey) || (!isMacUser && e.ctrlKey && !e.metaKey)) {
if (handleSendAction('generate-text-beam', composeText))
touchCtrlEnter();
return e.preventDefault();
}
@@ -326,7 +354,7 @@ export function Composer(props: {
}
}
}, [actileInterceptKeydown, assistantAbortible, chatModeId, composeText, enterIsNewline, handleSendAction, labsBeam, touchAltEnter, touchCtrlEnter, touchShiftEnter]);
}, [actileInterceptKeydown, assistantAbortible, chatModeId, composeText, enterIsNewline, handleSendAction, touchAltEnter, touchCtrlEnter, touchShiftEnter]);
// Focus mode
@@ -427,8 +455,8 @@ export function Composer(props: {
const handleAttachmentInlineText = React.useCallback((attachmentId: AttachmentId) => {
setComposeText(currentText => {
const attachmentOutputs = llmAttachments.getAttachmentOutputs(currentText, attachmentId);
const inlinedText = getTextBlockText(attachmentOutputs) || '';
const inlinedMultiPart = llmAttachments.collapseWithAttachment(currentText, attachmentId);
const inlinedText = getSingleTextBlockText(inlinedMultiPart) || '';
removeAttachment(attachmentId);
return inlinedText;
});
@@ -436,8 +464,8 @@ export function Composer(props: {
const handleAttachmentsInlineText = React.useCallback(() => {
setComposeText(currentText => {
const attachmentsOutputs = llmAttachments.getAttachmentsOutputs(currentText);
const inlinedText = getTextBlockText(attachmentsOutputs) || '';
const inlinedMultiPart = llmAttachments.collapseWithAttachments(currentText);
const inlinedText = getSingleTextBlockText(inlinedMultiPart) || '';
clearAttachments();
return inlinedText;
});
@@ -495,7 +523,8 @@ export function Composer(props: {
const isReAct = chatModeId === 'generate-react';
const isDraw = chatModeId === 'generate-image';
const showChatExtras = isText;
const showChatReplyTo = !!replyToGenerateText;
const showChatExtras = isText && !showChatReplyTo;
const buttonVariant: VariantProp = (isAppend || (isMobile && isTextBeam)) ? 'outlined' : 'solid';
@@ -525,15 +554,16 @@ export function Composer(props: {
isDraw ? 'Describe an idea or a drawing...'
: isReAct ? 'Multi-step reasoning question...'
: isTextBeam ? 'Beam: combine the smarts of models...'
: props.isDeveloperMode ? 'Chat with me' + (isDesktop ? ' · drop source' : '') + ' · attach code...'
: props.capabilityHasT2I ? 'Chat · /beam · /draw · drop files...'
: 'Chat · /react · drop files...';
: showChatReplyTo ? 'Chat about this'
: props.isDeveloperMode ? 'Chat with me' + (isDesktop ? ' · drop source' : '') + ' · attach code...'
: props.capabilityHasT2I ? 'Chat · /beam · /draw · drop files...'
: 'Chat · /react · drop files...';
if (isDesktop && timeToShowTips) {
if (explainShiftEnter)
textPlaceholder += !enterIsNewline ? '\n\n💡 Shift + Enter to add a new line' : '\n\n💡 Shift + Enter to send';
else if (explainAltEnter)
textPlaceholder += platformAwareKeystrokes('\n\n💡 Tip: Alt + Enter to just append the message');
else if (labsBeam && explainCtrlEnter)
else if (explainCtrlEnter)
textPlaceholder += platformAwareKeystrokes('\n\n💡 Tip: Ctrl + Enter to beam');
}
@@ -618,7 +648,7 @@ export function Composer(props: {
variant='outlined'
color={isDraw ? 'warning' : isReAct ? 'success' : undefined}
autoFocus
minRows={isMobile ? 4 : 5}
minRows={isMobile ? 4 : showChatReplyTo ? 4 : 5}
maxRows={isMobile ? 8 : 10}
placeholder={textPlaceholder}
value={composeText}
@@ -629,6 +659,7 @@ export function Composer(props: {
onPasteCapture={handleAttachCtrlV}
// onFocusCapture={handleFocusModeOn}
// onBlurCapture={handleFocusModeOff}
endDecorator={showChatReplyTo && <ReplyToBubble replyToText={replyToGenerateText} onClear={handleReplyToCleared} className='reply-to-bubble' />}
slotProps={{
textarea: {
enterKeyHint: enterIsNewline ? 'enter' : 'send',
@@ -641,16 +672,16 @@ export function Composer(props: {
}}
sx={{
backgroundColor: 'background.level1',
'&:focus-within': { backgroundColor: 'background.popup' },
'&:focus-within': { backgroundColor: 'background.popup', '.reply-to-bubble': { backgroundColor: 'background.popup' } },
lineHeight: lineHeightTextareaMd,
}} />
{tokenLimit > 0 && (tokensComposer > 0 || (tokensHistory + tokensReponseMax) > 0) && (
<TokenProgressbarMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} />
{!showChatReplyTo && tokenLimit > 0 && (tokensComposer > 0 || (tokensHistory + tokensReponseMax) > 0) && (
<TokenProgressbarMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} tokenPriceIn={tokenPriceIn} tokenPriceOut={tokenPriceOut} />
)}
{!!tokenLimit && (
<TokenBadgeMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} showExcess absoluteBottomRight />
{!showChatReplyTo && tokenLimit > 0 && (
<TokenBadgeMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} tokenPriceIn={tokenPriceIn} tokenPriceOut={tokenPriceOut} showCost={labsShowCost} showExcess absoluteBottomRight />
)}
</Box>
@@ -811,9 +842,10 @@ export function Composer(props: {
</ButtonGroup>
{/* [desktop] secondary-top buttons */}
{labsBeam && isDesktop && showChatExtras && !assistantAbortible && (
{isDesktop && showChatExtras && !assistantAbortible && (
<ButtonBeamMemo
disabled={!props.conversationId || !chatLLMId || !llmAttachments.isOutputAttacheable}
hasContent={!!composeText}
onClick={handleSendTextBeamClicked}
/>
)}
+110 -45
View File
@@ -3,41 +3,81 @@ import * as React from 'react';
import { Badge, Box, ColorPaletteProp, Tooltip } from '@mui/joy';
function alignRight(value: number, columnSize: number = 7) {
function alignRight(value: number, columnSize: number = 8) {
const str = value.toLocaleString();
return str.padStart(columnSize);
}
function formatCost(cost: number) {
return cost < 1
? (cost * 100).toFixed(cost < 0.010 ? 2 : 1) + ' ¢'
: '$ ' + cost.toFixed(2);
}
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number): {
color: ColorPaletteProp, message: string, remainingTokens: number
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number, tokenPriceIn?: number, tokenPriceOut?: number): {
color: ColorPaletteProp,
message: string,
remainingTokens: number,
costMax?: number,
costMin?: number,
} {
const usedTokens = directTokens + (historyTokens || 0) + (responseMaxTokens || 0);
const remainingTokens = tokenLimit - usedTokens;
const usedInputTokens = directTokens + (historyTokens || 0);
const usedMaxTokens = usedInputTokens + (responseMaxTokens || 0);
const remainingTokens = tokenLimit - usedMaxTokens;
const gteLimit = (remainingTokens <= 0 && tokenLimit > 0);
// message
let message: string = gteLimit ? '⚠️ ' : '';
// costs
let costMax: number | undefined = undefined;
let costMin: number | undefined = undefined;
// no limit: show used tokens only
if (!tokenLimit) {
message += `Requested: ${usedTokens.toLocaleString()} tokens`;
message += `Requested: ${usedMaxTokens.toLocaleString()} tokens`;
}
// has full information (d + i < l)
else if (historyTokens || responseMaxTokens) {
message +=
`${Math.abs(remainingTokens).toLocaleString()} ${remainingTokens >= 0 ? 'available' : 'excess'} message tokens\n\n` +
`${Math.abs(remainingTokens).toLocaleString()} ${remainingTokens >= 0 ? 'available' : 'excess'} message tokens\n\n` +
` = Model max tokens: ${alignRight(tokenLimit)}\n` +
` - This message: ${alignRight(directTokens)}\n` +
` - History: ${alignRight(historyTokens || 0)}\n` +
` - Max response: ${alignRight(responseMaxTokens || 0)}`;
// add the price, if available
if (tokenPriceIn || tokenPriceOut) {
costMin = tokenPriceIn ? usedInputTokens * tokenPriceIn / 1E6 : undefined;
const costOutMax = (tokenPriceOut && responseMaxTokens) ? responseMaxTokens * tokenPriceOut / 1E6 : undefined;
if (costMin || costOutMax) {
message += `\n\n\n▶ Chat Turn Cost (max, approximate)\n`;
if (costMin) message += '\n' +
` Input tokens: ${alignRight(usedInputTokens)}\n` +
` Input Price $/M: ${tokenPriceIn!.toFixed(2).padStart(8)}\n` +
` Input cost: ${('$' + costMin!.toFixed(4)).padStart(8)}\n`;
if (costOutMax) message += '\n' +
` Max output tokens: ${alignRight(responseMaxTokens!)}\n` +
` Output Price $/M: ${tokenPriceOut!.toFixed(2).padStart(8)}\n` +
` Max output cost: ${('$' + costOutMax!.toFixed(4)).padStart(8)}\n`;
if (costMin) message += '\n' +
` > Min turn cost: ${formatCost(costMin).padStart(8)}`;
costMax = (costMin && costOutMax) ? costMin + costOutMax : undefined;
if (costMax) message += '\n' +
` < Max turn cost: ${formatCost(costMax).padStart(8)}`;
}
}
}
// Cleaner mode: d + ? < R (total is the remaining in this case)
else {
message +=
`${(tokenLimit + usedTokens).toLocaleString()} available tokens after deleting this\n\n` +
`${(tokenLimit + usedMaxTokens).toLocaleString()} available tokens after deleting this\n\n` +
` = Currently free: ${alignRight(tokenLimit)}\n` +
` + This message: ${alignRight(usedTokens)}`;
` + This message: ${alignRight(usedMaxTokens)}`;
}
const color: ColorPaletteProp =
@@ -47,23 +87,21 @@ export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, h
? 'warning'
: 'primary';
return { color, message, remainingTokens };
return { color, message, remainingTokens, costMax, costMin };
}
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.JSX.Element }) =>
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.ReactElement }) =>
<Tooltip
placement={props.placement}
variant={props.color !== 'primary' ? 'solid' : 'soft'} color={props.color}
title={props.message
? <Box sx={{ p: 2, whiteSpace: 'pre' }}>
{props.message}
</Box>
: null
}
title={props.message ? <Box sx={{ p: 2, whiteSpace: 'pre' }}>{props.message}</Box> : null}
sx={{
fontFamily: 'code',
boxShadow: 'xl',
// fontSize: '0.8125rem',
border: '1px solid',
borderColor: `${props.color}.outlinedColor`,
boxShadow: 'md',
}}
>
{props.children}
@@ -76,38 +114,65 @@ export const TokenTooltip = (props: { message: string | null, color: ColorPalett
export const TokenBadgeMemo = React.memo(TokenBadge);
function TokenBadge(props: {
direct: number, history?: number, responseMax?: number, limit: number,
showExcess?: boolean, absoluteBottomRight?: boolean, inline?: boolean,
direct: number,
history?: number,
responseMax?: number,
limit: number,
tokenPriceIn?: number,
tokenPriceOut?: number,
showCost?: boolean
showExcess?: boolean,
absoluteBottomRight?: boolean,
inline?: boolean,
}) {
const { message, color, remainingTokens } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax);
const { message, color, remainingTokens, costMax, costMin } =
tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax, props.tokenPriceIn, props.tokenPriceOut);
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
? Math.abs(remainingTokens)
: props.direct;
let badgeValue: string;
const showAltCosts = !!props.showCost && !!costMax && costMin !== undefined;
if (showAltCosts) {
badgeValue = '< ' + formatCost(costMax);
} else {
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
? Math.abs(remainingTokens)
: props.direct;
badgeValue = value.toLocaleString();
}
const shallHide = !props.direct && remainingTokens >= 0 && !showAltCosts;
if (shallHide) return null;
return (
<Badge
variant='solid' color={color} max={100000}
invisible={!props.direct && remainingTokens >= 0}
badgeContent={
<TokenTooltip color={color} message={message}>
<span>{value.toLocaleString()}</span>
</TokenTooltip>
}
sx={{
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
cursor: 'help',
}}
slotProps={{
badge: {
sx: {
fontFamily: 'code',
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
<TokenTooltip color={color} message={message} placement='top-end'>
<Badge
variant='soft' color={color} max={1000000}
// invisible={shallHide}
badgeContent={badgeValue}
slotProps={{
root: {
sx: {
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
cursor: 'help',
},
},
},
}}
/>
badge: {
sx: {
// the badge (not the tooltip)
// boxShadow: 'sm',
fontFamily: 'code',
fontSize: 'xs',
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
},
},
}}
/>
</TokenTooltip>
);
}
@@ -12,7 +12,15 @@ import { tokensPrettyMath, TokenTooltip } from './TokenBadge';
*/
export const TokenProgressbarMemo = React.memo(TokenProgressbar);
function TokenProgressbar(props: { direct: number, history: number, responseMax: number, limit: number }) {
function TokenProgressbar(props: {
direct: number,
history: number,
responseMax: number,
limit: number,
tokenPriceIn?: number,
tokenPriceOut?: number,
}) {
// external state
const theme = useTheme();
@@ -40,7 +48,7 @@ function TokenProgressbar(props: { direct: number, history: number, responseMax:
const overflowColor = theme.palette.danger.softColor;
// tooltip message/color
const { message, color } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax);
const { message, color } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax, props.tokenPriceIn, props.tokenPriceOut);
// sizes
const containerHeight = 8;
@@ -153,7 +153,11 @@ export function AttachmentMenu(props: {
{/* Converters: {aConverters.map(((converter, idx) => ` ${converter.id}${(idx === aConverterIdx) ? '*' : ''}`)).join(', ')}*/}
{/*</Typography>*/}
<Typography level='body-xs'>
🡒 {isOutputMissing ? 'empty' : aOutputs.map(output => `${output.type}, ${output.type === 'text-block' ? output.text.length.toLocaleString() : '(base64 image)'} bytes`).join(' · ')}
🡒 {isOutputMissing ? 'empty' : aOutputs.map(output => `${output.type}, ${output.type === 'text-block'
? output.text.length.toLocaleString()
: output.type === 'image-part'
? output.base64Url.length.toLocaleString()
: '(other)'} bytes`).join(' · ')}
</Typography>
{!!tokenCountApprox && <Typography level='body-xs'>
🡒 {tokenCountApprox.toLocaleString()} tokens
@@ -153,7 +153,7 @@ export function Attachments(props: {
</MenuItem>
<MenuItem onClick={handleClearAttachments}>
<ListItemDecorator><ClearIcon /></ListItemDecorator>
Clear
Clear{attachments.length > 5 ? <span style={{ opacity: 0.5 }}> {attachments.length} attachments</span> : null}
</MenuItem>
</CloseableMenu>
)}
@@ -2,7 +2,7 @@ import { callBrowseFetchPage } from '~/modules/browse/browse.client';
import { createBase36Uid } from '~/common/util/textUtils';
import { htmlTableToMarkdown } from '~/common/util/htmlTableToMarkdown';
import { pdfToText } from '~/common/util/pdfUtils';
import { pdfToImageDataURLs, pdfToText } from '~/common/util/pdfUtils';
import type { Attachment, AttachmentConverter, AttachmentId, AttachmentInput, AttachmentSource } from './store-attachments';
import type { ComposerOutputMultiPart } from '../composer.types';
@@ -58,16 +58,12 @@ export async function attachmentLoadInputAsync(source: Readonly<AttachmentSource
edit({ label: source.refUrl, ref: source.refUrl });
try {
const page = await callBrowseFetchPage(source.url);
if (page.content) {
edit({
input: {
mimeType: 'text/plain',
data: page.content,
dataSize: page.content.length,
},
});
} else
edit({ inputError: 'No content found at this link' });
edit(
page.content.markdown ? { input: { mimeType: 'text/markdown', data: page.content.markdown, dataSize: page.content.markdown.length } }
: page.content.text ? { input: { mimeType: 'text/plain', data: page.content.text, dataSize: page.content.text.length } }
: page.content.html ? { input: { mimeType: 'text/html', data: page.content.html, dataSize: page.content.html.length } }
: { inputError: 'No content found at this link' },
);
} catch (error: any) {
edit({ inputError: `Issue downloading page: ${error?.message || (typeof error === 'string' ? error : JSON.stringify(error))}` });
}
@@ -297,7 +293,7 @@ export async function attachmentPerformConversion(attachment: Readonly<Attachmen
case 'pdf-text':
if (!(input.data instanceof ArrayBuffer)) {
console.log('Expected ArrayBuffer for PDF converter, got:', typeof input.data);
console.log('Expected ArrayBuffer for PDF text converter, got:', typeof input.data);
break;
}
// duplicate the ArrayBuffer to avoid mutation
@@ -312,7 +308,29 @@ export async function attachmentPerformConversion(attachment: Readonly<Attachmen
break;
case 'pdf-images':
// TODO: extract all pages as individual images
if (!(input.data instanceof ArrayBuffer)) {
console.log('Expected ArrayBuffer for PDF images converter, got:', typeof input.data);
break;
}
// duplicate the ArrayBuffer to avoid mutation
const pdfData2 = new Uint8Array(input.data.slice(0));
try {
const imageDataURLs = await pdfToImageDataURLs(pdfData2);
imageDataURLs.forEach((pdfImg, index) => {
outputs.push({
type: 'image-part',
base64Url: pdfImg.base64Url,
metadata: {
title: `Page ${index + 1}`,
width: pdfImg.width,
height: pdfImg.height,
},
collapsible: false,
});
});
} catch (error) {
console.error('Error converting PDF to images:', error);
}
break;
case 'image':
@@ -10,8 +10,8 @@ import type { ComposerOutputMultiPart, ComposerOutputPartType } from '../compose
export interface LLMAttachments {
attachments: LLMAttachment[];
getAttachmentOutputs: (initialTextBlockText: string | null, attachmentId: AttachmentId) => ComposerOutputMultiPart;
getAttachmentsOutputs: (initialTextBlockText: string | null) => ComposerOutputMultiPart;
collapseWithAttachment: (initialTextBlockText: string | null, attachmentId: AttachmentId) => ComposerOutputMultiPart;
collapseWithAttachments: (initialTextBlockText: string | null) => ComposerOutputMultiPart;
isOutputAttacheable: boolean;
isOutputTextInlineable: boolean;
tokenCountApprox: number;
@@ -37,13 +37,13 @@ export function useLLMAttachments(attachments: Attachment[], chatLLMId: DLLMId |
const llmAttachments = attachments.map(attachment => toLLMAttachment(attachment, supportedOutputPartTypes, chatLLMId));
const getAttachmentOutputs = (initialTextBlockText: string | null, attachmentId: AttachmentId): ComposerOutputMultiPart => {
const collapseWithAttachment = (initialTextBlockText: string | null, attachmentId: AttachmentId): ComposerOutputMultiPart => {
// get outputs of a specific attachment
const outputs = attachments.find(a => a.id === attachmentId)?.outputs || [];
return attachmentCollapseOutputs(initialTextBlockText, outputs);
};
const getAttachmentsOutputs = (initialTextBlockText: string | null): ComposerOutputMultiPart => {
const collapseWithAttachments = (initialTextBlockText: string | null): ComposerOutputMultiPart => {
// accumulate all outputs of all attachments
const allOutputs = llmAttachments.reduce((acc, a) => acc.concat(a.attachment.outputs), [] as ComposerOutputMultiPart);
return attachmentCollapseOutputs(initialTextBlockText, allOutputs);
@@ -51,8 +51,8 @@ export function useLLMAttachments(attachments: Attachment[], chatLLMId: DLLMId |
return {
attachments: llmAttachments,
getAttachmentOutputs,
getAttachmentsOutputs,
collapseWithAttachment,
collapseWithAttachments,
isOutputAttacheable: llmAttachments.every(a => a.isOutputAttachable),
isOutputTextInlineable: llmAttachments.every(a => a.isOutputTextInlineable),
tokenCountApprox: llmAttachments.reduce((acc, a) => acc + (a.tokenCountApprox || 0), 0),
@@ -60,7 +60,7 @@ export function useLLMAttachments(attachments: Attachment[], chatLLMId: DLLMId |
}, [attachments, chatLLMId]);
}
export function getTextBlockText(outputs: ComposerOutputMultiPart): string | null {
export function getSingleTextBlockText(outputs: ComposerOutputMultiPart): string | null {
const textOutputs = outputs.filter(part => part.type === 'text-block');
return (textOutputs.length === 1 && textOutputs[0].type === 'text-block') ? textOutputs[0].text : null;
}
@@ -11,10 +11,14 @@ import { animationEnterBelow } from '~/common/util/animUtils';
const desktopLegend =
<Box sx={{ px: 1, py: 0.75, lineHeight: '1.5rem' }}>
Combine the answers from multiple models<br />
{/*{platformAwareKeystrokes('Ctrl + Enter')}*/}
<KeyStroke combo='Ctrl + Enter' sx={{ mt: 0.5, mb: 0.25 }} />
</Box>;
const desktopLegendNoContent =
<Box sx={{ px: 1, py: 0.75, lineHeight: '1.5rem' }}>
Enter the text to Beam, then press this
</Box>;
const mobileSx: SxProps = {
mr: { xs: 1, md: 2 },
};
@@ -31,13 +35,13 @@ const desktopSx: SxProps = {
export const ButtonBeamMemo = React.memo(ButtonBeam);
function ButtonBeam(props: { isMobile?: boolean, disabled?: boolean, onClick: () => void }) {
function ButtonBeam(props: { isMobile?: boolean, disabled?: boolean, hasContent?: boolean, onClick: () => void }) {
return props.isMobile ? (
<IconButton variant='soft' color='primary' disabled={props.disabled} onClick={props.onClick} sx={mobileSx}>
<ChatBeamIcon />
</IconButton>
) : (
<Tooltip disableInteractive variant='solid' arrow placement='right' title={desktopLegend}>
<Tooltip disableInteractive variant='solid' arrow placement='right' title={props.hasContent ? desktopLegend : desktopLegendNoContent}>
<Button variant='soft' color='primary' disabled={props.disabled} onClick={props.onClick} endDecorator={<ChatBeamIcon />} sx={desktopSx}>
Beam
</Button>
@@ -9,6 +9,13 @@ export type ComposerOutputPart = {
// TODO: not implemented yet
type: 'image-part',
base64Url: string,
metadata: {
title?: string,
generatedBy?: string,
altText?: string,
width?: number,
height?: number,
},
collapsible: false,
};
@@ -36,8 +36,9 @@ export function FolderListItem(props: {
// Menu
const handleMenuOpen = (event: React.MouseEvent<HTMLAnchorElement>) => {
setMenuAnchorEl(event.currentTarget);
const handleMenuToggle = (event: React.MouseEvent<HTMLAnchorElement>) => {
event.preventDefault(); // added for the Right mouse click (to prevent the menu)
setMenuAnchorEl(anchor => anchor ? null : event.currentTarget);
setDeleteArmed(false); // Reset delete armed state
};
@@ -188,9 +189,11 @@ export function FolderListItem(props: {
{/* Icon to show the Popup menu */}
<IconButton
size='sm'
variant='outlined'
className='menu-icon'
onClick={handleMenuOpen}
onClick={handleMenuToggle}
onContextMenu={handleMenuToggle}
sx={{
visibility: 'hidden',
my: '-0.25rem', /* absorb the button padding */
+303 -128
View File
@@ -1,19 +1,21 @@
import * as React from 'react';
import { shallow } from 'zustand/shallow';
import { useShallow } from 'zustand/react/shallow';
import type { SxProps } from '@mui/joy/styles/types';
import { Avatar, Box, CircularProgress, IconButton, ListDivider, ListItem, ListItemDecorator, MenuItem, Switch, Tooltip, Typography } from '@mui/joy';
import AccountTreeTwoToneIcon from '@mui/icons-material/AccountTreeTwoTone';
import { Avatar, Box, ButtonGroup, CircularProgress, IconButton, ListDivider, ListItem, ListItemDecorator, MenuItem, Switch, Tooltip, Typography } from '@mui/joy';
import { ClickAwayListener, Popper } from '@mui/base';
import AccountTreeOutlinedIcon from '@mui/icons-material/AccountTreeOutlined';
import ClearIcon from '@mui/icons-material/Clear';
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
import DifferenceIcon from '@mui/icons-material/Difference';
import EditRoundedIcon from '@mui/icons-material/EditRounded';
import Face6Icon from '@mui/icons-material/Face6';
import ForkRightIcon from '@mui/icons-material/ForkRight';
import FormatPaintTwoToneIcon from '@mui/icons-material/FormatPaintTwoTone';
import FormatPaintOutlinedIcon from '@mui/icons-material/FormatPaintOutlined';
import MoreVertIcon from '@mui/icons-material/MoreVert';
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
import RecordVoiceOverOutlinedIcon from '@mui/icons-material/RecordVoiceOverOutlined';
import ReplayIcon from '@mui/icons-material/Replay';
import ReplyRoundedIcon from '@mui/icons-material/ReplyRounded';
import SettingsSuggestIcon from '@mui/icons-material/SettingsSuggest';
import SmartToyOutlinedIcon from '@mui/icons-material/SmartToyOutlined';
import StarOutlineRoundedIcon from '@mui/icons-material/StarOutlineRounded';
@@ -32,29 +34,31 @@ import { DMessage, DMessageUserFlag, messageHasUserFlag } from '~/common/state/s
import { InlineTextarea } from '~/common/components/InlineTextarea';
import { KeyStroke } from '~/common/components/KeyStroke';
import { Link } from '~/common/components/Link';
import { adjustContentScaling, themeScalingMap } from '~/common/app.theme';
import { adjustContentScaling, themeScalingMap, themeZIndexPageBar } from '~/common/app.theme';
import { animationColorRainbow } from '~/common/util/animUtils';
import { copyToClipboard } from '~/common/util/clipboardUtils';
import { prettyBaseModel } from '~/common/util/modelUtils';
import { useUIPreferencesStore } from '~/common/state/store-ui';
import { useUXLabsStore } from '~/common/state/store-ux-labs';
import { ReplyToBubble } from './ReplyToBubble';
import { useChatShowTextDiff } from '../../store-app-chat';
// Enable the menu on text selection
const ENABLE_SELECTION_RIGHT_CLICK_MENU: boolean = true;
const ENABLE_SELECTION_RIGHT_CLICK_MENU = false;
const ENABLE_SELECTION_TOOLBAR = true;
const SELECTION_TOOLBAR_MIN_LENGTH = 3;
// Enable the hover button to copy the whole message. The Copy button is also available in Blocks, or in the Avatar Menu.
const ENABLE_COPY_MESSAGE_OVERLAY: boolean = false;
export function messageBackground(messageRole: DMessage['role'] | string, wasEdited: boolean, unknownAssistantIssue: boolean): string {
export function messageBackground(messageRole: DMessage['role'] | string, wasEdited: boolean, isAssistantIssue: boolean): string {
switch (messageRole) {
case 'user':
return 'primary.plainHoverBg'; // was .background.level1
case 'assistant':
return unknownAssistantIssue ? 'danger.softBg' : 'background.surface';
return isAssistantIssue ? 'danger.softBg' : 'background.surface';
case 'system':
return wasEdited ? 'warning.softHoverBg' : 'neutral.softBg';
default:
@@ -114,7 +118,7 @@ export function makeAvatar(messageAvatar: string | null, messageRole: DMessage['
// icon: text-to-image
if (isTextToImage)
return <FormatPaintTwoToneIcon sx={{
return <FormatPaintOutlinedIcon sx={{
...avatarIconSx,
animation: `${animationColorRainbow} 1s linear 2.66`,
}} />;
@@ -228,6 +232,7 @@ export function ChatMessage(props: {
onMessageEdit?: (messageId: string, text: string) => void,
onMessageToggleUserFlag?: (messageId: string, flag: DMessageUserFlag) => void,
onMessageTruncate?: (messageId: string) => void,
onReplyTo?: (messageId: string, selectedText: string) => void,
onTextDiagram?: (messageId: string, text: string) => Promise<void>
onTextImagine?: (text: string) => Promise<void>
onTextSpeak?: (text: string) => Promise<void>
@@ -235,33 +240,34 @@ export function ChatMessage(props: {
}) {
// state
const blocksRendererRef = React.useRef<HTMLDivElement>(null);
const [isHovering, setIsHovering] = React.useState(false);
const [opsMenuAnchor, setOpsMenuAnchor] = React.useState<HTMLElement | null>(null);
const [selMenuAnchor, setSelMenuAnchor] = React.useState<HTMLElement | null>(null);
const [selMenuText, setSelMenuText] = React.useState<string | null>(null);
const [selToolbarAnchor, setSelToolbarAnchor] = React.useState<HTMLElement | null>(null);
const [selText, setSelText] = React.useState<string | null>(null);
const [isEditing, setIsEditing] = React.useState(false);
// external state
const labsBeam = useUXLabsStore(state => state.labsBeam);
const { showAvatar, contentScaling, doubleClickToEdit, renderMarkdown } = useUIPreferencesStore(state => ({
const { showAvatar, contentScaling, doubleClickToEdit, renderMarkdown } = useUIPreferencesStore(useShallow(state => ({
showAvatar: props.showAvatar !== undefined ? props.showAvatar : state.zenMode !== 'cleaner',
contentScaling: adjustContentScaling(state.contentScaling, props.adjustContentScaling),
doubleClickToEdit: state.doubleClickToEdit,
renderMarkdown: state.renderMarkdown,
}), shallow);
})));
const [showDiff, setShowDiff] = useChatShowTextDiff();
const textDiffs = useSanityTextDiffs(props.message.text, props.diffPreviousText, showDiff);
// derived state
const {
id: messageId,
text: messageText,
sender: messageSender,
avatar: messageAvatar,
typing: messageTyping,
role: messageRole,
purposeId: messagePurposeId,
originLLM: messageOriginLLM,
metadata: messageMetadata,
created: messageCreated,
updated: messageUpdated,
} = props.message;
@@ -272,10 +278,31 @@ export function ChatMessage(props: {
const fromSystem = messageRole === 'system';
const wasEdited = !!messageUpdated;
const textSel = selMenuText ? selMenuText : messageText;
// #840 - downgrade of V2 to V1
let messageText = props.message.text;
const isDowngradeV2toV1 = (!!(props.message as any)?.fragments) && messageText === undefined;
if (isDowngradeV2toV1) {
// try to salvage something: manual reduce the fragments to text
const fragments: unknown = (props.message as any)?.fragments;
if (fragments && Array.isArray(fragments) && fragments.length) {
messageText = '';
for (const frag of fragments) {
if (frag && typeof frag === 'object' && 'ft' in frag && frag.ft === 'content' && 'part' in frag && typeof frag.part === 'object' && 'pt' in frag.part && frag.part.pt === 'text' && 'text' in frag.part && typeof frag.part.text === 'string') {
if (messageText)
messageText += '\n\n';
messageText += frag.part.text;
}
}
}
if (!messageText)
messageText = '[Cannot downgrade to Big-AGI 1.x]';
}
const textSel = selText ? selText : messageText;
// WARNING: if you get an issue here, you're downgrading from the new Big-AGI 2 data format to 1.x.
const isSpecialT2I = textSel.startsWith('https://images.prodia.xyz/') || textSel.startsWith('/draw ') || textSel.startsWith('/imagine ') || textSel.startsWith('/img ');
const couldDiagram = textSel?.length >= 100 && !isSpecialT2I;
const couldImagine = textSel?.length >= 2 && !isSpecialT2I;
const couldDiagram = textSel.length >= 100 && !isSpecialT2I;
const couldImagine = textSel.length >= 3 && !isSpecialT2I;
const couldSpeak = couldImagine;
@@ -290,21 +317,27 @@ export function ChatMessage(props: {
const { onMessageToggleUserFlag } = props;
const closeOpsMenu = () => setOpsMenuAnchor(null);
const handleOpsMenuToggle = React.useCallback((event: React.MouseEvent<HTMLElement>) => {
event.preventDefault(); // added for the Right mouse click (to prevent the menu)
setOpsMenuAnchor(anchor => anchor ? null : event.currentTarget);
}, []);
const handleCloseOpsMenu = React.useCallback(() => setOpsMenuAnchor(null), []);
const handleOpsCopy = (e: React.MouseEvent) => {
copyToClipboard(textSel, 'Text');
e.preventDefault();
closeOpsMenu();
handleCloseOpsMenu();
closeSelectionMenu();
closeToolbar();
};
const handleOpsEdit = React.useCallback((e: React.MouseEvent) => {
if (messageTyping && !isEditing) return; // don't allow editing while typing
setIsEditing(!isEditing);
e.preventDefault();
closeOpsMenu();
}, [isEditing, messageTyping]);
handleCloseOpsMenu();
}, [handleCloseOpsMenu, isEditing, messageTyping]);
const handleOpsToggleStarred = React.useCallback(() => {
onMessageToggleUserFlag?.(messageId, 'starred');
@@ -312,21 +345,21 @@ export function ChatMessage(props: {
const handleOpsAssistantFrom = async (e: React.MouseEvent) => {
e.preventDefault();
closeOpsMenu();
handleCloseOpsMenu();
await props.onMessageAssistantFrom?.(messageId, fromAssistant ? -1 : 0);
};
const handleOpsBeamFrom = async (e: React.MouseEvent) => {
e.stopPropagation();
closeOpsMenu();
labsBeam && await props.onMessageBeam?.(messageId);
handleCloseOpsMenu();
await props.onMessageBeam?.(messageId);
};
const handleOpsBranch = (e: React.MouseEvent) => {
e.preventDefault();
e.stopPropagation(); // to try to not steal the focus from the banched conversation
props.onMessageBranch?.(messageId);
closeOpsMenu();
handleCloseOpsMenu();
};
const handleOpsToggleShowDiff = () => setShowDiff(!showDiff);
@@ -335,8 +368,9 @@ export function ChatMessage(props: {
e.preventDefault();
if (props.onTextDiagram) {
await props.onTextDiagram(messageId, textSel);
closeOpsMenu();
handleCloseOpsMenu();
closeSelectionMenu();
closeToolbar();
}
};
@@ -344,8 +378,19 @@ export function ChatMessage(props: {
e.preventDefault();
if (props.onTextImagine) {
await props.onTextImagine(textSel);
closeOpsMenu();
handleCloseOpsMenu();
closeSelectionMenu();
closeToolbar();
}
};
const handleOpsReplyTo = (e: React.MouseEvent) => {
e.preventDefault();
if (props.onReplyTo && textSel.trim().length >= SELECTION_TOOLBAR_MIN_LENGTH) {
props.onReplyTo(messageId, textSel.trim());
handleCloseOpsMenu();
closeSelectionMenu();
closeToolbar();
}
};
@@ -353,14 +398,15 @@ export function ChatMessage(props: {
e.preventDefault();
if (props.onTextSpeak) {
await props.onTextSpeak(textSel);
closeOpsMenu();
handleCloseOpsMenu();
closeSelectionMenu();
closeToolbar();
}
};
const handleOpsTruncate = (_e: React.MouseEvent) => {
props.onMessageTruncate?.(messageId);
closeOpsMenu();
handleCloseOpsMenu();
};
const handleOpsDelete = (_e: React.MouseEvent) => {
@@ -395,17 +441,17 @@ export function ChatMessage(props: {
document.body.appendChild(anchorEl);
setSelMenuAnchor(anchorEl);
setSelMenuText(selectedText);
setSelText(selectedText);
}, [removeSelectionAnchor]);
const closeSelectionMenu = React.useCallback(() => {
// window.getSelection()?.removeAllRanges?.();
removeSelectionAnchor();
setSelMenuAnchor(null);
setSelMenuText(null);
setSelText(null);
}, [removeSelectionAnchor]);
const handleMouseUp = React.useCallback((event: MouseEvent) => {
const handleContextMenu = React.useCallback((event: MouseEvent) => {
const selection = window.getSelection();
if (selection && selection.rangeCount > 0) {
const range = selection.getRangeAt(0);
@@ -416,16 +462,74 @@ export function ChatMessage(props: {
}, [openSelectionMenu]);
// Selection Toolbar
const closeToolbar = React.useCallback((anchorEl?: HTMLElement) => {
window.getSelection()?.removeAllRanges?.();
try {
const anchor = anchorEl || selToolbarAnchor;
anchor && document.body.removeChild(anchor);
} catch (e) {
// ignore...
}
setSelToolbarAnchor(null);
setSelText(null);
}, [selToolbarAnchor]);
const handleOpenToolbar = React.useCallback((_event: MouseEvent) => {
// check for selection
const selection = window.getSelection();
if (!selection || selection.rangeCount <= 0) return;
// check for enought selection
const selectionText = selection.toString().trim();
if (selectionText.length < SELECTION_TOOLBAR_MIN_LENGTH) return;
// check for the selection being inside the blocks renderer (core of the message)
const selectionRange = selection.getRangeAt(0);
const blocksElement = blocksRendererRef.current;
if (!blocksElement || !blocksElement.contains(selectionRange.commonAncestorContainer)) return;
const rangeRects = selectionRange.getClientRects();
if (rangeRects.length <= 0) return;
const firstRect = rangeRects[0];
const anchorEl = document.createElement('div');
anchorEl.style.position = 'fixed';
anchorEl.style.left = `${firstRect.left + window.scrollX}px`;
anchorEl.style.top = `${firstRect.top + window.scrollY}px`;
document.body.appendChild(anchorEl);
anchorEl.setAttribute('role', 'dialog');
// auto-close logic on unselect
const closeOnUnselect = () => {
const selection = window.getSelection();
if (!selection || selection.toString().trim() === '') {
closeToolbar(anchorEl);
document.removeEventListener('selectionchange', closeOnUnselect);
}
};
document.addEventListener('selectionchange', closeOnUnselect);
setSelToolbarAnchor(anchorEl);
setSelText(selectionText);
}, [closeToolbar]);
// Blocks renderer
const handleBlocksContextMenu = React.useCallback((event: React.MouseEvent) => {
handleMouseUp(event.nativeEvent);
}, [handleMouseUp]);
handleContextMenu(event.nativeEvent);
}, [handleContextMenu]);
const handleBlocksDoubleClick = React.useCallback((event: React.MouseEvent) => {
doubleClickToEdit && props.onMessageEdit && handleOpsEdit(event);
}, [doubleClickToEdit, handleOpsEdit, props.onMessageEdit]);
const handleBlocksMouseUp = React.useCallback((event: React.MouseEvent) => {
handleOpenToolbar(event.nativeEvent);
}, [handleOpenToolbar]);
// prettier upstream errors
const { isAssistantError, errorMessage } = React.useMemo(
@@ -446,6 +550,7 @@ export function ChatMessage(props: {
return (
<ListItem
role='chat-message'
onMouseUp={(ENABLE_SELECTION_TOOLBAR && !fromSystem && !isAssistantError) ? handleBlocksMouseUp : undefined}
sx={{
// style
backgroundColor: backgroundColor,
@@ -468,92 +573,97 @@ export function ChatMessage(props: {
}),
// style: make room for a top decorator if set
...(!!props.topDecorator && {
pt: '2.5rem',
}),
'&:hover > button': { opacity: 1 },
// layout
display: 'flex',
flexDirection: !fromAssistant ? 'row-reverse' : 'row',
alignItems: 'flex-start',
gap: { xs: 0, md: 1 },
display: 'block', // this is Needed, otherwise there will be a horizontal overflow
...props.sx,
}}
>
{/* (Optional) underlayed top decorator */}
{props.topDecorator && (
<Box sx={{ position: 'absolute', left: 0, right: 0, top: 0, textAlign: 'center' }}>
{props.topDecorator}
</Box>
)}
{props.topDecorator}
{/* Avatar (Persona) */}
{showAvatar && (
<Box sx={personaSx}>
{/* Message Row: Avatar, Blocks (1 text -> blocksRenderer) */}
<Box sx={{
display: 'flex',
flexDirection: !fromAssistant ? 'row-reverse' : 'row',
alignItems: 'flex-start',
gap: { xs: 0, md: 1 },
}}>
{/* Persona Avatar or Menu Button */}
<Box
onClick={event => setOpsMenuAnchor(event.currentTarget)}
onMouseEnter={() => setIsHovering(true)}
onMouseLeave={() => setIsHovering(false)}
sx={{ display: 'flex' }}
>
{(isHovering || opsMenuAnchor) ? (
<IconButton variant={opsMenuAnchor ? 'solid' : 'soft'} color={(fromAssistant || fromSystem) ? 'neutral' : 'primary'} sx={avatarIconSx}>
<MoreVertIcon />
</IconButton>
) : (
avatarEl
{/* Avatar (Persona) */}
{showAvatar && (
<Box sx={personaSx}>
{/* Persona Avatar or Menu Button */}
<Box
onClick={handleOpsMenuToggle}
onContextMenu={handleOpsMenuToggle}
onMouseEnter={() => setIsHovering(true)}
onMouseLeave={() => setIsHovering(false)}
sx={{ display: 'flex' }}
>
{(isHovering || opsMenuAnchor) ? (
<IconButton variant={opsMenuAnchor ? 'solid' : 'soft'} color={(fromAssistant || fromSystem) ? 'neutral' : 'primary'} sx={avatarIconSx}>
<MoreVertIcon />
</IconButton>
) : (
avatarEl
)}
</Box>
{/* Assistant model name */}
{fromAssistant && (
<Tooltip arrow title={messageTyping ? null : (messageOriginLLM || 'unk-model')} variant='solid'>
<Typography level='body-xs' sx={{
overflowWrap: 'anywhere',
...(messageTyping ? { animation: `${animationColorRainbow} 5s linear infinite` } : {}),
}}>
{prettyBaseModel(messageOriginLLM)}
</Typography>
</Tooltip>
)}
</Box>
{/* Assistant model name */}
{fromAssistant && (
<Tooltip arrow title={messageTyping ? null : (messageOriginLLM || 'unk-model')} variant='solid'>
<Typography level='body-xs' sx={{
overflowWrap: 'anywhere',
...(messageTyping ? { animation: `${animationColorRainbow} 5s linear infinite` } : {}),
}}>
{prettyBaseModel(messageOriginLLM)}
</Typography>
</Tooltip>
)}
</Box>
)}
)}
{/* Edit / Blocks */}
{isEditing ? (
{/* Edit / Blocks */}
{isEditing ? (
<InlineTextarea
initialText={messageText} onEdit={handleTextEdited}
sx={editBlocksSx}
/>
<InlineTextarea
initialText={messageText} onEdit={handleTextEdited}
sx={editBlocksSx}
/>
) : (
) : (
<BlocksRenderer
text={messageText}
fromRole={messageRole}
contentScaling={contentScaling}
errorMessage={errorMessage}
fitScreen={props.fitScreen}
isBottom={props.isBottom}
renderTextAsMarkdown={renderMarkdown}
renderTextDiff={textDiffs || undefined}
showDate={props.showBlocksDate === true ? messageUpdated || messageCreated || undefined : undefined}
showUnsafeHtml={props.showUnsafeHtml}
wasUserEdited={wasEdited}
onContextMenu={(props.onMessageEdit && ENABLE_SELECTION_RIGHT_CLICK_MENU) ? handleBlocksContextMenu : undefined}
onDoubleClick={(props.onMessageEdit && doubleClickToEdit) ? handleBlocksDoubleClick : undefined}
optiAllowMemo={messageTyping}
/>
<BlocksRenderer
ref={blocksRendererRef}
text={messageText}
fromRole={messageRole}
contentScaling={contentScaling}
errorMessage={errorMessage}
fitScreen={props.fitScreen}
isBottom={props.isBottom}
renderTextAsMarkdown={renderMarkdown}
renderTextDiff={textDiffs || undefined}
showDate={props.showBlocksDate === true ? messageUpdated || messageCreated || undefined : undefined}
showUnsafeHtml={props.showUnsafeHtml}
wasUserEdited={wasEdited}
onContextMenu={(props.onMessageEdit && ENABLE_SELECTION_RIGHT_CLICK_MENU) ? handleBlocksContextMenu : undefined}
onDoubleClick={(props.onMessageEdit && doubleClickToEdit) ? handleBlocksDoubleClick : undefined}
optiAllowMemo={messageTyping}
/>
)}
)}
</Box>
{/* Reply-To Bubble */}
{!!messageMetadata?.inReplyToText && <ReplyToBubble inlineMessage replyToText={messageMetadata.inReplyToText} className='reply-to-bubble' />}
{/* Overlay copy icon */}
@@ -575,7 +685,7 @@ export function ChatMessage(props: {
{!!opsMenuAnchor && (
<CloseableMenu
dense placement='bottom-end'
open anchorEl={opsMenuAnchor} onClose={closeOpsMenu}
open anchorEl={opsMenuAnchor} onClose={handleCloseOpsMenu}
sx={{ minWidth: 280 }}
>
@@ -637,6 +747,26 @@ export function ChatMessage(props: {
<span style={{ opacity: 0.5 }}>after this</span>
</MenuItem>
)}
{/* Diagram / Draw / Speak */}
{!!props.onTextDiagram && <ListDivider />}
{!!props.onTextDiagram && (
<MenuItem onClick={handleOpsDiagram} disabled={!couldDiagram}>
<ListItemDecorator><AccountTreeOutlinedIcon /></ListItemDecorator>
Auto-Diagram ...
</MenuItem>
)}
{!!props.onTextImagine && (
<MenuItem onClick={handleOpsImagine} disabled={!couldImagine || props.isImagining}>
<ListItemDecorator>{props.isImagining ? <CircularProgress size='sm' /> : <FormatPaintOutlinedIcon />}</ListItemDecorator>
Auto-Draw
</MenuItem>
)}
{!!props.onTextSpeak && (
<MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverOutlinedIcon />}</ListItemDecorator>
Speak
</MenuItem>
)}
{/* Diff Viewer */}
{!!props.diffPreviousText && <ListDivider />}
{!!props.diffPreviousText && (
@@ -646,26 +776,6 @@ export function ChatMessage(props: {
<Switch checked={showDiff} onChange={handleOpsToggleShowDiff} sx={{ ml: 'auto' }} />
</MenuItem>
)}
{/* Diagram / Draw / Speak */}
{!!props.onTextDiagram && <ListDivider />}
{!!props.onTextDiagram && (
<MenuItem onClick={handleOpsDiagram} disabled={!couldDiagram}>
<ListItemDecorator><AccountTreeTwoToneIcon /></ListItemDecorator>
Auto-Diagram ...
</MenuItem>
)}
{!!props.onTextImagine && (
<MenuItem onClick={handleOpsImagine} disabled={!couldImagine || props.isImagining}>
<ListItemDecorator>{props.isImagining ? <CircularProgress size='sm' /> : <FormatPaintTwoToneIcon />}</ListItemDecorator>
Auto-Draw
</MenuItem>
)}
{!!props.onTextSpeak && (
<MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverTwoToneIcon />}</ListItemDecorator>
Speak
</MenuItem>
)}
{/* Beam/Restart */}
{(!!props.onMessageAssistantFrom || !!props.onMessageBeam) && <ListDivider />}
{!!props.onMessageAssistantFrom && (
@@ -678,7 +788,7 @@ export function ChatMessage(props: {
: <Box sx={{ flexGrow: 1, display: 'flex', justifyContent: 'space-between', gap: 1 }}>Retry<KeyStroke combo='Ctrl + Shift + R' /></Box>}
</MenuItem>
)}
{!!props.onMessageBeam && labsBeam && (
{!!props.onMessageBeam && (
<MenuItem disabled={fromSystem} onClick={handleOpsBeamFrom}>
<ListItemDecorator>
<ChatBeamIcon color={fromSystem ? undefined : 'primary'} />
@@ -693,6 +803,71 @@ export function ChatMessage(props: {
</CloseableMenu>
)}
{/* Selection Toolbar */}
{ENABLE_SELECTION_TOOLBAR && !!selToolbarAnchor && (
<Popper placement='top-start' open anchorEl={selToolbarAnchor} slotProps={{
root: { style: { zIndex: themeZIndexPageBar + 1 } },
}}>
<ClickAwayListener onClickAway={() => closeToolbar()}>
<ButtonGroup
variant='plain'
sx={{
'--ButtonGroup-separatorColor': 'none !important',
'--ButtonGroup-separatorSize': 0,
borderRadius: '0',
backgroundColor: 'background.popup',
border: '1px solid',
borderColor: 'primary.outlinedBorder',
boxShadow: '0px 4px 12px -4px rgb(var(--joy-palette-neutral-darkChannel) / 50%)',
mb: 1,
ml: -1,
alignItems: 'center',
'& > button': {
'--Icon-fontSize': '1rem',
minHeight: '2.5rem',
minWidth: '2.75rem',
},
}}
>
{!!props.onReplyTo && fromAssistant && <Tooltip disableInteractive arrow placement='top' title='Reply'>
<IconButton color='primary' onClick={handleOpsReplyTo}>
<ReplyRoundedIcon sx={{ fontSize: 'xl' }} />
</IconButton>
</Tooltip>}
{/*{!!props.onMessageBeam && fromAssistant && <Tooltip disableInteractive arrow placement='top' title='Beam'>*/}
{/* <IconButton color='primary'>*/}
{/* <ChatBeamIcon sx={{ fontSize: 'xl' }} />*/}
{/* </IconButton>*/}
{/*</Tooltip>}*/}
{!!props.onReplyTo && fromAssistant && <MoreVertIcon sx={{ color: 'neutral.outlinedBorder', fontSize: 'md' }} />}
<Tooltip disableInteractive arrow placement='top' title='Copy'>
<IconButton onClick={handleOpsCopy}>
<ContentCopyIcon />
</IconButton>
</Tooltip>
{(!!props.onTextDiagram || !!props.onTextSpeak) && <MoreVertIcon sx={{ color: 'neutral.outlinedBorder', fontSize: 'md' }} />}
{!!props.onTextDiagram && <Tooltip disableInteractive arrow placement='top' title={couldDiagram ? 'Auto-Diagram...' : 'Too short to Auto-Diagram'}>
<IconButton onClick={couldDiagram ? handleOpsDiagram : undefined}>
<AccountTreeOutlinedIcon sx={{ color: couldDiagram ? 'primary' : 'neutral.plainDisabledColor' }} />
</IconButton>
</Tooltip>}
{/*{!!props.onTextImagine && <Tooltip disableInteractive arrow placement='top' title='Auto-Draw'>*/}
{/* <IconButton onClick={handleOpsImagine} disabled={!couldImagine || props.isImagining}>*/}
{/* {!props.isImagining ? <FormatPaintOutlinedIcon /> : <CircularProgress sx={{ '--CircularProgress-size': '16px' }} />}*/}
{/* </IconButton>*/}
{/*</Tooltip>}*/}
{!!props.onTextSpeak && <Tooltip disableInteractive arrow placement='top' title='Speak'>
<IconButton onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
{!props.isSpeaking ? <RecordVoiceOverOutlinedIcon /> : <CircularProgress sx={{ '--CircularProgress-size': '16px' }} />}
</IconButton>
</Tooltip>}
</ButtonGroup>
</ClickAwayListener>
</Popper>
)}
{/* Selection (Contextual) Menu */}
{!!selMenuAnchor && (
<CloseableMenu
@@ -706,15 +881,15 @@ export function ChatMessage(props: {
</MenuItem>
{!!props.onTextDiagram && <ListDivider />}
{!!props.onTextDiagram && <MenuItem onClick={handleOpsDiagram} disabled={!couldDiagram || props.isImagining}>
<ListItemDecorator><AccountTreeTwoToneIcon /></ListItemDecorator>
<ListItemDecorator><AccountTreeOutlinedIcon /></ListItemDecorator>
Auto-Diagram ...
</MenuItem>}
{!!props.onTextImagine && <MenuItem onClick={handleOpsImagine} disabled={!couldImagine || props.isImagining}>
<ListItemDecorator>{props.isImagining ? <CircularProgress size='sm' /> : <FormatPaintTwoToneIcon />}</ListItemDecorator>
<ListItemDecorator>{props.isImagining ? <CircularProgress size='sm' /> : <FormatPaintOutlinedIcon />}</ListItemDecorator>
Auto-Draw
</MenuItem>}
{!!props.onTextSpeak && <MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverTwoToneIcon />}</ListItemDecorator>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverOutlinedIcon />}</ListItemDecorator>
Speak
</MenuItem>}
</CloseableMenu>
@@ -0,0 +1,85 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { Box, IconButton, Tooltip, Typography } from '@mui/joy';
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
import ReplyRoundedIcon from '@mui/icons-material/ReplyRounded';
// configuration
const INLINE_COLOR = 'primary';
const bubbleComposerSx: SxProps = {
// contained
width: '100%',
zIndex: 2, // stays on top of the 'tokens' bubble in the composer
// style
backgroundColor: 'background.surface',
border: '1px solid',
borderColor: 'neutral.outlinedBorder',
borderRadius: 'sm',
boxShadow: 'xs',
padding: '0.5rem 0.25rem 0.5rem 0.5rem',
// layout
display: 'flex',
alignItems: 'start',
};
const inlineMessageSx: SxProps = {
...bubbleComposerSx,
// redefine
// border: 'none',
mt: 1,
borderColor: `${INLINE_COLOR}.outlinedColor`,
borderRadius: 'sm',
boxShadow: 'xs',
width: undefined,
padding: '0.375rem 0.25rem 0.375rem 0.5rem',
// self-layout (parent: 'block', as 'grid' was not working and the user would scroll the app on the x-axis on mobile)
// ml: 'auto',
float: 'inline-end',
mr: { xs: 7.75, md: 10.5 }, // personaSx.minWidth + gap (md: 1) + 1.5 (text margin)
};
export function ReplyToBubble(props: {
replyToText: string | null,
inlineMessage?: boolean
onClear?: () => void,
className?: string,
}) {
return (
<Box className={props.className} sx={!props.inlineMessage ? bubbleComposerSx : inlineMessageSx}>
<Tooltip disableInteractive arrow title='Referring to this assistant text' placement='top'>
<ReplyRoundedIcon sx={{
color: props.inlineMessage ? `${INLINE_COLOR}.outlinedColor` : 'primary.solidBg',
fontSize: 'xl',
mt: 0.125,
}} />
</Tooltip>
<Typography level='body-sm' sx={{
flex: 1,
ml: 1,
mr: 0.5,
overflow: 'auto',
maxHeight: '5.75rem',
lineHeight: 'xl',
color: /*props.inlineMessage ? 'text.tertiary' :*/ 'text.secondary',
whiteSpace: 'break-spaces', // 'balance'
}}>
{props.replyToText}
</Typography>
{!!props.onClear && (
<IconButton size='sm' onClick={props.onClear} sx={{ my: -0.5, background: 'none' }}>
<CloseRoundedIcon />
</IconButton>
)}
</Box>
);
}
@@ -1,5 +1,6 @@
import * as React from 'react';
import { shallow } from 'zustand/shallow';
import { v4 as uuidv4 } from 'uuid';
import type { SxProps } from '@mui/joy/styles/types';
import { Alert, Avatar, Box, Button, Card, CardContent, Checkbox, IconButton, Input, List, ListItem, ListItemButton, Textarea, Tooltip, Typography } from '@mui/joy';
@@ -10,17 +11,19 @@ import EditNoteIcon from '@mui/icons-material/EditNote';
import SearchIcon from '@mui/icons-material/Search';
import TelegramIcon from '@mui/icons-material/Telegram';
import { SystemPurposeData, SystemPurposeId, SystemPurposes } from '../../../../data';
import { bareBonesPromptMixer } from '~/modules/persona/pmix/pmix';
import { useChatLLM } from '~/modules/llms/store-llms';
import { DConversationId, useChatStore } from '~/common/state/store-chats';
import { DConversationId, DMessage, useChatStore } from '~/common/state/store-chats';
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
import { lineHeightTextareaMd } from '~/common/app.theme';
import { navigateToPersonas } from '~/common/app.routes';
import { useChipBoolean } from '~/common/components/useChipBoolean';
import { useUIPreferencesStore } from '~/common/state/store-ui';
import { SystemPurposeData, SystemPurposeId, SystemPurposes } from '../../../../data';
import { YouTubeURLInput } from './YouTubeURLInput';
import { usePurposeStore } from './store-purposes';
@@ -116,6 +119,8 @@ export function PersonaSelector(props: { conversationId: DConversationId, runExa
const [searchQuery, setSearchQuery] = React.useState('');
const [filteredIDs, setFilteredIDs] = React.useState<SystemPurposeId[] | null>(null);
const [editMode, setEditMode] = React.useState(false);
const [isYouTubeTranscriberActive, setIsYouTubeTranscriberActive] = React.useState(false);
// external state
const showFinder = useUIPreferencesStore(state => state.showPersonaFinder);
@@ -153,11 +158,52 @@ export function PersonaSelector(props: { conversationId: DConversationId, runExa
// Handlers
// Modify the handlePurposeChanged function to check for the YouTube Transcriber
const handlePurposeChanged = React.useCallback((purposeId: SystemPurposeId | null) => {
if (purposeId && setSystemPurposeId)
setSystemPurposeId(props.conversationId, purposeId);
if (purposeId) {
if (purposeId === 'YouTubeTranscriber') {
// If the YouTube Transcriber tile is clicked, set the state accordingly
setIsYouTubeTranscriberActive(true);
} else {
setIsYouTubeTranscriberActive(false);
}
if (setSystemPurposeId) {
setSystemPurposeId(props.conversationId, purposeId);
}
}
}, [props.conversationId, setSystemPurposeId]);
React.useEffect(() => {
const isTranscriberActive = systemPurposeId === 'YouTubeTranscriber';
setIsYouTubeTranscriberActive(isTranscriberActive);
}, [systemPurposeId]);
// Implement handleAddMessage function
const handleAddMessage = (messageText: string) => {
// Retrieve the appendMessage action from the useChatStore
const { appendMessage } = useChatStore.getState();
const conversationId = props.conversationId;
// Create a new message object
const newMessage: DMessage = {
id: uuidv4(),
text: messageText,
sender: 'Bot',
avatar: null,
typing: false,
role: 'assistant' as 'assistant',
tokenCount: 0,
created: Date.now(),
updated: null,
};
// Append the new message to the conversation
appendMessage(conversationId, newMessage);
};
const handleCustomSystemMessageChange = React.useCallback((v: React.ChangeEvent<HTMLTextAreaElement>): void => {
// TODO: persist this change? Right now it's reset every time.
// maybe we shall have a "save" button just save on a state to persist between sessions
@@ -418,6 +464,17 @@ export function PersonaSelector(props: { conversationId: DConversationId, runExa
/>
)}
{/* [row -1] YouTube URL */}
{isYouTubeTranscriberActive && (
<YouTubeURLInput
onSubmit={(url) => handleAddMessage(url)}
isFetching={false}
sx={{
gridColumn: '1 / -1',
}}
/>
)}
</Box>
</Box>
@@ -0,0 +1,74 @@
import * as React from 'react';
import { Box, Button, Input } from '@mui/joy';
import YouTubeIcon from '@mui/icons-material/YouTube';
import type { SxProps } from '@mui/joy/styles/types';
import { useYouTubeTranscript, YTVideoTranscript } from '~/modules/youtube/useYouTubeTranscript';
interface YouTubeURLInputProps {
onSubmit: (transcript: string) => void;
isFetching: boolean;
sx?: SxProps;
}
export const YouTubeURLInput: React.FC<YouTubeURLInputProps> = ({ onSubmit, isFetching, sx }) => {
const [url, setUrl] = React.useState('');
const [submitFlag, setSubmitFlag] = React.useState(false);
// Function to extract video ID from URL
function extractVideoID(videoURL: string): string | null {
const regExp = /^(?:https?:\/\/)?(?:www\.)?(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/)([^#&?]*).*/;
const match = videoURL.match(regExp);
return (match && match[1]?.length == 11) ? match[1] : null;
}
const videoID = extractVideoID(url);
// Callback function to handle new transcript
const handleNewTranscript = (newTranscript: YTVideoTranscript) => {
onSubmit(newTranscript.transcript); // Pass the transcript text to the onSubmit handler
setSubmitFlag(false); // Reset submit flag after handling
};
const { transcript, isFetching: isTranscriptFetching, isError, error } = useYouTubeTranscript(videoID && submitFlag ? videoID : null, handleNewTranscript);
const handleChange = (event: React.ChangeEvent<HTMLInputElement>) => {
setUrl(event.target.value);
};
const handleSubmit = (event: React.FormEvent<HTMLFormElement>) => {
event.preventDefault(); // Prevent form from causing a page reload
setSubmitFlag(true); // Set flag to indicate a submit action
};
return (
<Box sx={{ mb: 1, ...sx }}>
<form onSubmit={handleSubmit}>
<Input
required
type='url'
fullWidth
disabled={isFetching || isTranscriptFetching}
variant='outlined'
placeholder='Enter YouTube Video URL'
value={url}
onChange={handleChange}
startDecorator={<YouTubeIcon sx={{ color: '#f00' }} />}
sx={{ mb: 1.5, backgroundColor: 'background.popup' }}
/>
<Button
type='submit'
variant='solid'
disabled={isFetching || isTranscriptFetching || !url}
loading={isFetching || isTranscriptFetching}
sx={{ minWidth: 140 }}
>
Get Transcript
</Button>
{isError && <div>Error fetching transcript. Please try again.</div>}
</form>
</Box>
);
};
@@ -18,7 +18,7 @@ export const usePurposeStore = create<PurposeStore>()(
(set) => ({
// default state
hiddenPurposeIDs: ['Developer', 'Designer'],
hiddenPurposeIDs: ['Developer', 'Designer', 'YouTubeTranscriber'],
toggleHiddenPurposeId: (purposeId: string) => {
set(state => {
@@ -37,14 +37,19 @@ export const usePurposeStore = create<PurposeStore>()(
/* versioning:
* 1: hide 'Developer' as 'DeveloperPreview' is best
* 2: add a hidden 'YouTubeTranscriber' purpose
*/
version: 1,
version: 2,
migrate: (state: any, fromVersion: number): PurposeStore => {
// 0 -> 1: rename 'enterToSend' to 'enterIsNewline' (flip the meaning)
if (state && fromVersion === 0)
if (!state.hiddenPurposeIDs.includes('Developer'))
state.hiddenPurposeIDs.push('Developer');
// 1 -> 2: add a hidden 'YouTubeTranscriber' purpose
if (state && fromVersion === 1)
if (!state.hiddenPurposeIDs.includes('YouTubeTranscriber'))
state.hiddenPurposeIDs.push('YouTubeTranscriber');
return state;
},
}),
+151
View File
@@ -0,0 +1,151 @@
import { getChatLLMId } from '~/modules/llms/store-llms';
import { updateHistoryForReplyTo } from '~/modules/aifn/replyto/replyTo';
import { ConversationsManager } from '~/common/chats/ConversationsManager';
import { createDMessage, DConversationId, DMessage, getConversationSystemPurposeId } from '~/common/state/store-chats';
import { getUXLabsHighPerformance } from '~/common/state/store-ux-labs';
import { extractChatCommand, findAllChatCommands } from '../commands/commands.registry';
import { getInstantAppChatPanesCount } from '../components/panes/usePanesManager';
import { runAssistantUpdatingState } from './chat-stream';
import { runBrowseGetPageUpdatingState } from './browse-load';
import { runImageGenerationUpdatingState } from './image-generate';
import { runReActUpdatingState } from './react-tangent';
import type { ChatModeId } from '../AppChat';
export async function _handleExecute(chatModeId: ChatModeId, conversationId: DConversationId, history: DMessage[]) {
// Handle missing conversation
if (!conversationId)
return 'err-no-conversation';
const chatLLMId = getChatLLMId();
// Update the system message from the active persona to the history
// NOTE: this does NOT call setMessages anymore (optimization). make sure to:
// 1. all the callers need to pass a new array
// 2. all the exit points need to call setMessages
const cHandler = ConversationsManager.getHandler(conversationId);
cHandler.inlineUpdatePurposeInHistory(history, chatLLMId || undefined);
// FIXME: shouldn't do this for all the code paths. The advantage for having it here (vs Composer output only) is re-executing history
// TODO: move this to the server side after transferring metadata?
updateHistoryForReplyTo(history);
// Handle unconfigured
if (!chatLLMId || !chatModeId) {
// set the history (e.g. the updated system prompt and the user prompt) at least, see #523
cHandler.messagesReplace(history);
return !chatLLMId ? 'err-no-chatllm' : 'err-no-chatmode';
}
// Valid /commands are intercepted here, and override chat modes, generally for mechanics or sidebars
const lastMessage = history.length > 0 ? history[history.length - 1] : null;
if (lastMessage?.role === 'user') {
const chatCommand = extractChatCommand(lastMessage.text)[0];
if (chatCommand && chatCommand.type === 'cmd') {
switch (chatCommand.providerId) {
case 'ass-browse':
cHandler.messagesReplace(history); // show command
return await runBrowseGetPageUpdatingState(cHandler, chatCommand.params);
case 'ass-t2i':
cHandler.messagesReplace(history); // show command
return await runImageGenerationUpdatingState(cHandler, chatCommand.params);
case 'ass-react':
cHandler.messagesReplace(history); // show command
return await runReActUpdatingState(cHandler, chatCommand.params, chatLLMId);
case 'chat-alter':
// /clear
if (chatCommand.command === '/clear') {
if (chatCommand.params === 'all') {
cHandler.messagesReplace([]);
} else {
cHandler.messagesReplace(history);
cHandler.messageAppendAssistant('Issue: this command requires the \'all\' parameter to confirm the operation.', undefined, 'issue', false);
}
return true;
}
// /assistant, /system
Object.assign(lastMessage, {
role: chatCommand.command.startsWith('/s') ? 'system' : chatCommand.command.startsWith('/a') ? 'assistant' : 'user',
sender: 'Bot',
text: chatCommand.params || '',
} satisfies Partial<DMessage>);
cHandler.messagesReplace(history);
return true;
case 'cmd-help':
const chatCommandsText = findAllChatCommands()
.map(cmd => ` - ${cmd.primary}` + (cmd.alternatives?.length ? ` (${cmd.alternatives.join(', ')})` : '') + `: ${cmd.description}`)
.join('\n');
cHandler.messagesReplace(history);
cHandler.messageAppendAssistant('Available Chat Commands:\n' + chatCommandsText, undefined, 'help', false);
return true;
case 'mode-beam':
if (chatCommand.isError) {
cHandler.messagesReplace(history);
return false;
}
// remove '/beam ', as we want to be a user chat message
Object.assign(lastMessage, { text: chatCommand.params || '' });
cHandler.messagesReplace(history);
ConversationsManager.getHandler(conversationId).beamInvoke(history, [], null);
return true;
default:
cHandler.messagesReplace([...history, createDMessage('assistant', 'This command is not supported.')]);
return false;
}
}
}
// get the system purpose (note: we don't react to it, or it would invalidate half UI components..)
if (!getConversationSystemPurposeId(conversationId)) {
cHandler.messagesReplace(history);
cHandler.messageAppendAssistant('Issue: no Persona selected.', undefined, 'issue', false);
return 'err-no-persona';
}
// synchronous long-duration tasks, which update the state as they go
switch (chatModeId) {
case 'generate-text':
cHandler.messagesReplace(history);
return await runAssistantUpdatingState(conversationId, history, chatLLMId, getUXLabsHighPerformance() ? 0 : getInstantAppChatPanesCount());
case 'generate-text-beam':
cHandler.messagesReplace(history);
cHandler.beamInvoke(history, [], null);
return true;
case 'append-user':
cHandler.messagesReplace(history);
return true;
case 'generate-image':
if (!lastMessage?.text) break;
// also add a 'fake' user message with the '/draw' command
cHandler.messagesReplace(history.map(message => (message.id !== lastMessage.id) ? message : {
...message,
text: `/draw ${lastMessage.text}`,
}));
return await runImageGenerationUpdatingState(cHandler, lastMessage.text);
case 'generate-react':
if (!lastMessage?.text) break;
cHandler.messagesReplace(history);
return await runReActUpdatingState(cHandler, lastMessage.text, chatLLMId);
}
// ISSUE: if we're here, it means we couldn't do the job, at least sync the history
console.log('Chat execute: issue running', chatModeId, conversationId, lastMessage);
cHandler.messagesReplace(history);
return false;
}
+5 -2
View File
@@ -6,7 +6,7 @@ import type { ConversationHandler } from '~/common/chats/ConversationHandler';
export const runBrowseGetPageUpdatingState = async (cHandler: ConversationHandler, url?: string) => {
if (!url) {
cHandler.messageAppendAssistant('Issue: no URL provided.', undefined, 'issue', false);
return;
return false;
}
// noinspection HttpUrlsUsage
@@ -15,9 +15,12 @@ export const runBrowseGetPageUpdatingState = async (cHandler: ConversationHandle
try {
const page = await callBrowseFetchPage(url);
cHandler.messageEdit(assistantMessageId, { text: page.content || 'Issue: page load did not produce an answer: no text found', typing: false }, true);
const pageContent = page.content.markdown || page.content.text || page.content.html || 'Issue: page load did not produce an answer: no text found';
cHandler.messageEdit(assistantMessageId, { text: pageContent, typing: false }, true);
return true;
} catch (error: any) {
console.error(error);
cHandler.messageEdit(assistantMessageId, { text: 'Issue: browse did not produce an answer (error: ' + (error?.message || error?.toString() || 'unknown') + ').', typing: false }, true);
return false;
}
};
+10 -3
View File
@@ -2,7 +2,7 @@ import type { DLLMId } from '~/modules/llms/store-llms';
import type { StreamingClientUpdate } from '~/modules/llms/vendors/unifiedStreamingClient';
import { autoSuggestions } from '~/modules/aifn/autosuggestions/autoSuggestions';
import { conversationAutoTitle } from '~/modules/aifn/autotitle/autoTitle';
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';
import { llmStreamingChatGenerate, VChatContextRef, VChatMessageIn, VChatStreamContextName } from '~/modules/llms/llm.client';
import { speakText } from '~/modules/elevenlabs/elevenlabs.client';
import type { DMessage } from '~/common/state/store-chats';
@@ -31,9 +31,11 @@ export async function runAssistantUpdatingState(conversationId: string, history:
cHandler.setAbortController(abortController);
// stream the assistant's messages
await streamAssistantMessage(
const messageStatus = await streamAssistantMessage(
assistantLlmId,
history.map((m): VChatMessageIn => ({ role: m.role, content: m.text })),
'conversation',
conversationId,
parallelViewCount,
autoSpeak,
(update) => cHandler.messageEdit(assistantMessageId, update, false),
@@ -41,6 +43,7 @@ export async function runAssistantUpdatingState(conversationId: string, history:
);
// clear to send, again
// FIXME: race condition?
cHandler.setAbortController(null);
if (autoTitleChat) {
@@ -50,6 +53,8 @@ export async function runAssistantUpdatingState(conversationId: string, history:
if (autoSuggestDiagrams || autoSuggestQuestions)
autoSuggestions(conversationId, assistantMessageId, autoSuggestDiagrams, autoSuggestQuestions);
return messageStatus.outcome === 'success';
}
type StreamMessageOutcome = 'success' | 'aborted' | 'errored';
@@ -58,6 +63,8 @@ type StreamMessageStatus = { outcome: StreamMessageOutcome, errorMessage?: strin
export async function streamAssistantMessage(
llmId: DLLMId,
messagesHistory: VChatMessageIn[],
contextName: VChatStreamContextName,
contextRef: VChatContextRef,
throttleUnits: number, // 0: disable, 1: default throttle (12Hz), 2+ reduce the message frequency with the square root
autoSpeak: ChatAutoSpeakType,
editMessage: (update: Partial<DMessage>) => void,
@@ -89,7 +96,7 @@ export async function streamAssistantMessage(
const incrementalAnswer: Partial<DMessage> = { text: '' };
try {
await llmStreamingChatGenerate(llmId, messagesHistory, null, null, abortSignal, (update: StreamingClientUpdate) => {
await llmStreamingChatGenerate(llmId, messagesHistory, contextName, contextRef, null, null, abortSignal, (update: StreamingClientUpdate) => {
const textSoFar = update.textSoFar;
// grow the incremental message
+4 -2
View File
@@ -10,7 +10,7 @@ import type { TextToImageProvider } from '~/common/components/useCapabilities';
export async function runImageGenerationUpdatingState(cHandler: ConversationHandler, imageText?: string) {
if (!imageText) {
cHandler.messageAppendAssistant('Issue: no image description provided.', undefined, 'issue', false);
return;
return false;
}
// Acquire the active TextToImageProvider
@@ -19,7 +19,7 @@ export async function runImageGenerationUpdatingState(cHandler: ConversationHand
t2iProvider = getActiveTextToImageProviderOrThrow();
} catch (error: any) {
cHandler.messageAppendAssistant(`[Issue] Sorry, I can't generate images right now. ${error?.message || error?.toString() || 'Unknown error'}.`, undefined, 'issue', false);
return;
return 'err-t2i-unconfigured';
}
// if the imageText ends with " xN" or " [N]" (where N is a number), then we'll generate N images
@@ -36,8 +36,10 @@ export async function runImageGenerationUpdatingState(cHandler: ConversationHand
try {
const imageUrls = await t2iGenerateImageOrThrow(t2iProvider, imageText, repeat);
cHandler.messageEdit(assistantMessageId, { text: imageUrls.join('\n'), typing: false }, true);
return true;
} catch (error: any) {
const errorMessage = error?.message || error?.toString() || 'Unknown error';
cHandler.messageEdit(assistantMessageId, { text: `[Issue] Sorry, I couldn't create an image for you. ${errorMessage}`, typing: false }, false);
return false;
}
}
+4 -2
View File
@@ -15,11 +15,11 @@ const EPHEMERAL_DELETION_DELAY = 5 * 1000;
export async function runReActUpdatingState(cHandler: ConversationHandler, question: string | undefined, assistantLlmId: DLLMId) {
if (!question) {
cHandler.messageAppendAssistant('Issue: no question provided.', undefined, 'issue', false);
return;
return false;
}
// create a blank and 'typing' message for the assistant - to be filled when we're done
const assistantModelLabel = 'react-' + assistantLlmId.slice(4, 7); // HACK: this is used to change the Avatar animation
const assistantModelLabel = 'react-' + assistantLlmId; //.slice(4, 7); // HACK: this is used to change the Avatar animation
const assistantMessageId = cHandler.messageAppendAssistant(STREAM_TEXT_INDICATOR, undefined, assistantModelLabel, true);
const { enableReactTool: enableBrowse } = useBrowseStore.getState();
@@ -42,9 +42,11 @@ export async function runReActUpdatingState(cHandler: ConversationHandler, quest
cHandler.messageEdit(assistantMessageId, { text: reactResult, typing: false }, false);
setTimeout(() => eHandler.delete(), EPHEMERAL_DELETION_DELAY);
return true;
} catch (error: any) {
console.error(error);
logToEphemeral(ephemeralText + `\nIssue: ${error || 'unknown'}`);
cHandler.messageEdit(assistantMessageId, { text: 'Issue: ReAct did not produce an answer.', typing: false }, false);
return false;
}
}
+82 -29
View File
@@ -2,9 +2,11 @@ import * as React from 'react';
import NextImage from 'next/image';
import TimeAgo from 'react-timeago';
import { AspectRatio, Box, Button, Card, CardContent, CardOverflow, Container, Grid, IconButton, Typography } from '@mui/joy';
import { AspectRatio, Box, Button, Card, CardContent, CardOverflow, Container, Grid, Typography } from '@mui/joy';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import LaunchIcon from '@mui/icons-material/Launch';
import ArrowForwardRoundedIcon from '@mui/icons-material/ArrowForwardRounded';
import ArrowOutwardRoundedIcon from '@mui/icons-material/ArrowOutwardRounded';
import { Brand } from '~/common/app.config';
import { Link } from '~/common/components/Link';
@@ -14,10 +16,14 @@ import { capitalizeFirstLetter } from '~/common/util/textUtils';
import { NewsItems } from './news.data';
import { beamNewsCallout } from './beam.data';
import { bigAgi2NewsCallout, bigAgi2Url } from './bigAgi2.data';
import { downloadAllConversationsJson } from '~/modules/trade/trade.client';
// number of news items to show by default, before the expander
const DEFAULT_NEWS_COUNT = 4;
const NEWS_INITIAL_COUNT = 3;
const NEWS_LOAD_STEP = 2;
export const newsRoadmapCallout =
@@ -54,12 +60,17 @@ export const newsRoadmapCallout =
export function AppNews() {
// state
const [lastNewsIdx, setLastNewsIdx] = React.useState<number>(DEFAULT_NEWS_COUNT - 1);
const [lastNewsIdx, setLastNewsIdx] = React.useState<number>(NEWS_INITIAL_COUNT - 1);
// news selection
const news = NewsItems.filter((_, idx) => idx <= lastNewsIdx);
const firstNews = news[0] ?? null;
// show expander
const canExpand = news.length < NewsItems.length;
const currentVer = '1.6.9'; // firstNews?.versionCode;
return (
<Box sx={{
@@ -74,26 +85,41 @@ export function AppNews() {
display: 'flex', flexDirection: 'column', alignItems: 'center',
}}>
<Typography level='h1' sx={{ fontSize: '2.9rem', mb: 4 }}>
Welcome to {Brand.Title.Base} <Box component='span' sx={{ animation: `${animationColorBlues} 10s infinite`, zIndex: 1 /* perf-opt */ }}>{firstNews?.versionCode}</Box>!
<Typography level='h1' sx={{ fontSize: '2.7rem', mb: 4 }}>
Welcome to {Brand.Title.Base} <Box component='span' sx={{ animation: `${animationColorBlues} 10s infinite`, zIndex: 1 /* perf-opt */ }}>{currentVer}</Box>!
</Typography>
<Typography sx={{ mb: 2 }} level='title-sm'>
{capitalizeFirstLetter(Brand.Title.Base)} has been updated to version {firstNews?.versionCode}
<Typography level='title-sm' sx={{ mb: 2, textAlign: 'center', lineHeight: 'lg' }} >
{capitalizeFirstLetter(Brand.Title.Base)} has been updated to version {currentVer}.<br/>
<b>And a whole-new 2.0 is waiting!</b>
</Typography>
<Box sx={{ mb: 5 }}>
<Box sx={{ mb: 5, display: 'flex', gap: 2, flexWrap: 'wrap', justifyContent: 'center' }}>
<Button
variant='solid' color='primary' size='lg'
variant='solid' color='neutral' size='lg'
component={Link} href={ROUTE_INDEX} noLinkStyle
endDecorator='✨'
endDecorator={<ArrowForwardRoundedIcon />}
// endDecorator='✨'
sx={{
boxShadow: '0 8px 24px -4px rgb(var(--joy-palette-primary-mainChannel) / 20%)',
// boxShadow: '0 8px 24px -4px rgb(var(--joy-palette-primary-mainChannel) / 20%)',
minWidth: 180,
}}
>
Continue
</Button>
<Button
variant='solid' color='primary' size='lg'
component={Link} href={bigAgi2Url} noLinkStyle
endDecorator={<><ArrowOutwardRoundedIcon /></>}
// endDecorator='✨'
sx={{
boxShadow: '0 8px 24px -4px rgb(var(--joy-palette-primary-mainChannel) / 20%)',
minWidth: 180,
transform: 'translateY(-1px)',
}}
>
Big-AGI 2
</Button>
</Box>
{/*<Typography level='title-sm' sx={{ mb: 1, placeSelf: 'start', ml: 1 }}>*/}
@@ -102,21 +128,26 @@ export function AppNews() {
<Container disableGutters maxWidth='sm'>
{news?.map((ni, idx) => {
// const firstCard = idx === 0;
const hasCardAfter = news.length < NewsItems.length;
const showExpander = hasCardAfter && (idx === news.length - 1);
const firstCard = idx === 0;
const addPadding = false; //!firstCard; // || showExpander;
return <React.Fragment key={idx}>
{/* Inject the Beam item here*/}
{/* Inject the Big-AGI 2.0 item here*/}
{idx === 0 && (
<Box sx={{ mb: 3 }}>
{bigAgi2NewsCallout}
</Box>
)}
{/* Inject the Beam item here*/}
{idx === 2 && (
<Box sx={{ mb: 3 }}>
{beamNewsCallout}
</Box>
)}
{/* News Item */}
<Card key={'news-' + idx} sx={{ mb: 3, minHeight: 32, gap: 1 }}>
<Card color={firstCard ? 'primary' : undefined} key={'news-' + idx} sx={{ mb: 3, minHeight: 32, gap: 1 }}>
<CardContent sx={{ position: 'relative', pr: addPadding ? 4 : 0 }}>
<Box sx={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between' }}>
<Typography level='title-sm' component='div'>
@@ -150,19 +181,27 @@ export function AppNews() {
</ul>
)}
{showExpander && (
<IconButton
variant='solid'
onClick={() => setLastNewsIdx(idx + 1)}
sx={{
position: 'absolute', right: 0, bottom: 0, mr: -1, mb: -1,
// backgroundColor: 'background.surface',
borderRadius: '50%',
}}
>
<ExpandMoreIcon />
</IconButton>
)}
{firstCard && <Box sx={{ mt: 2 }}>
<Card variant='soft' color='primary' invertedColors>
<CardContent>
<Typography level='title-sm' sx={{ lineHeight: 'lg' }}>
<b>Migrate your chats:</b> you can download all your conversations at any time by clicking
on &quot;Export&quot; &gt; &quot;Download All&quot; or the button below. Then open Big-AGI 2 and
import the conversation by clicking &quot;Organize&quot; &gt; &quot;Import&quot;.
</Typography>
<Button
size='sm'
variant='soft'
color='primary'
sx={{ mt: 1 }}
onClick={downloadAllConversationsJson}
>
Download All Conversations
</Button>
</CardContent>
</Card>
</Box>}
</CardContent>
{!!ni.versionCoverImage && (
@@ -181,6 +220,7 @@ export function AppNews() {
</AspectRatio>
</CardOverflow>
)}
</Card>
{/* Inject the roadmap item here*/}
@@ -192,6 +232,19 @@ export function AppNews() {
</React.Fragment>;
})}
{canExpand && (
<Button
fullWidth
variant='soft'
color='neutral'
onClick={() => setLastNewsIdx(index => index + NEWS_LOAD_STEP)}
endDecorator={<ExpandMoreIcon />}
>
Previous News
</Button>
)}
</Container>
{/*<Typography sx={{ textAlign: 'center' }}>*/}
+1 -2
View File
@@ -2,7 +2,6 @@ import * as React from 'react';
import { Button, Card, CardContent, Grid, Typography } from '@mui/joy';
import LaunchIcon from '@mui/icons-material/Launch';
import ThumbUpRoundedIcon from '@mui/icons-material/ThumbUpRounded';
import { Link } from '~/common/components/Link';
@@ -14,7 +13,7 @@ export const beamNewsCallout =
<Card variant='solid' invertedColors>
<CardContent sx={{ gap: 2 }}>
<Typography level='title-lg'>
Beam - just launched in 1.15
Beam - launched in 1.15
</Typography>
<Typography level='body-sm'>
Beam is a world-first, multi-model AI chat modality that accelerates the discovery of superior solutions by leveraging the collective strengths of diverse LLMs.
+52
View File
@@ -0,0 +1,52 @@
import * as React from 'react';
import { Button, Card, CardContent, Grid, Typography } from '@mui/joy';
import AccessTimeIcon from '@mui/icons-material/AccessTime';
import LaunchIcon from '@mui/icons-material/Launch';
import RocketLaunchRounded from '@mui/icons-material/RocketLaunchRounded';
import SupportAgentIcon from '@mui/icons-material/SupportAgent';
import { Link } from '~/common/components/Link';
import { clientUtmSource } from '~/common/util/pwaUtils';
export const bigAgi2Url = 'https://app.big-agi.com' + clientUtmSource('upgrade');
const bigAgiSupport = 'https://form.typeform.com/to/nLf8gFmx?utm_source=big-agi-1&utm_medium=app&utm_campaign=support';
export const bigAgi2NewsCallout =
<Card variant='solid' color='primary' invertedColors>
<CardContent sx={{ gap: 2 }}>
<Typography level='title-lg'>
Big-AGI 2.0 - Now Live
</Typography>
<Typography level='title-sm' sx={{ lineHeight: 'xl' }}>
Experience the <b>next generation of Big-AGI</b> with <b>Beam 2</b>, <b>Personas</b>, and <b>Cloud Sync</b> to never lose data.
</Typography>
<Grid container spacing={1}>
<Grid xs={12} sm={7}>
<Button
size='lg'
fullWidth variant='solid' color='neutral' endDecorator={<RocketLaunchRounded />}
component={Link} href={bigAgi2Url} noLinkStyle target='_blank'
>
Big-AGI 2.0
</Button>
</Grid>
<Grid xs={12} sm={5} sx={{ display: 'flex', flexAlign: 'center', justifyContent: 'center' }}>
<Button
fullWidth variant='soft' color='primary' endDecorator={<SupportAgentIcon />}
component={Link} href={bigAgiSupport} noLinkStyle target='_blank'
// disabled
>
Support
</Button>
</Grid>
</Grid>
</CardContent>
</Card>;
+62 -16
View File
@@ -17,8 +17,12 @@ import { Link } from '~/common/components/Link';
import { clientUtmSource } from '~/common/util/pwaUtils';
import { platformAwareKeystrokes } from '~/common/components/KeyStroke';
import { beamBlogUrl } from './beam.data';
// Cover Images
// A landscape image of a capybara made entirely of clear, translucent crystal, wearing oversized black sunglasses, sitting at a sleek, minimalist desk. The desk is bathed in a soft, ethereal light emanating from within the capybara, symbolizing clarity and transparency. The capybara is typing on a futuristic, holographic keyboard, with floating code snippets and diagrams surrounding it, illustrating an improved developer experience and Auto-Diagrams feature. The background is a clean, white space with subtle, geometric patterns. Close-up photography style with a bokeh effect.
import coverV116 from '../../../public/images/covers/release-cover-v1.16.0.png';
// (not exactly) Imagine a futuristic, holographically bounded space. Inside this space, four capybaras stand. Three of them are in various stages of materialization, their forms made up of thousands of tiny, vibrant particles of electric blues, purples, and greens. These particles represent the merging of different intelligent inputs, symbolizing the concept of 'Beaming'. Positioned slightly towards the center and ahead of the others, the fourth capybara is fully materialized and composed of shimmering golden cotton candy, representing the optimal solution the 'Beam' feature seeks to achieve. The golden capybara gazes forward confidently, embodying a target achieved. Illuminated grid lines softly glow on the floor and walls of the setting, amplifying the futuristic aspect. In front of the golden capybara, floating, holographic interfaces depict complex networks of points and lines symbolizing the solution space 'Beaming' explores. The capybara interacts with these interfaces, implying the user's ability to control and navigate towards the best outcomes.
import coverV115 from '../../../public/images/covers/release-cover-v1.15.0.png';
// An image of a capybara sculpted entirely from iridescent blue cotton candy, gazing into a holographic galaxy of floating AI model icons (representing various AI models like Perplexity, Groq, etc.). The capybara is wearing a lightweight, futuristic headset, and its paws are gesturing as if orchestrating the movement of the models in the galaxy. The backdrop is minimalist, with occasional bursts of neon light beams, creating a sense of depth and wonder. Close-up photography, bokeh effect, with a dark but vibrant background to make the colors pop.
@@ -27,7 +31,6 @@ import coverV114 from '../../../public/images/covers/release-cover-v1.14.0.png';
import coverV113 from '../../../public/images/covers/release-cover-v1.13.0.png';
// An image of a capybara sculpted entirely from black cotton candy, set against a minimalist backdrop with splashes of bright, contrasting sparkles. The capybara is calling on a 3D origami old-school pink telephone and the camera is zooming on the telephone. Close up photography, bokeh, white background.
import coverV112 from '../../../public/images/covers/release-cover-v1.12.0.png';
import { beamBlogUrl } from './beam.data';
interface NewsItem {
@@ -48,16 +51,59 @@ interface NewsItem {
// news and feature surfaces
export const NewsItems: NewsItem[] = [
/*{
versionCode: '1.16.0',
{
versionCode: '2.0',
versionName: 'Big-AGI',
versionDate: new Date('2025-10-06T00:00:00Z'),
// text: <>Big-AGI 2.0 is now available at <B href='https://app.big-agi.com'>app.big-agi.com</B></>,
items: [
{ text: <>Big-AGI 2.0 is ready with top-notch AI models support and more productive and faster than ever, including:</> },
{ text: <><B href='https://app.big-agi.com' wow>Beam 2</B>: multi-modal, program-based, follow-ups, save presets</>, icon: ChatBeamIcon },
{ text: <><B wow>Personas</B>: craft your perfect AI assistants with data support</> },
{ text: <><B wow>Cloud Sync</B>: never lose your data</> },
{ text: <>Built for the future, madly optimized</> },
],
},
/*{
versionCode: '1.17.0',
items: [
Screen Capture (when removed from labs)
Auto-Merge
Draw
...
Screen Capture (when removed from labs)
]
}*/
{
versionCode: '1.15.1',
versionCode: '1.16.9',
versionName: 'Crystal Clear',
versionDate: new Date('2024-06-07T05:00:00Z'),
// versionDate: new Date('2024-05-13T19:00:00Z'),
// versionDate: new Date('2024-05-09T00:00:00Z'),
versionCoverImage: coverV116,
items: [
{ text: <><B href={beamBlogUrl} wow>Beam</B> core and UX improvements based on user feedback</>, issue: 470, icon: ChatBeamIcon },
{ text: <>Chat <B>Cost estimation</B> with supported models* 💰</> },
{ text: <>Major <B>Auto-Diagrams</B> enhancements</> },
{ text: <>Save/load chat files with Ctrl+S / O</>, issue: 466 },
{ text: <><B issue={500}>YouTube Transcriber</B> persona: chat with videos</>, issue: 500 },
{ text: <>Improved <B issue={508}>formula render</B>, dark-mode diagrams</>, issue: 508 },
{ text: <>More: <B issue={517}>code soft-wrap</B>, selection toolbar, <B issue={507}>3x faster</B> on Apple silicon</>, issue: 507 },
{ text: <>Updated <B>Anthropic</B>*, <B>Groq</B>, <B>Ollama</B>, <B>OpenAI</B>*, <B>OpenRouter</B>*, and <B>Perplexity</B></> },
{ text: <>Developers: update LLMs data structures</>, dev: true },
{ text: <>1.16.1: Support for <B>OpenAI</B> <B href='https://openai.com/index/hello-gpt-4o/'>GPT-4o</B></> },
{ text: <>1.16.2: Proper <B>Gemini</B> support, <B>HTML/Markdown</B> downloads, and latest <B>Mistral</B></> },
{ text: <>1.16.3: Support for <B href='https://www.anthropic.com/news/claude-3-5-sonnet'>Claude 3.5 Sonnet</B> (refresh your <B>Anthropic</B> models)</> },
{ text: <>1.16.4: <B>8192 tokens</B> support for Claude 3.5 Sonnet</> },
{ text: <>1.16.5: OpenAI <B>GPT-4o Mini</B> support</> },
{ text: <>1.16.6: Groq <B>Llama 3.1</B> support</> },
{ text: <>1.16.7: Gpt-4o <B>2024-08-06</B></> },
{ text: <>1.16.8: <B>ChatGPT-4o</B> latest</> },
{ text: <>1.16.9: <B>Gemini</B> fixes</> },
{ text: <>OpenAI <B>o1</B>, DeepSeek R1, and newer models require Big-AGI 2. <B href='https://form.typeform.com/to/ZSADpr5u?utm_source=gh-stable&utm_medium=news&utm_campaign=ea2'>Sign up here</B></> },
],
},
{
versionCode: '1.15',
versionName: 'Beam',
versionDate: new Date('2024-04-10T08:00:00Z'),
versionCoverImage: coverV115,
@@ -73,7 +119,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.14.1',
versionCode: '1.14',
versionName: 'Modelmorphic',
versionCoverImage: coverV114,
versionDate: new Date('2024-03-07T08:00:00Z'),
@@ -92,7 +138,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.13.0',
versionCode: '1.13',
versionName: 'Multi + Mind',
versionMoji: '🧠🔀',
versionDate: new Date('2024-02-08T07:47:00Z'),
@@ -108,7 +154,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.12.0',
versionCode: '1.12',
versionName: 'AGI Hotline',
versionMoji: '✨🗣️',
versionDate: new Date('2024-01-26T12:30:00Z'),
@@ -127,7 +173,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.11.0',
versionCode: '1.11',
versionName: 'Singularity',
versionMoji: '🌌🌠',
versionDate: new Date('2024-01-16T06:30:00Z'),
@@ -141,7 +187,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.10.0',
versionCode: '1.10',
versionName: 'The Year of AGI',
// versionMoji: '🎊✨',
versionDate: new Date('2024-01-06T08:00:00Z'),
@@ -155,7 +201,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.9.0',
versionCode: '1.9',
versionName: 'Creative Horizons',
// versionMoji: '🎨🌌',
versionDate: new Date('2023-12-28T22:30:00Z'),
@@ -170,7 +216,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.8.0',
versionCode: '1.8',
versionName: 'To The Moon And Back',
// versionMoji: '🚀🌕🔙❤️',
versionDate: new Date('2023-12-20T09:30:00Z'),
@@ -187,7 +233,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.7.0',
versionCode: '1.7',
versionName: 'Attachment Theory',
// versionDate: new Date('2023-12-11T06:00:00Z'), // 1.7.3
versionDate: new Date('2023-12-10T12:00:00Z'), // 1.7.0
@@ -203,7 +249,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.6.0',
versionCode: '1.6',
versionName: 'Surf\'s Up',
versionDate: new Date('2023-11-28T21:00:00Z'),
items: [
@@ -218,7 +264,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.5.0',
versionCode: '1.5',
versionName: 'Loaded!',
versionDate: new Date('2023-11-19T21:00:00Z'),
items: [
@@ -234,7 +280,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.4.0',
versionCode: '1.4',
items: [
{ text: <><B>Share and clone</B> conversations, with public links</> },
{ text: <><B code='/docs/config-azure-openai.md'>Azure</B> models, incl. gpt-4-32k</> },
+24 -5
View File
@@ -1,21 +1,40 @@
// NOTE: this is a separate file to help with bundle tracing, as it's included by the ProviderBootstrapLogic (i.e. by All pages)
// update this variable every time you want to broadcast a new version to clients
import { create } from 'zustand';
import { persist } from 'zustand/middleware';
import { useAppStateStore } from '~/common/state/store-appstate';
export const incrementalNewsVersion: number = 15;
// update this variable every time you want to broadcast a new version to clients
export const incrementalNewsVersion: number = 16.9; // not notifying for 1.16.9
interface NewsState {
lastSeenNewsVersion: number;
}
export const useAppNewsStateStore = create<NewsState>()(
persist(
(set) => ({
lastSeenNewsVersion: 0,
}),
{
name: 'app-news',
},
),
);
export function shallRedirectToNews() {
const { usageCount, lastSeenNewsVersion } = useAppStateStore.getState();
const { lastSeenNewsVersion } = useAppNewsStateStore.getState();
const { usageCount } = useAppStateStore.getState();
const isNewsOutdated = (lastSeenNewsVersion || 0) < incrementalNewsVersion;
return isNewsOutdated && usageCount > 2;
}
export function markNewsAsSeen() {
const { setLastSeenNewsVersion } = useAppStateStore.getState();
setLastSeenNewsVersion(incrementalNewsVersion);
useAppNewsStateStore.setState({ lastSeenNewsVersion: incrementalNewsVersion });
}
+7 -3
View File
@@ -1,4 +1,5 @@
import * as React from 'react';
import { v4 as uuidv4 } from 'uuid';
import { Alert, Box, Button, Card, CardContent, CircularProgress, Divider, FormLabel, Grid, IconButton, LinearProgress, Tab, tabClasses, TabList, TabPanel, Tabs, Typography } from '@mui/joy';
import AddIcon from '@mui/icons-material/Add';
@@ -102,8 +103,11 @@ export function Creator(props: { display: boolean }) {
strings: editedInstructions, stringEditors: instructionEditors,
} = useFormEditTextArray(Prompts, PromptTitles);
const creationChainSteps = React.useMemo(() => {
return createChain(editedInstructions, PromptTitles);
const { steps: creationChainSteps, id: chainId } = React.useMemo(() => {
return {
steps: createChain(editedInstructions, PromptTitles),
id: uuidv4(),
};
}, [editedInstructions]);
const llmLabel = personaLlm?.label || undefined;
@@ -122,7 +126,7 @@ export function Creator(props: { display: boolean }) {
chainError,
userCancelChain,
restartChain,
} = useLLMChain(creationChainSteps, personaLlm?.id, chainInputText ?? undefined, savePersona);
} = useLLMChain(creationChainSteps, personaLlm?.id, chainInputText ?? undefined, savePersona, 'persona-extract', chainId);
// Reset the relevant state when the selected tab changes
+20 -9
View File
@@ -1,6 +1,6 @@
import * as React from 'react';
import { Box, Button, IconButton, ListItemButton, ListItemDecorator, Sheet, Tooltip, Typography } from '@mui/joy';
import { Box, Button, IconButton, ListItemDecorator, Sheet, Tooltip } from '@mui/joy';
import CheckBoxIcon from '@mui/icons-material/CheckBox';
import CheckBoxOutlineBlankIcon from '@mui/icons-material/CheckBoxOutlineBlank';
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
@@ -136,17 +136,28 @@ export function CreatorDrawer(props: {
</Sheet>
) : (
// Create Button
<ListItemButton
<Button
variant={props.selectedSimplePersonaId ? 'plain' : 'soft'}
onClick={handleSimplePersonaUnselect}
sx={{
m: 2,
// ...PageDrawerTallItemSx,
justifyContent: 'flex-start',
padding: '0px 0.75rem',
// style
border: '1px solid',
borderColor: 'neutral.outlinedBorder',
borderRadius: 'sm',
'--ListItemDecorator-size': 'calc(2.5rem - 1px)', // compensate for the border
}}
>
<ListItemDecorator>
<Diversity2Icon />
</ListItemDecorator>
<Typography level='title-sm' sx={!props.selectedSimplePersonaId ? { fontWeight: 'lg' } : undefined}>
Create
</Typography>
</ListItemButton>
<ListItemDecorator><Diversity2Icon /></ListItemDecorator>
{/*<Typography level='title-sm' sx={!props.selectedSimplePersonaId ? { fontWeight: 'lg' } : undefined}>*/}
Create
{/*</Typography>*/}
</Button>
)}
{/* Personas [] */}
+1 -1
View File
@@ -200,7 +200,7 @@ export function SettingsModal(props: {
<TabPanel value={PreferencesTab.Tools} variant='outlined' sx={{ p: 'var(--Tabs-gap)', borderRadius: 'md' }}>
<Topics>
<Topic icon={<SearchIcon />} title='Browsing' startCollapsed>
<Topic icon={<SearchIcon />} title='Browsing'>
<BrowseSettings />
</Topic>
<Topic icon={<SearchIcon />} title='Google Search API' startCollapsed>
+2 -2
View File
@@ -19,8 +19,8 @@ const shortcutsMd = platformAwareKeystrokes(`
| Ctrl + Shift + V | Attach clipboard (better than Ctrl + V) |
| Ctrl + M | Microphone (voice typing) |
| **Chats** | |
| Ctrl + O | Open Chat ... |
| Ctrl + S | Save Chat ... |
| Ctrl + O | Open Chat File ... |
| Ctrl + S | Save Chat File ... |
| Ctrl + Alt + N | **New** chat |
| Ctrl + Alt + X | **Reset** chat |
| Ctrl + Alt + D | **Delete** chat |
+22 -13
View File
@@ -2,11 +2,11 @@ import * as React from 'react';
import { FormControl, Typography } from '@mui/joy';
import AddAPhotoIcon from '@mui/icons-material/AddAPhoto';
import LocalAtmOutlinedIcon from '@mui/icons-material/LocalAtmOutlined';
import ScreenshotMonitorIcon from '@mui/icons-material/ScreenshotMonitor';
import SpeedIcon from '@mui/icons-material/Speed';
import TitleIcon from '@mui/icons-material/Title';
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
import { FormSwitchControl } from '~/common/components/forms/FormSwitchControl';
import { Link } from '~/common/components/Link';
@@ -24,48 +24,57 @@ export function UxLabsSettings() {
const isMobile = useIsMobile();
const {
labsAttachScreenCapture, setLabsAttachScreenCapture,
labsBeam, setLabsBeam,
labsCameraDesktop, setLabsCameraDesktop,
labsChatBarAlt, setLabsChatBarAlt,
labsHighPerformance, setLabsHighPerformance,
labsShowCost, setLabsShowCost,
} = useUXLabsStore();
return <>
<FormSwitchControl
title={<><ChatBeamIcon color={labsBeam ? 'primary' : undefined} sx={{ mr: 0.25 }} />Chat Beam</>} description={'v1.15 · ' + (labsBeam ? 'Active' : 'Off')}
checked={labsBeam} onChange={setLabsBeam}
/>
{/* 'v1.15 · ' + .. */}
<FormSwitchControl
title={<><SpeedIcon color={labsHighPerformance ? 'primary' : undefined} sx={{ mr: 0.25 }} />Performance</>} description={'v1.14 · ' + (labsHighPerformance ? 'Unlocked' : 'Default')}
title={<><SpeedIcon sx={{ fontSize: 'lg', mr: 0.5, mb: 0.25 }} />Performance</>} description={labsHighPerformance ? 'Unlocked' : 'Default'}
checked={labsHighPerformance} onChange={setLabsHighPerformance}
/>
{DEV_MODE_SETTINGS && <FormSwitchControl
title={<><TitleIcon color={labsChatBarAlt ? 'primary' : undefined} sx={{ mr: 0.25 }} />Chat Title</>} description={'v1.14 · ' + (labsChatBarAlt === 'title' ? 'Show Title' : 'Show Models')}
title={<><TitleIcon sx={{ fontSize: 'lg', mr: 0.5, mb: 0.25 }} />Chat Title</>} description={labsChatBarAlt === 'title' ? 'Show Title' : 'Show Models'}
checked={labsChatBarAlt === 'title'} onChange={(on) => setLabsChatBarAlt(on ? 'title' : false)}
/>}
{!isMobile && <FormSwitchControl
title={<><ScreenshotMonitorIcon color={labsAttachScreenCapture ? 'primary' : undefined} sx={{ mr: 0.25 }} /> Screen Capture</>} description={'v1.13 · ' + (labsAttachScreenCapture ? 'Enabled' : 'Disabled')}
title={<><ScreenshotMonitorIcon sx={{ fontSize: 'lg', mr: 0.5, mb: 0.25 }} /> Screen Capture</>} description={labsAttachScreenCapture ? 'Enabled' : 'Disabled'}
checked={labsAttachScreenCapture} onChange={setLabsAttachScreenCapture}
/>}
{!isMobile && <FormSwitchControl
title={<><AddAPhotoIcon color={labsCameraDesktop ? 'primary' : undefined} sx={{ mr: 0.25 }} /> Webcam</>} description={/*'v1.8 · ' +*/ (labsCameraDesktop ? 'Enabled' : 'Disabled')}
title={<><AddAPhotoIcon sx={{ fontSize: 'lg', mr: 0.5, mb: 0.25 }} /> Webcam Capture</>} description={/*'v1.8 · ' +*/ (labsCameraDesktop ? 'Enabled' : 'Disabled')}
checked={labsCameraDesktop} onChange={setLabsCameraDesktop}
/>}
<FormSwitchControl
title={<><LocalAtmOutlinedIcon sx={{ fontSize: 'lg', mr: 0.5, mb: 0.25 }} />Cost of messages</>} description={labsShowCost ? 'Show when available' : 'Disabled'}
checked={labsShowCost} onChange={setLabsShowCost}
/>
{/*
Other Graduated (removed or backlog):
- <Link href='https://github.com/enricoros/big-AGI/issues/359' target='_blank'>Draw App</Link>
- Text Tools: dinamically shown where applicable (e.g. Diff)
- Chat Mode: follow-ups; moved to Chat Advanced UI
*/}
<FormControl orientation='horizontal' sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
<FormLabelStart title='Graduated' description='Ex-labs' />
<Typography level='body-xs'>
<Link href='https://github.com/enricoros/big-AGI/issues/208' target='_blank'>Split Chats</Link>
{' · '}<Link href='https://github.com/enricoros/big-AGI/issues/359' target='_blank'>Draw App</Link>
<Link href='https://big-agi.com/blog/beam-multi-model-ai-reasoning' target='_blank'>Beam</Link>
{' · '}<Link href='https://github.com/enricoros/big-AGI/issues/208' target='_blank'>Split Chats</Link>
{' · '}<Link href='https://github.com/enricoros/big-AGI/issues/354' target='_blank'>Call AGI</Link>
{' · '}<Link href='https://github.com/enricoros/big-AGI/issues/282' target='_blank'>Persona Creator</Link>
{' · '}<Link href='https://github.com/enricoros/big-agi/issues/192' target='_blank'>Auto Diagrams</Link>
{' · '}Imagine · Relative chat size · Text Tools · LLM Overheat
{' · '}Imagine · Chat Search · Text Tools · LLM Overheat
</Typography>
</FormControl>
+4
View File
@@ -169,6 +169,7 @@ export function adjustContentScaling(scaling: ContentScaling, offset?: number) {
interface ContentScalingOptions {
// BlocksRenderer
blockCodeFontSize: string;
blockCodeMarginY: number;
blockFontSize: string;
blockImageGap: number;
blockLineHeight: string | number;
@@ -182,6 +183,7 @@ interface ContentScalingOptions {
export const themeScalingMap: Record<ContentScaling, ContentScalingOptions> = {
xs: {
blockCodeFontSize: '0.75rem',
blockCodeMarginY: 0.5,
blockFontSize: 'xs',
blockImageGap: 1,
blockLineHeight: 1.666667,
@@ -191,6 +193,7 @@ export const themeScalingMap: Record<ContentScaling, ContentScalingOptions> = {
},
sm: {
blockCodeFontSize: '0.75rem',
blockCodeMarginY: 1,
blockFontSize: 'sm',
blockImageGap: 1.5,
blockLineHeight: 1.714286,
@@ -200,6 +203,7 @@ export const themeScalingMap: Record<ContentScaling, ContentScalingOptions> = {
},
md: {
blockCodeFontSize: '0.875rem',
blockCodeMarginY: 1.5,
blockFontSize: 'md',
blockImageGap: 2,
blockLineHeight: 1.75,
+11 -4
View File
@@ -8,6 +8,7 @@ import { ChatActions, createDMessage, DConversationId, DMessage, getConversation
import { createBeamVanillaStore } from '~/modules/beam/store-beam-vanilla';
import { EphemeralHandler, EphemeralsStore } from './EphemeralsStore';
import { createChatOverlayVanillaStore } from './store-chat-overlay-vanilla';
/**
@@ -21,6 +22,7 @@ export class ConversationHandler {
private readonly conversationId: DConversationId;
private readonly beamStore = createBeamVanillaStore();
private readonly overlayStore = createChatOverlayVanillaStore();
readonly ephemeralsStore: EphemeralsStore = new EphemeralsStore();
@@ -84,7 +86,7 @@ export class ConversationHandler {
// if zeroing the messages, also terminate an active beam
if (!messages.length)
this.beamStore.getState().terminate();
this.beamStore.getState().terminateKeepingSettings();
}
@@ -100,7 +102,7 @@ export class ConversationHandler {
* @param destReplaceMessageId If set, the output will replace the message with this id, otherwise it will append to the history
*/
beamInvoke(viewHistory: Readonly<DMessage[]>, importMessages: DMessage[], destReplaceMessageId: DMessage['id'] | null): void {
const { open: beamOpen, importRays: beamImportRays, terminate: beamTerminate } = this.beamStore.getState();
const { open: beamOpen, importRays: beamImportRays, terminateKeepingSettings } = this.beamStore.getState();
const onBeamSuccess = (messageText: string, llmId: DLLMId) => {
// set output when going back to the chat
@@ -116,11 +118,11 @@ export class ConversationHandler {
}
// close beam
this.beamStore.getState().terminate();
terminateKeepingSettings();
};
beamOpen(viewHistory, useModelsStore.getState().chatLLMId, onBeamSuccess);
importMessages.length && beamImportRays(importMessages);
importMessages.length && beamImportRays(importMessages, useModelsStore.getState().chatLLMId);
}
@@ -130,4 +132,9 @@ export class ConversationHandler {
return new EphemeralHandler(title, initialText, this.ephemeralsStore);
}
// Overlay Store
getOverlayStore = () => this.overlayStore;
}
@@ -0,0 +1,54 @@
import { StoreApi, useStore } from 'zustand';
import { createStore, StateCreator } from 'zustand/vanilla';
/// Composer Slice: per-chat composer overlay state ///
interface ComposerOverlayState {
// if set, this is the 'reply to' mode text
replyToText: string | null;
}
const initComposerOverlayStateSlice = (): ComposerOverlayState => ({
replyToText: null,
});
interface ComposerOverlayStore extends ComposerOverlayState {
setReplyToText: (text: string | null) => void;
}
const createComposerOverlayStoreSlice: StateCreator<ComposerOverlayStore, [], [], ComposerOverlayStore> = (_set, _get) => ({
// init state
...initComposerOverlayStateSlice(),
// actions
setReplyToText: (text: string | null) => _set({ replyToText: text }),
});
/// Chat Overlay Store: per-chat overlay state ///
// Note: at this time there are numerous overlay stores, including beam (vanilla), ephemerals (EventTarget), and this one.
export type OverlayStore = ComposerOverlayStore;
export type OverlayStoreApi = Readonly<StoreApi<OverlayStore>>;
export const createChatOverlayVanillaStore = () => createStore<OverlayStore>()((...a) => ({
...createComposerOverlayStoreSlice(...a),
}));
const fallbackOverlayStore = createChatOverlayVanillaStore();
export const useChatOverlayStore = <T, >(vanillaStore: OverlayStoreApi | null, selector: (store: OverlayStore) => T): T =>
useStore(vanillaStore || fallbackOverlayStore, selector);
+15
View File
@@ -0,0 +1,15 @@
import * as React from 'react';
import { Typography } from '@mui/joy';
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
export function AlreadySet(props: { required?: boolean }) {
return (
<Typography level='body-sm' startDecorator={props.required ? undefined : <CheckRoundedIcon color='success' />}>
{/*Installed Already*/}
{props.required ? 'required' : 'Already set on server'}
</Typography>
);
}
+82 -55
View File
@@ -3,18 +3,24 @@ import { sendGAEvent } from '@next/third-parties/google';
import type { SxProps } from '@mui/joy/styles/types';
import { Box, Button, Step, stepClasses, StepIndicator, stepIndicatorClasses, Stepper, Typography } from '@mui/joy';
import ArrowBackRoundedIcon from '@mui/icons-material/ArrowBackRounded';
import ArrowForwardRoundedIcon from '@mui/icons-material/ArrowForwardRounded';
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
import KeyboardArrowDownRoundedIcon from '@mui/icons-material/KeyboardArrowDownRounded';
import { ChatMessageMemo } from '../../apps/chat/components/message/ChatMessage';
import { BlocksRenderer } from '~/modules/blocks/BlocksRenderer';
import { AgiSquircleIcon } from '~/common/components/icons/AgiSquircleIcon';
import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon';
import { GlobalShortcutItem, ShortcutKeyName, useGlobalShortcuts } from '~/common/components/useGlobalShortcut';
import { createDMessage } from '~/common/state/store-chats';
import { hasGoogleAnalytics } from '~/common/components/GoogleAnalytics';
import { useIsMobile } from '~/common/components/useMatchMedia';
import { animationTextShadowLimey } from '~/common/util/animUtils';
// configuration
const colorButtons = 'neutral' as const;
const colorStepper = 'neutral' as const;
// Steps - the top stepper
@@ -27,13 +33,13 @@ interface ExplainerStep {
const stepSequenceSx: SxProps = {
// width: '100%',
[`& .${stepClasses.completed}::after`]: {
bgcolor: 'primary.500',
bgcolor: `${colorStepper}.500`,
},
[`& .${stepClasses.active} .${stepIndicatorClasses.root}`]: {
borderColor: 'primary.500',
borderColor: `${colorStepper}.500`,
},
[`& .${stepClasses.root}:has(+ .${stepClasses.active})::after`]: {
color: 'primary.500',
color: `${colorStepper}.500`,
backgroundColor: 'transparent',
backgroundImage: 'radial-gradient(currentColor 2px, transparent 2px)',
backgroundSize: '7px 7px',
@@ -41,6 +47,18 @@ const stepSequenceSx: SxProps = {
},
};
const buttonBaseSx: SxProps = {
justifyContent: 'space-between',
minHeight: '2.5rem',
minWidth: 120,
};
const buttonNextSx: SxProps = {
...buttonBaseSx,
boxShadow: `0 8px 24px -4px rgb(var(--joy-palette-${colorButtons}-mainChannel) / 20%)`,
minWidth: 180,
};
function AllStepsStepper(props: {
steps: ExplainerStep[],
@@ -59,10 +77,14 @@ function AllStepsStepper(props: {
orientation='vertical'
completed={completed}
active={active}
onClick={() => props.onStepClicked(stepIndex)}
indicator={
<StepIndicator variant={(completed || active) ? 'solid' : 'outlined'} color='primary'>
{completed ? <CheckRoundedIcon /> : active ? <KeyboardArrowDownRoundedIcon /> : undefined}
<StepIndicator
variant={(completed || active) ? 'solid' : 'outlined'}
color={colorStepper}
onClick={() => props.onStepClicked(stepIndex)}
sx={{ cursor: 'pointer' }}
>
{completed ? <CheckRoundedIcon sx={{ fontSize: 'md' }} /> : active ? <KeyboardArrowDownRoundedIcon sx={{ fontSize: 'lg' }} /> : undefined}
</StepIndicator>
}
>
@@ -95,6 +117,7 @@ export function ExplainerCarousel(props: {
explainerId: string,
steps: ExplainerPage[],
footer?: React.ReactNode,
noStepper?: boolean,
onFinished: () => any,
}) {
@@ -106,15 +129,13 @@ export function ExplainerCarousel(props: {
// derived state
const { onFinished } = props;
const isFirstPage = stepIndex === 0;
const isLastPage = stepIndex === props.steps.length - 1;
const activeStep = props.steps[stepIndex] ?? null;
// handlers
const mdText = activeStep?.mdContent ?? null;
const mdMessage = React.useMemo(() => {
return mdText ? createDMessage('assistant', mdText) : null;
}, [mdText]);
const handlePrevPage = React.useCallback(() => {
setStepIndex(step => step > 0 ? step - 1 : step);
@@ -161,7 +182,7 @@ export function ExplainerCarousel(props: {
// content
display: 'flex',
flexDirection: 'column',
justifyContent: 'space-around',
justifyContent: 'space-evenly',
gap: 2,
}}>
@@ -171,85 +192,91 @@ export function ExplainerCarousel(props: {
level='h1'
component='h1'
sx={{
fontSize: isMobile ? '2rem' : '2.75rem',
fontSize: isMobile ? '2rem' : '2.5rem',
fontWeight: 'md',
textAlign: 'center',
whiteSpace: 'balance',
}}>
{activeStep?.titlePrefix}{' '}
{!!activeStep?.titleSquircle && <AgiSquircleIcon inverted sx={{ color: 'white', fontSize: isMobile ? '1.55rem' : '2.04rem', borderRadius: 'md' }} />}
{!!activeStep?.titleSquircle && '-'}
{!!activeStep?.titleSpark && <Box component='span' sx={{ fontWeight: 'lg', /*animation: `${animationTextShadowLimey} 15s linear infinite`*/ color: 'primary.softColor' }}>
{!!activeStep?.titleSpark && <Box component='span' sx={{
fontWeight: 'lg',
color: 'neutral.softColor',
animation: `${animationTextShadowLimey} 5s infinite`,
/*, animation: `${animationTextShadowLimey} 15s linear infinite`*/
}}>
{activeStep.titleSpark}
</Box>}{activeStep?.titleSuffix}
</Typography>
{/* All Steps */}
<Box>
<AllStepsStepper
steps={props.steps}
activeIndex={stepIndex}
isMobile={isMobile}
onStepClicked={setStepIndex}
/>
</Box>
{/* Page Message */}
{!!mdMessage && (
<ChatMessageMemo
message={mdMessage}
fitScreen={isMobile}
showAvatar={false}
adjustContentScaling={isMobile ? 0 : undefined}
sx={{
minHeight: '19rem', // 256px
py: 2,
border: 'none',
bordreRadius: 0,
borderRadius: 'xl',
// boxShadow: '0 8px 24px -4px rgb(var(--joy-palette-primary-darkChannel) / 0.12)',
<Box sx={{ display: 'flex', flexDirection: 'column', alignItems: 'center', gap: 1 }}>
{/* Main Card with the markdown body */}
{!!mdText && (
<Box sx={{
minHeight: '24rem',
backgroundColor: 'background.popup',
borderRadius: 'lg',
boxShadow: '0 60px 32px -60px rgb(var(--joy-palette-primary-darkChannel) / 0.14)',
mb: 2,
px: { xs: 1, md: 2 },
py: 2,
// customize the embedded GitHub Markdown for transparent images
['.markdown-body img']: {
'--color-canvas-default': 'transparent!important',
},
}}
/>
)}
}}>
<BlocksRenderer
text={mdText}
fromRole='assistant'
contentScaling='md'
fitScreen={isMobile}
renderTextAsMarkdown
/>
</Box>
)}
{/* Buttons */}
<Box sx={{ display: 'flex', flexDirection: 'column', alignItems: 'center', gap: 1 }}>
{/* Advance Button */}
<Button
variant='solid'
size='lg'
endDecorator={isLastPage ? <ChatBeamIcon /> : <ArrowForwardRoundedIcon />}
color={colorButtons}
onClick={handleNextPage}
sx={{
boxShadow: '0 8px 24px -4px rgb(var(--joy-palette-primary-mainChannel) / 20%)',
minWidth: 180,
}}
endDecorator={isLastPage ? <ChatBeamIcon /> : <ArrowForwardRoundedIcon />}
sx={buttonNextSx}
>
{isLastPage ? 'Start' : 'Next'}
</Button>
{/* Back Button */}
<Button
variant='outlined'
color='neutral'
variant='plain'
color={colorButtons}
disabled={isFirstPage}
onClick={handlePrevPage}
sx={{
minWidth: 140,
}}
startDecorator={<ArrowBackRoundedIcon />}
sx={buttonBaseSx}
>
Previous
</Button>
</Box>
{/* All Steps */}
{props.noStepper ? null : (
<AllStepsStepper
steps={props.steps}
activeIndex={stepIndex}
isMobile={isMobile}
onStepClicked={setStepIndex}
/>
)}
{/* Final words of wisdom (also perfect for centering the other components) */}
{props.footer}
+2
View File
@@ -11,6 +11,7 @@ export const GoodTooltip = (props: {
title: React.ReactNode,
placement?: 'top' | 'bottom' | 'top-start',
isError?: boolean, isWarning?: boolean,
arrow?: boolean,
usePlain?: boolean,
children: React.JSX.Element,
sx?: SxProps
@@ -19,6 +20,7 @@ export const GoodTooltip = (props: {
title={props.title}
placement={props.placement}
disableInteractive
arrow={props.arrow}
variant={(props.isError || props.isWarning) ? 'soft' : props.usePlain ? 'plain' : undefined}
color={props.isError ? 'danger' : props.isWarning ? 'warning' : undefined}
sx={{
@@ -39,6 +39,7 @@ const FormLabelStartBase = (props: {
{!!props.description && (
<FormHelperText
sx={{
fontSize: 'xs',
display: 'block',
}}
>
+2 -2
View File
@@ -8,7 +8,7 @@ import { FormRadioOption } from './FormRadioControl';
/**
* Warning: this must be a constant to avoid re-rendering the radio group
*/
export function useFormRadio<T extends string>(initialValue: T, options: FormRadioOption<T>[], label?: string, hidden?: boolean): [T | null, React.JSX.Element | null] {
export function useFormRadio<T extends string>(initialValue: T, options: FormRadioOption<T>[], label?: string, hidden?: boolean): [T | null, React.JSX.Element | null, React.Dispatch<React.SetStateAction<T | null>>] {
// state
const [value, setValue] = React.useState<T | null>(initialValue);
@@ -33,5 +33,5 @@ export function useFormRadio<T extends string>(initialValue: T, options: FormRad
[handleChange, hidden, label, options, value],
);
return [value, component];
return [value, component, setValue];
}
+1 -1
View File
@@ -39,7 +39,7 @@ const DesktopDrawerTranslatingSheet = styled(Sheet)(({ theme }) => ({
// borderBottomRightRadius: 'var(--AGI-Optima-Radius)',
// contain: 'strict',
// boxShadow: theme.shadow.md, // too thin and complex; also tried 40px blurs
boxShadow: `1px 2px 6px 0 rgba(${theme.palette.neutral.darkChannel} / 0.12)`,
boxShadow: `0px 0px 6px 0 rgba(${theme.palette.neutral.darkChannel} / 0.12)`,
// content layout
display: 'flex',
+2 -2
View File
@@ -4,7 +4,7 @@ import Router from 'next/router';
import type { SxProps } from '@mui/joy/styles/types';
import { Divider, Dropdown, ListItemDecorator, Menu, MenuButton, MenuItem, Tooltip } from '@mui/joy';
import MenuIcon from '@mui/icons-material/Menu';
import MoreHorizRoundedIcon from '@mui/icons-material/MoreHorizRounded';
import MoreHorizIcon from '@mui/icons-material/MoreHoriz';
import { useModelsStore } from '~/modules/llms/store-llms';
@@ -93,7 +93,7 @@ export function DesktopNav(props: { component: React.ElementType, currentApp?: N
<Dropdown key='n-app-overflow'>
<Tooltip disableInteractive enterDelay={600} title='More Apps'>
<MenuButton slots={{ root: DesktopNavIcon }} slotProps={{ root: { className: navItemClasses.typeApp } }}>
<MoreHorizRoundedIcon />
<MoreHorizIcon />
</MenuButton>
</Tooltip>
<Menu variant='solid' invertedColors placement='right-start'>
+11 -1
View File
@@ -108,6 +108,11 @@ export function PageBar(props: { component: React.ElementType, currentApp?: NavI
return <CommonPageMenuItems onClose={closePageMenu} />;
}, [closePageMenu]);
const handlePageContextMenu = React.useCallback((event: React.MouseEvent) => {
event.preventDefault(); // added for the Right mouse click (to prevent the menu)
openPageMenu();
}, [openPageMenu]);
// [Desktop] hide the app bar if the current app doesn't use it
const desktopHide = !!props.currentApp?.hideBar && !props.isMobile;
if (desktopHide)
@@ -165,7 +170,12 @@ export function PageBar(props: { component: React.ElementType, currentApp?: NavI
{/* Page Menu Anchor */}
<InvertedBarCornerItem>
<IconButton disabled={!pageMenuAnchor /*|| (!appMenuItems && !props.isMobile)*/} onClick={openPageMenu} ref={pageMenuAnchor}>
<IconButton
ref={pageMenuAnchor}
disabled={!pageMenuAnchor /*|| (!appMenuItems && !props.isMobile)*/}
onClick={openPageMenu}
onContextMenu={handlePageContextMenu}
>
<MoreVertIcon />
</IconButton>
</InvertedBarCornerItem>
+2 -2
View File
@@ -57,8 +57,8 @@ export function PageWrapper(props: { component: React.ElementType, currentApp?:
sx={{
boxShadow: {
xs: 'none',
md: amplitude === 'narrow' ? 'md' : 'none',
xl: amplitude !== 'full' ? 'lg' : 'none',
md: amplitude === 'narrow' ? '0px 0px 4px 0 rgba(50 56 62 / 0.12)' : 'none',
xl: amplitude !== 'full' ? '0px 0px 4px 0 rgba(50 56 62 / 0.12)' : 'none',
},
}}
>
@@ -1,7 +1,7 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { IconButton, Sheet, Typography } from '@mui/joy';
import { Box, IconButton, Typography } from '@mui/joy';
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
@@ -11,23 +11,24 @@ export const PageDrawerHeader = (props: {
sx?: SxProps,
children?: React.ReactNode,
}) =>
<Sheet
variant='outlined'
<Box
// variant='soft'
// invertedColors
sx={{
minHeight: 'var(--AGI-Nav-width)',
// content
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
px: 1,
// style
borderTop: 'none',
borderLeft: 'none',
borderRight: 'none',
backgroundColor: 'background.popup',
// borderLeft: 'none',
// borderRight: 'none',
// borderTop: 'none',
// borderTopRightRadius: 'var(--AGI-Optima-Radius)',
// layout
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
}}
>
@@ -41,4 +42,4 @@ export const PageDrawerHeader = (props: {
<CloseRoundedIcon />
</IconButton>
</Sheet>;
</Box>;
+4 -8
View File
@@ -2,9 +2,9 @@ import { createStore } from 'zustand/vanilla';
import { persist } from 'zustand/middleware';
import { DModelSource, useModelsStore } from '~/modules/llms/store-llms';
import { createModelSourceForVendor, findAccessForSourceOrThrow, findAllVendors } from '~/modules/llms/vendors/vendors.registry';
import { createModelSourceForVendor, findAllVendors } from '~/modules/llms/vendors/vendors.registry';
import { getBackendCapabilities } from '~/modules/backend/store-backend-capabilities';
import { updateModelsForSource } from '~/modules/llms/vendors/useLlmUpdateModels';
import { llmsUpdateModelsForSourceOrThrow } from '~/modules/llms/llm.client';
interface AutoConfStore {
@@ -65,12 +65,8 @@ const autoConfVanillaStore = createStore<AutoConfStore>()(persist((_set, _get) =
source = useModelsStore.getState().sources.find(_s => _s.id === source.id)!;
}
// get the access, assuming there's no client config and the server will do all
const transportAcess = findAccessForSourceOrThrow(source.id);
// fetch models
const data = await vendor.rpcUpdateModelsOrThrow(transportAcess);
return updateModelsForSource(data, source, true);
// auto-configure this source
await llmsUpdateModelsForSourceOrThrow(source.id, true);
})
.catch(error => {
// catches errors and logs them, but does not stop the chain
@@ -4,6 +4,8 @@ import type { SxProps } from '@mui/joy/styles/types';
import { IconButton } from '@mui/joy';
import KeyboardDoubleArrowDownIcon from '@mui/icons-material/KeyboardDoubleArrowDown';
import { themeZIndexBeamView } from '~/common/app.theme';
import { useScrollToBottom } from './useScrollToBottom';
@@ -11,6 +13,9 @@ const inlineButtonSx: SxProps = {
// style it
// NOTE: just an IconButton when inline
// for usage inside BeamGatherPane, to not enlarge the row
my: -0.25,
// fade it in when hovering
// transition: 'all 0.15s',
// '&:hover': {
@@ -27,7 +32,7 @@ const absoluteButtonSx: SxProps = {
borderColor: 'neutral.500',
borderRadius: '50%',
boxShadow: 'sm',
zIndex: 3, // stay on top of the Chat Message buttons (e.g. copy)
zIndex: themeZIndexBeamView + 1, // stay on top of the Chat Message buttons (e.g. copy)
// place this on the bottom-right corner (FAB-like)
position: 'absolute',
@@ -57,6 +62,7 @@ export function ScrollToBottomButton(props: { inline?: boolean }) {
aria-label='Scroll To Bottom'
variant='plain'
onClick={handleStickToBottom}
size={props.inline ? 'sm' : undefined}
sx={props.inline ? inlineButtonSx : absoluteButtonSx}
>
<KeyboardDoubleArrowDownIcon sx={{ fontSize: 'xl' }} />
+1 -12
View File
@@ -6,24 +6,13 @@ import { persist } from 'zustand/middleware';
interface AppStateData {
usageCount: number;
lastSeenNewsVersion: number;
// suppressedItems: Record<string, boolean>;
}
interface AppStateActions {
setLastSeenNewsVersion: (version: number) => void;
}
export const useAppStateStore = create<AppStateData & AppStateActions>()(
export const useAppStateStore = create<AppStateData>()(
persist(
(set) => ({
usageCount: 0,
lastSeenNewsVersion: 0,
// suppressedItems: {},
setLastSeenNewsVersion: (version: number) => set({ lastSeenNewsVersion: version }),
}),
{
+31 -33
View File
@@ -5,7 +5,7 @@ import { v4 as uuidv4 } from 'uuid';
import { DLLMId, getChatLLMId } from '~/modules/llms/store-llms';
import { IDB_MIGRATION_INITIAL, idbStateStorage } from '../util/idbUtils';
import { idbStateStorage } from '../util/idbUtils';
import { countModelTokens } from '../util/token-counter';
import { defaultSystemPurposeId, SystemPurposeId } from '../../data';
@@ -65,7 +65,8 @@ export interface DMessage {
purposeId?: SystemPurposeId; // only assistant/system
originLLM?: string; // only assistant - model that generated this message, goes beyond known models
userFlags?: DMessageUserFlag[]; // user-set per-message flags
metadata?: DMessageMetadata; // metadata, mainly at creation and for UI
userFlags?: DMessageUserFlag[]; // (UI) user-set per-message flags
tokenCount: number; // cache for token count, using the current Conversation model (0 = not yet calculated)
@@ -76,6 +77,10 @@ export interface DMessage {
export type DMessageUserFlag =
| 'starred'; // user starred this
export interface DMessageMetadata {
inReplyToText?: string; // text this was in reply to
}
export function createDMessage(role: DMessage['role'], text: string): DMessage {
return {
id: uuidv4(),
@@ -130,6 +135,7 @@ export interface ChatActions {
appendMessage: (conversationId: string, message: DMessage) => void;
deleteMessage: (conversationId: string, messageId: string) => void;
editMessage: (conversationId: string, messageId: string, update: Partial<DMessage> | ((message: DMessage) => Partial<DMessage>), touchUpdated: boolean) => void;
updateMetadata: (conversationId: string, messageId: string, metadataDelta: Partial<DMessageMetadata>, touchUpdated?: boolean) => void;
setSystemPurposeId: (conversationId: string, systemPurposeId: SystemPurposeId) => void;
setAutoTitle: (conversationId: string, autoTitle: string) => void;
setUserTitle: (conversationId: string, userTitle: string) => void;
@@ -345,10 +351,31 @@ export const useChatStore = create<ConversationsStore>()(devtools(
return {
messages,
tokenCount: messages.reduce((sum, message) => sum + 4 + message.tokenCount || 0, 3),
...(touchUpdated && { updated: Date.now() }),
updated: touchUpdated ? Date.now() : conversation.updated,
};
}),
updateMetadata: (conversationId: string, messageId: string, metadataDelta: Partial<DMessageMetadata>, touchUpdated: boolean = true) => {
_get()._editConversation(conversationId, conversation => {
const messages = conversation.messages.map(message =>
message.id !== messageId ? message
: {
...message,
metadata: {
...message.metadata,
...metadataDelta,
},
updated: touchUpdated ? Date.now() : message.updated,
},
);
return {
messages,
updated: touchUpdated ? Date.now() : conversation.updated,
};
});
},
setSystemPurposeId: (conversationId: string, systemPurposeId: SystemPurposeId) =>
_get()._editConversation(conversationId,
{
@@ -380,10 +407,7 @@ export const useChatStore = create<ConversationsStore>()(devtools(
storage: createJSONStorage(() => idbStateStorage),
// Migrations
migrate: (persistedState: unknown, fromVersion: number): ConversationsStore => {
// -1 -> 3: migration loading from localStorage to IndexedDB
if (fromVersion === IDB_MIGRATION_INITIAL)
return _migrateLocalStorageData() as any;
migrate: (persistedState: unknown, _fromVersion: number): ConversationsStore => {
// other: just proceed
return persistedState as any;
@@ -438,32 +462,6 @@ function getNextBranchTitle(currentTitle: string): string {
return `(1) ${currentTitle}`;
}
/**
* Returns the chats stored in the localStorage, and rename the key for
* backup/data loss prevention purposes
*/
function _migrateLocalStorageData(): ChatState | {} {
const key = 'app-chats';
const value = localStorage.getItem(key);
if (!value) return {};
try {
// parse the localStorage state
const localStorageState = JSON.parse(value)?.state;
// backup and delete the localStorage key
const backupKey = `${key}-v2`;
localStorage.setItem(backupKey, value);
localStorage.removeItem(key);
// match the state from localstorage
return {
conversations: localStorageState?.conversations ?? [],
};
} catch (error) {
console.error('LocalStorage migration error', error);
return {};
}
}
/**
* Convenience function to count the tokens in a DMessage object
+6
View File
@@ -32,6 +32,9 @@ interface UIPreferencesStore {
renderMarkdown: boolean;
setRenderMarkdown: (renderMarkdown: boolean) => void;
renderCodeSoftWrap: boolean;
setRenderCodeSoftWrap: (renderCodeSoftWrap: boolean) => void;
// showPersonaExamples: boolean;
// setShowPersonaExamples: (showPersonaExamples: boolean) => void;
@@ -75,6 +78,9 @@ export const useUIPreferencesStore = create<UIPreferencesStore>()(
renderMarkdown: true,
setRenderMarkdown: (renderMarkdown: boolean) => set({ renderMarkdown }),
renderCodeSoftWrap: false,
setRenderCodeSoftWrap: (renderCodeSoftWrap: boolean) => set({ renderCodeSoftWrap }),
// showPersonaExamples: false,
// setShowPersonaExamples: (showPersonaExamples: boolean) => set({ showPersonaExamples }),
+9 -12
View File
@@ -4,20 +4,14 @@ import { persist } from 'zustand/middleware';
// UX Labs Experiments
/**
* Graduated:
* - see `UxLabsSettings.tsx`, and also:
* - Text Tools: dinamically shown where applicable
* - Chat Mode: follow-ups; moved to Chat Advanced UI
*/
// UxLabsSettings.tsx contains the graduated settings, but the following are not stated:
// - Text Tools: dinamically shown where applicable
// - Chat Mode: Follow-Ups; moved to Chat Advanced UI
interface UXLabsStore {
labsAttachScreenCapture: boolean;
setLabsAttachScreenCapture: (labsAttachScreenCapture: boolean) => void;
labsBeam: boolean;
setLabsBeam: (labsBeam: boolean) => void;
labsCameraDesktop: boolean;
setLabsCameraDesktop: (labsCameraDesktop: boolean) => void;
@@ -27,6 +21,9 @@ interface UXLabsStore {
labsHighPerformance: boolean;
setLabsHighPerformance: (labsHighPerformance: boolean) => void;
labsShowCost: boolean;
setLabsShowCost: (labsShowCost: boolean) => void;
}
export const useUXLabsStore = create<UXLabsStore>()(
@@ -36,9 +33,6 @@ export const useUXLabsStore = create<UXLabsStore>()(
labsAttachScreenCapture: false,
setLabsAttachScreenCapture: (labsAttachScreenCapture: boolean) => set({ labsAttachScreenCapture }),
labsBeam: true,
setLabsBeam: (labsBeam: boolean) => set({ labsBeam }),
labsCameraDesktop: false,
setLabsCameraDesktop: (labsCameraDesktop: boolean) => set({ labsCameraDesktop }),
@@ -48,6 +42,9 @@ export const useUXLabsStore = create<UXLabsStore>()(
labsHighPerformance: false,
setLabsHighPerformance: (labsHighPerformance: boolean) => set({ labsHighPerformance }),
labsShowCost: true, // release 1.16.0 with this enabled by default
setLabsShowCost: (labsShowCost: boolean) => set({ labsShowCost }),
}),
{
name: 'app-ux-labs',
+6 -6
View File
@@ -250,20 +250,20 @@ export const animationShadowLimey = keyframes`
box-shadow: 2px 2px 12px -6px rgb(255, 153, 0);
}`;
/*export const animationTextShadowLimey = keyframes`
export const animationTextShadowLimey = keyframes`
100%, 0% {
text-shadow: 2px 2px 0 white, 4px 4px 0 rgb(183, 255, 0);
text-shadow: 2px 2px 0 rgba(183, 255, 0, 0.5);
}
25% {
text-shadow: 2px 2px 0 white, 4px 4px 0 rgb(255, 251, 0);
text-shadow: 2px 2px 0 rgba(255, 251, 0, 0.5);
}
50% {
text-shadow: 2px 2px 0 white, 4px 4px 0 rgba(0, 255, 81);
text-shadow: 2px 2px 0 rgba(0, 255, 81, 0.5);
}
75% {
text-shadow: 2px 2px 0 white, 4px 4px 0 rgb(255, 153, 0);
text-shadow: 2px 2px 0 rgba(255, 153, 0, 0.5);
}`;
*/
// export const animationShadowBlueDarker = keyframes`
// 0%, 100% {
// box-shadow: 3px 3px 0 rgb(135, 206, 235), /* Sky Blue */ 6px 6px 0 rgb(70, 130, 180), /* Steel Blue */ 9px 9px 0 rgb(0, 128, 128); /* Teal */
-15
View File
@@ -1,10 +1,6 @@
import type { StateStorage } from 'zustand/middleware';
import { del as idbDel, get as idbGet, set as idbSet } from 'idb-keyval';
// used by the state storage middleware to detect data migration from the old state storage (localStorage)
// NOTE: remove past 2024-03-19 (6 months past release of this utility conversion)
export const IDB_MIGRATION_INITIAL = -1;
// set to true to enable debugging
const DEBUG_SCHEDULER = false;
@@ -130,17 +126,6 @@ export const idbStateStorage: StateStorage = {
if (DEBUG_SCHEDULER)
console.warn(' (read bytes:', value?.length?.toLocaleString(), ')');
/* IMPORTANT!
* We modify the default behavior of `getItem` to return a {version: -1} object if a key is not found.
* This is to trigger the migration across state storage implementations, as Zustand would not call the
* 'migrate' function otherwise.
* See 'https://github.com/enricoros/big-agi/pull/158' for more details
*/
if (value === undefined) {
return JSON.stringify({
version: IDB_MIGRATION_INITIAL,
});
}
return value || null;
},
setItem: (name: string, value: string): void => {
+5 -2
View File
@@ -8,8 +8,11 @@ export function prettyBaseModel(model: string | undefined): string {
if (!model) return '';
if (model.includes('gpt-4-vision-preview')) return 'GPT-4 Vision';
if (model.includes('gpt-4-1106-preview')) return 'GPT-4 Turbo';
if (model.includes('gpt-4-32k')) return 'gpt-4-32k';
if (model.includes('gpt-4')) return 'gpt-4';
if (model.includes('gpt-4-32k')) return 'GPT-4-32k';
if (model.includes('gpt-4o-mini')) return 'GPT-4o Mini';
if (model.includes('gpt-4o')) return 'GPT-4o';
if (model.includes('gpt-4-turbo')) return 'GPT-4 Turbo';
if (model.includes('gpt-4')) return 'GPT-4';
if (model.includes('gpt-3.5-turbo-instruct')) return '3.5 Turbo Instruct';
if (model.includes('gpt-3.5-turbo-1106')) return '3.5 Turbo 16k';
if (model.includes('gpt-3.5-turbo-16k')) return '3.5 Turbo 16k';
+74 -8
View File
@@ -10,12 +10,7 @@
* @param pdfBuffer The content of a PDF file
*/
export async function pdfToText(pdfBuffer: ArrayBuffer): Promise<string> {
// Dynamically import the 'pdfjs-dist' library [nextjs]
const { getDocument, GlobalWorkerOptions } = await import('pdfjs-dist');
// Set the worker script path
GlobalWorkerOptions.workerSrc = '/workers/pdf.worker.min.mjs';
const { getDocument } = await dynamicImportPdfJs();
const pdf = await getDocument(pdfBuffer).promise;
const textPages: string[] = []; // Initialize an array to hold text from all pages
@@ -25,10 +20,81 @@ export async function pdfToText(pdfBuffer: ArrayBuffer): Promise<string> {
const strings = content.items
.filter(isTextItem) // Use the type guard to filter out items with the 'str' property
.map((item) => (item as { str: string }).str); // Use type assertion to ensure that the item has the 'str' property
textPages.push(strings.join(' ') + '\n'); // Add the joined strings to the array
// textPages.push(strings.join(' ')); // Add the joined strings to the array
// New way: join the strings to form a page text. treat empty lines as newlines, otherwise join with a space (or not if the line is just 1 space)
textPages.push(strings.reduce((acc, str) => {
// empty line -> newline
if (str === '')
return acc + '\n';
// single space
if (str === ' ')
return acc + str;
// trick: de-hyphenation of consecutive lines
if (/\w-$/.test(acc) && /^\w/.test(str))
return acc.slice(0, -1) + str;
// add a space if the last char is not a space or return (regex)
if (/\S$/.test(acc))
return acc + ' ' + str;
// otherwise just concatenate
return acc + str;
}, ''));
}
return textPages.join('\n\n'); // Join all the page texts at the end
}
type PdfPageImage = { base64Url: string, scale: number, width: number, height: number };
/**
* Renders all pages of a PDF to images
*
* @param pdfBuffer The content of a PDF file
* @param scale The scale factor for the image resolution (default 1.5 for moderate quality)
*/
export async function pdfToImageDataURLs(pdfBuffer: ArrayBuffer, scale = 1.5): Promise<PdfPageImage[]> {
const { getDocument } = await dynamicImportPdfJs();
const pdf = await getDocument({ data: pdfBuffer }).promise;
const images: PdfPageImage[] = [];
for (let i = 1; i <= pdf.numPages; i++) {
const page = await pdf.getPage(i);
const viewport = page.getViewport({ scale });
const canvas = document.createElement('canvas');
const context = canvas.getContext('2d');
canvas.height = viewport.height;
canvas.width = viewport.width;
await page.render({
canvasContext: context!,
viewport,
}).promise;
images.push({
base64Url: canvas.toDataURL('image/jpeg'),
scale,
width: viewport.width,
height: viewport.height,
});
}
return textPages.join(''); // Join all the page texts at the end
return images;
}
// Dynamically import the 'pdfjs-dist' library
async function dynamicImportPdfJs() {
// Dynamically import the 'pdfjs-dist' library [nextjs]
const { getDocument, GlobalWorkerOptions } = await import('pdfjs-dist');
// Set the worker script path
GlobalWorkerOptions.workerSrc = '/workers/pdf.worker.min.mjs';
return { getDocument };
}
// Type guard to check if an item has a 'str' property
+26 -13
View File
@@ -6,14 +6,20 @@ import { DLLMId, findLLMOrThrow } from '~/modules/llms/store-llms';
// Do not set this to true in production, it's very verbose
const DEBUG_TOKEN_COUNT = false;
// Globals
// const tokenEncodings: string[] = ['gpt2', 'r50k_base', 'p50k_base', 'p50k_edit', 'cl100k_base', 'o200k_base'] satisfies TiktokenEncoding[];
// global symbols to dynamically load the Tiktoken library
// Global symbols to dynamically load the Tiktoken library
let get_encoding: ((encoding: TiktokenEncoding) => Tiktoken) | null = null;
let encoding_for_model: ((model: TiktokenModel) => Tiktoken) | null = null;
let preloadPromise: Promise<void> | null = null;
let informTheUser = false;
export function preloadTiktokenLibrary() {
/**
* Preloads the Tiktoken library if not already loaded.
* @returns {Promise<void>} A promise that resolves when the library is loaded.
*/
export function preloadTiktokenLibrary(): Promise<void> {
if (!preloadPromise) {
preloadPromise = import('tiktoken')
.then(tiktoken => {
@@ -33,16 +39,21 @@ export function preloadTiktokenLibrary() {
/**
* Wrapper around the Tiktoken library, to keep tokenizers for all models in a cache
*
* We also preload the tokenizer for the default model, so that the first time a user types
* a message, it doesn't stall loading the tokenizer.
* Wrapper around the Tiktoken library to keep tokenizers for all models in a cache.
* Also, preloads the tokenizer for the default model to avoid initial stall.
*/
export const countModelTokens: (text: string, llmId: DLLMId, debugFrom: string) => number | null = (() => {
// return () => 0;
const tokenEncoders: { [modelId: string]: Tiktoken } = {};
let encodingCL100K: Tiktoken | null = null;
let encodingDefault: Tiktoken | null = null;
/**
* Counts the tokens in the given text for the specified model.
* @param {string} text - The text to tokenize.
* @param {DLLMId} llmId - The ID of the LLM.
* @param {string} debugFrom - Debug information.
* @returns {number | null} The token count or null if not ready.
*/
function _tokenCount(text: string, llmId: DLLMId, debugFrom: string): number | null {
// The library shall have been preloaded - if not, attempt to start its loading and return null to indicate we're not ready to count
@@ -55,21 +66,23 @@ export const countModelTokens: (text: string, llmId: DLLMId, debugFrom: string)
return null;
}
const { options: { llmRef: openaiModel } } = findLLMOrThrow(llmId);
const openaiModel = findLLMOrThrow(llmId)?.options?.llmRef;
if (!openaiModel) throw new Error(`LLM ${llmId} has no LLM reference id`);
if (!(openaiModel in tokenEncoders)) {
try {
tokenEncoders[openaiModel] = encoding_for_model(openaiModel as TiktokenModel);
} catch (e) {
// make sure we recycle the default encoding across all models
if (!encodingCL100K)
encodingCL100K = get_encoding('cl100k_base');
tokenEncoders[openaiModel] = encodingCL100K;
// fallback to the default encoding across all models (not just OpenAI - this will be used everywhere..)
if (!encodingDefault)
encodingDefault = get_encoding('cl100k_base');
tokenEncoders[openaiModel] = encodingDefault;
}
}
let count: number = 0;
// Note: the try/catch shouldn't be necessary, but there could be corner cases where the tiktoken library throws
// https://github.com/enricoros/big-agi/issues/182
let count = 0;
try {
count = tokenEncoders[openaiModel]?.encode(text, 'all', [])?.length || 0;
} catch (e) {
+15 -2
View File
@@ -1,6 +1,6 @@
import * as React from 'react';
export type SystemPurposeId = 'Catalyst' | 'Custom' | 'Designer' | 'Developer' | 'DeveloperPreview' | 'Executive' | 'Generic' | 'Scientist';
export type SystemPurposeId = 'Catalyst' | 'Custom' | 'Designer' | 'Developer' | 'DeveloperPreview' | 'Executive' | 'Generic' | 'Scientist' | 'YouTubeTranscriber';
export const defaultSystemPurposeId: SystemPurposeId = 'Generic';
@@ -96,7 +96,10 @@ Current date: {{LocaleNow}}
Designer: {
title: 'Designer',
description: 'Helps you design',
systemMessage: 'You are an AI visual design assistant. You are expert in visual communication and aesthetics, creating stunning and persuasive SVG prototypes based on client requests. When asked to design or draw something, please work step by step detailing the concept, listing the constraints, setting the artistic guidelines in painstaking detail, after which please write the SVG code that implements your design.',
systemMessage: `
You are an AI visual design assistant. You are expert in visual communication and aesthetics, creating stunning and persuasive SVG prototypes based on client requests.
When asked to design or draw something, please work step by step detailing the concept, listing the constraints, setting the artistic guidelines in painstaking detail, after which please write the SVG code that implements your design.
{{RenderSVG}}`.trim(),
symbol: '🖌️',
examples: ['minimalist logo for a tech startup', 'infographic on climate change', 'suggest color schemes for a website'],
call: { starters: ['Hey! What\'s the vision?', 'Designer on call. What\'s the project?', 'Ready for design talk.', 'Hey.'] },
@@ -110,4 +113,14 @@ Current date: {{LocaleNow}}
call: { starters: ['What\'s the task?', 'What can I do?', 'Ready for your task.', 'Yes?'] },
voices: { elevenLabs: { voiceId: 'flq6f7yk4E4fJM5XTYuZ' } },
},
YouTubeTranscriber: {
title: 'YouTube Transcriber',
description: 'Enter a YouTube URL to get the transcript and chat about the content.',
systemMessage: 'You are an expert in understanding video transcripts and answering questions about video content.',
symbol: '📺',
examples: ['Analyze the sentiment of this video', 'Summarize the key points of the lecture'],
call: { starters: ['Enter a YouTube URL to begin.', 'Ready to transcribe YouTube content.', 'Paste the YouTube link here.'] },
voices: { elevenLabs: { voiceId: 'z9fAnlkpzviPz146aGWa' } },
},
};
@@ -1,4 +1,4 @@
import { llmChatGenerateOrThrow, VChatFunctionIn } from '~/modules/llms/llm.client';
import { llmChatGenerateOrThrow, VChatFunctionIn, VChatMessageIn } from '~/modules/llms/llm.client';
import { useModelsStore } from '~/modules/llms/store-llms';
import { useChatStore } from '~/common/state/store-chats';
@@ -83,13 +83,18 @@ export function autoSuggestions(conversationId: string, assistantMessageId: stri
// Follow-up: Auto-Diagrams
if (suggestDiagrams) {
void llmChatGenerateOrThrow(funcLLMId, [
{ role: 'system', content: systemMessage.text },
{ role: 'user', content: userMessage.text },
{ role: 'assistant', content: assistantMessageText },
], [suggestPlantUMLFn], 'draw_plantuml_diagram',
const instructions: VChatMessageIn[] = [
{ role: 'system', content: systemMessage.text },
{ role: 'user', content: userMessage.text },
{ role: 'assistant', content: assistantMessageText },
];
llmChatGenerateOrThrow(
funcLLMId,
instructions,
'chat-followup-diagram', conversationId,
[suggestPlantUMLFn], 'draw_plantuml_diagram',
).then(chatResponse => {
// cheap way to check if the function was supported
if (!('function_arguments' in chatResponse))
return;
@@ -110,7 +115,8 @@ export function autoSuggestions(conversationId: string, assistantMessageId: stri
}
}
}).catch(err => {
console.error('autoSuggestions::diagram:', err);
// Likely the model did not support function calling
// console.log('autoSuggestions: diagram error:', err);
});
}
+16 -14
View File
@@ -1,5 +1,5 @@
import { getFastLLMId } from '~/modules/llms/store-llms';
import { llmChatGenerateOrThrow } from '~/modules/llms/llm.client';
import { llmChatGenerateOrThrow, VChatMessageIn } from '~/modules/llms/llm.client';
import { useChatStore } from '~/common/state/store-chats';
@@ -34,21 +34,23 @@ export async function conversationAutoTitle(conversationId: string, forceReplace
try {
// LLM chat-generate call
const instructions: VChatMessageIn[] = [
{ role: 'system', content: `You are an AI conversation titles assistant who specializes in creating expressive yet few-words chat titles.` },
{
role: 'user', content:
'Analyze the given short conversation (every line is truncated) and extract a concise chat title that ' +
'summarizes the conversation in as little as a couple of words.\n' +
'Only respond with the lowercase short title and nothing else.\n' +
'\n' +
'```\n' +
historyLines.join('\n') +
'```\n',
},
];
const chatResponse = await llmChatGenerateOrThrow(
fastLLMId,
[
{ role: 'system', content: `You are an AI conversation titles assistant who specializes in creating expressive yet few-words chat titles.` },
{
role: 'user', content:
'Analyze the given short conversation (every line is truncated) and extract a concise chat title that ' +
'summarizes the conversation in as little as a couple of words.\n' +
'Only respond with the lowercase short title and nothing else.\n' +
'\n' +
'```\n' +
historyLines.join('\n') +
'```\n',
},
],
instructions,
'chat-ai-title', conversationId,
null, null,
);
+139 -78
View File
@@ -2,6 +2,7 @@ import * as React from 'react';
import { Box, Button, ButtonGroup, CircularProgress, Divider, FormControl, FormLabel, Grid, IconButton, Input } from '@mui/joy';
import AccountTreeTwoToneIcon from '@mui/icons-material/AccountTreeTwoTone';
import AutoFixHighIcon from '@mui/icons-material/AutoFixHigh';
import ExpandLessIcon from '@mui/icons-material/ExpandLess';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import ReplayIcon from '@mui/icons-material/Replay';
@@ -13,6 +14,7 @@ import { llmStreamingChatGenerate } from '~/modules/llms/llm.client';
import { GoodModal } from '~/common/components/GoodModal';
import { InlineError } from '~/common/components/InlineError';
import { adjustContentScaling } from '~/common/app.theme';
import { createDMessage, useChatStore } from '~/common/state/store-chats';
import { useFormRadio } from '~/common/components/forms/useFormRadio';
import { useFormRadioLlmType } from '~/common/components/forms/useFormRadioLlmType';
@@ -22,6 +24,10 @@ import { useUIPreferencesStore } from '~/common/state/store-ui';
import { bigDiagramPrompt, DiagramLanguage, diagramLanguages, DiagramType, diagramTypes } from './diagrams.data';
// configuration
const DIAGRAM_ACTOR_PREFIX = 'diagram';
// Used by the callers to setup the diagam session
export interface DiagramConfig {
conversationId: string;
@@ -37,7 +43,10 @@ function hotFixDiagramCode(llmCode: string): string {
llmCode = '```\n' + llmCode + '\n```';
// fix generation mistakes
return llmCode
.replaceAll('@endmindmap\n@enduml', '@endmindmap')
.replaceAll('@startumd', '@startuml') // haiku
.replaceAll('@endutml', '@enduml') // haiku
.replaceAll('@endmindmap\n@enduml', '@endmindmap') // gpt-3.5
.replaceAll('@endmindmap\n@end', '@endmindmap') // gpt-3.5
.replaceAll('```\n```', '```');
}
@@ -47,8 +56,8 @@ export function DiagramsModal(props: { config: DiagramConfig, onClose: () => voi
// state
const [showOptions, setShowOptions] = React.useState(true);
const [diagramCode, setDiagramCode] = React.useState<string | null>(null);
const [diagramType, diagramComponent] = useFormRadio<DiagramType>('auto', diagramTypes, 'Visualize');
const [diagramLanguage, languageComponent] = useFormRadio<DiagramLanguage>('plantuml', diagramLanguages, 'Style');
const [diagramType, diagramComponent] = useFormRadio<DiagramType>('mind', diagramTypes, 'Diagram');
const [diagramLanguage, languageComponent, setDiagramLanguage] = useFormRadio<DiagramLanguage>('mermaid', diagramLanguages, 'Syntax');
const [customInstruction, setCustomInstruction] = React.useState<string>('');
const [errorMessage, setErrorMessage] = React.useState<string | null>(null);
const [abortController, setAbortController] = React.useState<AbortController | null>(null);
@@ -56,10 +65,11 @@ export function DiagramsModal(props: { config: DiagramConfig, onClose: () => voi
// external state
const isMobile = useIsMobile();
const contentScaling = useUIPreferencesStore(state => state.contentScaling);
const [diagramLlm, llmComponent] = useFormRadioLlmType('Generator');
const [diagramLlm, llmComponent] = useFormRadioLlmType('Generator', 'chat');
// derived state
const { conversationId, text: subject } = props.config;
const { conversationId, messageId, text: subject } = props.config;
const diagramLlmId = diagramLlm?.id;
/**
@@ -88,7 +98,7 @@ export function DiagramsModal(props: { config: DiagramConfig, onClose: () => voi
const diagramPrompt = bigDiagramPrompt(diagramType, diagramLanguage, systemMessage.text, subject, customInstruction);
try {
await llmStreamingChatGenerate(diagramLlm.id, diagramPrompt, null, null, stepAbortController.signal,
await llmStreamingChatGenerate(diagramLlm.id, diagramPrompt, 'ai-diagram', messageId, null, null, stepAbortController.signal,
({ textSoFar }) => textSoFar && setDiagramCode(diagramCode = textSoFar),
);
} catch (error: any) {
@@ -99,7 +109,7 @@ export function DiagramsModal(props: { config: DiagramConfig, onClose: () => voi
setAbortController(null);
}
}, [abortController, conversationId, diagramLanguage, diagramLlm, diagramType, subject, customInstruction]);
}, [abortController, conversationId, customInstruction, diagramLanguage, diagramLlm, diagramType, messageId, subject]);
// [Effect] Auto-abort on unmount
@@ -113,95 +123,146 @@ export function DiagramsModal(props: { config: DiagramConfig, onClose: () => voi
}, [abortController]);
const handleInsertAndClose = () => {
// custom instruction
const handleCustomInstructionKeyDown = React.useCallback((event: React.KeyboardEvent<HTMLInputElement>) => {
if (event.key === 'Enter') {
event.preventDefault();
void handleGenerateNew();
}
}, [handleGenerateNew]);
const handleCustomInstructionChange = React.useCallback((event: React.ChangeEvent<HTMLInputElement>) => {
setCustomInstruction(event.target.value);
}, []);
// done
const handleAppendMessageAndClose = React.useCallback(() => {
if (!diagramCode)
return setErrorMessage('Nothing to add to the conversation.');
const diagramMessage = createDMessage('assistant', diagramCode);
// diagramMessage.purposeId = conversation.systemPurposeId;
diagramMessage.originLLM = 'diagram';
diagramMessage.originLLM = DIAGRAM_ACTOR_PREFIX + (diagramLlmId ? `-${diagramLlmId}` : '');
useChatStore.getState().appendMessage(conversationId, diagramMessage);
props.onClose();
};
}, [conversationId, diagramCode, diagramLlmId, props]);
return <GoodModal
title='Generate Diagram' noTitleBar
open onClose={props.onClose}
sx={{ maxWidth: { xs: '100vw', md: '95vw' } }}
startButton={
<Button variant='soft' color='success' disabled={!diagramCode || !!abortController} endDecorator={<TelegramIcon />} onClick={handleInsertAndClose}>
Add To Chat
</Button>
}
>
// [effect] Auto-switch language to match diagram type
React.useEffect(() => {
setDiagramLanguage(diagramType === 'mind' ? 'mermaid' : 'plantuml');
}, [diagramType, setDiagramLanguage]);
{showOptions && (
<Grid container spacing={2}>
<Grid xs={12} md={6}>
{diagramComponent}
</Grid>
{languageComponent && (
return (
<GoodModal
titleStartDecorator={<AutoFixHighIcon sx={{ fontSize: 'md', mr: 1 }} />}
title={<>
Auto-Diagram
<IconButton
aria-label={showOptions ? 'Hide Options' : 'Show Options'}
size='sm'
onClick={() => setShowOptions(options => !options)}
sx={{ ml: 1, my: -0.5 }}
>
{showOptions ? <ExpandMoreIcon /> : <ExpandLessIcon />}
</IconButton>
</>}
hideBottomClose
open onClose={props.onClose}
sx={{ maxWidth: { xs: '100vw', md: '95vw', lg: '88vw' } }}
>
{showOptions && (
<Grid container spacing={2}>
<Grid xs={12} md={6}>
{languageComponent}
{diagramComponent}
</Grid>
{languageComponent && (
<Grid xs={12} md={6}>
{languageComponent}
</Grid>
)}
<Grid xs={12} md={6}>
{llmComponent}
</Grid>
<Grid xs={12} md={6}>
<FormControl>
<FormLabel>Customize</FormLabel>
<Input
title='Custom Instruction'
placeholder='e.g. visualize as state'
value={customInstruction}
onKeyDown={handleCustomInstructionKeyDown}
onChange={handleCustomInstructionChange}
endDecorator={(abortController && customInstruction) ? <CircularProgress size='sm' /> : undefined}
/>
</FormControl>
</Grid>
)}
<Grid xs={12} xl={6}>
{llmComponent}
</Grid>
<Grid xs={12} md={6}>
<FormControl>
<FormLabel>Custom Instruction</FormLabel>
<Input title='Custom Instruction' placeholder='e.g. visualize as state' value={customInstruction} onChange={(e) => setCustomInstruction(e.target.value)} />
</FormControl>
</Grid>
</Grid>
)}
)}
<ButtonGroup color='primary' sx={{ flexGrow: 1 }}>
<Button
fullWidth
variant={abortController ? 'soft' : 'solid'} color='primary'
disabled={!diagramLlm}
onClick={abortController ? () => abortController.abort() : handleGenerateNew}
endDecorator={abortController ? <StopOutlinedIcon /> : diagramCode ? <ReplayIcon /> : <AccountTreeTwoToneIcon />}
sx={{ minWidth: 200 }}
>
{abortController ? 'Stop' : diagramCode ? 'Regenerate' : 'Generate'}
</Button>
<IconButton onClick={() => setShowOptions(options => !options)}>
{showOptions ? <ExpandLessIcon /> : <ExpandMoreIcon />}
</IconButton>
</ButtonGroup>
{errorMessage && <InlineError error={errorMessage} />}
{errorMessage && <InlineError error={errorMessage} />}
{!showOptions && !!abortController && <Box sx={{ display: 'flex', justifyContent: 'center' }}>
<CircularProgress size='lg' />
</Box>}
{!showOptions && !!abortController && <Box sx={{ display: 'flex', justifyContent: 'center' }}>
<CircularProgress size='lg' />
</Box>}
{!!diagramCode && (!abortController || showOptions) && (
<Box sx={{
backgroundColor: 'background.level2',
marginX: 'calc(-1 * var(--Card-padding))',
minHeight: 96,
p: { xs: 1, md: 2 },
overflow: 'hidden',
}}>
<BlocksRenderer
text={diagramCode}
fromRole='assistant'
fitScreen={isMobile}
contentScaling={adjustContentScaling(contentScaling, -1)}
renderTextAsMarkdown={false}
specialDiagramMode
// onMessageEdit={(text) => setMessage({ ...message, text })}
/>
</Box>
)}
{!diagramCode && <Divider />}
{/* End */}
<Box sx={{ mt: 'auto', display: 'flex', flexWrap: 'wrap', justifyContent: 'space-between' }}>
{/* Add Message to Chat (once complete) */}
<Button variant='soft' color='success' disabled={!diagramCode || !!abortController} endDecorator={<TelegramIcon />} onClick={handleAppendMessageAndClose}>
Add To Chat
</Button>
{/* Button Group to toggle controls visibility - NOT enabled at the moment */}
<ButtonGroup variant='solid' color='primary' sx={{ ml: 'auto' }}>
{/*<IconButton*/}
{/* aria-label={showOptions ? 'Hide Options' : 'Show Options'}*/}
{/* onClick={() => setShowOptions(options => !options)}*/}
{/*>*/}
{/* {showOptions ? <ExpandLessIcon /> : <ExpandMoreIcon />}*/}
{/*</IconButton>*/}
<Button
variant={abortController ? 'soft' : 'solid'} color='primary'
disabled={!diagramLlm}
onClick={abortController ? () => abortController.abort() : handleGenerateNew}
endDecorator={abortController ? <StopOutlinedIcon /> : diagramCode ? <ReplayIcon /> : <AccountTreeTwoToneIcon />}
sx={{ minWidth: isMobile ? 160 : 220 }}
>
{abortController ? 'Stop' : diagramCode ? 'Regenerate' : 'Generate'}
</Button>
</ButtonGroup>
{!!diagramCode && (!abortController || showOptions) && (
<Box sx={{
backgroundColor: 'background.level2',
marginX: 'calc(-1 * var(--Card-padding))',
minHeight: 96,
p: { xs: 1, md: 2 },
overflow: 'hidden',
}}>
<BlocksRenderer
text={diagramCode}
fromRole='assistant'
fitScreen={isMobile}
contentScaling={contentScaling}
renderTextAsMarkdown={false}
specialDiagramMode
// onMessageEdit={(text) => setMessage({ ...message, text })}
/>
</Box>
)}
{!diagramCode && <Divider />}
</GoodModal>;
</GoodModal>
);
}
+25 -23
View File
@@ -7,7 +7,7 @@ export type DiagramLanguage = 'mermaid' | 'plantuml';
// NOTE: keep these global, or it will trigger re-renders
export const diagramTypes: FormRadioOption<DiagramType>[] = [
{ label: 'Auto-diagram', value: 'auto' },
{ label: 'Automatic', value: 'auto' },
{ label: 'Mindmap', value: 'mind' },
];
@@ -16,7 +16,8 @@ export const diagramLanguages: FormRadioOption<DiagramLanguage>[] = [
{ label: 'Mermaid (mindmaps)', value: 'mermaid' },
];
const mermaidMindmapExample = `
const mermaidMindmapExample = `For example:
\`\`\`mermaid
mindmap
root((mindmap))
Origins
@@ -32,42 +33,43 @@ mindmap
Tools
Pen and paper
Mermaid
`.trim();
function mermaidDiagramPrompt(diagramType: DiagramType): { sys: string, usr: string } {
let promptDetails = diagramType === 'auto'
? 'You create a valid Mermaid diagram markdown (```mermaid\\n...), ready to be rendered into a diagram. Ensure the code contains no external references, and all names are properly enclosed in double quotes and escaped if necessary. Choose the most suitable diagram type from the following supported types: flowchart, sequence, class, state, erd, gantt, pie, git.'
: 'You create a valid Mermaid mindmap markdown (```mermaid\\n...), ready to be rendered into a mind map. Ensure the code contains no external references, and all names are properly enclosed in double quotes and escaped if necessary. For example:\n' + mermaidMindmapExample + '\n';
return {
sys: `You are an AI that generates correct Mermaid code based on provided text. ${promptDetails}`,
usr: `Generate the Mermaid code for a ${diagramType === 'auto' ? 'suitable diagram' : 'mind map'} that represents the preceding assistant message.`,
};
}
\`\`\`
`;
function plantumlDiagramPrompt(diagramType: DiagramType): { sys: string, usr: string } {
switch (diagramType) {
case 'auto':
return {
sys: 'You are an AI that writes PlantUML code based on provided text. You create a valid PlantUML string, enclosed by "```\n@startuml" and "@enduml\n```", ready to be rendered into a diagram or mindmap, ensuring the code contains no external references and all names are properly escaped without spaces. You choose the most suitable diagram typesequence, class, use case, activity, component, state, object, deployment, wireframe, mindmap, gantt, or flowchart.',
usr: 'Generate the PlantUML code for the diagram type that best represents the preceding assistant message.',
sys: 'Generate a valid PlantUML diagram markdown (```plantuml\\n@startuml\\n...@enduml\\n```), ready for rendering. No external references allowed and all strings must be escaped correctly (each in a single line). Choose the most suitable PlantUML diagram type: sequence, class, use case, activity, component, state, object, deployment, wireframe, mindmap, gantt, or flowchart.',
usr: 'Generate the PlantUML code for a suitable diagram that best captures the essence of the preceding message.',
};
case 'mind':
return {
sys: 'You are an AI that writes PlantUML code based on provided text. You create a valid PlantUML string, enclosed by "```\n@startmindmap" and "@endmindmap\n```", ready to be rendered into a mind map, ensuring the code contains no external references and all names are properly escaped without spaces.',
usr: 'Generate the PlantUML code for a mind map based on the preceding assistant message.',
sys: 'Generate a valid PlantUML mindmap markdown (```plantuml\\n@startmindmap\\n...@endmindmap\\n\`\`\`), ready for rendering. No external references allowed. Use one or more asterisks to indent and separate with spaces.',
usr: 'Generate a PlantUML mindmap that effectively summarizes the key points from the preceding message.',
};
}
}
function mermaidDiagramPrompt(diagramType: DiagramType): { sys: string, usr: string } {
let promptDetails = diagramType === 'auto'
? 'Generate a valid Mermaid diagram markdown (```mermaid\\n...```), ready for rendering. The code should have no external references and all names must be in double quotes and properly escaped. Select the most appropriate Mermaid diagram type: flowchart, sequence, class, state, erd, gantt, pie, or git.'
: 'Generate a valid Mermaid mindmap markdown (```mermaid\\n...```), ready for rendering. The code should have no external references and all names must be in double quotes and properly escaped. ' + mermaidMindmapExample;
return {
sys: `Your task is to generate accurate and well-structured Mermaid code from the given text. ${promptDetails}`,
usr: `Generate the Mermaid code for a ${diagramType === 'auto' ? 'suitable diagram' : 'mind map'} that ${diagramType === 'auto' ? 'best captures the essence' : 'effectively summarizes the key points'} of the preceding message.`,
};
}
const sysSuffixPM = 'The next three messages will outline: 1. your personality, 2. the data you\'ll work with, and 3. a clear restatement of the instructions.';
const usrSuffixCoT = 'Please think step by step, then generate valid diagram code in a markdown block as instructed, and stop your response.';
export function bigDiagramPrompt(diagramType: DiagramType, diagramLanguage: DiagramLanguage, chatSystemPrompt: string, subject: string, customInstruction: string): VChatMessageIn[] {
const { sys, usr } = diagramLanguage === 'mermaid' ? mermaidDiagramPrompt(diagramType) : plantumlDiagramPrompt(diagramType);
if (customInstruction) {
customInstruction = 'Also consider the following instructions: ' + customInstruction;
}
return [
{ role: 'system', content: sys },
{ role: 'system', content: chatSystemPrompt },
{ role: 'system', content: sys + '\n' + sysSuffixPM },
{ role: 'user', content: chatSystemPrompt },
{ role: 'assistant', content: subject },
{ role: 'user', content: `${usr} ${customInstruction}` },
{ role: 'user', content: (!customInstruction?.trim() ? usr : `${usr} Also consider the following instructions: ${customInstruction.trim()}`) + '\n' + usrSuffixCoT },
];
}
+1 -1
View File
@@ -117,7 +117,7 @@ export function FlattenerModal(props: {
await startStreaming(llm.id, [
{ role: 'system', content: flattenProfile.systemPrompt },
{ role: 'user', content: encodeConversationAsUserMessage(flattenProfile.userPrompt, messages) },
]);
], 'ai-flattener', messages[0].id);
}, [llm, props.conversationId, startStreaming]);
@@ -1,5 +1,5 @@
import { getFastLLMId } from '~/modules/llms/store-llms';
import { llmChatGenerateOrThrow } from '~/modules/llms/llm.client';
import { llmChatGenerateOrThrow, VChatMessageIn } from '~/modules/llms/llm.client';
const simpleImagineSystemPrompt =
@@ -10,14 +10,15 @@ Provide output as a lowercase prompt and nothing else.`;
/**
* Creates a caption for a drawing or photo given some description - used to elevate the quality of the imaging
*/
export async function imaginePromptFromText(messageText: string): Promise<string | null> {
export async function imaginePromptFromText(messageText: string, contextRef: string): Promise<string | null> {
const fastLLMId = getFastLLMId();
if (!fastLLMId) return null;
try {
const chatResponse = await llmChatGenerateOrThrow(fastLLMId, [
const instructions: VChatMessageIn[] = [
{ role: 'system', content: simpleImagineSystemPrompt },
{ role: 'user', content: 'Write a prompt, based on the following input.\n\n```\n' + messageText.slice(0, 1000) + '\n```\n' },
], null, null);
];
const chatResponse = await llmChatGenerateOrThrow(fastLLMId, instructions, 'draw-expand-prompt', contextRef, null, null);
return chatResponse.content?.trim() ?? null;
} catch (error: any) {
console.error('imaginePromptFromText: fetch request error:', error);
+3 -2
View File
@@ -132,7 +132,7 @@ export class Agent {
S.messages.push({ role: 'user', content: prompt });
let content: string;
try {
content = (await llmChatGenerateOrThrow(llmId, S.messages, null, null, 500)).content;
content = (await llmChatGenerateOrThrow(llmId, S.messages, 'chat-react-turn', null, null, null, 500)).content;
} catch (error: any) {
content = `Error in llmChatGenerateOrThrow: ${error}`;
}
@@ -194,7 +194,8 @@ async function search(query: string): Promise<string> {
async function browse(url: string): Promise<string> {
try {
const page = await callBrowseFetchPage(url);
return JSON.stringify(page.content ? { text: page.content } : { error: 'Issue reading the page' });
const pageContent = page.content.markdown || page.content.text || page.content.html || '';
return JSON.stringify(pageContent ? { text: pageContent } : { error: 'Issue reading the page' });
} catch (error) {
console.error('Error browsing:', (error as Error).message);
return 'An error occurred while browsing to the URL. Missing WSS Key?';
+22
View File
@@ -0,0 +1,22 @@
import { createDMessage, DMessage } from '~/common/state/store-chats';
const replyToSystemPrompt = `The user is referring to this in particular:
{{ReplyToText}}`;
/**
* Adds a system message to the history, explaining the context of the reply
*
* FIXME: HACK - this is a temporary solution to pass the metadata to the execution
*
* Only works with OpenAI and a couple more right now. Fix it by making it vendor-agnostic
*/
export function updateHistoryForReplyTo(history: DMessage[]) {
if (history?.length < 1)
return;
const lastMessage = history[history.length - 1];
if (lastMessage.role === 'user' && lastMessage.metadata?.inReplyToText)
history.push(createDMessage('system', replyToSystemPrompt.replace('{{ReplyToText}}', lastMessage.metadata.inReplyToText)));
}
+4 -3
View File
@@ -1,5 +1,5 @@
import { DLLMId, findLLMOrThrow } from '~/modules/llms/store-llms';
import { llmChatGenerateOrThrow } from '~/modules/llms/llm.client';
import { llmChatGenerateOrThrow, VChatMessageIn } from '~/modules/llms/llm.client';
// prompt to be tried when doing recursive summerization.
@@ -80,10 +80,11 @@ async function cleanUpContent(chunk: string, llmId: DLLMId, _ignored_was_targetW
const autoResponseTokensSize = contextTokens ? Math.floor(contextTokens * outputTokenShare) : null;
try {
const chatResponse = await llmChatGenerateOrThrow(llmId, [
const instructions: VChatMessageIn[] = [
{ role: 'system', content: cleanupPrompt },
{ role: 'user', content: chunk },
], null, null, autoResponseTokensSize ?? undefined);
];
const chatResponse = await llmChatGenerateOrThrow(llmId, instructions, 'chat-ai-summarize', null, null, null, autoResponseTokensSize ?? undefined);
return chatResponse?.content ?? '';
} catch (error: any) {
return '';
+4 -4
View File
@@ -1,7 +1,7 @@
import * as React from 'react';
import { DLLMId, findLLMOrThrow } from '~/modules/llms/store-llms';
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';
import { llmStreamingChatGenerate, VChatContextRef, VChatMessageIn, VChatStreamContextName } from '~/modules/llms/llm.client';
// set to true to log to the console
@@ -20,7 +20,7 @@ export interface LLMChainStep {
/**
* React hook to manage a chain of LLM transformations.
*/
export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, chainInput: string | undefined, onSuccess?: (output: string, input: string) => void) {
export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, chainInput: string | undefined, onSuccess: (output: string, input: string) => void, contextName: VChatStreamContextName, contextRef: VChatContextRef) {
// state
const [chain, setChain] = React.useState<ChainState | null>(null);
@@ -114,7 +114,7 @@ export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, ch
setChainStepInterimText(null);
// LLM call (streaming, cancelable)
llmStreamingChatGenerate(llmId, llmChatInput, null, null, stepAbortController.signal,
llmStreamingChatGenerate(llmId, llmChatInput, contextName, contextRef, null, null, stepAbortController.signal,
({ textSoFar }) => {
textSoFar && setChainStepInterimText(interimText = textSoFar);
})
@@ -141,7 +141,7 @@ export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, ch
stepAbortController.abort('step aborted');
_chainAbortController.signal.removeEventListener('abort', globalToStepListener);
};
}, [chain, llmId, onSuccess]);
}, [chain, contextRef, contextName, llmId, onSuccess]);
return {
+3 -3
View File
@@ -1,7 +1,7 @@
import * as React from 'react';
import type { DLLMId } from '~/modules/llms/store-llms';
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';
import { llmStreamingChatGenerate, VChatContextRef, VChatMessageIn, VChatStreamContextName } from '~/modules/llms/llm.client';
export function useStreamChatText() {
@@ -13,7 +13,7 @@ export function useStreamChatText() {
const abortControllerRef = React.useRef<AbortController | null>(null);
const startStreaming = React.useCallback(async (llmId: DLLMId, prompt: VChatMessageIn[]) => {
const startStreaming = React.useCallback(async (llmId: DLLMId, prompt: VChatMessageIn[], contextName: VChatStreamContextName, contextRef: VChatContextRef) => {
setStreamError(null);
setPartialText(null);
setText(null);
@@ -24,7 +24,7 @@ export function useStreamChatText() {
try {
let lastText = '';
await llmStreamingChatGenerate(llmId, prompt, null, null, abortControllerRef.current.signal, ({ textSoFar }) => {
await llmStreamingChatGenerate(llmId, prompt, contextName, contextRef, null, null, abortControllerRef.current.signal, ({ textSoFar }) => {
if (textSoFar) {
lastText = textSoFar;
setPartialText(lastText);
+33 -40
View File
@@ -1,7 +1,7 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { Box, Typography } from '@mui/joy';
import { Box } from '@mui/joy';
import { ExplainerCarousel, ExplainerPage } from '~/common/components/ExplainerCarousel';
import { animationEnterScaleUp } from '~/common/util/animUtils';
@@ -11,59 +11,52 @@ const beamSteps: ExplainerPage[] = [
{
stepDigits: '',
stepName: 'Welcome',
titlePrefix: 'Welcome to',
// titleSquircle: true,
titleSpark: 'Beam',
// titlePrefix: 'Welcome to Beam.', // Better answers, faster.
titlePrefix: 'Welcome to ', titleSpark: 'Beam',
// titleSpark: 'B E A M',
// titleSuffix: ' azing',
// titleSquircle: true,
mdContent: `
**Hello, we just launched Beam for you.**
Beam is a new Big-AGI chat modality that allows you to engage multiple AI models in parallel.
**Beam** is a chat modality in Big-AGI to engage multiple AI models, [together](https://big-agi.com/blog/beam-multi-model-ai-reasoning).
It's like having a brainstorm session with several smart people,
only they are AI models. And as with people,
each AI model has its own unique perspective.
And Beam lets you make the best of them.
each adding their own unique perspective.
Beam lets you make the best of them all.
![big-AGI BEAM Rays](https://raw.githubusercontent.com/enricoros/big-AGI/main/public/images/explainers/explainer-beam-scatter-1200px-alpha.png)
Let&apos;s get you to **better chat answers, faster**.
`,
`, // Let&apos;s get you to better chat answers, faster.
},
{
stepDigits: '01',
stepName: 'Beam',
titleSpark: 'Beaming',
titleSuffix: ': Exploration',
titlePrefix: 'Explore with ', titleSpark: 'Beam', titleSuffix: '.',
// titleSpark: 'Beaming', titleSuffix: ': Exploration',
mdContent: `
**Beaming is the exploration phase, it's where you get the AI models to generate ideas.**
**Beaming is the exploration phase**, where AI models generate ideas.
To Beam, pick the AI models you want to use (you can also load/save combos), and start them all at once or one by one.
Keep the responses you like and delete the ones that aren't helpful.
Simply pick the AI models you want to use (you can load/save combos) and start them.
You can then select a single response to continue the chat,
or keep the responses you like and do a Merge.
**Important**: 💰 Beware of the token usage of Beaming and Merging.
Being multiple and high-intensity operations,
they can consume more tokens than regular chats.
It is better to _use them in early/shorter chats_.
**Important:** _Best used in earlier / shorter chats_. 💰 Beware of the token usage of Beaming and Merging;
being parallel and lengthy operations, they will use more tokens than regular chats.
Use a mix of different AI models to get a diverse set of ideas and perspectives.
**Once you see a response you love, send it back to the chat**, otherwise move to the Merge step.
`,
Use a mix of different AI models to get a diverse set of ideas and perspectives.
`, // and delete the ones that aren't helpful
},
{
stepDigits: '02',
stepName: 'Merge',
titleSpark: 'Merging',
titleSuffix: ': Convergence', // Synthesis, Convergence
titlePrefix: 'Combine with ', titleSpark: 'Merge', titleSuffix: '.',
// titleSpark: 'Merging', titleSuffix: ': Synthesis', // Synthesis, Convergence
mdContent: `
**Merging is the consolidation phase**, where AI combines the best parts of the responses into a great, coherent answer.
Merging is **combining the best parts of each response** into a great, coherent answer.
You can choose from various merge options, including **Fusion**, **Checklist**, **Compare**, and **Custom**.
Experiment with different options to find the one that works best for your chat.
![big-AGI BEAM Rays](https://raw.githubusercontent.com/enricoros/big-AGI/main/public/images/explainers/explainer-beam-gather-1600px-alpha.png)
You can choose from various merge options, including Fusion, Checklist, Compare, and Custom.
Feel free to experiment with different options to find the one that works best for you.
`, // > Merge until you have a single, high-quality response. Or choose the final response manually, skipping merge.
},
// {
@@ -96,7 +89,7 @@ const beamExplainerSx: SxProps = {
height: '100%',
// style
padding: { xs: '1rem', md: '1.5rem' },
padding: 3, // { xs: 3, md: 3 },
animation: `${animationEnterScaleUp} 0.2s cubic-bezier(.17,.84,.44,1)`,
// layout
@@ -118,14 +111,14 @@ export function BeamExplainer(props: {
<ExplainerCarousel
explainerId='beam-onboard'
steps={beamSteps}
footer={
<Typography level='body-xs' sx={{ textAlign: 'center', maxWidth: '400px', mx: 'auto' }}>
{/*Unlock beaming, combine AI wisdom, achieve clarity.*/}
{/*Discover, Design and Dream.*/}
{/*The journey from exploration to refinement is iterative.*/}
{/*Each cycle sharpens your ideas, bringing you closer to innovation.*/}
</Typography>
}
// footer={
// <Typography level='body-xs' sx={{ textAlign: 'center', maxWidth: '400px', mx: 'auto' }}>
// {/*Unlock beaming, combine AI wisdom, achieve clarity.*/}
// {/*Discover, Design and Dream.*/}
// {/*The journey from exploration to refinement is iterative.*/}
// {/*Each cycle sharpens your ideas, bringing you closer to innovation.*/}
// </Typography>
// }
onFinished={props.onWizardComplete}
/>
+115 -96
View File
@@ -4,7 +4,6 @@ import { useShallow } from 'zustand/react/shallow';
import { Alert, Box, CircularProgress } from '@mui/joy';
import { ConfirmationModal } from '~/common/components/ConfirmationModal';
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
import { animationEnterScaleUp } from '~/common/util/animUtils';
import { useUICounter } from '~/common/state/store-ui';
@@ -15,38 +14,44 @@ import { BeamRayGrid } from './scatter/BeamRayGrid';
import { BeamScatterInput } from './scatter/BeamScatterInput';
import { BeamScatterPane } from './scatter/BeamScatterPane';
import { BeamStoreApi, useBeamStore } from './store-beam.hooks';
import { SCATTER_RAY_DEF } from './beam.config';
import { useModuleBeamStore } from './store-module-beam';
export function BeamView(props: {
beamStore: BeamStoreApi,
isMobile: boolean,
showExplainer?: boolean,
// sx?: SxProps,
}) {
// state
const [hasAutoMerged, setHasAutoMerged] = React.useState(false);
const [warnIsScattering, setWarnIsScattering] = React.useState(false);
// external state
const { novel: explainerUnseen, touch: explainerCompleted, forget: explainerShow } = useUICounter('beam-wizard');
const gatherAutoStartAfterScatter = useModuleBeamStore(state => state.gatherAutoStartAfterScatter);
const {
/* root */ editInputHistoryMessage,
/* scatter */ setRayCount, startScatteringAll, stopScatteringAll,
} = props.beamStore.getState();
const {
/* root */ inputHistory, inputIssues, inputReady,
/* scatter */ isScattering, raysReady,
/* scatter */ hadImportedRays, isScattering, raysReady,
/* gather (composite) */ canGather,
} = useBeamStore(props.beamStore, useShallow(state => ({
// input
inputHistory: state.inputHistory,
inputIssues: state.inputIssues,
inputReady: state.inputReady,
// scatter
hadImportedRays: state.hadImportedRays,
isScattering: state.isScattering,
raysReady: state.raysReady,
// gather (composite)
canGather: state.raysReady >= 2 && state.currentFactoryId !== null && state.currentGatherLlmId !== null,
})));
// the following are independent because of useShallow, which would break in the above call
const rayIds = useBeamStore(props.beamStore, useShallow(state => state.rays.map(ray => ray.rayId)));
const fusionIds = useBeamStore(props.beamStore, useShallow(state => state.fusions.map(fusion => fusion.fusionId)));
@@ -60,6 +65,11 @@ export function BeamView(props: {
const handleRayIncreaseCount = React.useCallback(() => setRayCount(raysCount + 1), [setRayCount, raysCount]);
const handleScatterStart = React.useCallback(() => {
setHasAutoMerged(false);
startScatteringAll();
}, [startScatteringAll]);
const handleCreateFusion = React.useCallback(() => {
// if scatter is busy, ask for confirmation
@@ -71,137 +81,146 @@ export function BeamView(props: {
}, [isScattering, props.beamStore]);
const handleStopScatterConfirmation = React.useCallback(() => {
const handleStartMergeConfirmation = React.useCallback(() => {
setWarnIsScattering(false);
stopScatteringAll();
handleCreateFusion();
}, [handleCreateFusion, stopScatteringAll]);
const handleStopScatterDenial = React.useCallback(() => setWarnIsScattering(false), []);
const handleStartMergeDenial = React.useCallback(() => setWarnIsScattering(false), []);
// (this is great ux) scatter freed up while we were asking the question, proceed
// auto-merge
const shallAutoMerge = gatherAutoStartAfterScatter && canGather && !isScattering && !hasAutoMerged;
React.useEffect(() => {
if (warnIsScattering && !isScattering)
handleStopScatterConfirmation();
}, [handleStopScatterConfirmation, isScattering, warnIsScattering]);
if (shallAutoMerge) {
setHasAutoMerged(true);
handleStartMergeConfirmation();
}
}, [handleStartMergeConfirmation, shallAutoMerge]);
// (great ux) scatter finished while the "start merge" (warning) dialog is up: dismiss dialog and proceed
// here we assume that 'warnIsScattering' shows the intention of the user to proceed with a merge asap
const shallResumeMerge = warnIsScattering && !isScattering && !gatherAutoStartAfterScatter;
React.useEffect(() => {
if (shallResumeMerge)
handleStartMergeConfirmation();
}, [handleStartMergeConfirmation, shallResumeMerge]);
// runnning
// [effect] pre-populate a default number of rays
const bootup = raysCount < SCATTER_RAY_DEF;
React.useEffect(() => {
bootup && handleRaySetCount(SCATTER_RAY_DEF);
}, [bootup, handleRaySetCount]);
// const bootup = raysCount < SCATTER_RAY_DEF;
// React.useEffect(() => {
// bootup && handleRaySetCount(SCATTER_RAY_DEF);
// }, [bootup, handleRaySetCount]);
// Explainer, if unseen
if (props.showExplainer && explainerUnseen)
return <BeamExplainer onWizardComplete={explainerCompleted} />;
return (
<ScrollToBottom disableAutoStick>
return <>
{/* Main V-Layout */}
<Box sx={{
// scroller fill
minHeight: '100%',
<Box sx={{
// scroller fill
minHeight: '100%',
// ...props.sx,
// enter animation
animation: `${animationEnterScaleUp} 0.2s cubic-bezier(.17,.84,.44,1)`,
// enter animation
animation: `${animationEnterScaleUp} 0.2s cubic-bezier(.17,.84,.44,1)`,
// config
'--Pad': { xs: '1rem', md: '1.5rem' },
'--Pad_2': 'calc(var(--Pad) / 2)',
// config
'--Pad': { xs: '1rem', md: '1.5rem' },
'--Pad_2': 'calc(var(--Pad) / 2)',
// layout
display: 'flex',
flexDirection: 'column',
gap: 'var(--Pad)',
}}>
// layout
display: 'flex',
flexDirection: 'column',
gap: 'var(--Pad)',
}}>
{/* Config Issues */}
{!!inputIssues && <Alert>{inputIssues}</Alert>}
{/* Config Issues */}
{!!inputIssues && <Alert>{inputIssues}</Alert>}
{/* User Message */}
<BeamScatterInput
isMobile={props.isMobile}
history={inputHistory}
editHistory={editInputHistoryMessage}
/>
{/* User Message */}
<BeamScatterInput
isMobile={props.isMobile}
history={inputHistory}
editHistory={editInputHistoryMessage}
/>
{/* Scatter Controls */}
<BeamScatterPane
beamStore={props.beamStore}
isMobile={props.isMobile}
rayCount={raysCount}
setRayCount={handleRaySetCount}
startEnabled={inputReady}
startBusy={isScattering}
onStart={startScatteringAll}
onStop={stopScatteringAll}
onExplainerShow={explainerShow}
/>
{/* Scatter Controls */}
<BeamScatterPane
beamStore={props.beamStore}
isMobile={props.isMobile}
rayCount={raysCount}
setRayCount={handleRaySetCount}
startEnabled={inputReady}
startBusy={isScattering}
onStart={handleScatterStart}
onStop={stopScatteringAll}
onExplainerShow={explainerShow}
/>
{/* Rays Grid */}
<BeamRayGrid
beamStore={props.beamStore}
isMobile={props.isMobile}
rayIds={rayIds}
onIncreaseRayCount={handleRayIncreaseCount}
// linkedLlmId={currentGatherLlmId}
/>
{/* Rays Grid */}
<BeamRayGrid
beamStore={props.beamStore}
isMobile={props.isMobile}
rayIds={rayIds}
hadImportedRays={hadImportedRays}
onIncreaseRayCount={handleRayIncreaseCount}
// linkedLlmId={currentGatherLlmId}
/>
{/* Gapper between Rays and Merge, without compromising the auto margin of the Ray Grid */}
<Box />
{/* Gapper between Rays and Merge, without compromising the auto margin of the Ray Grid */}
<Box />
{/* Gather Controls */}
<BeamGatherPane
beamStore={props.beamStore}
canGather={canGather}
isMobile={props.isMobile}
onAddFusion={handleCreateFusion}
raysReady={raysReady}
/>
{/* Gather Controls */}
<BeamGatherPane
beamStore={props.beamStore}
canGather={canGather}
isMobile={props.isMobile}
// onAddFusion={handleCreateFusion}
raysReady={raysReady}
/>
{/* Fusion Grid */}
<BeamFusionGrid
beamStore={props.beamStore}
canGather={canGather}
fusionIds={fusionIds}
isMobile={props.isMobile}
onAddFusion={handleCreateFusion}
raysCount={raysCount}
/>
{/* Fusion Grid */}
<BeamFusionGrid
beamStore={props.beamStore}
canGather={canGather}
fusionIds={fusionIds}
isMobile={props.isMobile}
onAddFusion={handleCreateFusion}
raysCount={raysCount}
/>
</Box>
</Box>
{/* Confirm Stop Scattering */}
{warnIsScattering && (
<ConfirmationModal
open
onClose={handleStopScatterDenial}
onPositive={handleStopScatterConfirmation}
// lowStakes
noTitleBar
confirmationText='Some responses are still being generated. Do you want to stop and proceed with merging the available responses now?'
positiveActionText='Proceed with Merge'
negativeActionText='Wait for All Responses'
negativeActionStartDecorator={
<CircularProgress color='neutral' sx={{ '--CircularProgress-size': '24px', '--CircularProgress-trackThickness': '1px' }} />
}
/>
)}
{/* Confirm Stop Scattering */}
{warnIsScattering && (
<ConfirmationModal
open
onClose={handleStartMergeDenial}
onPositive={handleStartMergeConfirmation}
// lowStakes
noTitleBar
confirmationText='Some responses are still being generated. Do you want to stop and proceed with merging the available responses now?'
positiveActionText='Proceed with Merge'
negativeActionText='Wait for All Responses'
negativeActionStartDecorator={
<CircularProgress color='neutral' sx={{ '--CircularProgress-size': '24px', '--CircularProgress-trackThickness': '1px' }} />
}
/>
)}
</ScrollToBottom>
);
</>;
}
+3 -2
View File
@@ -145,8 +145,9 @@ export function BeamFusionGrid(props: {
</Typography>
</Box> : (
<Typography level='body-sm'>
You need two or more replies for a {currentFactory?.shortLabel?.toLocaleLowerCase() ?? ''} merge.
<Typography level='body-sm' sx={{ opacity: 0.8 }}>
{/*You need two or more replies for a {currentFactory?.shortLabel?.toLocaleLowerCase() ?? ''} merge.*/}
Waiting for multiple responses.
</Typography>
)}
</BeamCard>
+23 -22
View File
@@ -6,15 +6,14 @@ import { Box, Button, ButtonGroup, FormControl, Typography } from '@mui/joy';
import AutoAwesomeIcon from '@mui/icons-material/AutoAwesome';
import AutoAwesomeOutlinedIcon from '@mui/icons-material/AutoAwesomeOutlined';
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
import { animationColorBeamGather } from '~/common/util/animUtils';
import { useLLMSelect } from '~/common/components/forms/useLLMSelect';
import { BeamGatherDropdown } from './BeamGatherPaneDropdown';
import { BeamStoreApi, useBeamStore } from '../store-beam.hooks';
import { FFactoryId, FUSION_FACTORIES } from './instructions/beam.gather.factories';
import { GATHER_COLOR } from '../beam.config';
import { beamPaneSx } from '../BeamCard';
import { useModuleBeamStore } from '../store-module-beam';
const gatherPaneClasses = {
@@ -59,7 +58,7 @@ export function BeamGatherPane(props: {
beamStore: BeamStoreApi,
canGather: boolean,
isMobile: boolean,
onAddFusion: () => void,
// onAddFusion: () => void,
raysReady: number,
}) {
@@ -67,7 +66,7 @@ export function BeamGatherPane(props: {
// external state
// const { setStickToBottom } = useScrollToBottom();
const {
currentFactoryId, currentGatherLlmId, isGatheringAny,
currentFactoryId, currentGatherLlmId, isGatheringAny, hasFusions,
setCurrentFactoryId, setCurrentGatherLlmId,
} = useBeamStore(props.beamStore, useShallow(state => ({
// state
@@ -75,13 +74,15 @@ export function BeamGatherPane(props: {
currentFactoryId: state.currentFactoryId,
currentGatherLlmId: state.currentGatherLlmId,
isGatheringAny: state.isGatheringAny,
hasFusions: state.fusions.length > 0,
// actions
setCurrentFactoryId: state.setCurrentFactoryId,
setCurrentGatherLlmId: state.setCurrentGatherLlmId,
})));
const [_, gatherLlmComponent, gatherLlmIcon] = useLLMSelect(
currentGatherLlmId, setCurrentGatherLlmId, props.isMobile ? '' : 'Merge Model', true,
const gatherAutoStartAfterScatter = useModuleBeamStore(state => state.gatherAutoStartAfterScatter);
const [_, gatherLlmComponent/*, gatherLlmIcon*/] = useLLMSelect(
currentGatherLlmId, setCurrentGatherLlmId, props.isMobile ? '' : 'Merge Model', true, !props.canGather && !gatherAutoStartAfterScatter,
);
// derived state
@@ -96,7 +97,7 @@ export function BeamGatherPane(props: {
}, [currentFactoryId, setCurrentFactoryId]);
const MainLlmIcon = gatherLlmIcon || (isGatheringAny ? AutoAwesomeIcon : AutoAwesomeOutlinedIcon);
const MainLlmIcon = /*gatherLlmIcon ||*/ (isGatheringAny ? AutoAwesomeIcon : AutoAwesomeOutlinedIcon);
return (
<Box
@@ -105,21 +106,20 @@ export function BeamGatherPane(props: {
>
{/* Title */}
<Box sx={{ display: 'flex', alignItems: 'center', gap: 0.25, minWidth: 184 }}>
<div>
<Typography
level='h4' component='h2'
endDecorator={<BeamGatherDropdown />}
// sx={{ my: 0.25 }}
>
<MainLlmIcon sx={{ fontSize: '1rem', animation: isGatheringAny ? `${animationColorBeamGather} 2s linear infinite` : undefined }} />&nbsp;Merge
</Typography>
<Typography level='body-sm' sx={{ whiteSpace: 'nowrap' }}>
{/* may merge or not (hasInputs) N replies.. put this in pretty messages */}
{props.canGather ? `Combine the ${props.raysReady} replies` : 'Two replies or more'}
</Typography>
</div>
<ScrollToBottomButton inline />
<Box>
<Typography
level='h4' component='h3'
// endDecorator={<ScrollToBottomButton inline />}
// sx={{ my: 0.25 }}
sx={(props.canGather || hasFusions || isGatheringAny) ? undefined : { color: 'primary.solidDisabledColor', ['& > svg']: { color: 'primary.solidDisabledColor' } }}
>
<MainLlmIcon sx={{ fontSize: '1rem', mr: 0.625, animation: isGatheringAny ? `${animationColorBeamGather} 2s linear infinite` : undefined }} />
Merge
</Typography>
<Typography level='body-sm' sx={{ whiteSpace: 'nowrap' }}>
{/* may merge or not (hasInputs) N replies.. put this in pretty messages */}
{props.canGather ? `Combine the ${props.raysReady} replies` : /*'Fuse all replies'*/ ''}
</Typography>
</Box>
{/* Method */}
@@ -128,6 +128,7 @@ export function BeamGatherPane(props: {
<ButtonGroup
variant='outlined'
size='md'
disabled={!props.canGather}
// sx={{ boxShadow: isNoFactorySelected ? 'xs' : undefined }}
>
{FUSION_FACTORIES.map(factory => {
@@ -1,42 +0,0 @@
import * as React from 'react';
import { useShallow } from 'zustand/react/shallow';
import { Dropdown, IconButton, ListItem, ListItemDecorator, Menu, MenuButton, MenuItem, Typography } from '@mui/joy';
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
import MoreHorizRoundedIcon from '@mui/icons-material/MoreHorizRounded';
import { useModuleBeamStore } from '../store-module-beam';
export function BeamGatherDropdown() {
// external (persisted) state
const {
gatherShowPrompts,
toggleGatherShowPrompts,
} = useModuleBeamStore(useShallow(state => ({
gatherShowPrompts: state.gatherShowPrompts,
toggleGatherShowPrompts: state.toggleGatherShowPrompts,
})));
return (
<Dropdown>
<MenuButton
aria-label='Merge Options'
slots={{ root: IconButton }}
slotProps={{ root: { size: 'sm', sx: { my: -0.5 /* to not disrupt the layouting */ } } }}
>
<MoreHorizRoundedIcon />
</MenuButton>
<Menu placement='right-end' sx={{ minWidth: 250, zIndex: 'var(--joy-zIndex-modal)' /* on top of its own modal in FS */ }}>
<ListItem>
<Typography level='body-sm'>Advanced</Typography>
</ListItem>
<MenuItem onClick={toggleGatherShowPrompts}>
<ListItemDecorator>{gatherShowPrompts && <CheckRoundedIcon />}</ListItemDecorator>
Show All Prompts
</MenuItem>
</Menu>
</Dropdown>
);
}
+5 -4
View File
@@ -170,23 +170,24 @@ export function Fusion(props: {
<GoodTooltip title='Use this message'>
<IconButton
size='sm'
// variant='solid'
// variant='plain'
color={GATHER_COLOR}
disabled={isFusing}
onClick={handleFusionUse}
// endDecorator={<TelegramIcon />}
sx={{
// ...BEAM_BTN_SX,
// fontSize: 'xs',
fontSize: 'xs',
// '--Icon-fontSize': 'var(--joy-fontSize-xl)',
// backgroundColor: 'background.popup',
// border: '1px solid',
// borderColor: `${GATHER_COLOR}.outlinedBorder`,
// boxShadow: `0 4px 16px -4px rgb(var(--joy-palette-${GATHER_COLOR}-mainChannel) / 20%)`,
animation: `${animationEnterBelow} 0.1s ease-out`,
// whiteSpace: 'nowrap',
whiteSpace: 'nowrap',
}}
>
{/*Ok*/}
{/*Use*/}
<TelegramIcon />
</IconButton>
</GoodTooltip>
+1 -1
View File
@@ -30,7 +30,7 @@ function FusionControls(props: {
{/* LLM Icon */}
{!!props.llmVendorIcon && (
<GoodTooltip title={props.llmLabel}>
<GoodTooltip placement='top' arrow title={props.llmLabel}>
<Box sx={{ display: 'flex' }}>
<props.llmVendorIcon sx={{ fontSize: 'lg', my: 'auto' }} />
</Box>
@@ -88,7 +88,7 @@ function EditableInstruction(props: {
}) {
// external state
const gatherShowPrompts = useModuleBeamStore(state => state.gatherShowPrompts);
const gatherShowAllPrompts = useModuleBeamStore(state => state.gatherShowAllPrompts);
// derived state
const { instruction, instructionIndex, onInstructionEdit } = props;
@@ -101,7 +101,7 @@ function EditableInstruction(props: {
return (instruction.type === 'chat-generate') ? (
<>
{gatherShowPrompts && (
{gatherShowAllPrompts && (
<EditableChatInstructionPrompt
isEditable={props.isEditable}
itemKey='systemPrompt'

Some files were not shown because too many files have changed in this diff Show More