Compare commits

...

378 Commits

Author SHA1 Message Date
Enrico Ros e5f674509c 2.0.2 News 2025-11-30 16:54:56 -08:00
Enrico Ros 197a4ae5c0 2.0.2 Package 2025-11-30 16:53:25 -08:00
Enrico Ros 64d2dcf39c AudioLivePlayer: tryfix for the persistent android notification 2025-11-30 15:05:17 -08:00
Enrico Ros caf54c736b Speex: do not stop the playback too early 2025-11-30 14:31:43 -08:00
Enrico Ros 423c2cce28 speakText: port to Speex 2025-11-30 12:51:55 -08:00
Enrico Ros a1af51efcb Call: port to Speex 2025-11-30 06:55:51 -08:00
Enrico Ros ffc1bf9c58 Remove src/modules/elevenlabs 2025-11-30 06:55:51 -08:00
Enrico Ros a54bfdb342 Settings: port to Speex 2025-11-30 06:55:51 -08:00
Enrico Ros 03861d2dbd Speex: map instead of array 2025-11-30 06:38:14 -08:00
Enrico Ros 8c080da6bf Speex: Autoconfig WebSpeech best 2025-11-30 06:38:14 -08:00
Enrico Ros a8c98056b6 Speex: Config UI Done 2025-11-30 06:38:14 -08:00
Enrico Ros 78e663f955 Speex: important fixes 2025-11-30 06:38:14 -08:00
Enrico Ros 70546a5039 Speex: Almost Done 2025-11-30 06:38:14 -08:00
Enrico Ros 30f78b33cb Speex: diable Azure 2025-11-30 06:38:14 -08:00
Enrico Ros 712e8c1f16 Speex: UI update: Selects and Persona Voice changer 2025-11-30 06:38:14 -08:00
Enrico Ros 933dfdfb53 Speex: improve types 2025-11-30 06:38:14 -08:00
Enrico Ros 9ce86b029f Speex: UI settings modal 2025-11-30 06:38:14 -08:00
Enrico Ros 13580cc69d Speex: UI config improvements 2025-11-30 06:38:14 -08:00
Enrico Ros a7dee0002d Speex: debug instrumentation 2025-11-30 06:38:14 -08:00
Enrico Ros c84b2df3fa Speex: fix elevenlabs 2025-11-30 06:38:14 -08:00
Enrico Ros d9471a8684 Speex: fix types 2025-11-30 06:38:14 -08:00
Enrico Ros ef630c2272 Speex: improve UI and errors 2025-11-30 06:38:14 -08:00
Enrico Ros e188c71652 Speex: RPC: shared downstreaming 2025-11-30 06:38:14 -08:00
Enrico Ros 910260c2c8 Speex: UI: credentials edit and add new 2025-11-30 06:38:14 -08:00
Enrico Ros 22752abc38 Speex: relax engine validation 2025-11-30 06:38:14 -08:00
Enrico Ros 92bc3a5d64 Speex: DVoice -> wire_Voice 2025-11-30 06:38:14 -08:00
Enrico Ros 1383752cc1 Speex: reduce logging 2025-11-30 06:38:13 -08:00
Enrico Ros 66af16fb81 Speex: manual refactor 2025-11-30 06:38:13 -08:00
Enrico Ros fc019d7b46 Speex: client cleanups 2025-11-30 06:38:13 -08:00
Enrico Ros ac4f0fcb12 Speex: LocalAI: Preview 2025-11-30 06:38:13 -08:00
Enrico Ros a6c2bc663d Speex: arrange files 2025-11-30 06:38:13 -08:00
Enrico Ros e62ffa02e9 Speex: LocalAI vendor 2025-11-30 06:38:13 -08:00
Enrico Ros a003600839 Speex: some UI 2025-11-30 06:38:13 -08:00
Enrico Ros ea73feb06d Speex: remove elevenlabs, with key migration 2025-11-30 06:38:13 -08:00
Enrico Ros 3bdf69e1b7 Speex: ui: begin 2025-11-30 06:38:13 -08:00
Enrico Ros 590fe78bd1 Speex: client cleanup 2025-11-30 06:38:13 -08:00
Enrico Ros 76187ba0e7 Speex: rpc backend 2025-11-30 06:38:13 -08:00
Enrico Ros 5eba375f4d Speex: add webspeech (with detection) and synthesize-openai 2025-11-30 06:38:13 -08:00
Enrico Ros 8fa6a8251f Speex: vendors, engine store, client, router, skel-synthesize 2025-11-30 06:38:13 -08:00
Enrico Ros 75fa046f30 Speex: centralize capability 2025-11-30 06:38:13 -08:00
Enrico Ros 08a8cd1430 Speex: Types & Client 2025-11-30 06:38:13 -08:00
Enrico Ros 3afbb78a39 Icons: port to PhVoice 2025-11-30 06:38:12 -08:00
Enrico Ros fca6ccd816 Badge: transparent BG to not overlap text. Fixes #889 2025-11-29 14:52:13 -08:00
Enrico Ros 8d351822c1 Niy 2025-11-29 13:25:36 -08:00
Enrico Ros 7d274a31fe AIX: CGR: use shared objectUtils 2025-11-29 12:40:04 -08:00
Enrico Ros e36dde0d25 objectUtils: estimate JSON size, deep clone with string limit, find largestStringPaths 2025-11-29 12:17:28 -08:00
Enrico Ros 51cc6e5ae5 CSF: only show the option for server-side (not client-side) disconnect 2025-11-29 11:12:30 -08:00
Enrico Ros 28d911c617 ElevenLabsIcon: add icon 2025-11-28 05:49:33 -08:00
Enrico Ros b1e9fe58fb objectUtils: add stripUndefined 2025-11-28 04:23:11 -08:00
Enrico Ros 16ba014ade GoodBadge: for 'new' 2025-11-28 04:23:11 -08:00
Enrico Ros e9d5a20c1a FormTextField: support inputSx 2025-11-28 04:23:11 -08:00
Enrico Ros 6e0036f9c4 FormSecretField: crystal clear keys input 2025-11-28 04:23:11 -08:00
Enrico Ros d7e189aa1c FormSliderControl: allow sliderSx 2025-11-28 04:23:11 -08:00
Enrico Ros ea2b444fb2 FormChipControl: alignEnd 2025-11-28 04:23:11 -08:00
Enrico Ros cd1efaf26e FormChipControl: support descriptions 2025-11-28 04:23:11 -08:00
Enrico Ros e47f0e5d43 LanguageSelect: imrove select 2025-11-28 04:23:11 -08:00
Enrico Ros 5284d37984 AudioLivePlayer: ignore a closure error 2025-11-28 04:23:11 -08:00
Enrico Ros 1bf6fa0e4d Browse service: improve error reporting 2025-11-27 19:12:08 -08:00
Enrico Ros fc294c82f1 Pdfjs: lock to 5.4.54
more recent 5.4 have trouble with await import('pdfjs-dist'), throwing.
2025-11-27 18:33:20 -08:00
Enrico Ros 7b1dc49dda Roll pdfjs 2025-11-27 18:19:51 -08:00
Enrico Ros d15ddeea24 Roll react-player 2025-11-27 18:15:19 -08:00
Enrico Ros eaac213859 Ph: add Voice 2025-11-27 18:07:54 -08:00
Enrico Ros 02c1460351 Roll posthog 2025-11-27 18:04:06 -08:00
Enrico Ros 2fff35b7d9 Roll superjson 2025-11-27 18:03:37 -08:00
Enrico Ros c5b9072bde LLMs: LocalAI publish interface 2025-11-26 19:01:44 -08:00
Enrico Ros 8a570e912a CSF: docs 2025-11-26 07:37:56 -08:00
Enrico Ros 1dcc40afb8 CSF: Propagate everywhere 2025-11-26 07:37:09 -08:00
Enrico Ros c2092f8035 BlockPartError: vendor name 2025-11-26 06:50:11 -08:00
Enrico Ros 886c4b411e Revert "Test Edge on node"
This reverts commit 8888fd40cd.
2025-11-26 06:13:28 -08:00
Enrico Ros 8888fd40cd Test Edge on node 2025-11-26 04:56:26 -08:00
Enrico Ros 31cd01bccf BlockPartError: CSF enabled 2025-11-26 04:42:50 -08:00
Enrico Ros c59b221004 BlockPartError: allow retrying disconnected errors too 2025-11-26 04:27:52 -08:00
Enrico Ros cb3cc3e74c PostHog: disable the info level 2025-11-26 04:05:03 -08:00
Enrico Ros 9e90015fcc PostHog: disable the info level 2025-11-26 03:56:55 -08:00
Enrico Ros 95e0517056 60s - disable any maxDuration 2025-11-26 03:56:25 -08:00
Enrico Ros 2b2f47915f AIX: OpenAI: Fix CSF! 2025-11-26 03:11:12 -08:00
Enrico Ros 9acd178ce1 AudioPlayer: safe end of stream 2025-11-26 03:11:08 -08:00
Enrico Ros f381f80184 AIX: Anthropic: add strict to tool defs on wiretypes 2025-11-24 16:44:13 -08:00
Enrico Ros c83be61343 AIX: Anthropic: newlines for text broken by tool calls 2025-11-24 16:05:44 -08:00
Enrico Ros f6e49d31ec PWA-Desktop detect. Fixes #887 2025-11-24 15:48:50 -08:00
Enrico Ros cc0429a362 Update readme 2025-11-24 15:14:49 -08:00
Enrico Ros b35901d94c 2.0.1 Roll AIX 2025-11-24 15:06:39 -08:00
Enrico Ros c0df1a23f4 2.0.1 Update news 2025-11-24 15:05:40 -08:00
Enrico Ros 495619af2c LLMs: Interfaces fix 2025-11-24 15:00:09 -08:00
Enrico Ros 72dfadf106 AIX: Anthropic: auto-header for programmatic tool calling (calling from code, not just llm) 2025-11-24 14:58:34 -08:00
Enrico Ros 5825909e45 AIX: Anthropic: programmatic tool calling support 2025-11-24 14:42:20 -08:00
Enrico Ros d3f6d87ee0 AIX: remove legacy unconstrained 'json mode' 2025-11-24 14:29:36 -08:00
Enrico Ros c4f4c5ddad AIX: cross-vendor json output | strict tool invocation 2025-11-24 14:23:25 -08:00
Enrico Ros 2921d7ca27 Anthropic: Structured Outputs | Strict Tools 2025-11-24 13:59:20 -08:00
Enrico Ros 2021cbc988 Anthropic: MessageDeltaUsage 2025-11-24 13:58:57 -08:00
Enrico Ros e9e29861b2 Anthropic: cleanup models 2025-11-24 13:21:25 -08:00
Enrico Ros 8e6da36059 LLM interface types - relax for bw comp 2025-11-24 13:21:13 -08:00
Enrico Ros 5e1469e12e Anthropic: Tool Search Tool 2025-11-24 13:20:58 -08:00
Enrico Ros bd7465f8b1 Roll packages 2025-11-24 12:34:52 -08:00
Enrico Ros 570397a616 Anthropic: add Effort parameter 2025-11-24 12:34:39 -08:00
Enrico Ros b3b5f1daef Anthropic: raise default thinking to 16384 2025-11-24 12:13:50 -08:00
Enrico Ros 25ec3ae47c Anthropic: add Opus 4.5 2025-11-24 12:09:41 -08:00
Enrico Ros 5ba5e3da58 2.0.1 Roll AIX, news 2025-11-24 04:11:39 -08:00
Enrico Ros 9296c14ca0 2.0.1 News 2025-11-24 04:11:39 -08:00
Enrico Ros 310b5d3422 2.0.1 Package 2025-11-24 03:57:17 -08:00
Enrico Ros 1c5967112e Rolled posthog as there's still no local min 2025-11-24 03:19:08 -08:00
Enrico Ros 49a3d8ee71 Roll deep 2025-11-24 03:14:58 -08:00
Enrico Ros cf8b61e8d9 Packages: locked dexie 2025-11-24 03:11:26 -08:00
Enrico Ros 967ae5723e Roll posthog - next.config.ts fix 2025-11-24 02:39:28 -08:00
Enrico Ros 03421acf2f Roll posthog - security fix (details below)
https://helixguard.ai/blog/malicious-sha1hulud-2025-11-24
2025-11-24 02:39:15 -08:00
Enrico Ros d43896cc5a Package: cmd to fix sharp on win32/x64 2025-11-24 02:33:50 -08:00
Enrico Ros b283124a2f Roll packages 2025-11-24 02:05:37 -08:00
Enrico Ros 8c39be01f8 Roll packages 2025-11-24 02:04:23 -08:00
Enrico Ros fb2bd4ccd8 Error Hints: nit 2025-11-23 23:34:36 -08:00
Enrico Ros 5b826ffc45 Error Hints: control AI Service advanced setup 2025-11-23 23:26:56 -08:00
Enrico Ros 0b2ab365d3 Error Hints: Render Reconnect 2025-11-23 23:26:56 -08:00
Enrico Ros 93fc54992c Error Hints: AIX Client and Reassembler -> Fragment 2025-11-23 23:26:56 -08:00
Enrico Ros 60b7326deb Error Hints: Fragments 2025-11-23 23:26:56 -08:00
Enrico Ros d6e6139244 AIX: Gemini: change log 2025-11-23 23:26:56 -08:00
Enrico Ros 0892911ddc Next config: conditional strict mode 2025-11-23 23:26:56 -08:00
Enrico Ros 30267ac50c LLMs: Nano Banana message names 2025-11-23 23:16:43 -08:00
Enrico Ros ffef0ef31d PWA detect. Fixes #887 2025-11-23 23:15:56 -08:00
Enrico Ros fc047087ce CSF: direct connect actions hook 2025-11-23 23:15:03 -08:00
Enrico Ros 81d4966535 CSF: improve vendors 2025-11-23 20:02:06 -08:00
Enrico Ros 004d63fda1 FormRadioControl: gap 1 2025-11-23 16:23:35 -08:00
Enrico Ros 23e2dbb354 tRPC fetchers: increase error message to 240 2025-11-23 16:19:25 -08:00
Enrico Ros 28e9899b97 Settings: looks 2025-11-23 16:19:22 -08:00
Enrico Ros 7441d41550 FormRadioControl: descriptions 2025-11-23 16:19:11 -08:00
Enrico Ros 99e2d5597a LLMs: CSF: OpenAI 2025-11-23 02:56:08 -08:00
Enrico Ros 74321a44ca LLMs: Client-side .listModels 2025-11-23 02:46:28 -08:00
Enrico Ros 7b664affb7 AIX: Client-side .chatGenerate 2025-11-23 02:46:28 -08:00
Enrico Ros c411835f3b LLMs: listModels dispatch: cleanup 2025-11-23 02:36:08 -08:00
Enrico Ros 7b62c946a5 LLMs: Vendors: type the access 2025-11-23 02:35:59 -08:00
Enrico Ros 252e2fcd29 LLMs: Access extraction rewires 2025-11-23 02:28:38 -08:00
Enrico Ros aa2731bccc LLMs: Access extraction 2025-11-23 02:28:38 -08:00
Enrico Ros 282c439963 LLMs: CSF: UI configs 2025-11-23 02:28:38 -08:00
Enrico Ros e99459aba0 LLMs: CSF: vendors 2025-11-23 02:28:38 -08:00
Enrico Ros 4c35cbbe34 LLMs: CSF: access 2025-11-23 02:28:38 -08:00
Enrico Ros cab3537ae2 CSF: activate stubs 2025-11-23 02:28:38 -08:00
Enrico Ros c3f211389b AIX: edgeProcedure rename 2025-11-23 02:28:38 -08:00
Enrico Ros a4de84a842 AIX: dispatch: extract debugger / move security 2025-11-23 02:28:38 -08:00
Enrico Ros 2bf1eaaa0f Partially remove app.config 2025-11-23 01:57:29 -08:00
Enrico Ros 7f5ddd1629 Client stubs: env with log 2025-11-23 01:35:12 -08:00
Enrico Ros ed798fec65 Client stubs: PostHog 2025-11-22 19:09:38 -08:00
Enrico Ros 90386f5794 Client stubs: Env 2025-11-22 19:09:36 -08:00
Enrico Ros 8ada8811bf Build/Env: remove superfluous function 2025-11-22 19:09:33 -08:00
Enrico Ros b24badabef Revert "PostHog: server-client-safe import"
This reverts commit 2c8b713ff3.
2025-11-22 18:58:21 -08:00
Enrico Ros 4e20cb12cd Env: server-only naming 2025-11-22 16:38:05 -08:00
Enrico Ros 245da9e6cc App: server-client-safe vercel import 2025-11-22 16:30:36 -08:00
Enrico Ros a800b34aa7 App: prioritize posthog 2025-11-22 16:30:36 -08:00
Enrico Ros 50c3941f42 Posthog: client: cleanup 2025-11-22 16:30:36 -08:00
Enrico Ros 6e5d5ee36c Posthog: server: exceptions: trpc change 2025-11-22 16:25:50 -08:00
Enrico Ros 2c8b713ff3 PostHog: server-client-safe import 2025-11-22 16:06:00 -08:00
Enrico Ros 8162a6706d PostHog: server: add templates 2025-11-22 16:06:00 -08:00
Enrico Ros 952f6883fa PostHog: server: exceptions 2025-11-22 16:06:00 -08:00
Enrico Ros 373f3e3698 PostHog: server: cleanups 2025-11-22 16:06:00 -08:00
Enrico Ros 17791f631f AIX: extract consts for client-side bundles 2025-11-22 16:06:00 -08:00
Enrico Ros 6987c67cc7 AIX: Images: further resize the last generated image - can be very large, so >0.5M we resize 2025-11-21 03:11:08 -08:00
Enrico Ros 65a59e5d2d Blocks: Reasoning: mx 1.5 2025-11-21 01:58:34 -08:00
Enrico Ros 05b9a6d412 AIX: OpenAI: avoid prefixing with [summary] the reasoning 2025-11-21 01:41:16 -08:00
Enrico Ros 6608f4f164 AIX: Gemini: collapse empty text in between reasoning 2025-11-21 01:40:52 -08:00
Enrico Ros 93378ad6b0 VoidFragments -> ContentFragments 2025-11-21 01:19:23 -08:00
Enrico Ros bd4a60203e In-order rendering: transfer aux 2025-11-21 00:52:23 -08:00
Enrico Ros c9e6a62641 Fragments: Aux: add delete aux 2025-11-21 00:35:08 -08:00
Enrico Ros 68d797fa99 Fragments: Aux: show reasoning progress (assumed, not in the Aux fragment) and disable the 'realize' button while incomplete 2025-11-21 00:35:05 -08:00
Enrico Ros 08011d8cf2 ChatMessage: fix Void Fragment to text 2025-11-21 00:34:43 -08:00
Enrico Ros 2f91bf7f52 In-order rendering: cleanups 2025-11-21 00:27:52 -08:00
Enrico Ros d5182c05c1 In-order rendering of parts 2025-11-20 23:09:29 -08:00
Enrico Ros 8e0947a833 AIX: GG Transmitter: also faster body size estimation 2025-11-20 22:56:15 -08:00
Enrico Ros 1d88fc37b0 AIX: GG Transmitter: smaller echo request 2025-11-20 20:22:43 -08:00
Enrico Ros 46bd8e6f4d AIX debugger: wrap 2025-11-20 20:22:43 -08:00
Enrico Ros b95b427331 AIX debugger: unbreak layout 2025-11-20 20:22:43 -08:00
Enrico Ros 9b574c60eb Roll AIX 2025-11-20 19:52:32 -08:00
Enrico Ros a8b39cc0a4 LLMs: Gemini: support image size 2025-11-20 19:51:51 -08:00
Enrico Ros cdbc7dd9b8 AIX: Gemini: parse and recreate thought signatures 2025-11-20 19:11:42 -08:00
Enrico Ros 08dfec4fcf AIX: expend parts to include per-part vendor opaque information 2025-11-20 19:11:42 -08:00
Enrico Ros 7f4553225b AIX: transfer/reassemble per-fragment opaque vendor data 2025-11-20 19:11:42 -08:00
Enrico Ros f37e65a91e DMessageFragments: per-fragment vendor-specific 2025-11-20 19:11:42 -08:00
Enrico Ros c022f8a68c LLMs: improve params editing 2025-11-20 19:11:42 -08:00
Enrico Ros daa7a506a5 AIX: Gemini: include thoughts when thinking is requested 2025-11-20 19:11:42 -08:00
Enrico Ros f3dcf39c15 LLMs: Gemini: update cba 2025-11-20 17:01:39 -08:00
Enrico Ros 06cbef16d4 LLMs: Gemini: add Nano Banana Pro
LLMs: Gemini: show thoughts
2025-11-20 17:01:39 -08:00
Enrico Ros ab31bcd3e3 LLMs: Gemini: remove obsolete 2025-11-20 17:01:39 -08:00
Enrico Ros 563a99864f LLMs: showThoughts: default to false 2025-11-20 17:01:39 -08:00
Enrico Ros 39b8abc2c6 Fix: LLMs: cleanup stale userParameters 2025-11-20 17:01:39 -08:00
Enrico Ros f3dd837076 AIX: Gemini: UrlContext (fetching website) not supported by Nano Bananas 2025-11-20 17:01:39 -08:00
Enrico Ros d6b3a5259d fix eslint warnings 2025-11-20 17:01:39 -08:00
Enrico Ros 9fea1d5c64 LLMs: xAI: add Grok 4.1 Fast 2025-11-19 16:24:46 -08:00
Enrico Ros 0adb5355c7 Debug Wire: mark sessions 2025-11-19 15:44:13 -08:00
Enrico Ros 01d807b61e Browsing: allow time for screenshot 2025-11-19 15:44:10 -08:00
Enrico Ros 285bb812d0 LocalAI: fix list and virtualize 2025-11-19 14:43:56 -08:00
Enrico Ros d897155d6e LocalAI: fix gallery parsing 2025-11-19 14:43:53 -08:00
Enrico Ros 7154426279 packages: add react-virtual 2025-11-19 14:43:50 -08:00
Enrico Ros 4526084e4d roll packages 2025-11-19 12:20:18 -08:00
Enrico Ros 0c5c786ae3 Beam: starring selects 2025-11-19 12:14:47 -08:00
Enrico Ros 8a2c4aa356 useLLMSelect: show starred models only 2025-11-19 12:14:16 -08:00
Enrico Ros 4cba819edd State: show starred models only 2025-11-19 12:11:24 -08:00
Enrico Ros 4db42a2b29 StarIcons: improve 2025-11-19 12:11:21 -08:00
Enrico Ros fc0ee5b698 LLMs: fix OpenAI models overlap check 2025-11-19 12:11:11 -08:00
Enrico Ros 2c0c3f1c70 CC: zustand fix 2025-11-19 12:11:01 -08:00
Enrico Ros 3f3976b73c eslint: migrate to flat conf 2025-11-19 12:09:53 -08:00
Enrico Ros 82d5dcced5 LLMs: Gemini 3: fixed temperature, as Google Strongly Recommends to keep it at 1 2025-11-18 20:29:31 -08:00
Enrico Ros f4eaed694a LLMParametersEditor: allow code execution 2025-11-18 20:19:26 -08:00
Enrico Ros 05d9869326 Improve CG tool appearance 2025-11-18 20:19:09 -08:00
Enrico Ros 2675934ff8 LLMs: Gemini 3 - enable code execution 2025-11-18 20:10:01 -08:00
Enrico Ros fb6e19d3ea Roll AIX 2025-11-18 19:51:56 -08:00
Enrico Ros f1151d54e1 LLMs: Gemini 3.0 with Thinking Level, media Resolution, Google Search. Fixes #884 2025-11-18 19:51:55 -08:00
Enrico Ros 6a0fa4f9fa LLMs: Gemini Parameters 2025-11-18 19:44:16 -08:00
Enrico Ros 20d96fffc8 AIX: Gemini: wires upgrade 2025-11-18 19:42:29 -08:00
Enrico Ros ad6c06308a AIX: Gemini: -> thinkingLevel, -> mediaResolution, -> codeExecution, -> urlContext 2025-11-18 19:36:40 -08:00
Enrico Ros 84ee4171a4 AIX: Gemini: thought signature logger 2025-11-18 19:36:40 -08:00
Enrico Ros 6bc4f8a1e4 AIX: Gemini: wiretypes for thinkingLevel (param), urlContext (tool), thoughtSignature (fc invocation arg) 2025-11-18 19:36:40 -08:00
Enrico Ros 8876aa0866 RenderCode: reload button when html is rendered. Fixes #885 2025-11-18 19:36:39 -08:00
Enrico Ros 691d2e7228 Starring: models on top 2025-11-18 05:34:04 -08:00
Enrico Ros 7a12755de9 T2I: LocalAI: temporary mappings of models names and sizes to OpenAI gpt-image (GI) family equivalents
Maps OpenAI model IDs to LocalAI models:
- gpt-image-1 → stablediffusion
- gpt-image-1-mini → dreamshaper
- dall-e-3 → sd-3.5-large-ggml
- dall-e-2 → sd-3.5-medium-ggml
2025-11-18 04:36:09 -08:00
Enrico Ros 8573f56d03 T2I: Enable LocalAI, over 'stablediffusion' 2025-11-18 04:05:16 -08:00
Enrico Ros 8f3e683321 T2I: Azure disable WebP 2025-11-18 02:59:02 -08:00
Enrico Ros 64867b0b67 T2I: Azure support - LocalAI likely. Fixes #832 2025-11-18 02:59:01 -08:00
Enrico Ros e42d060e57 T2I: OpenAI: honor options 2025-11-18 02:55:47 -08:00
Enrico Ros 2ca9ab8a0c T2I: add options 2025-11-18 02:54:43 -08:00
Enrico Ros fdc0c6b371 T2I: propagate abort signal 2025-11-18 02:48:01 -08:00
Enrico Ros 8f8779c3cd Selection Operations: improve with fuzzy matching: matches more. Helps #881 2025-11-18 02:43:11 -08:00
Enrico Ros 851877ad8b LLMs: Azure: send the image_generation request anyway, for future compatibility - #832 2025-11-18 01:19:21 -08:00
Enrico Ros 8df74529ad LLMs: Azure: still inline image generation does not work - #832 2025-11-18 01:16:13 -08:00
Enrico Ros 353f51ebf0 LLMs: Azure: more explicitly named no-web-search var 2025-11-18 01:07:31 -08:00
Enrico Ros 6c5cb08118 ModelsList: fix list jumpiness on starring. Fixes #880. 2025-11-18 00:49:35 -08:00
Enrico Ros 54fee92b15 ModelsList: improve performance in large lists (e.g. OpenRouter) 2025-11-18 00:49:05 -08:00
Enrico Ros 776431c801 LLMs: Mistral: silence the off-by-2 warning 2025-11-17 14:58:49 -08:00
Enrico Ros 9f893ce999 LLMs: Groq: update models 2025-11-17 14:58:49 -08:00
Enrico Ros 820447670c LLMs registry: move 2025-11-17 05:05:27 -08:00
Enrico Ros b43c49cd64 Type annotation 2025-11-17 03:58:40 -08:00
Enrico Ros f9c3558975 Remove warning 2025-11-17 03:49:03 -08:00
Enrico Ros 1b75250824 LLMs: enumeration 2025-11-17 03:43:10 -08:00
Enrico Ros 3fa3bb5d03 LLMs: Central Dispatch 2025-11-17 03:29:40 -08:00
Enrico Ros ef0ff55f1f LLMs: extract LocalAI, LMStudio 2025-11-17 01:42:26 -08:00
Enrico Ros 66aa8ed177 Composer: autoFocus only on Desktop 2025-11-17 00:27:52 -08:00
Enrico Ros 519286bc69 DocumentAttachmentFragments: doc number limiter 2025-11-17 00:13:46 -08:00
Enrico Ros 9882f45fd2 DocumentAttachmentFragments: rename into pane 2025-11-17 00:09:31 -08:00
Enrico Ros 634f6216a0 Roll packages 2025-11-17 00:07:08 -08:00
Enrico Ros 69574a7d1c AIX: OpenRouter: fix reasoning summaries 2025-11-16 04:36:41 -08:00
Enrico Ros eddd4b9be8 ModelsServiceSelector: improve +1 2025-11-16 04:36:26 -08:00
Enrico Ros 9a9c31ff53 DocumentAttachmentFragments: performance: memo 2025-11-16 04:36:06 -08:00
Enrico Ros 41ee7a1c85 Nav support: remessage 2025-11-15 13:35:26 -08:00
Enrico Ros 2f9bbf373c Nav support: with AI triage 2025-11-15 13:26:30 -08:00
Enrico Ros d662e10ebb Support: with AI triage 2025-11-15 13:23:42 -08:00
Enrico Ros cd31092333 Update AI triage 2025-11-15 13:23:31 -08:00
Enrico Ros 1eae7ab6f3 Update AI triage 2025-11-15 13:15:51 -08:00
Enrico Ros ba378f852f Tryout AI triage 2025-11-15 12:51:56 -08:00
Enrico Ros 5cfd1e557d Update README 2025-11-15 12:46:39 -08:00
Enrico Ros df31d79eaf Update README 2025-11-15 12:21:54 -08:00
Enrico Ros 12d7304325 Update README 2025-11-15 12:19:38 -08:00
Enrico Ros 41424cbdfd Update README 2025-11-15 12:17:06 -08:00
Enrico Ros 05dda519a2 CC: add tree 2025-11-15 12:04:07 -08:00
Enrico Ros 120d39282e Add screenshot section to README
Added a section for screenshots and usage examples.
2025-11-15 11:40:34 -08:00
Enrico Ros 8e7d0fd13b Update README 2025-11-15 11:32:21 -08:00
Enrico Ros 3d979fdfbb Update Docker deployment link in README 2025-11-15 01:58:19 -08:00
Enrico Ros 6ab47ae3cb Sweet readme
Updated README to enhance project visibility and features.
2025-11-15 01:56:56 -08:00
Enrico Ros a4977b4924 AI Inspector: 5Hz updates limit 2025-11-14 17:42:43 -08:00
Enrico Ros bac9c692b8 AI Inspector: toggle visibility on shortcut 2025-11-14 17:27:23 -08:00
Enrico Ros 6ab15356e1 AI Inspector: show Desktop shortcut 2025-11-14 17:19:21 -08:00
Enrico Ros 73cc7121c3 CC: add npx eslint 2025-11-14 15:40:34 -08:00
Enrico Ros 1aeef06f49 AppBeam: nits 2025-11-14 01:51:45 -08:00
Enrico Ros 3b16bcf01d OpenAI: officially updated 5.1 models 2025-11-14 01:15:09 -08:00
Enrico Ros f6351fda41 Ph: +Megaphone 2025-11-13 01:49:34 -08:00
Enrico Ros 007e91480d Roll posthog 2025-11-13 01:49:00 -08:00
Enrico Ros 163ef9296e HBWA: note 2025-11-13 01:48:18 -08:00
Enrico Ros fa042f7d68 AIX: prenorm 2025-11-12 22:53:50 -08:00
Enrico Ros 8a11040dde optimization: faster 'hasKeys' Object non-emptiness check (avoids allocation) 2025-11-12 22:40:46 -08:00
Enrico Ros a88971d557 Models: Vendor (service) selection: vastly improve 2025-11-12 17:45:34 -08:00
Enrico Ros 5867e5fcc5 OpenRouter: more config beauty 2025-11-12 17:32:43 -08:00
Enrico Ros 20e587d6d3 OpenRouter: more config cleanup 2025-11-12 16:58:14 -08:00
Enrico Ros 6bfa8471cd Models Modal: option for fullscreen 2025-11-12 16:01:14 -08:00
Enrico Ros 5c10bce2f4 ModelsList: disable the large popups 2025-11-12 15:53:52 -08:00
Enrico Ros f1663f6668 DataStreamViz: optimize with intersection observers for 1k Beams 2025-11-12 15:39:15 -08:00
Enrico Ros 90c27e0e74 LLMs: add displayGroup 2025-11-12 15:31:32 -08:00
Enrico Ros b5eac0d907 OpenRouter: more config improvement 2025-11-12 15:20:30 -08:00
Enrico Ros 4eabe2cb3a Roll AIX 2025-11-12 15:11:17 -08:00
Enrico Ros a1c0d30a06 LLMs: GPT-5 optimistic updates (coming later this week) 2025-11-12 15:10:50 -08:00
Enrico Ros 63c9f65040 Merge remote-tracking branch 'opensource/claude/issue-879-20251112-2245' 2025-11-12 15:05:01 -08:00
Enrico Ros f58a066bff OpenRouter: improve config 2025-11-12 15:02:21 -08:00
Enrico Ros 952ea6357a tRPC: newline on dev warnings 2025-11-12 14:57:59 -08:00
Enrico Ros 6695973035 tRPC: raise the correct server-side exceptions 2025-11-12 14:57:44 -08:00
claude[bot] 3dc28635f4 Add support for GPT-5.1 models
- Add gpt-5.1-2025-11-12 (GPT-5.1 Thinking) with adaptive reasoning
- Add gpt-5.1-chat-latest (GPT-5.1 Instant) with adaptive reasoning
- Both models include full feature set: chat, vision, function calling, JSON, prompt caching, reasoning, web search
- Pricing set to match GPT-5 (to be updated when official pricing is announced)
- Added models to manual ordering list for proper UI sorting

Co-authored-by: Enrico Ros <enricoros@users.noreply.github.com>
2025-11-12 22:47:51 +00:00
Enrico Ros 0bde01a85f Llms: vector ops 2025-11-12 14:43:26 -08:00
Enrico Ros b9840c2074 Warn downgrade 2025-11-12 14:43:23 -08:00
Enrico Ros 8228a76875 LLMs: LocalAI enumeration less verbose 2025-11-11 18:40:52 -08:00
Enrico Ros 46b370a2e3 AIX: OpenAI ChatGenerate: fixup for malformed NS objects - best-effort 2025-11-11 18:37:34 -08:00
Enrico Ros 820e9513ba Fragments: Void reasoning priority 2025-11-11 18:22:14 -08:00
Enrico Ros bd71d64db3 LLMs: OpenRouter: fix context window max_tokens issue 2025-11-11 18:09:50 -08:00
Enrico Ros 9d4baf827c LLMs: OpenRouter: auto-detection of capabilities (i/o modalities, features, etc). Thanks OpenRouter, you're the best! 2025-11-11 17:47:31 -08:00
Enrico Ros d6843d7fcf AIX: OpenRouter: write/parse the new reasoning request / reasoning_details response
Removes older system(s) from OpenRouter.
2025-11-11 17:46:58 -08:00
Enrico Ros babb1dd962 LLMs: OpenRouter: parser 2025-11-11 16:46:13 -08:00
claude[bot] aa32e396a7 Tryfix speech recognition on Android 2025-11-11 14:07:51 -08:00
Enrico Ros 1068efcb49 CC: commands: openrouter 2025-11-11 14:01:08 -08:00
Enrico Ros 576c7f1458 CC: improve triage 3 2025-11-11 13:48:20 -08:00
Enrico Ros 37c857b055 CC: improve triage 2 2025-11-11 13:37:46 -08:00
claude[bot] 794dfb44d1 Add date to README
Added today's date (2025-11-11) to README.md as requested in issue #876.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-11 21:34:04 +00:00
Enrico Ros 929bb6dc66 CC: improve triage 2025-11-11 13:32:34 -08:00
Enrico Ros 28337e31eb Token Badge: show for half a cent+ 2025-11-11 12:48:32 -08:00
Enrico Ros 09a38c0e4b UserInputChecklistComponent: fix crash 2025-11-11 12:30:02 -08:00
Enrico Ros 645b8fb9cd LLMs: OpenRouter: fix 400 2025-11-11 12:14:37 -08:00
Enrico Ros 541588948c LLMs: OpenRouter: add future PKCE 2025-11-11 12:13:09 -08:00
Enrico Ros bdd6fcfbbc Ph: add Gift/Key 2025-11-11 12:13:08 -08:00
Enrico Ros 9e50286c66 LLMs: Moonshot: remove images from the request for those models 2025-11-10 02:00:42 -08:00
Enrico Ros 418e4649dc Analytics: Client send 2025-11-10 00:59:28 -08:00
Enrico Ros 4a70f20f4a Roll packages
Note: fix Posthog/nextjs-config to 1.3.2 because starting from 1.3.3 there are new dependencies not evaluated yet
2025-11-10 00:33:28 -08:00
Enrico Ros d6eabfcb6d Roll posthog 2025-11-10 00:27:07 -08:00
Enrico Ros d88889d760 Extra: disable 0.1 opacity; makes the code less usable 2025-11-10 00:07:27 -08:00
Enrico Ros 85146d8af0 LLMs: Moonshot: native search parameter (disabled here and in the model - upstream not good enough) 2025-11-09 20:16:39 -08:00
Enrico Ros 9612572f07 AIX: Moonshot: native search dispatch/parse support 2025-11-09 20:15:02 -08:00
Enrico Ros 4bb1dddf4d ApproximateCosts: only display on Extra 2025-11-09 20:15:02 -08:00
Enrico Ros b066a86962 AIX: fix exhaustive check 2025-11-09 20:15:02 -08:00
Enrico Ros 6086455782 LLMs/AIX: Moonshot AI support 2025-11-09 20:15:02 -08:00
Enrico Ros 9020b3cbad AIX: Fix Mistral's response breakage in NS mode - Fixes #873. 2025-11-09 17:23:07 -08:00
Enrico Ros 5822dea270 AIX: Further breakdown logging levels - with Retry entry points. 2025-11-09 17:14:55 -08:00
Enrico Ros c445f59664 AIX: Fix Logging, with warn only for real server-side solvable issues. Fixes #872. Fixes #874.Fixes #875 2025-11-09 17:07:14 -08:00
Enrico Ros 737e4cb4f9 AIX - fix Gemini parser. Fixes #871 2025-11-09 16:31:52 -08:00
Enrico Ros dba7368d01 Rename platform format 2025-11-07 23:35:29 -08:00
Enrico Ros 314c4cd8cc Error correction: render placeholder notification 2025-11-07 23:11:31 -08:00
Enrico Ros 3e46f99e14 AIX: Error correction: client-side reporting 2025-11-07 22:45:11 -08:00
Enrico Ros e0cc552b8d AIX: Error correction: operation reporting (Anthropic only for now, but generic) 2025-11-07 22:45:11 -08:00
Enrico Ros 6b5be403af AIX: Error correction: dispatch reporting 2025-11-07 22:45:11 -08:00
Enrico Ros 269d5989bc Fetchers: Error correction: callback before retry 2025-11-07 22:43:12 -08:00
Enrico Ros edfe3d9b65 DFragments: Error correction: keep results in placeholder fragments (backward-comp) 2025-11-07 22:43:12 -08:00
Enrico Ros ffb2c42a26 imageUtils: non-aliased rescaling, multi-pass for downscales 2025-11-07 14:11:23 -08:00
Enrico Ros b7de19b020 AIX: turn on operation-retrier for Anthropic's error-in-stream. Fixes #869 2025-11-07 12:03:16 -08:00
Enrico Ros 77cd659b39 AIX: support operation-level retrier with reassembly wipe #869 2025-11-07 12:03:12 -08:00
Enrico Ros fbba9d8357 Block parts: show the retry-followup 2025-11-07 12:02:13 -08:00
Enrico Ros f464a9efdf LLMs: listModels: openAI, deprio 401. Fixes #870 2025-11-07 11:42:22 -08:00
Enrico Ros 7ec4290582 AIX: Anthropic: retriable errors: extended parsing 2025-11-07 11:04:21 -08:00
Enrico Ros 3f887a1d3a AIX: Gemini: fix other proxy errors. Fixes #868 2025-11-07 10:50:25 -08:00
Enrico Ros ffd76dc587 *Image Captioning with a dedicated (configurable) model. Fixes #862 2025-11-05 14:15:50 -08:00
Enrico Ros d7f3594a73 FormInputKey: smaller key 2025-11-05 12:32:18 -08:00
Enrico Ros 32fa5f206b AudioGenerator: underwater 2025-11-05 10:41:53 -08:00
Enrico Ros 70d2c09e81 Env: drop requirements on non-staging/non-prod 2025-11-05 09:01:47 -08:00
Enrico Ros 17f03806d0 AIX: rename DMessage gen to throw 2025-11-05 07:07:05 -08:00
Enrico Ros b6aba0efa4 AIX: type change to DMessageGuts for correctness 2025-11-04 17:01:26 -08:00
Enrico Ros 65a5e06935 DMessage: Generator helpers 2025-11-03 14:53:53 -08:00
Enrico Ros f459cb9805 PhPencilSimple: add 2025-11-03 09:04:28 -08:00
Enrico Ros f5470aca5d AIX: OpenAI Responses: remove patch for deep-research 2025-11-02 20:12:07 -08:00
Enrico Ros c26af97fe7 LLMs: OpenAI: deep research with standard params path 2025-11-02 20:11:17 -08:00
Enrico Ros 766ec458a2 CC: add permission 2025-11-02 15:43:28 -08:00
Enrico Ros 48ff78580c typos 2025-11-02 13:49:12 -08:00
Enrico Ros 396f7524d7 Panel Resize: rename inset 2025-11-01 18:08:31 -07:00
Enrico Ros da19ef42f5 AIX: OpenAI Responses: use the web_search (non preview) tool 2025-11-01 17:19:57 -07:00
Enrico Ros 91abe5aa43 DLLM: FC/FR: rendering improvement 2025-11-01 12:05:37 -07:00
Enrico Ros 682435321b DLLM: FC/FR: rendering improvement 2025-11-01 12:01:23 -07:00
Enrico Ros 76f0d60224 LLMs/AIX: Gemini: computer use comment 2025-11-01 11:28:34 -07:00
Enrico Ros 628b88ef9f LLMs/AIX: Gemini: computer use test 2025-11-01 11:18:12 -07:00
Enrico Ros 6a792814ce LLMs/AIX: Gemini: computer use mixed-tool (hosted + client) test. 2025-11-01 11:09:41 -07:00
Enrico Ros 05ce15d677 LLMOptionsModal: auto-open if user pricing is set or custom tokens are set 2025-11-01 11:02:26 -07:00
Enrico Ros 4a9d0d4f8e LLMs: fix post user-pricing, #860 2025-11-01 10:54:55 -07:00
Enrico Ros 16f0552682 CC: unbreak settings 2025-11-01 10:34:04 -07:00
claude[bot] 9e3819b9c7 feat: Add user pricing override for hypothetical cost tracking
Add userPricing field to DLLM interface following the established pattern
for user overrides (similar to userContextTokens and userMaxOutputTokens).

This enables users to set custom pricing for local models (Ollama, LM Studio, etc.)
to track "what if" costs and compare with cloud models.

Changes:
- Added userPricing field to DLLM interface (llms.types.ts)
- Added getLLMPricing() getter function with override precedence
- Updated store to preserve userPricing during model updates
- Updated all llm.pricing access points to use getLLMPricing()
- Added pricing override UI in LLMOptionsModal (Details section)
  - Input price ($/M tokens)
  - Output price ($/M tokens)
  - Reset buttons for each field
- Cost calculations automatically use user pricing when set
- Existing cost display in tooltips works with user pricing

Resolves #860

Co-authored-by: Enrico Ros <enricoros@users.noreply.github.com>
2025-10-31 08:37:58 +00:00
Enrico Ros 233a0d4b35 LLMs: xAI: rerank 4 2025-10-29 15:53:41 -07:00
Enrico Ros bd95b808ae InlineTextareaEditable: click to edit 2025-10-29 13:53:59 -07:00
Enrico Ros 96132c4585 Export: subordinate tslug warn to the downgrade 2025-10-29 09:50:12 -07:00
Enrico Ros 3edacef572 Export: messaging 2025-10-29 09:43:34 -07:00
Enrico Ros 36889c1695 Export: robustness 2025-10-29 09:30:45 -07:00
Enrico Ros cd2c6c1d8f Export: nits 2025-10-29 09:29:06 -07:00
Enrico Ros d8c78b1a00 Export: disable when beam open, as it's not exported for now 2025-10-29 09:28:18 -07:00
Enrico Ros 74a22c26cf Export: relax file export 2025-10-29 09:26:55 -07:00
Enrico Ros f742eba4c1 Ph: Terminal Icon 2025-10-29 09:11:01 -07:00
Enrico Ros 36c2812157 Export: warn on import from a downgrade or different tenant 2025-10-29 09:07:07 -07:00
Enrico Ros d353fc4c63 Export: save file variant 1 2025-10-29 08:54:05 -07:00
Enrico Ros 98bd3d6da0 LLMs: Ollama: Update models 2025-10-28 16:36:43 -07:00
Enrico Ros cd5ec8d295 LLMs: Perplexity: Update models 2025-10-28 16:34:24 -07:00
Enrico Ros f91c6456bd LLMs: xAI: Update models 2025-10-28 16:31:53 -07:00
Enrico Ros 67af87968e workflows: CC: ollama update 2025-10-28 16:30:48 -07:00
Enrico Ros 58ea3e1b35 workflows: CC: permissions 2025-10-28 16:27:15 -07:00
Enrico Ros a9435c10e8 LLMs: OpenPipe: Update models 2025-10-28 16:23:40 -07:00
Enrico Ros a86860fe76 LLMs: Groq: Update models 2025-10-28 16:19:40 -07:00
Enrico Ros a3d707f78a LLMs: Mistral: Update models 2025-10-28 16:17:34 -07:00
Enrico Ros c502426249 LLMs: Anthropic: Update models 2025-10-28 16:17:06 -07:00
Enrico Ros 2fb5ffcecf LLMs: Anthropic: remove retired Claude 2 models 2025-10-28 16:09:36 -07:00
Enrico Ros 6d995c1253 LLMs: Anthropic: remove retired Sonnet 3 models 2025-10-28 16:08:39 -07:00
Enrico Ros a860c1c490 LLMs: Anthropic: remove retired Sonnet 3.5 models - So long and thanks!! 2025-10-28 16:06:42 -07:00
Enrico Ros 481d9cc745 LLMs: Anthropic: only display 'obsoleted models' in 2025-10-28 16:03:02 -07:00
Enrico Ros 7e53a7bc2b Server: tRPC: Retriers: carve0out 429 quota 2025-10-28 15:59:05 -07:00
Enrico Ros 4df10e3782 Lint 2025-10-28 15:59:05 -07:00
Enrico Ros 396da65178 AIX: OpenRouter: don't display processing messages 2025-10-28 15:49:37 -07:00
Enrico Ros 87e8faf383 workflows: docker: limit to 1hr 2025-10-28 13:11:28 -07:00
Enrico Ros 9eb3e6d398 workflows: CC: raise to 30min 2025-10-28 13:11:21 -07:00
312 changed files with 14750 additions and 4611 deletions
@@ -0,0 +1,49 @@
---
description: Sync OpenRouter API implementation with latest upstream documentation
argument-hint: specific feature to check
---
Review the OpenRouter implementation:
- Models list: `src/modules/llms/server/openai/openrouter.wiretypes.ts` (list API response schema)
- Chat wire types: `src/modules/aix/server/dispatch/wiretypes/openai.wiretypes.ts` (OpenAI-compatible)
- Request adapter: `src/modules/aix/server/dispatch/chatGenerate/adapters/openai.chatCompletions.ts` ('openrouter' dialect)
- Response parser: `src/modules/aix/server/dispatch/chatGenerate/parsers/openai.parser.ts` (shared OpenAI parser)
- Vendor config: `src/modules/llms/vendors/openrouter/openrouter.vendor.ts`
GOAL: Ensure complete support for OpenRouter's API including advanced features like reasoning/thinking tokens, tool use, search integration, and multi-modal capabilities. OpenRouter is OpenAI-compatible but has important extensions and differences.
Use Task tool with subagent_type=Explore and thoroughness="very thorough" to discover:
1. Map API structure - all endpoints, parameters, capabilities from https://openrouter.ai/docs
2. **Advanced features** - How to use: reasoning/thinking tokens (o1, DeepSeek R1), tool use/function calling, search integration, multi-modal (vision/audio)
3. Changelog location - How does OpenRouter communicate API updates and breaking changes?
4. Model metadata - What capabilities are exposed in the models list API? How to detect feature support?
5. OpenAI deviations - Extensions, special headers (HTTP-Referer, X-Title), response fields, streaming differences
Then check the latest API information. Try these sources (be creative if blocked):
**Primary Sources:**
- API Reference: https://openrouter.ai/docs/api-reference
- Chat Completions: https://openrouter.ai/docs/api-reference#chat-completions
- Models List: https://openrouter.ai/docs/api-reference#models-list
- Parameters Guide: https://openrouter.ai/docs/parameters
- Announcements: https://openrouter.ai/announcements (feature launches, API updates, new models)
- Models Directory: https://openrouter.ai/models (check metadata for capabilities)
**Alternative Sources:**
- GitHub: https://github.com/OpenRouterTeam (SDKs, examples, issues for recent changes)
- Web Search: "openrouter api changelog" or "openrouter reasoning tokens" or "openrouter tool use"
**If blocked:** Ask user to provide documentation.
$ARGUMENTS
Focus on discrepancies and gaps:
- **Request/Response structure**: New fields, changed requirements, streaming event types
- **Feature support**: Thinking tokens format, tool calling protocol, search parameters
- **Model capabilities**: How to detect and enable advanced features per model
- **OpenRouter extensions**: Headers, routing, fallbacks, rate limiting (free vs paid)
- **Breaking changes**: Protocol updates, deprecated fields, new required parameters
Report differences in wire types, adapter logic, parser handling, or dialect-specific quirks.
Prioritize new capabilities that improve user experience (reasoning visibility, better tool use, etc.).
When making changes, add comments with date: `// [OpenRouter, 2025-MM-DD]: explanation`
@@ -4,7 +4,7 @@ description: Update Alibaba model definitions with latest pricing and capabiliti
Update `src/modules/llms/server/openai/models/alibaba.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models & Pricing: https://www.alibabacloud.com/help/en/model-studio/models
@@ -4,7 +4,7 @@ description: Update Anthropic model definitions with latest pricing and capabili
Update `src/modules/llms/server/anthropic/anthropic.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models: https://docs.claude.com/en/docs/about-claude/models/overview
@@ -4,7 +4,7 @@ description: Update DeepSeek model definitions with latest pricing and capabilit
Update `src/modules/llms/server/openai/models/deepseek.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Pricing: https://api-docs.deepseek.com/quick_start/pricing
@@ -4,7 +4,7 @@ description: Update Gemini model definitions with latest pricing and capabilitie
Update `src/modules/llms/server/gemini/gemini.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.types.ts`, `src/modules/llms/server/llm.server.types.ts`, and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.types.ts`, `src/modules/llms/server/llm.server.types.ts`, and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models: https://ai.google.dev/gemini-api/docs/models
+1 -1
View File
@@ -4,7 +4,7 @@ description: Update Groq model definitions with latest pricing and capabilities
Update `src/modules/llms/server/openai/models/groq.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models: https://console.groq.com/docs/models
@@ -0,0 +1,19 @@
---
description: Update Kimi model definitions with latest pricing and capabilities
---
Update `src/modules/llms/server/openai/models/moonshot.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Pricing: https://platform.moonshot.ai/docs/pricing/chat
- API Reference: https://platform.moonshot.ai/docs/api/chat
**Fallbacks if blocked:** Search "moonshot kimi models latest pricing", "kimi k2 models", "moonshot api models", or search GitHub for latest model prices and context windows
**Important:**
- Review the full model list for additions, removals, and price changes
- Minimize whitespace/comment changes, focus on content
- Preserve comments to make diffs easy to review
- Flag broken links or unexpected content
@@ -4,7 +4,7 @@ description: Update Mistral model definitions with latest pricing and capabiliti
Update `src/modules/llms/server/openai/models/mistral.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models: https://docs.mistral.ai/getting-started/models/models_overview/
@@ -4,7 +4,7 @@ description: Update Ollama model definitions with latest featured models
Update `src/modules/llms/server/ollama/ollama.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Automated Workflow:**
```bash
@@ -29,6 +29,7 @@ The parser outputs: `modelName|pulls|capabilities|sizes`
**Important:**
- Skip models below 50,000 pulls (parser does this automatically)
- Skip embedding models (parser does not do this automatically)
- Sort them in the EXACT same order as the source (featured models)
- Extract tags: 'tools' → hasTools, 'vision' → hasVision, 'embedding' → isEmbeddings (note the 's'), 'thinking' → tags only
- Extract 'b' tags (1.5b, 7b, 32b) to tags field
@@ -4,7 +4,7 @@ description: Update OpenAI model definitions with latest pricing and capabilitie
Update `src/modules/llms/server/openai/models/openai.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Manual hint:** For pricing page, expand all tables before copying content.
@@ -4,7 +4,7 @@ description: Update OpenPipe model definitions with latest pricing and capabilit
Update `src/modules/llms/server/openai/models/openpipe.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Base Models: https://docs.openpipe.ai/base-models
@@ -4,7 +4,7 @@ description: Update Perplexity model definitions with latest pricing and capabil
Update `src/modules/llms/server/openai/models/perplexity.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models: https://docs.perplexity.ai/getting-started/models
+1 -1
View File
@@ -4,7 +4,7 @@ description: Update xAI model definitions with latest pricing and capabilities
Update `src/modules/llms/server/openai/models/xai.models.ts` with latest model definitions.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.data.ts` for context only. Focus on the model file, do not descend into other code.
Reference `src/modules/llms/server/llm.server.types.ts` and `src/modules/llms/server/models.mappings.ts` for context only. Focus on the model file, do not descend into other code.
**Primary Sources:**
- Models & Pricing: https://docs.x.ai/docs/models?cluster=us-east-1#detailed-pricing-for-all-grok-models
+5
View File
@@ -3,9 +3,11 @@
"allow": [
"Bash(cat:*)",
"Bash(cp:*)",
"Bash(curl:*)",
"Bash(find:*)",
"Bash(git branch:*)",
"Bash(git describe:*)",
"Bash(git grep:*)",
"Bash(git log:*)",
"Bash(git log:*)",
"Bash(git show:*)",
@@ -16,10 +18,13 @@
"Bash(npm install)",
"Bash(npm install:*)",
"Bash(npm run:*)",
"Bash(npx eslint:*)",
"Bash(npx tsc:*)",
"Bash(rg:*)",
"Bash(rm:*)",
"Bash(sed:*)",
"Bash(tree:*)",
"Read(//tmp/**)",
"WebFetch",
"WebFetch(domain:big-agi.com)",
"WebSearch",
-3
View File
@@ -1,3 +0,0 @@
{
"extends": "next/core-web-vitals"
}
+70
View File
@@ -0,0 +1,70 @@
name: 🔥 Make AI Fix This
description: Bug, question, or feedback - AI analyzes and changes Big-AGI appropriately
labels: [ 'claude-triage' ]
body:
- type: markdown
attributes:
value: |
Thanks for opening an issue! Our AI will analyze it and change Big-AGI appropriately.
**What happens next:**
- AI searches the codebase and documentation
- You get a response, typically within 30 minutes
- Ticket gets follow-up and community votes
- type: textarea
attributes:
label: What's happening?
description: Describe the bug, feature request, or question. Be as detailed as you can.
placeholder: |
Bug example: "In Beam, Anthropic models seem to have search off..."
Model request: "Add Claude Opus 4.5 out today, see https://..."
Feature example: "Add the option to to save frequent prompt templates for reuse..."
validations:
required: true
- type: dropdown
attributes:
label: Where does this happen?
description: If this is a bug or issue, where are you experiencing it?
options:
- Big-AGI Pro (big-agi.com)
- Self-deployed from GitHub
- Docker deployment
- Local development
- Not applicable (question/feedback)
- Other
validations:
required: false
- type: dropdown
attributes:
label: Impact on your workflow
description: How does this affect your use of Big-AGI?
options:
- Blocking - Can't use Big-AGI
- High - Major feature broken
- Medium - Workaround exists
- Low - Minor inconvenience
- None - Just a question/suggestion
validations:
required: false
- type: textarea
attributes:
label: Environment (if applicable)
description: Device, OS, browser - only if reporting a bug
placeholder: |
Device: Macbook Pro M3
OS: macOS 15.2
Browser: Chrome 131
validations:
required: false
- type: textarea
attributes:
label: Additional context
description: Screenshots, error messages, or anything else that helps
placeholder: Paste screenshots or error messages here
validations:
required: false
+1 -1
View File
@@ -19,7 +19,7 @@ jobs:
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude'))
runs-on: ubuntu-latest
timeout-minutes: 20
timeout-minutes: 30
permissions:
contents: read
+9 -3
View File
@@ -12,13 +12,14 @@ jobs:
!contains(github.event.issue.body, '@claude')
runs-on: ubuntu-latest
timeout-minutes: 20
timeout-minutes: 30
permissions:
contents: read
issues: write
pull-requests: read
pull-requests: write
id-token: write
actions: read
steps:
- name: Checkout repository
@@ -35,6 +36,10 @@ jobs:
allowed_non_write_users: '*'
# track_progress: true # Enables tracking comments
# This is an optional setting that allows Claude to read CI results on PRs
additional_permissions: |
actions: read
prompt: |
REPO: ${{ github.repository }}
ISSUE NUMBER: #${{ github.event.issue.number }}
@@ -61,11 +66,12 @@ jobs:
- Link duplicates if found
If you're uncertain, say so and suggest next steps.
If you write any code make sure that it compiles and that you push it.
Be welcoming, helpful, professional, solution-focused and no-BS.
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
claude_args: |
--model claude-sonnet-4-5-20250929
--max-turns 60
--max-turns 75
--allowedTools "Edit,Read,Write,WebFetch,WebSearch,Bash(cat:*),Bash(cp:*),Bash(find:*),Bash(git branch:*),Bash(grep:*),Bash(ls:*),Bash(mkdir:*),Bash(npm install),Bash(npm install:*),Bash(npm run:*),Bash(gh issue:*),Bash(gh search:*),Bash(gh label:*),Bash(gh pr:*),mcp__chrome-devtools,SlashCommand"
+1
View File
@@ -23,6 +23,7 @@ env:
jobs:
build-and-push-image:
runs-on: ubuntu-latest
timeout-minutes: 60 # Max 1 hour (expected: ~25min)
permissions:
contents: read
packages: write
-3
View File
@@ -1,3 +0,0 @@
overrides=@mui/material@^5.0.0:
dependencies:
@mui/material: replaced-by=@mui/joy
+1
View File
@@ -117,6 +117,7 @@ Located in `/src/common/layout/optima/`
- `store-chats`: Conversations and messages
- `store-llms`: Model configurations
- `store-ux-labs`: UI preferences and labs features
- **Zustand pattern**: Always wrap multi-property selectors with `useShallow` from `zustand/react/shallow` to prevent re-renders on reference changes
2. **Per-Instance Stores** (Vanilla Zustand)
- `store-beam_vanilla`: Beam scatter/gather state
+164 -93
View File
@@ -1,3 +1,32 @@
<div align="center">
<img width="256" height="256" alt="Big-AGI Logo" src="https://big-agi.com/assets/logo-bright-github.svg" />
<h1><a href="https://big-agi.com">Big-AGI</a></h1>
[![Use Free ⋅ Go Pro](https://img.shields.io/badge/Use_Free-Get_Pro-d5ec31?style=for-the-badge&logo=rocket&logoColor=white&labelColor=000)](https://big-agi.com)
[![Deploy on Docker](https://img.shields.io/badge/Self--Host-Docker-blue?style=for-the-badge&logo=docker&logoColor=white&labelColor=000)](https://github.com/enricoros/big-AGI/pkgs/container/big-agi)
[![Deploy on Vercel](https://img.shields.io/badge/Vercel-Deploy-blue?style=for-the-badge&logo=vercel&logoColor=white&labelColor=000)](https://vercel.com/new/clone?repository-url=https://github.com/enricoros/big-agi)
[![Discord](https://img.shields.io/discord/1098796266906980422?style=for-the-badge&label=Discord&logo=discord&logoColor=white&labelColor=000000&color=purple)](https://discord.gg/MkH4qj2Jp9)
<br/>
[![GitHub Monthly Commits](https://img.shields.io/github/commit-activity/m/enricoros/big-agi?style=for-the-badge&x=3&logo=github&logoColor=white&label=commits&labelColor=000&color=green)](https://github.com/enricoros/big-agi/commits)
[![GHCR Pulls](https://img.shields.io/badge/ghcr.io-767k_dl-12b76a?style=for-the-badge&logo=Xdocker&logoColor=white&labelColor=000&color=A8E6CF)](https://github.com/enricoros/big-AGI/pkgs/container/big-agi)
[![Contributors](https://img.shields.io/github/contributors/enricoros/big-agi?style=for-the-badge&x=2&logo=Xgithub&logoColor=white&label=cooks&labelColor=000&color=A8E6CF)](https://github.com/enricoros/big-AGI/graphs/contributors)
[![License: MIT](https://img.shields.io/badge/License-MIT-A8E6CF?style=for-the-badge&labelColor=000)](https://opensource.org/licenses/MIT)
<br/>
[![Open an Issue](https://img.shields.io/badge/Open_Issue-AI_Will_Help-ff8c00?style=for-the-badge&logo=fireship&logoColor=fff&labelColor=8b0000)](https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml)
[//]: # ([![Uptime Robot ratio &#40;30 days&#41;]&#40;https://img.shields.io/uptimerobot/ratio/m801796948-868b22ed7ceaa0acac4dc765?style=for-the-badge&labelColor=000&color=green&#41;]&#40;https://stats.uptimerobot.com/59MXcnmjrM&#41;)
[//]: # ([![Open Version]&#40;https://img.shields.io/github/v/release/enricoros/big-AGI?label=Open+Release&style=flat-square&logo=github&logoColor=white&labelColor=000&#41;]&#40;https://github.com/enricoros/big-AGI/releases/latest&#41;)
[//]: # (![GitHub Stars]&#40;https://img.shields.io/github/stars/enricoros/big-agi?style=flat-square&logo=github&logoColor=white&labelColor=000&color=yellow&#41;)
[//]: # ([![GitHub Forks]&#40;https://img.shields.io/github/forks/enricoros/big-agi?style=flat-square&logo=github&logoColor=white&labelColor=000&#41;]&#40;#&#41;)
[//]: # ([![Follow on X]&#40;https://img.shields.io/twitter/follow/enricoros?style=flat-square&logo=X&logoColor=white&labelColor=000&color=000&#41;]&#40;https://x.com/enricoros&#41;)
</div>
<br/>
# Big-AGI Open 🧠
This is the open-source foundation of **Big-AGI**, ___the multi-model AI workspace for experts___.
@@ -8,18 +37,71 @@ You need to think broader, decide faster, and build with confidence, then you ne
It comes packed with **world-class features** like Beam, and is praised for its **best-in-class AI chat UX**.
**As an independent, non-VC-funded project, Pro subscriptions at $10.99/mo fund development for everyone, including the free and open-source tiers.**
**What makes Big-AGI different:**
**Intelligence**: with [Beam & Merge](https://big-agi.com/beam) for multi-model de-hallucination, native search, and bleeding-edge AI models like Nano Banana, or GPT-5 Pro -
**Control**: with personas, data ownership, requests inspection, unlimited usage with API keys, and *no vendor lock-in* -
![LLM Vendors](https://img.shields.io/badge/18+_LLM_Services-500+_Models-black?style=for-the-badge&logo=anthropic&logoColor=white&labelColor=purple)&nbsp;
[![Feature Beam](https://img.shields.io/badge/AI--Validation-BEAM-000?style=for-the-badge&labelColor=purple)](https://big-agi.com/beam)&nbsp;
[![Feature Inspector](https://img.shields.io/badge/Expert_Mode-AI_Inspector-000?style=for-the-badge&labelColor=purple)](https://big-agi.com/inspector)
### What makes Big-AGI different:
**Intelligence**: with [Beam & Merge](https://big-agi.com/beam) for multi-model de-hallucination, native search, and bleeding-edge AI models like Opus 4.5, Nano Banana, Kimi K2 or GPT 5.1 -
**Control**: with personas, data ownership, requests inspection, unlimited usage with API keys, and *no vendor lock-in* -
and **Speed**: with a local-first, over-powered, zero-latency, madly optimized web app.
**Who uses Big-AGI:**
<table>
<tr>
<td align="center" width="25%">
<b>🧠 Intelligence</b><br/>
<img src="https://img.shields.io/badge/Multi--Model-Trust-4285F4?style=for-the-badge" alt="Multi-Model"/>
</td>
<td align="center" width="25%">
<b>✨ Experience</b><br/>
<img src="https://img.shields.io/badge/Clean-UX-34A853?style=for-the-badge" alt="Clean UX"/>
</td>
<td align="center" width="25%">
<b>⚡ Performance</b><br/>
<img src="https://img.shields.io/badge/Zero-Latency-EA4335?style=for-the-badge" alt="Zero Latency"/>
</td>
<td align="center" width="25%">
<b>🔒 Control</b><br/>
<img src="https://img.shields.io/badge/No-Lock--in-FBBC04?style=for-the-badge" alt="No Lock-in"/>
</td>
</tr>
<tr>
<td align="center" valign="top">
Beam & Merge<br/>
No context junk<br/>
Purest AI outputs
</td>
<td align="center" valign="top">
Flow-state interface<br/>
Higly customizable<br/>
Best-in-class UX
</td>
<td align="center" valign="top">
Local-first<br/>
Highly parallel<br/>
Madly optimized
</td>
<td align="center" valign="top">
No vendor lock-in<br/>
Your API keys<br/>
AI Inspector
</td>
</tr>
</table>
### Who uses Big-AGI:
Loved by engineers, founders, researchers, self-hosters, and IT departments for its power, reliability, and transparency.
<img width="830" height="370" alt="image" src="https://github.com/user-attachments/assets/513c4f77-0970-4a56-b23b-1416c8246174" />
Choose Big-AGI because you don't need another clone or slop - you need an AI tool that scales with you.
### Show me a screenshot:
Sure - here is real-world screeengrab as I'm writing this, while running a Beam to extract SVG from an image with Sonnet 4.5, Opus 4.1, GPT 5.1, Gemini 2.5 Pro, Nano Banana, etc.
<img alt="Real-world screen capture as of Nov 15 2025, 2am" src="https://github.com/user-attachments/assets/853f4160-27cb-4ac9-826b-402f1e63d4af" />
## Get Started
| Tier | Best For | What You Get | Setup |
@@ -31,15 +113,12 @@ Choose Big-AGI because you don't need another clone or slop - you need an AI too
\*: **Configuration requires your API keys**. *Big-AGI does not charge for model usage or limit your access*.
**Why Pro?** As an independent project, Pro subscriptions fund all development. Early subscribers shape the roadmap directly.
<a href="https://big-agi.com">
<img width="210" height="68" alt="image" src="https://github.com/user-attachments/assets/b2f8a7b8-415f-4c92-b228-4f5a54fe2bdd" />
</a>
[![Use Free ⋅ Go Pro](https://img.shields.io/badge/Use_Free-Get_Pro-d5ec31?style=for-the-badge&logo=rocket&logoColor=white&labelColor=000)](https://big-agi.com)
**Self-host and developers** (full control)
- Develop locally or self-host with Docker on your own infrastructure [guide](docs/installation.md)
- Or fork & run on Vercel:
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
- Or fork & run on Vercel:
[![Deploy on Vercel](https://img.shields.io/badge/Deploy-black?style=for-the-badge&logo=vercel&logoColor=white&labelColor=000)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI&env=OPENAI_API_KEY&envDescription=Backend%20API%20keys%2C%20optional%20and%20may%20be%20overridden%20by%20the%20UI.&envLink=https%3A%2F%2Fgithub.com%2Fenricoros%2Fbig-AGI%2Fblob%2Fmain%2Fdocs%2Fenvironment-variables.md&project-name=big-AGI)
[//]: # (**For the latest Big-AGI:**)
@@ -60,9 +139,14 @@ so you **are not vendor locked-in**, and obsessed over a powerful UI that works,
NOTE: this is a powerful tool - if you need a toy UI or clone, this ain't it.
## What's New in 2.0 · Oct 31, 2025 · Open
---
👉 **[See the full changelog](https://big-agi.com/changes)**
## Release Notes
👉 **[See the Live Release Notes](https://big-agi.com/changes)**
- Open 2.0.1: **Opus 4.5** full support, **Gemini 3 Pro** w/ code exec, **Nano Banana Pro**, **Grok 4.1**, **GPT-5.1**, **Kimi K2 Thinking** + 280 fixes
### What's New in 2.0 · Oct 31, 2025 · Open
- **Big-AGI Open** is ready and more productive and faster than ever, with:
- **Beam 2**: multi-modal, program-based, follow-ups, save presets
@@ -75,7 +159,7 @@ NOTE: this is a powerful tool - if you need a toy UI or clone, this ain't it.
<img width="830" height="385" alt="image" src="https://github.com/user-attachments/assets/ad52761d-7e3f-44d8-b41e-947ce8b4faa1" />
### Open links: 👉 [changelog](https://big-agi.com/changes) 👉 [installation](docs/installation.md) 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [documentation](docs/README.md)
#### **Open** links: 👉 [changelog](https://big-agi.com/changes) 👉 [installation](docs/installation.md) 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [documentation](docs/README.md)
**For teams and institutions:** Need shared prompts, SSO, or managed deployments? Reach out at enrico@big-agi.com. We're actively collecting requirements from research groups and IT departments.
@@ -225,96 +309,83 @@ https://github.com/enricoros/big-AGI/assets/1590910/a6b8e172-0726-4b03-a5e5-10cf
For full details and former releases, check out the [archived versions changelog](docs/changelog.md).
## 👉 Key Features
## 👉 Supported Models & Integrations
| ![Advanced AI](https://img.shields.io/badge/Advanced%20AI-32383e?style=for-the-badge&logo=ai&logoColor=white) | ![100+ AI Models](https://img.shields.io/badge/100%2B%20AI%20Models-32383e?style=for-the-badge&logo=ai&logoColor=white) | ![Flow-state UX](https://img.shields.io/badge/Flow--state%20UX-32383e?style=for-the-badge&logo=flow&logoColor=white) | ![Privacy First](https://img.shields.io/badge/Privacy%20First-32383e?style=for-the-badge&logo=privacy&logoColor=white) | ![Advanced Tools](https://img.shields.io/badge/Fun%20To%20Use-f22a85?style=for-the-badge&logo=tools&logoColor=white) |
Delightful UX with latest models exclusive features like Beam for **multi-model AI validation**.
> ![LLM Vendors](https://img.shields.io/badge/18_LLM_Services-500+_Models-black?style=for-the-badge&logo=openai&logoColor=white&labelColor=purple)&nbsp;
> [![Feature Beam](https://img.shields.io/badge/AI--Validation-BEAM-000?style=for-the-badge&logo=anthropic&labelColor=purple)](https://big-agi.com/beam)
| ![Advanced AI](https://img.shields.io/badge/Advanced%20AI-32383e?style=for-the-badge&logo=ai&logoColor=white) | ![500+ AI Models](https://img.shields.io/badge/500%2B%20AI%20Models-32383e?style=for-the-badge&logo=ai&logoColor=white) | ![Flow-state UX](https://img.shields.io/badge/Flow--state%20UX-32383e?style=for-the-badge&logo=flow&logoColor=white) | ![Privacy First](https://img.shields.io/badge/Privacy%20First-32383e?style=for-the-badge&logo=privacy&logoColor=white) | ![Advanced Tools](https://img.shields.io/badge/Fun%20To%20Use-f22a85?style=for-the-badge&logo=tools&logoColor=white) |
|---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------|
| **Chat**<br/>**Call**<br/>**Beam**<br/>**Draw**, ... | Local & Cloud<br/>Open & Closed<br/>Cheap & Heavy<br/>Google, Mistral, ... | Attachments<br/>Diagrams<br/>Multi-Chat<br/>Mobile-first UI | Stored Locally<br/>Easy self-Host<br/>Local actions<br/>Data = Gold | AI Personas<br/>Voice Modes<br/>Screen Capture<br/>Camera + OCR |
![big-AGI screenshot](docs/pixels/big-AGI-compo-20240201_small.png)
You can easily configure 100s of AI models in big-AGI:
### AI Models & Vendors
| **AI models** | _supported vendors_ |
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Opensource Servers | [LocalAI](https://localai.io/) (multimodal) · [Ollama](https://ollama.com/) |
| Local Servers | [LM Studio](https://lmstudio.ai/) |
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Anthropic](https://anthropic.com) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
| Language services | [Alibaba](https://www.alibabacloud.com/en/product/modelstudio) · [DeepSeek](https://deepseek.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) · [xAI](https://x.ai/) |
| Image services | OpenAI · Google Gemini |
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
Configure 100s of AI models from 18+ providers:
Add extra functionality with these integrations:
| **AI models** | _supported vendors_ |
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Opensource Servers | [LocalAI](https://localai.io/) · [Ollama](https://ollama.com/) |
| Local Servers | [LM Studio](https://lmstudio.ai/) (non-open) |
| Multimodal services | [Azure](https://azure.microsoft.com/en-us/products/ai-services/openai-service) · [Anthropic](https://anthropic.com) · [Google Gemini](https://ai.google.dev/) · [OpenAI](https://platform.openai.com/docs/overview) |
| LLM services | [Alibaba](https://www.alibabacloud.com/en/product/modelstudio) · [DeepSeek](https://deepseek.com) · [Groq](https://wow.groq.com/) · [Mistral](https://mistral.ai/) · [Moonshot](https://www.moonshot.cn/) · [OpenPipe](https://openpipe.ai/) · [OpenRouter](https://openrouter.ai/) · [Perplexity](https://www.perplexity.ai/) · [Together AI](https://www.together.ai/) · [xAI](https://x.ai/) |
| Image services | OpenAI · Google Gemini |
| Speech services | [ElevenLabs](https://elevenlabs.io) (Voice synthesis / cloning) |
| **More** | _integrations_ |
|:-------------|:---------------------------------------------------------------------------------------------------------------|
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
| Tracking | [Helicone](https://www.helicone.ai) (LLM Observability) |
### Additional Integrations
[//]: # (- [x] **Flow-state UX** for uncompromised productivity)
[//]: # (- [x] **AI Personas**: Tailor your AI interactions with customizable personas)
[//]: # (- [x] **Sleek UI/UX**: A smooth, intuitive, and mobile-responsive interface)
[//]: # (- [x] **Efficient Interaction**: Voice commands, OCR, and drag-and-drop file uploads)
[//]: # (- [x] **Privacy First**: Self-host and use your own API keys for full control)
[//]: # (- [x] **Advanced Tools**: Execute code, import PDFs, and summarize documents)
[//]: # (- [x] **Seamless Integrations**: Enhance functionality with various third-party services)
[//]: # (- [x] **Open Roadmap**: Contribute to the progress of big-AGI)
<br/>
## 🚀 Installation
To get started with big-AGI, follow our comprehensive [Installation Guide](docs/installation.md).
The guide covers various installation options, whether you're spinning it up on
your local computer, deploying on Vercel, on Cloudflare, or rolling it out
through Docker.
Whether you're a developer, system integrator, or enterprise user, you'll find step-by-step instructions
to set up big-AGI quickly and easily.
[![Installation Guide](https://img.shields.io/badge/Installation%20Guide-blue?style=for-the-badge&logo=read-the-docs&logoColor=white)](docs/installation.md)
Or bring your API keys and jump straight into our free instance on [big-AGI.com](https://big-agi.com).
<br/>
# 🌟 Get Involved!
[//]: # ([![Official Discord]&#40;https://img.shields.io/discord/1098796266906980422?label=discord&logo=discord&logoColor=%23fff&style=for-the-badge&#41;]&#40;https://discord.gg/MkH4qj2Jp9&#41;)
[![Official Discord](https://discordapp.com/api/guilds/1098796266906980422/widget.png?style=banner2)](https://discord.gg/MkH4qj2Jp9)
- [ ] 📢️ [**Chat with us** on Discord](https://discord.gg/MkH4qj2Jp9)
- [ ]**Give us a star** on GitHub 👆
- [ ] 🚀 **Do you like code**? You'll love this gem of a project! [_Pick up a task!_](https://github.com/users/enricoros/projects/4/views/4) - _easy_ to _pro_
- [ ] 💡 Got a feature suggestion? [_Add your roadmap ideas_](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
- [ ] ✨ [Deploy](docs/installation.md) your [fork](docs/customizations.md) for your friends and family, or [customize it for work](docs/customizations.md)
<br/>
[//]: # ([![GitHub stars]&#40;https://img.shields.io/github/stars/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/stargazers&#41;)
[//]: # ([![GitHub forks]&#40;https://img.shields.io/github/forks/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/network&#41;)
[//]: # ([![GitHub pull requests]&#40;https://img.shields.io/github/issues-pr/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/pulls&#41;)
[//]: # ([![License]&#40;https://img.shields.io/github/license/enricoros/big-agi&#41;]&#40;https://github.com/enricoros/big-agi/LICENSE&#41;)
## 📜 Licensing
Big-AGI incorporates third-party software components that are subject
to separate license terms. For detailed information about these
components and their respective licenses, please refer to
the [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md).
| **More** | _integrations_ |
|:--------------|:---------------------------------------------------------------------------------------------------------------|
| Web Browse | [Browserless](https://www.browserless.io/) · [Puppeteer](https://pptr.dev/)-based |
| Web Search | [Google CSE](https://programmablesearchengine.google.com/) |
| Code Editors | [CodePen](https://codepen.io/pen/) · [StackBlitz](https://stackblitz.com/) · [JSFiddle](https://jsfiddle.net/) |
| Observability | [Helicone](https://www.helicone.ai) |
---
2023-2025 · Enrico Ros x [Big-AGI](https://big-agi.com) · Like this project? Leave a star! 💫⭐
## 🚀 Installation
Self-host with Docker, deploy on Vercel, or develop locally. Full setup guide:
[![Installation Guide](https://img.shields.io/badge/Installation%20Guide-blue?style=for-the-badge&logo=read-the-docs&logoColor=white)](docs/installation.md)
Or use the hosted version at [big-agi.com](https://big-agi.com) with your API keys.
---
## 👋 Community & Contributing
### Connect
[![Official Discord](https://discordapp.com/api/guilds/1098796266906980422/widget.png?style=banner2)](https://discord.gg/MkH4qj2Jp9)
⭐ [Star the repo](https://github.com/enricoros/big-agi) if Big-AGI is useful to you
### Contribute
**🤖 AI-Powered Issue Assistance**
When you open an issue, our custom AI triage system (powered by [Claude Code](https://github.com/anthropics/claude-code-action) with Big-AGI architecture documentation) analyzes it, searches the codebase, and provides solutions - typically within 30 minutes. We've trained the system on our modules and subsystems so it handles most issues effectively. Your feedback drives development!
[![Open an Issue](https://img.shields.io/badge/Open_Issue-AI_Will_Help-ff8c00?style=for-the-badge&logo=fireship&logoColor=fff&labelColor=8b0000)](https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml)
[![Request Feature](https://img.shields.io/badge/Request_Feature-Roadmap_Idea-orange?style=for-the-badge&logo=lightbulb&logoColor=white)](https://github.com/enricoros/big-agi/issues/new?&template=roadmap-request.md)
[![Good First Issues](https://img.shields.io/badge/Good_First_Issues-Start-blue?style=for-the-badge&logo=github&logoColor=white)](https://github.com/users/enricoros/projects/4/views/4)
[![Customization](https://img.shields.io/badge/Fork_&_Customize-Your_Own-purple?style=for-the-badge&logo=git&logoColor=white)](docs/customizations.md)
[![Roadmap](https://img.shields.io/badge/Open_Roadmap-View-0366d6?style=for-the-badge&logo=github&logoColor=white)](https://github.com/users/enricoros/projects/4/views/2)
#### Contributors
<a href="https://github.com/enricoros/big-agi/graphs/contributors">
<img src="https://contrib.rocks/image?repo=enricoros/big-agi&max=48&columns=12" />
</a>
---
## License
MIT License · [Third-Party Notices](src/modules/3rdparty/THIRD_PARTY_NOTICES.md)
**2023-2025** · Enrico Ros × [Big-AGI](https://big-agi.com)
+5 -5
View File
@@ -2,7 +2,7 @@ import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
import { appRouterCloud } from '~/server/trpc/trpc.router-cloud';
import { createTRPCFetchContext } from '~/server/trpc/trpc.server';
import { posthogCaptureServerException } from '~/server/posthog/posthog.server';
import { posthogServerSendException } from '~/server/posthog/posthog.server';
const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
endpoint: '/api/cloud',
@@ -16,15 +16,15 @@ const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
console.error(`❌ tRPC-cloud failed on ${path ?? 'unk-path'}: ${error.message}`);
// -> Capture node errors
await posthogCaptureServerException(error, {
await posthogServerSendException(error, undefined, {
domain: 'trpc-onerror',
runtime: 'nodejs',
endpoint: path ?? 'unknown',
method: req.method,
url: req.url,
additionalProperties: {
errorCode: error.code,
errorType: type,
error_code: error.code,
error_type: type,
},
});
},
@@ -33,7 +33,7 @@ const handlerNodeRoutes = (req: Request) => fetchRequestHandler({
// NOTE: the following statement breaks the build on non-pro deployments, and conditionals don't work either
// so we resorted to raising the timeout from 10s to 60s in the vercel.json file instead
export const maxDuration = 60;
// export const maxDuration = 60;
export const runtime = 'nodejs';
export const dynamic = 'force-dynamic';
export { handlerNodeRoutes as GET, handlerNodeRoutes as POST };
+3 -1
View File
@@ -10,9 +10,11 @@ const handlerEdgeRoutes = (req: Request) => fetchRequestHandler({
createContext: createTRPCFetchContext,
onError:
process.env.NODE_ENV === 'development'
? ({ path, error }) => console.error(`❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
? ({ path, error }) => console.error(`\n❌ tRPC-edge failed on ${path ?? 'unk-path'}: ${error.message}`)
: undefined,
});
// NOTE: we don't set maxDuration explicitly here - however we set it in the Vercel project settings, raising to the limit of 300s
// export const maxDuration = 60;
export const runtime = 'edge';
export { handlerEdgeRoutes as GET, handlerEdgeRoutes as POST };
+1 -1
View File
@@ -54,7 +54,7 @@ If the running LocalAI instance is configured with a [Model Gallery](https://loc
At the time of writing, LocalAI does not publish the model `context window size`.
Every model is assumed to be capable of chatting, and with a context window of 4096 tokens.
Please update the [src/modules/llms/transports/server/openai/models/models.data.ts](../src/modules/llms/server/openai/models/models.data.ts)
Please update the [src/modules/llms/server/models.mappings.ts](../src/modules/llms/server/models.mappings.ts)
file with the mapping information between LocalAI model IDs and names/descriptions/tokens, etc.
# 🤝 Support
+2
View File
@@ -35,6 +35,7 @@ GROQ_API_KEY=
LOCALAI_API_HOST=
LOCALAI_API_KEY=
MISTRAL_API_KEY=
MOONSHOT_API_KEY=
OLLAMA_API_HOST=
OPENPIPE_API_KEY=
OPENROUTER_API_KEY=
@@ -105,6 +106,7 @@ requiring the user to enter an API key
| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional |
| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional |
| `MISTRAL_API_KEY` | The API key for Mistral | Optional |
| `MOONSHOT_API_KEY` | The API key for Moonshot AI | Optional |
| `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama.md) | |
| `OPENPIPE_API_KEY` | The API key for OpenPipe | Optional |
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
+1
View File
@@ -28,6 +28,7 @@ stringData:
LOCALAI_API_HOST: ""
LOCALAI_API_KEY: ""
MISTRAL_API_KEY: ""
MOONSHOT_API_KEY: ""
OLLAMA_API_HOST: ""
OPENPIPE_API_KEY: ""
OPENROUTER_API_KEY: ""
+17
View File
@@ -0,0 +1,17 @@
import { defineConfig } from "eslint/config";
import path from "node:path";
import { fileURLToPath } from "node:url";
import js from "@eslint/js";
import { FlatCompat } from "@eslint/eslintrc";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all
});
export default defineConfig([{
extends: compat.extends("next/core-web-vitals"),
}]);
+3
View File
@@ -14,6 +14,9 @@ Internal documentation for Big-AGI architecture and systems, for use by AI agent
- **[AIX.md](modules/AIX.md)** - AIX streaming architecture documentation
- **[AIX-callers-analysis.md](modules/AIX-callers-analysis.md)** - Analysis of AIX entry points, call chains, common and different rendering, error handling, etc.
#### CSF - Client-Side Fetch
- **[CSF.md](systems/client-side-fetch.md)** - Direct browser-to-API communication for LLM requests
### Systems Documentation
#### Core Platform Systems
+1 -1
View File
@@ -60,7 +60,7 @@ Shows only parameters that are:
The AIX client transforms DLLM parameters to wire protocol format. This layer handles parameter precedence rules and name transformations:
```typescript
```
// Parameter precedence: newer 4-value version takes priority over 3-value
...((llmVndOaiReasoningEffort4 || llmVndOaiReasoningEffort) ?
{ vndOaiReasoningEffort: llmVndOaiReasoningEffort4 || llmVndOaiReasoningEffort } : {})
+13
View File
@@ -0,0 +1,13 @@
# CSF - Client-Side Fetch
Client-Side Fetch (CSF) enables direct browser-to-API communication, bypassing the server for LLM requests. When enabled, the browser makes requests directly to vendor APIs (e.g., `api.openai.com`, `api.groq.com`) instead of routing through the Next.js server. This reduces latency, decreases server load, and is particularly useful for local models where the browser can communicate directly with Ollama or LM Studio.
## Implementation
CSF is implemented as an opt-in setting stored as `csf: boolean` in each vendor's service settings. The vendor interface exposes `csfAvailable?: (setup) => boolean` to determine if CSF can be enabled (typically checking if an API key or host is configured). The actual execution happens in `aix.client.direct-chatGenerate.ts` which dynamically imports when CSF is active, making direct fetch calls using the same wire protocols as the server.
All 16 supported vendors (OpenAI, Anthropic, Gemini, Ollama, LocalAI, Deepseek, Groq, Mistral, xAI, OpenRouter, Perplexity, Together AI, Alibaba, Moonshot, OpenPipe, LM Studio) support CSF. Cloud vendors require CORS support from the API provider (all tested vendors return `access-control-allow-origin: *`). Local vendors (Ollama, LocalAI, LM Studio) require CORS to be enabled on the local server.
## UI
The CSF toggle appears in each vendor's setup panel under "Advanced" settings, labeled "Direct Connection". It becomes visible when the prerequisites are met (API key present for cloud vendors, host configured for local vendors). The setting is managed through `useModelServiceClientSideFetch` hook which provides `csfAvailable`, `csfActive`, `csfToggle`, and `csfReset` for UI consumption.
+28 -7
View File
@@ -1,4 +1,5 @@
import type { NextConfig } from 'next';
import type { WebpackConfigContext } from 'next/dist/server/config-shared';
import { execSync } from 'node:child_process';
import { readFileSync } from 'node:fs';
@@ -29,7 +30,7 @@ buildType && console.log(` 🧠 big-AGI: building for ${buildType}...\n`);
/** @type {import('next').NextConfig} */
let nextConfig: NextConfig = {
reactStrictMode: true,
reactStrictMode: !process.env.NO_STRICT_MODE, // default: enabled
// [exports] https://nextjs.org/docs/advanced-features/static-html-export
...(buildType && {
@@ -47,7 +48,7 @@ let nextConfig: NextConfig = {
// NOTE: we may not be needing this anymore, as we use '@cloudflare/puppeteer'
serverExternalPackages: ['puppeteer-core'],
webpack: (config: any, { isServer }: { isServer: boolean }) => {
webpack: (config: any, { isServer, webpack /*, dev, nextRuntime*/ }: WebpackConfigContext) => {
// @mui/joy: anything material gets redirected to Joy
config.resolve.alias['@mui/material'] = '@mui/joy';
@@ -57,8 +58,28 @@ let nextConfig: NextConfig = {
layers: true,
};
// fix warnings for async functions in the browser (https://github.com/vercel/next.js/issues/64792)
// client-side bundling
if (!isServer) {
/**
* AIX client-side
* We replace certain server-only modules with client-side mocks, to reuse the exact same imports
* while avoiding importing server-only code which would break the build or break at runtime.
*/
const serverToClientMocks: ReadonlyArray<[RegExp, string]> = [
[/\/posthog\.server/, '/posthog.client-mock'],
[/\/env\.server/, '/env.client-mock'],
];
config.plugins = [
...config.plugins,
...serverToClientMocks.map(([pattern, replacement]) =>
new webpack.NormalModuleReplacementPlugin(pattern, (resource: any) => {
// console.log(' 🧠 [WEBPACK REPLACEMENT]:', resource.request, '->', resource.request.replace(pattern, replacement));
resource.request = resource.request.replace(pattern, replacement);
}),
),
];
// cosmetic: fix warnings for (absent!) top-level awaits in the browser (https://github.com/vercel/next.js/issues/64792)
config.output.environment = { ...config.output.environment, asyncFunction: true };
}
@@ -108,9 +129,9 @@ let nextConfig: NextConfig = {
// },
};
// Validate environment variables, if set at build time. Will be actually read and used at runtime.
import { verifyBuildTimeVars } from '~/server/env';
verifyBuildTimeVars();
// Validate environment variables at build time, if required. Server env vars will be actually read and used at runtime (cloud/edge).
import { env as validateEnv } from '~/server/env.server';
void validateEnv; // Triggers env validation - throws if required vars are missing
// PostHog error reporting with source maps for production builds
import { withPostHogConfig } from '@posthog/nextjs-config';
@@ -120,7 +141,7 @@ if (process.env.POSTHOG_API_KEY && process.env.POSTHOG_ENV_ID) {
personalApiKey: process.env.POSTHOG_API_KEY,
envId: process.env.POSTHOG_ENV_ID,
host: 'https://us.i.posthog.com', // backtrace upload host
verbose: false,
logLevel: 'error', // lowered, too noisy
sourcemaps: {
enabled: process.env.NODE_ENV === 'production',
project: 'big-agi',
+1364 -643
View File
File diff suppressed because it is too large Load Diff
+22 -20
View File
@@ -1,6 +1,6 @@
{
"name": "big-agi",
"version": "2.0.0",
"version": "2.0.2",
"private": true,
"author": "Enrico Ros <enrico.ros@gmail.com>",
"repository": "https://github.com/enricoros/big-agi",
@@ -14,7 +14,8 @@
"postinstall": "prisma generate --no-hints",
"db:push": "prisma db push",
"db:studio": "prisma studio",
"vercel:env:pull": "npx vercel env pull .env.development.local"
"vercel:env:pull": "npx vercel env pull .env.development.local",
"sharp:win32_x64": "npm install --os=win32 --cpu=x64 sharp"
},
"prisma": {
"schema": "src/server/prisma/schema.prisma"
@@ -32,7 +33,8 @@
"@mui/joy": "^5.0.0-beta.52",
"@next/bundle-analyzer": "~15.1.8",
"@prisma/client": "~5.22.0",
"@tanstack/react-query": "5.90.3",
"@tanstack/react-query": "5.90.10",
"@tanstack/react-virtual": "^3.13.12",
"@trpc/client": "11.5.1",
"@trpc/next": "11.5.1",
"@trpc/react-query": "11.5.1",
@@ -42,8 +44,8 @@
"browser-fs-access": "^0.38.0",
"cheerio": "^1.1.2",
"csv-stringify": "^6.6.0",
"dexie": "^4.0.11",
"dexie-react-hooks": "^1.1.7",
"dexie": "~4.0.11",
"dexie-react-hooks": "~1.1.7",
"diff": "^8.0.2",
"eventemitter3": "^5.0.1",
"idb-keyval": "^6.2.2",
@@ -52,40 +54,40 @@
"next": "~15.1.8",
"nprogress": "^0.2.0",
"pdfjs-dist": "5.4.54",
"posthog-js": "^1.275.3",
"posthog-node": "^5.10.0",
"posthog-js": "^1.298.1",
"posthog-node": "^5.14.0",
"prismjs": "^1.30.0",
"puppeteer-core": "^24.25.0",
"puppeteer-core": "^24.31.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-hook-form": "^7.65.0",
"react-hook-form": "^7.66.1",
"react-markdown": "^10.1.0",
"react-player": "^3.3.3",
"react-player": "^3.4.0",
"react-resizable-panels": "^3.0.6",
"react-timeago": "^8.3.0",
"rehype-katex": "^7.0.1",
"remark-gfm": "^4.0.1",
"remark-mark-highlight": "^0.1.1",
"remark-math": "^6.0.0",
"sharp": "^0.33.5",
"superjson": "^2.2.2",
"sharp": "^0.34.5",
"superjson": "^2.2.6",
"tesseract.js": "^6.0.1",
"tiktoken": "^1.0.22",
"turndown": "^7.2.1",
"zod": "^4.1.12",
"turndown": "^7.2.2",
"zod": "^4.1.13",
"zustand": "5.0.7"
},
"devDependencies": {
"@posthog/nextjs-config": "^1.3.2",
"@types/node": "^24.7.2",
"@posthog/nextjs-config": "^1.6.0",
"@types/node": "^24.10.1",
"@types/nprogress": "^0.2.3",
"@types/prismjs": "^1.26.5",
"@types/react": "^19.2.2",
"@types/react": "^19.2.7",
"@types/react-csv": "^1.1.10",
"@types/react-dom": "^19.2.2",
"@types/turndown": "^5.0.5",
"@types/react-dom": "^19.2.3",
"@types/turndown": "^5.0.6",
"cross-env": "^10.1.0",
"eslint": "^9.37.0",
"eslint": "^9.39.1",
"eslint-config-next": "~15.1.8",
"prettier": "^3.6.2",
"prisma": "~5.22.0",
+9 -4
View File
@@ -1,12 +1,17 @@
import * as React from 'react';
import Head from 'next/head';
import dynamic from 'next/dynamic';
import { MyAppProps } from 'next/app';
import { Analytics as VercelAnalytics } from '@vercel/analytics/next';
import { SpeedInsights as VercelSpeedInsights } from '@vercel/speed-insights/next';
import { Brand } from '~/common/app.config';
import { apiQuery } from '~/common/util/trpc.client';
// [server-client-safe] dynamic imports to avoid webpack bundling issues with next/navigation
const VercelAnalytics = dynamic(() => import('@vercel/analytics/next').then(mod => mod.Analytics), { ssr: false });
const VercelSpeedInsights = dynamic(() => import('@vercel/speed-insights/next').then(mod => mod.SpeedInsights), { ssr: false });
import 'katex/dist/katex.min.css';
import '~/common/styles/CodePrism.css';
import '~/common/styles/GithubMarkdown.css';
@@ -55,10 +60,10 @@ const Big_AGI_App = ({ Component, emotionCache, pageProps }: MyAppProps) => {
</ProviderSingleTab>
</ProviderTheming>
{Is.Deployment.VercelFromFrontend && <VercelAnalytics debug={false} />}
{Is.Deployment.VercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
{hasGoogleAnalytics && <OptionalGoogleAnalytics />}
{hasPostHogAnalytics && <OptionalPostHogAnalytics />}
{Is.Deployment.VercelFromFrontend && <VercelAnalytics debug={false} />}
{Is.Deployment.VercelFromFrontend && <VercelSpeedInsights debug={false} sampleRate={1 / 2} />}
</>;
};
-1
View File
@@ -111,7 +111,6 @@ MyDocument.getInitialProps = async (ctx: DocumentContext) => {
<style
data-emotion={`${style.key} ${style.ids.join(' ')}`}
key={style.key}
// eslint-disable-next-line react/no-danger
dangerouslySetInnerHTML={{ __html: style.css }}
/>
));
+1 -2
View File
@@ -18,7 +18,7 @@ import { ROUTE_APP_CHAT, ROUTE_INDEX } from '~/common/app.routes';
import { Release } from '~/common/app.release';
// capabilities access
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs, useCapabilityTextToImage } from '~/common/components/useCapabilities';
import { useCapabilityBrowserSpeechRecognition, useCapabilityTextToImage } from '~/common/components/useCapabilities';
// stores access
import { getLLMsDebugInfo } from '~/common/stores/llms/store-llms';
@@ -95,7 +95,6 @@ function AppDebug() {
const cProduct = {
capabilities: {
mic: useCapabilityBrowserSpeechRecognition(),
elevenLabs: useCapabilityElevenLabs(),
textToImage: useCapabilityTextToImage(),
},
models: getLLMsDebugInfo(),
+1 -1
View File
@@ -20,7 +20,7 @@ function initTestConversation(): DConversation {
return conversation;
}
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi = createBeamVanillaStore()): BeamStoreApi {
function initTestBeamStore(messages: DMessage[], beamStore: BeamStoreApi): BeamStoreApi {
beamStore.getState().open(messages, null, false, (content) => alert(content));
return beamStore;
}
+15 -12
View File
@@ -6,13 +6,15 @@ import ChatIcon from '@mui/icons-material/Chat';
import CheckRoundedIcon from '@mui/icons-material/CheckRounded';
import CloseRoundedIcon from '@mui/icons-material/CloseRounded';
import MicIcon from '@mui/icons-material/Mic';
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
import WarningRoundedIcon from '@mui/icons-material/WarningRounded';
import { useSpeexGlobalEngine } from '~/modules/speex/store-module-speex';
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
import { animationColorRainbow } from '~/common/util/animUtils';
import { navigateBack } from '~/common/app.routes';
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
import { useCapabilityBrowserSpeechRecognition, useCapabilityElevenLabs } from '~/common/components/useCapabilities';
import { useCapabilityBrowserSpeechRecognition } from '~/common/components/useCapabilities';
import { useChatStore } from '~/common/stores/chat/store-chats';
import { useUICounter } from '~/common/stores/store-ui';
@@ -45,7 +47,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
// external state
const recognition = useCapabilityBrowserSpeechRecognition();
const synthesis = useCapabilityElevenLabs();
const speexGlobalEngine = useSpeexGlobalEngine();
const chatIsEmpty = useChatStore(state => {
if (!props.conversationId)
return false;
@@ -58,15 +60,16 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
const outOfTheBlue = !props.conversationId;
const overriddenEmptyChat = chatEmptyOverride || !chatIsEmpty;
const overriddenRecognition = recognitionOverride || recognition.mayWork;
const allGood = overriddenEmptyChat && overriddenRecognition && synthesis.mayWork;
const fatalGood = overriddenRecognition && synthesis.mayWork;
const synthesisShallWork = !!speexGlobalEngine;
const allGood = overriddenEmptyChat && overriddenRecognition && synthesisShallWork;
const fatalGood = overriddenRecognition && synthesisShallWork;
const handleOverrideChatEmpty = React.useCallback(() => setChatEmptyOverride(true), []);
const handleOverrideRecognition = React.useCallback(() => setRecognitionOverride(true), []);
const handleConfigureElevenLabs = React.useCallback(() => optimaOpenPreferences('voice'), []);
const handleConfigureVoice = React.useCallback(() => optimaOpenPreferences('voice'), []);
const handleFinishButton = React.useCallback(() => {
if (!allGood)
@@ -128,17 +131,17 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n
{/* Text to Speech status */}
<StatusCard
icon={<RecordVoiceOverTwoToneIcon />}
icon={<PhVoice />}
text={
(synthesis.mayWork ? 'Voice synthesis should be ready.' : 'There might be an issue with ElevenLabs voice synthesis.')
+ (synthesis.isConfiguredServerSide ? '' : (synthesis.isConfiguredClientSide ? '' : ' Please add your API key in the settings.'))
(synthesisShallWork ? 'Voice synthesis should be ready.' : 'There might be an issue with voice synthesis.')
// + (synthesis.isConfiguredServerSide ? '' : (synthesis.isConfiguredClientSide ? '' : ' Please add your API key in the settings.'))
}
button={synthesis.mayWork ? undefined : (
<Button variant='outlined' onClick={handleConfigureElevenLabs} sx={{ mx: 1 }}>
button={synthesisShallWork ? undefined : (
<Button variant='outlined' onClick={handleConfigureVoice} sx={{ mx: 1 }}>
Configure
</Button>
)}
hasIssue={!synthesis.mayWork}
hasIssue={!synthesisShallWork}
/>
{/*<Typography>*/}
+1 -1
View File
@@ -317,7 +317,7 @@ export function Contacts(props: { setCallIntent: (intent: AppCallIntent) => void
issue={354}
text='Call App: Support thread and compatibility matrix'
note={<>
Voice input uses the HTML Web Speech API, and speech output requires an ElevenLabs API Key.
Voice input uses the HTML Web Speech API.
</>}
// note2='Please report any issues you encounter'
sx={{
+18 -31
View File
@@ -7,22 +7,22 @@ import CallEndIcon from '@mui/icons-material/CallEnd';
import CallIcon from '@mui/icons-material/Call';
import MicIcon from '@mui/icons-material/Mic';
import MicNoneIcon from '@mui/icons-material/MicNone';
import RecordVoiceOverTwoToneIcon from '@mui/icons-material/RecordVoiceOverTwoTone';
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
import { useChatLLMDropdown } from '../chat/components/layout-bar/useLLMDropdown';
import { SystemPurposeId, SystemPurposes } from '../../data';
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
import { AixChatGenerateContent_DMessage, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
import { useElevenLabsVoiceDropdown } from '~/modules/elevenlabs/useElevenLabsVoiceDropdown';
import { aixChatGenerateContent_DMessage_FromConversation, AixChatGenerateContent_DMessageGuts } from '~/modules/aix/client/aix.client';
import { speakText } from '~/modules/speex/speex.client';
import type { OptimaBarControlMethods } from '~/common/layout/optima/bar/OptimaBarDropdown';
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
import { Link } from '~/common/components/Link';
import { OptimaPanelGroupedList } from '~/common/layout/optima/panel/OptimaPanelGroupedList';
import { OptimaPanelIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
import { SpeechResult, useSpeechRecognition } from '~/common/components/speechrecognition/useSpeechRecognition';
import { conversationTitle, remapMessagesSysToUsr } from '~/common/stores/chat/chat.conversation';
import { createDMessageFromFragments, createDMessageTextContent, DMessage, messageFragmentsReduceText, messageWasInterruptedAtStart } from '~/common/stores/chat/chat.message';
@@ -43,18 +43,13 @@ import { useAppCallStore } from './state/store-app-call';
function CallMenu(props: {
pushToTalk: boolean,
setPushToTalk: (pushToTalk: boolean) => void,
override: boolean,
setOverride: (overridePersonaVoice: boolean) => void,
}) {
// external state
const { grayUI, toggleGrayUI } = useAppCallStore();
const { voicesDropdown } = useElevenLabsVoiceDropdown(false, !props.override);
const handlePushToTalkToggle = () => props.setPushToTalk(!props.pushToTalk);
const handleChangeVoiceToggle = () => props.setOverride(!props.override);
return <OptimaPanelGroupedList title='Call'>
<MenuItem onClick={handlePushToTalkToggle}>
@@ -63,17 +58,6 @@ function CallMenu(props: {
<Switch checked={props.pushToTalk} onChange={handlePushToTalkToggle} sx={{ ml: 'auto' }} />
</MenuItem>
<MenuItem onClick={handleChangeVoiceToggle}>
<ListItemDecorator><RecordVoiceOverTwoToneIcon /></ListItemDecorator>
Change Voice
<Switch checked={props.override} onChange={handleChangeVoiceToggle} sx={{ ml: 'auto' }} />
</MenuItem>
<MenuItem>
<ListItemDecorator>{' '}</ListItemDecorator>
{voicesDropdown}
</MenuItem>
<ListDivider />
<MenuItem onClick={toggleGrayUI}>
@@ -98,7 +82,6 @@ export function Telephone(props: {
const [avatarClickCount, setAvatarClickCount] = React.useState<number>(0);// const [micMuted, setMicMuted] = React.useState(false);
const [callElapsedTime, setCallElapsedTime] = React.useState<string>('00:00');
const [callMessages, setCallMessages] = React.useState<DMessage[]>([]);
const [overridePersonaVoice, setOverridePersonaVoice] = React.useState<boolean>(false);
const [personaTextInterim, setPersonaTextInterim] = React.useState<string | null>(null);
const [pushToTalk, setPushToTalk] = React.useState(true);
const [stage, setStage] = React.useState<'ring' | 'declined' | 'connected' | 'ended'>('ring');
@@ -118,7 +101,7 @@ export function Telephone(props: {
}));
const persona = SystemPurposes[props.callIntent.personaId as SystemPurposeId] ?? undefined;
const personaCallStarters = persona?.call?.starters ?? undefined;
const personaVoiceId = overridePersonaVoice ? undefined : (persona?.voices?.elevenLabs?.voiceId ?? undefined);
// const personaVoiceSelector = React.useMemo(() => personaGetVoiceSelector(persona), [persona]);
const personaSystemMessage = persona?.systemMessage ?? undefined;
// hooks and speech
@@ -165,7 +148,6 @@ export function Telephone(props: {
};
// [E] pickup -> seed message and call timer
// FIXME: Overriding the voice will reset the call - not a desired behavior
React.useEffect(() => {
if (!isConnected) return;
@@ -185,11 +167,14 @@ export function Telephone(props: {
setCallMessages([createDMessageTextContent('assistant', firstMessage)]); // [state] set assistant:hello message
// fire/forget
void elevenLabsSpeakText(firstMessage, personaVoiceId, true, true);
// fire/forget - use 'fast' priority for real-time conversation
void speakText(firstMessage,
undefined,
{ label: 'Call', priority: 'fast' },
);
return () => clearInterval(interval);
}, [isConnected, personaCallStarters, personaVoiceId]);
}, [isConnected, personaCallStarters]);
// [E] persona streaming response - upon new user message
React.useEffect(() => {
@@ -254,7 +239,7 @@ export function Telephone(props: {
'call',
callMessages[0].id,
{ abortSignal: responseAbortController.current.signal },
(update: AixChatGenerateContent_DMessage, _isDone: boolean) => {
(update: AixChatGenerateContent_DMessageGuts, _isDone: boolean) => {
const updatedText = messageFragmentsReduceText(update.fragments).trim();
if (updatedText)
setPersonaTextInterim(finalText = updatedText);
@@ -270,9 +255,12 @@ export function Telephone(props: {
fullMessage.generator = status.lastDMessage.generator;
setCallMessages(messages => [...messages, fullMessage]); // [state] append assistant:call_response
// fire/forget
// fire/forget - use 'fast' priority for real-time conversation
if (status.outcome === 'success' && finalText?.length >= 1)
void elevenLabsSpeakText(finalText, personaVoiceId, true, true);
void speakText(finalText,
undefined,
{ label: 'Call', priority: 'fast' },
);
}).catch((err: DOMException) => {
if (err?.name !== 'AbortError') {
@@ -288,7 +276,7 @@ export function Telephone(props: {
responseAbortController.current?.abort();
responseAbortController.current = null;
};
}, [isConnected, callMessages, modelId, personaVoiceId, personaSystemMessage, reMessages]);
}, [callMessages, isConnected, modelId, personaSystemMessage, reMessages]);
// [E] Message interrupter
const abortTrigger = isConnected && recognitionState.hasSpeech;
@@ -325,7 +313,6 @@ export function Telephone(props: {
<OptimaPanelIn>
<CallMenu
pushToTalk={pushToTalk} setPushToTalk={setPushToTalk}
override={overridePersonaVoice} setOverride={setOverridePersonaVoice}
/>
</OptimaPanelIn>
+8 -13
View File
@@ -10,7 +10,6 @@ import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
import type { TradeConfig } from '~/modules/trade/TradeModal';
import { downloadSingleChat, importConversationsFromFilesAtRest, openConversationsAtRestPicker } from '~/modules/trade/trade.client';
import { imaginePromptFromTextOrThrow } from '~/modules/aifn/imagine/imaginePromptFromText';
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
import { useAreBeamsOpen } from '~/modules/beam/store-beam.hooks';
import { useCapabilityTextToImage } from '~/modules/t2i/t2i.client';
@@ -21,7 +20,7 @@ import { ConversationsManager } from '~/common/chat-overlay/ConversationsManager
import { ErrorBoundary } from '~/common/components/ErrorBoundary';
import { getLLMContextTokens, LLM_IF_ANT_PromptCaching, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { OptimaDrawerIn, OptimaPanelIn, OptimaToolbarIn } from '~/common/layout/optima/portals/OptimaPortalsIn';
import { PanelResizeInset } from '~/common/components/panes/GoodPanelResizeHandler';
import { PanelResizeInset } from '~/common/components/PanelResizeInset';
import { Release } from '~/common/app.release';
import { ScrollToBottom } from '~/common/scroll-to-bottom/ScrollToBottom';
import { ScrollToBottomButton } from '~/common/scroll-to-bottom/ScrollToBottomButton';
@@ -186,6 +185,7 @@ export function AppChat() {
const beamOpenStoreInFocusedPane = focusedPaneIndex === null ? null
: !beamsOpens?.[focusedPaneIndex] ? null
: paneBeamStores?.[focusedPaneIndex] ?? null;
const focusedChatBeamOpen = focusedPaneIndex !== null && !!beamsOpens?.[focusedPaneIndex];
const {
// focused
@@ -345,11 +345,6 @@ export function AppChat() {
});
}, [handleExecuteAndOutcome]);
const handleTextSpeak = React.useCallback(async (text: string): Promise<void> => {
await elevenLabsSpeakText(text, undefined, true, true);
}, []);
// Chat actions
const handleConversationNewInFocusedPane = React.useCallback((forceNoRecycle: boolean, isIncognito: boolean) => {
@@ -479,7 +474,7 @@ export function AppChat() {
);
// Disabled by default, as it lags the opening of the drawer and immediatly vanishes during the closing animation
// Disabled by default, as it lags the opening of the drawer and immediately vanishes during the closing animation
const isDrawerOpen = true; // useOptimaDrawerOpen();
const drawerContent = React.useMemo(() => !isDrawerOpen ? null :
@@ -489,6 +484,7 @@ export function AppChat() {
activeFolderId={activeFolderId}
chatPanesConversationIds={paneUniqueConversationIds}
disableNewButton={disableNewButton}
focusedChatBeamOpen={focusedChatBeamOpen}
onConversationActivate={handleOpenConversationInFocusedPane}
onConversationBranch={handleConversationBranch}
onConversationNew={handleConversationNewInFocusedPane}
@@ -497,7 +493,7 @@ export function AppChat() {
onConversationsImportDialog={handleConversationImportDialog}
setActiveFolderId={setActiveFolderId}
/>,
[activeFolderId, disableNewButton, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isDrawerOpen, paneUniqueConversationIds],
[activeFolderId, disableNewButton, focusedChatBeamOpen, focusedPaneConversationId, handleConversationBranch, handleConversationExport, handleConversationImportDialog, handleConversationNewInFocusedPane, handleDeleteConversations, handleOpenConversationInFocusedPane, isDrawerOpen, paneUniqueConversationIds],
);
const focusedChatPanelContent = React.useMemo(() => !focusedPaneConversationId ? null :
@@ -523,7 +519,7 @@ export function AppChat() {
React.useEffect(() => {
// Debug: open a null chat
if (Release.IsNodeDevBuild && intent.initialConversationId === 'null')
openConversationInFocusedPane(null! /* for debugging purporse */);
openConversationInFocusedPane(null! /* for debugging purpose */);
// Open the initial conversation if set
else if (intent.initialConversationId)
openConversationInFocusedPane(intent.initialConversationId);
@@ -651,7 +647,7 @@ export function AppChat() {
setFocusedPaneIndex(idx);
}}
onCollapse={() => {
// NOTE: despite the delay to try to let the draggin settle, there seems to be an issue with the Pane locking the screen
// NOTE: despite the delay to try to let the dragging settle, there seems to be an issue with the Pane locking the screen
// setTimeout(() => removePane(idx), 50);
// more than 2 will result in an assertion from the framework
if (chatPanes.length === 2) removePane(idx);
@@ -678,7 +674,7 @@ export function AppChat() {
// NOTE: this is a workaround for the 'stuck-after-collapse-close' issue. We will collapse the 'other' pane, which
// will get it removed (onCollapse), and somehow this pane will be stuck with a pointerEvents: 'none' style, which de-facto
// disables further interaction with the chat. This is a workaround to re-enable the pointer events.
// The root cause seems to be a Dragstate not being reset properly, however the pointerEvents has been set since 0.0.56 while
// The root cause seems to be a Drag state not being reset properly, however the pointerEvents has been set since 0.0.56 while
// it was optional before: https://github.com/bvaughn/react-resizable-panels/issues/241
pointerEvents: 'auto',
}),
@@ -723,7 +719,6 @@ export function AppChat() {
onConversationNew={handleConversationNewInFocusedPane}
onTextDiagram={handleTextDiagram}
onTextImagine={handleImagineFromText}
onTextSpeak={handleTextSpeak}
sx={chatMessageListSx}
/>
)}
+10 -13
View File
@@ -7,6 +7,7 @@ import { Box, List } from '@mui/joy';
import type { SystemPurposeExample } from '../../../data';
import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal';
import { speakText } from '~/modules/speex/speex.client';
import type { ConversationHandler } from '~/common/chat-overlay/ConversationHandler';
import type { DLLMContextTokens } from '~/common/stores/llms/llms.types';
@@ -17,8 +18,6 @@ import { createDMessageFromFragments, createDMessageTextContent, DMessage, DMess
import { createTextContentFragment, DMessageFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
import { openFileForAttaching } from '~/common/components/ButtonAttachFiles';
import { optimaOpenPreferences } from '~/common/layout/optima/useOptima';
import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating';
import { useCapabilityElevenLabs } from '~/common/components/useCapabilities';
import { useChatOverlayStore } from '~/common/chat-overlay/store-perchat_vanilla';
import { useChatStore } from '~/common/stores/chat/store-chats';
import { useScrollToBottom } from '~/common/scroll-to-bottom/useScrollToBottom';
@@ -51,7 +50,6 @@ export function ChatMessageList(props: {
onConversationNew: (forceNoRecycle: boolean, isIncognito: boolean) => void,
onTextDiagram: (diagramConfig: DiagramConfig | null) => void,
onTextImagine: (conversationId: DConversationId, selectedText: string) => Promise<void>,
onTextSpeak: (selectedText: string) => Promise<void>,
setIsMessageSelectionMode: (isMessageSelectionMode: boolean) => void,
sx?: SxProps,
}) {
@@ -65,7 +63,6 @@ export function ChatMessageList(props: {
const { notifyBooting } = useScrollToBottom();
const danger_experimentalHtmlWebUi = useChatAutoSuggestHTMLUI();
const [showSystemMessages] = useChatShowSystemMessages();
const optionalTranslationWarning = useBrowserTranslationWarning();
const { conversationMessages, historyTokenCount } = useChatStore(useShallow(({ conversations }) => {
const conversation = conversations.find(conversation => conversation.id === props.conversationId);
return {
@@ -77,10 +74,9 @@ export function ChatMessageList(props: {
_composerInReferenceToCount: state.inReferenceTo?.length ?? 0,
ephemerals: state.ephemerals?.length ? state.ephemerals : null,
})));
const { mayWork: isSpeakable } = useCapabilityElevenLabs();
// derived state
const { conversationHandler, conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine, onTextSpeak } = props;
const { conversationHandler, conversationId, capabilityHasT2I, onConversationBranch, onConversationExecuteHistory, onTextDiagram, onTextImagine } = props;
const composerCanAddInReferenceTo = _composerInReferenceToCount < 5;
const composerHasInReferenceto = _composerInReferenceToCount > 0;
@@ -214,12 +210,15 @@ export function ChatMessageList(props: {
}, [capabilityHasT2I, conversationId, onTextImagine]);
const handleTextSpeak = React.useCallback(async (text: string) => {
if (!isSpeakable)
return optimaOpenPreferences('voice');
// sandwich the speaking with the indicator
setIsSpeaking(true);
await onTextSpeak(text);
const result = await speakText(text, undefined, { label: 'Chat speak' });
setIsSpeaking(false);
}, [isSpeakable, onTextSpeak]);
// open voice preferences
if (!result.success && (result.errorType === 'tts-no-engine' || result.errorType === 'tts-unconfigured'))
optimaOpenPreferences('voice');
}, []);
// operate on the local selection set
@@ -326,8 +325,6 @@ export function ChatMessageList(props: {
return (
<List role='chat-messages-list' sx={listSx}>
{optionalTranslationWarning}
{props.isMessageSelectionMode && (
<MessagesSelectionHeader
hasSelected={selectedMessages.size > 0}
@@ -381,7 +378,7 @@ export function ChatMessageList(props: {
onMessageTruncate={handleMessageTruncate}
onTextDiagram={handleTextDiagram}
onTextImagine={capabilityHasT2I ? handleTextImagine : undefined}
onTextSpeak={isSpeakable ? handleTextSpeak : undefined}
onTextSpeak={handleTextSpeak}
/>
);
@@ -17,7 +17,7 @@ import { useChatAutoSuggestAttachmentPrompts, useChatMicTimeoutMsValue } from '.
import { useAgiAttachmentPrompts } from '~/modules/aifn/agiattachmentprompts/useAgiAttachmentPrompts';
import { useBrowseCapability } from '~/modules/browse/store-module-browsing';
import { DLLM, getLLMContextTokens, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { DLLM, getLLMContextTokens, getLLMPricing, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { AudioGenerator } from '~/common/util/audio/AudioGenerator';
import { AudioPlayer } from '~/common/util/audio/AudioPlayer';
import { ButtonAttachFilesMemo, openFileForAttaching } from '~/common/components/ButtonAttachFiles';
@@ -233,7 +233,7 @@ export function Composer(props: {
const tokensHistory = _historyTokenCount;
const tokensResponseMax = getModelParameterValueOrThrow('llmResponseTokens', props.chatLLM?.initialParameters, props.chatLLM?.userParameters, 0) ?? 0;
const tokenLimit = getLLMContextTokens(props.chatLLM) ?? 0;
const tokenChatPricing = props.chatLLM?.pricing?.chat;
const tokenChatPricing = getLLMPricing(props.chatLLM)?.chat;
// Effect: load initial text if queued up (e.g. by /link/share_targetF)
@@ -859,7 +859,7 @@ export function Composer(props: {
<Textarea
variant='outlined'
color={isDraw ? 'warning' : isReAct ? 'success' : undefined}
autoFocus
autoFocus={isDesktop}
minRows={isMobile ? 3.5 : isDraw ? 4 : agiAttachmentPrompts.hasData ? 3 : showChatInReferenceTo ? 4 : 5}
maxRows={isMobile ? 8 : 10}
placeholder={textPlaceholder}
@@ -905,7 +905,7 @@ export function Composer(props: {
)}
{!showChatInReferenceTo && !isDraw && tokenLimit > 0 && (
<TokenBadgeMemo hideBelowDollars={0.0001} chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} showCost={labsShowCost} enableHover={!isMobile} showExcess absoluteBottomRight />
<TokenBadgeMemo hideBelowDollars={0.01} chatPricing={tokenChatPricing} direct={tokensComposer} history={tokensHistory} responseMax={tokensResponseMax} limit={tokenLimit} showCost={labsShowCost} enableHover={!isMobile} showExcess absoluteBottomRight />
)}
</Box>
@@ -98,6 +98,7 @@ const converterTypeToIconMap: { [key in AttachmentDraftConverterType]: React.Com
'image-resized-high': PhotoSizeSelectLargeOutlinedIcon,
'image-resized-low': PhotoSizeSelectSmallOutlinedIcon,
'image-to-default': ImageOutlinedIcon,
'image-caption': AbcIcon,
'image-ocr': AbcIcon,
'pdf-text': PictureAsPdfIcon,
'pdf-images': PermMediaOutlinedIcon,
@@ -47,9 +47,9 @@ function TokenBadge(props: {
const showAltCosts = !!props.showCost && !!costMax && costMin !== undefined;
if (showAltCosts) {
// Note: switched to 'min cost (>= ...)' on mobile as well, to restore the former behavior, just uncomment the !props.enableHover (a proxy for isMobile)
badgeValue = (/*!props.enableHover ||*/ isHovering)
? '< ' + formatModelsCost(costMax)
: '> ' + formatModelsCost(costMin);
badgeValue =
// (/*!props.enableHover ||*/ isHovering) ? '< ' + formatModelsCost(costMax) :
'> ' + formatModelsCost(costMin);
} else {
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
@@ -77,7 +77,7 @@ function TokenBadge(props: {
slotProps={{
root: {
sx: {
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: '1rem' }),
cursor: 'help',
...(shallInvisible && {
opacity: 0,
@@ -92,6 +92,13 @@ function TokenBadge(props: {
fontFamily: 'code',
fontSize: 'xs',
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
// make it transparent over text
// backgroundColor: `rgb(var(--joy-palette-${color}-lightChannel) / 15%)`, // similar to success.50
background: 'transparent',
boxShadow: 'none', // outline
'&:hover': {
backgroundColor: `${color}.softHoverBg`,
},
},
},
}}
@@ -66,6 +66,7 @@ function ChatDrawer(props: {
activeFolderId: string | null,
chatPanesConversationIds: DConversationId[],
disableNewButton: boolean,
focusedChatBeamOpen: boolean,
onConversationActivate: (conversationId: DConversationId) => void,
onConversationBranch: (conversationId: DConversationId, messageId: string | null, addSplitPane: boolean) => void,
onConversationNew: (forceNoRecycle: boolean, isIncognito: boolean) => void,
@@ -456,7 +457,7 @@ function ChatDrawer(props: {
{/*<OpenAIIcon sx={{ ml: 'auto' }} />*/}
</ListItemButton>
<ListItemButton disabled={filteredChatsAreEmpty} onClick={handleConversationsExport} sx={{ flex: 1 }}>
<ListItemButton disabled={filteredChatsAreEmpty || props.focusedChatBeamOpen} onClick={handleConversationsExport} sx={{ flex: 1 }}>
<ListItemDecorator>
<FileUploadOutlinedIcon />
</ListItemDecorator>
@@ -4,7 +4,8 @@ import type { SxProps } from '@mui/joy/styles/types';
import { Box, Button, ColorPaletteProp } from '@mui/joy';
import type { ContentScaling } from '~/common/app.theme';
import { DMessageContentFragment, DMessageTextPart, isTextContentFragment } from '~/common/stores/chat/chat.fragments';
import type { InterleavedFragment } from '~/common/stores/chat/hooks/useFragmentBuckets';
import { DMessageTextPart, isTextContentFragment } from '~/common/stores/chat/chat.fragments';
// configuration
@@ -54,7 +55,7 @@ const optionSx: SxProps = {
};
export function optionsExtractFromFragments_dangerModifyFragment(enabled: boolean, fragments: DMessageContentFragment[]): { fragments: DMessageContentFragment[], options: string[], } {
export function optionsExtractFromFragments_dangerModifyFragment(enabled: boolean, fragments: InterleavedFragment[]): { fragments: InterleavedFragment[], options: string[] } {
if (enabled && fragments.length) {
const fragment = fragments[fragments.length - 1];
if (isTextContentFragment(fragment)) {
@@ -21,7 +21,6 @@ import InsertLinkIcon from '@mui/icons-material/InsertLink';
import MoreVertIcon from '@mui/icons-material/MoreVert';
import NotificationsActiveIcon from '@mui/icons-material/NotificationsActive';
import NotificationsOutlinedIcon from '@mui/icons-material/NotificationsOutlined';
import RecordVoiceOverOutlinedIcon from '@mui/icons-material/RecordVoiceOverOutlined';
import ReplayIcon from '@mui/icons-material/Replay';
import ReplyAllRoundedIcon from '@mui/icons-material/ReplyAllRounded';
import ReplyRoundedIcon from '@mui/icons-material/ReplyRounded';
@@ -40,6 +39,7 @@ import { CloseablePopup } from '~/common/components/CloseablePopup';
import { DMessage, DMessageId, DMessageUserFlag, DMetaReferenceItem, MESSAGE_FLAG_AIX_SKIP, MESSAGE_FLAG_NOTIFY_COMPLETE, MESSAGE_FLAG_STARRED, MESSAGE_FLAG_VND_ANT_CACHE_AUTO, MESSAGE_FLAG_VND_ANT_CACHE_USER, messageFragmentsReduceText, messageHasUserFlag } from '~/common/stores/chat/chat.message';
import { KeyStroke } from '~/common/components/KeyStroke';
import { MarkHighlightIcon } from '~/common/components/icons/MarkHighlightIcon';
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
import { Release } from '~/common/app.release';
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
import { adjustContentScaling, themeScalingMap, themeZIndexChatBubble } from '~/common/app.theme';
@@ -217,15 +217,15 @@ export function ChatMessage(props: {
const isVndAndCacheUser = !!props.showAntPromptCaching && messageHasUserFlag(props.message, MESSAGE_FLAG_VND_ANT_CACHE_USER);
const {
annotationFragments, // Web Citations, References (rendered at top)
interleavedFragments, // Reasoning, Placeholders, Text, Code, Tools (interleaved in temporal order)
imageAttachments, // Stamp-sized Images
voidFragments, // Model-Aux, Placeholders
contentFragments, // Text (Markdown + Code + ... blocks), Errors, (large) Images
nonImageAttachments, // Document Attachments, likely the User dropped them in
lastFragmentIsError,
} = useFragmentBuckets(messageFragments);
const fragmentFlattenedText = React.useMemo(() => messageFragmentsReduceText(messageFragments), [messageFragments]);
const handleHighlightSelText = useSelHighlighterMemo(messageId, selText, contentFragments, fromAssistant, props.onMessageFragmentReplace);
const handleHighlightSelText = useSelHighlighterMemo(messageId, selText, interleavedFragments.filter(f => f.ft === 'content'), fromAssistant, props.onMessageFragmentReplace);
const textSubject = selText ? selText : fragmentFlattenedText;
const isSpecialT2I = textSubject.startsWith('/draw ') || textSubject.startsWith('/imagine ') || textSubject.startsWith('/img ');
@@ -579,9 +579,9 @@ export function ChatMessage(props: {
const lookForOptions = props.onMessageContinue !== undefined && props.isBottom === true && messageGenerator?.tokenStopReason !== 'out-of-tokens' && fromAssistant && !messagePendingIncomplete && !isEditingText && uiComplexityMode !== 'minimal' && false;
const { fragments: renderContentFragments, options: continuationOptions } = React.useMemo(() => {
return optionsExtractFromFragments_dangerModifyFragment(lookForOptions, contentFragments);
}, [contentFragments, lookForOptions]);
const { fragments: renderInterleavedFragments, options: continuationOptions } = React.useMemo(() => {
return optionsExtractFromFragments_dangerModifyFragment(lookForOptions, interleavedFragments);
}, [interleavedFragments, lookForOptions]);
// style
@@ -589,7 +589,7 @@ export function ChatMessage(props: {
const listItemSx: SxProps = React.useMemo(() => ({
// vars
'--AGI-overlay-start-opacity': uiComplexityMode === 'extra' ? 0.1 : 0,
// '--AGI-overlay-start-opacity': uiComplexityMode === 'extra' ? 0.1 : 0, // disabled - looks worse
// style
backgroundColor: backgroundColor,
@@ -773,20 +773,23 @@ export function ChatMessage(props: {
/>
)}
{/* Void Fragments */}
{voidFragments.length >= 1 && (
{/* Annotation Fragments (absolute top: citations, references) */}
{annotationFragments.length >= 1 && (
<VoidFragments
voidFragments={voidFragments}
nonVoidFragmentsCount={renderContentFragments.length}
voidFragments={annotationFragments}
nonVoidFragmentsCount={interleavedFragments.filter(f => f.ft === 'content').length}
contentScaling={adjContentScaling}
uiComplexityMode={uiComplexityMode}
messageRole={messageRole}
messagePendingIncomplete={messagePendingIncomplete}
onFragmentDelete={!props.onMessageFragmentDelete ? undefined : handleFragmentDelete}
onFragmentReplace={!props.onMessageFragmentReplace ? undefined : handleFragmentReplace}
/>
)}
{/* Content Fragments */}
{/* Interleaved Fragments (reasoning + content in temporal order) */}
<ContentFragments
contentFragments={renderContentFragments}
contentFragments={renderInterleavedFragments}
showEmptyNotice={!messageFragments.length && !messagePendingIncomplete}
contentScaling={adjContentScaling}
@@ -794,6 +797,8 @@ export function ChatMessage(props: {
fitScreen={props.fitScreen}
isMobile={props.isMobile}
messageRole={messageRole}
messageGeneratorLlmId={messageGenerator?.mgt === 'aix' ? messageGenerator.aix?.mId : undefined}
messagePendingIncomplete={messagePendingIncomplete}
optiAllowSubBlocksMemo={!!messagePendingIncomplete}
disableMarkdownText={disableMarkdown || fromUser /* User messages are edited as text. Try to have them in plain text. NOTE: This may bite. */}
showUnsafeHtmlCode={props.showUnsafeHtmlCode}
@@ -1022,7 +1027,7 @@ export function ChatMessage(props: {
)}
{!!props.onTextSpeak && (
<MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverOutlinedIcon />}</ListItemDecorator>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <PhVoice />}</ListItemDecorator>
Speak
</MenuItem>
)}
@@ -1150,7 +1155,7 @@ export function ChatMessage(props: {
</Tooltip>}
{!!props.onTextSpeak && <Tooltip disableInteractive arrow placement='top' title='Speak'>
<IconButton color='success' onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
{!props.isSpeaking ? <RecordVoiceOverOutlinedIcon /> : <CircularProgress sx={{ '--CircularProgress-size': '16px' }} />}
{!props.isSpeaking ? <PhVoice /> : <CircularProgress sx={{ '--CircularProgress-size': '16px' }} />}
</IconButton>
</Tooltip>}
{(!!props.onTextDiagram || !!props.onTextImagine || !!props.onTextSpeak) && <Divider />}
@@ -1190,7 +1195,7 @@ export function ChatMessage(props: {
Auto-Draw
</MenuItem>}
{!!props.onTextSpeak && <MenuItem onClick={handleOpsSpeak} disabled={!couldSpeak || props.isSpeaking}>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <RecordVoiceOverOutlinedIcon />}</ListItemDecorator>
<ListItemDecorator>{props.isSpeaking ? <CircularProgress size='sm' /> : <PhVoice />}</ListItemDecorator>
Speak
</MenuItem>}
</CloseablePopup>
@@ -7,13 +7,13 @@ import CodeIcon from '@mui/icons-material/Code';
import EditRoundedIcon from '@mui/icons-material/EditRounded';
import ImageOutlinedIcon from '@mui/icons-material/ImageOutlined';
import PictureAsPdfIcon from '@mui/icons-material/PictureAsPdf';
import RecordVoiceOverOutlinedIcon from '@mui/icons-material/RecordVoiceOverOutlined';
import TextFieldsIcon from '@mui/icons-material/TextFields';
import TextureIcon from '@mui/icons-material/Texture';
import { ContentScaling, themeScalingMap } from '~/common/app.theme';
import { DMessageAttachmentFragment, DMessageFragmentId, DVMimeType, isDocPart } from '~/common/stores/chat/chat.fragments';
import { LiveFileIcon } from '~/common/livefile/liveFile.icons';
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
import { ellipsizeMiddle } from '~/common/util/textUtils';
import { useLiveFileMetadata } from '~/common/livefile/useLiveFileMetadata';
@@ -41,7 +41,7 @@ export function buttonIconForFragment(part: DMessageAttachmentFragment['part']):
case 'image':
return ImageOutlinedIcon;
case 'audio':
return RecordVoiceOverOutlinedIcon;
return PhVoice;
default:
const _exhaustiveCheck: never = assetType;
return TextureIcon; // missing zync asset type
@@ -53,7 +53,7 @@ function _inferInitialViewAsCode(attachmentFragment: DMessageAttachmentFragment)
}
export function DocAttachmentFragment(props: {
export const DocAttachmentFragmentPane = React.memo(function DocAttachmentFragment(props: {
fragment: DMessageAttachmentFragment,
controlledEditor: boolean,
editedText?: string,
@@ -400,4 +400,4 @@ export function DocAttachmentFragment(props: {
</RenderCodePanelFrame>
);
}
});
@@ -1,5 +1,5 @@
import * as React from 'react';
import { Box } from '@mui/joy';
import { Box, Button } from '@mui/joy';
import type { ContentScaling } from '~/common/app.theme';
import type { DMessageRole } from '~/common/stores/chat/chat.message';
@@ -7,7 +7,7 @@ import { DMessageAttachmentFragment, DMessageFragmentId, isDocPart, updateFragme
import type { ChatMessageTextPartEditState } from '../ChatMessage';
import { DocAttachmentFragmentButton } from './DocAttachmentFragmentButton';
import { DocAttachmentFragment } from './DocAttachmentFragment';
import { DocAttachmentFragmentPane } from './DocAttachmentFragmentPane';
/**
@@ -15,7 +15,7 @@ import { DocAttachmentFragment } from './DocAttachmentFragment';
* When one is active, there is a content part just right under (with the collapse mechanism in case it's a user role).
* If one is clicked the content part (use ContentPartText) is displayed.
*/
export function DocumentAttachmentFragments(props: {
export const DocumentAttachmentFragments = React.memo(function DocumentAttachmentFragments(props: {
attachmentFragments: DMessageAttachmentFragment[],
messageRole: DMessageRole,
contentScaling: ContentScaling,
@@ -30,6 +30,7 @@ export function DocumentAttachmentFragments(props: {
// state
const [_activeFragmentId, setActiveFragmentId] = React.useState<DMessageFragmentId | null>(null);
const [editState, setEditState] = React.useState<ChatMessageTextPartEditState | null>(null);
const [showAllAttachments, setShowAllAttachments] = React.useState<boolean>(false);
// derived state
@@ -92,6 +93,20 @@ export function DocumentAttachmentFragments(props: {
}, []);
// pagination logic
const SHOW_LIMIT = 49;
const totalAttachments = props.attachmentFragments.length;
const hasMoreThanLimit = totalAttachments > SHOW_LIMIT + 1; // +1 to account for "show more" button
const visibleAttachments = hasMoreThanLimit && !showAllAttachments
? props.attachmentFragments.slice(0, SHOW_LIMIT)
: props.attachmentFragments;
const remainingCount = totalAttachments - SHOW_LIMIT;
const handleToggleShowAll = React.useCallback(() => {
setShowAllAttachments(prev => !prev);
}, []);
// memos
const buttonsSx = React.useMemo(() => ({
// layout
@@ -112,7 +127,7 @@ export function DocumentAttachmentFragments(props: {
{/* Document buttons */}
<Box sx={buttonsSx}>
{props.attachmentFragments.map((attachmentFragment) =>
{visibleAttachments.map((attachmentFragment) =>
<DocAttachmentFragmentButton
key={attachmentFragment.fId}
fragment={attachmentFragment}
@@ -122,11 +137,27 @@ export function DocumentAttachmentFragments(props: {
toggleSelected={handleToggleSelectedId}
/>,
)}
{/* Show more/less button */}
{hasMoreThanLimit && (
<Button
size={props.contentScaling === 'md' ? 'md' : 'sm'}
variant='soft'
onClick={handleToggleShowAll}
sx={{
minHeight: props.contentScaling === 'md' ? 40 : props.contentScaling === 'sm' ? 38 : 36,
minWidth: '64px',
fontWeight: 'md',
}}
>
{showAllAttachments ? `Show fewer docs...` : `Show ${remainingCount} more...`}
</Button>
)}
</Box>
{/* Document Viewer & Editor */}
{!!selectedFragment && isDocPart(selectedFragment.part) && (
<DocAttachmentFragment
<DocAttachmentFragmentPane
key={selectedFragment.fId /* this is here for the useLiveFile hook which otherwise would migrate state across fragments */}
fragment={selectedFragment}
controlledEditor={controlledEditor}
@@ -144,4 +175,4 @@ export function DocumentAttachmentFragments(props: {
</Box>
);
}
});
@@ -3,15 +3,44 @@ import * as React from 'react';
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
import type { ContentScaling } from '~/common/app.theme';
import type { DMessageErrorPart } from '~/common/stores/chat/chat.fragments';
import type { DMessageRole } from '~/common/stores/chat/chat.message';
import { BlockPartError_NetDisconnected } from './BlockPartError_NetDisconnected';
import { BlockPartError_RequestExceeded } from './BlockPartError_RequestExceeded';
export function BlockPartError(props: {
errorText: string,
errorHint?: DMessageErrorPart['hint'],
messageRole: DMessageRole,
messageGeneratorLlmId?: string | null,
contentScaling: ContentScaling,
}) {
// special error presentation, based on hints
switch (props.errorHint) {
case 'aix-net-disconnected':
// determine the 2 'kinds' of disconnection errors in aix.client.ts
const kind =
props.errorText.includes('**network error**') ? 'net-client-closed'
: props.errorText.includes('**connection terminated**') ? 'net-server-closed'
: 'net-unknown-closed';
// For client-side error, we don't show the _NetDisconnected component
if (kind === 'net-client-closed')
break;
return <BlockPartError_NetDisconnected disconnectionKind={kind} messageGeneratorLlmId={props.messageGeneratorLlmId} contentScaling={props.contentScaling} />;
case 'aix-request-exceeded':
return <BlockPartError_RequestExceeded messageGeneratorLlmId={props.messageGeneratorLlmId} contentScaling={props.contentScaling} />;
default:
// continue rendering generic error
break;
}
// Check if the errorText starts with '**' and has a closing '**' following Markdown rules
let textToRender = props.errorText;
let renderAsMarkdown = false;
@@ -0,0 +1,103 @@
import * as React from 'react';
import { Alert, Box, FormHelperText, Switch } from '@mui/joy';
import WifiOffRoundedIcon from '@mui/icons-material/WifiOffRounded';
import type { ContentScaling } from '~/common/app.theme';
import { useLLM } from '~/common/stores/llms/llms.hooks';
import { useModelServiceClientSideFetch } from '~/common/stores/llms/hooks/useModelServiceClientSideFetch';
/**
* Error recovery component for "Connection terminated" errors.
*/
export function BlockPartError_NetDisconnected(props: {
disconnectionKind: 'net-client-closed' | 'net-server-closed' | 'net-unknown-closed';
messageGeneratorLlmId?: string | null;
contentScaling: ContentScaling;
}) {
// external state
const model = useLLM(props.messageGeneratorLlmId) ?? null;
const isServerSideClosed = props.disconnectionKind === 'net-server-closed'; // do not show CSF option for non-server-side
const { csfAvailable, csfActive, csfToggle, vendorName } = useModelServiceClientSideFetch(isServerSideClosed, model);
return (
<Alert
size={props.contentScaling === 'xs' ? 'sm' : 'md'}
color='danger'
variant='plain'
sx={{ display: 'flex', alignItems: 'flex-start', gap: 1 }}
>
<Box sx={{ flex: 1, display: 'flex', flexDirection: 'column', gap: 0.5, alignItems: 'flex-start' }}>
{/* Header */}
<Box sx={{ display: 'flex', gap: 2 }}>
<WifiOffRoundedIcon sx={{ flexShrink: 0, mt: 0.5 }} />
<div>
<Box fontSize='larger'>
Connection Terminated
</Box>
<div>
The connection was unexpectedly closed before the response completed.
</div>
</div>
</Box>
{/* Recovery options */}
{csfAvailable ? <>
{/* Explanation */}
<Box color='text.tertiary' fontSize='sm' my={2}>
<strong>Experimental:</strong> enable direct connection to {vendorName} to bypass server timeouts - then try again.
</Box>
{/* Toggle */}
<Box
sx={{
display: 'flex',
alignItems: 'center',
gap: 2,
p: 2,
borderRadius: 'sm',
bgcolor: 'background.popup',
boxShadow: 'md',
// border: '1px solid',
// borderColor: 'divider',
}}
>
<Box sx={{ flex: 1 }}>
<Box color={!csfActive ? undefined : 'primary.solidBg'} fontWeight='lg' mb={0.5}>
Direct Connection {csfActive && '- Now Try Again'}
</Box>
<FormHelperText>
Connect directly from this client -&gt; {vendorName || 'AI service'}
</FormHelperText>
</Box>
<Switch
checked={csfActive}
onChange={(e) => csfToggle(e.target.checked)}
/>
</Box>
</> : (
<div>
<Box sx={{ color: 'text.secondary', my: 1 }}>
Suggestions:
</Box>
<Box component='ul' sx={{ color: 'text.secondary' }}>
<li>Check your internet connection and try again</li>
<li>The AI service may be experiencing issues - wait a moment and retry</li>
<li>If the issue persists, please let us know promptly on Discord or GitHib</li>
</Box>
</div>
)}
</Box>
</Alert>
);
}
@@ -0,0 +1,107 @@
import * as React from 'react';
import { Alert, Box, FormHelperText, Switch } from '@mui/joy';
import WarningRoundedIcon from '@mui/icons-material/WarningRounded';
import type { ContentScaling } from '~/common/app.theme';
import { useLLM } from '~/common/stores/llms/llms.hooks';
import { useModelServiceClientSideFetch } from '~/common/stores/llms/hooks/useModelServiceClientSideFetch';
/**
* Error recovery component for "Request too large" errors.
*/
export function BlockPartError_RequestExceeded(props: {
messageGeneratorLlmId?: string | null;
contentScaling: ContentScaling;
onRegenerate?: () => void;
}) {
// external state
const model = useLLM(props.messageGeneratorLlmId) ?? null;
const { csfAvailable, csfActive, csfToggle, vendorName } = useModelServiceClientSideFetch(true, model);
return (
<Alert
size={props.contentScaling === 'xs' ? 'sm' : 'md'}
color='warning'
sx={{ display: 'flex', alignItems: 'flex-start', gap: 1, border: '1px solid', borderColor: 'warning.outlinedBorder' }}
>
<WarningRoundedIcon sx={{ flexShrink: 0, mt: 0.25 }} />
<Box sx={{ flex: 1, display: 'flex', flexDirection: 'column', gap: 0.5 }}>
<Box fontSize='larger'>
Request Too Large
</Box>
<div>
Your message or attachments exceed the limit of the Vercel edge network
</div>
{/* Recovery options */}
{csfAvailable ? <>
{/* Explanation */}
<Box color='text.secondary' fontSize='sm' my={2}>
<strong>Experimental:</strong> enable Direct Connection to {vendorName} to work around size limitations.
</Box>
{/* Toggle */}
<Box
sx={{
display: 'flex',
alignItems: 'center',
gap: 2,
p: 2,
borderRadius: 'sm',
bgcolor: 'background.popup',
boxShadow: 'md',
}}
>
<Box sx={{ flex: 1 }}>
<Box color={!csfActive ? undefined : 'primary.solidBg'} fontWeight='lg' mb={0.5}>
Direct Connection {csfActive && '- Now Try Again'}
</Box>
<FormHelperText>
Connect directly from this client -&gt; {vendorName || 'AI service'}
</FormHelperText>
</Box>
<Switch
checked={csfActive}
onChange={(e) => csfToggle(e.target.checked)}
/>
</Box>
{/* Regenerate button */}
{/*{props.onRegenerate && (*/}
{/* <Button*/}
{/* size='sm'*/}
{/* variant={csfActive ? 'solid' : 'outlined'}*/}
{/* color={csfActive ? 'success' : 'neutral'}*/}
{/* startDecorator={<RefreshIcon />}*/}
{/* onClick={props.onRegenerate}*/}
{/* sx={{ alignSelf: 'flex-start' }}*/}
{/* >*/}
{/* {csfActive ? 'Regenerate with Direct Connection' : 'Regenerate'}*/}
{/* </Button>*/}
{/*)}*/}
</> : (
<Box>
<Box sx={{ color: 'text.secondary', my: 1 }}>
Suggestions:
</Box>
<Box component='ul' sx={{ color: 'text.secondary' }}>
<li>Use the cleanup button in the right pane to hide old messages</li>
<li>Remove large attachments from the conversation</li>
{/*<li>Reduce conversation length before sending</li>*/}
</Box>
</Box>
)}
</Box>
</Alert>
);
}
@@ -1,20 +1,25 @@
import * as React from 'react';
import type { ColorPaletteProp, SxProps, VariantProp } from '@mui/joy/styles/types';
import { Sheet } from '@mui/joy';
import type { SxProps } from '@mui/joy/styles/types';
import { Box, IconButton, Sheet, Typography } from '@mui/joy';
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
import { BlocksContainer } from '~/modules/blocks/BlocksContainers';
import { useScaledTypographySx } from '~/modules/blocks/blocks.styles';
import type { ContentScaling } from '~/common/app.theme';
import type { DMessageToolInvocationPart } from '~/common/stores/chat/chat.fragments';
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
import { humanReadableFunctionName } from './BlockPartToolInvocation.utils';
const keyValueGridSx = {
border: '1px solid',
borderRadius: 'sm',
boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
p: 1.5,
// border: '1px solid',
// borderRadius: 'sm',
// boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
// p: 1.5,
// Grid layout with 2 columns
display: 'grid',
@@ -30,36 +35,49 @@ const keyValueGridSx = {
// },
} as const;
const _styleKeyValueGrid: SxProps = {
border: 'none',
boxShadow: 'none',
p: 0,
fontSize: '0.875em',
opacity: 0.9,
} as const;
export type KeyValueData = { label: string, value: React.ReactNode, asCode?: boolean }[];
export function KeyValueGrid(props: {
data: KeyValueData,
contentScaling: ContentScaling,
color?: ColorPaletteProp,
variant?: VariantProp,
stableSx?: SxProps,
// contentScaling: ContentScaling,
// color?: ColorPaletteProp,
// variant?: VariantProp,
// stableSx?: SxProps,
}) {
const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
// const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
const gridSx = React.useMemo(() => ({
...keyValueGridSx,
// fontWeight,
fontSize,
lineHeight,
...props.stableSx,
}), [fontSize, lineHeight, props.stableSx]);
// fontSize,
// lineHeight,
// ...props.stableSx,
_styleKeyValueGrid,
}), [/*props.stableSx*/]);
return (
<Sheet color={props.color} variant={props.variant || 'soft'} sx={gridSx}>
<Box
// color={props.color}
// variant={props.variant || 'soft'}
sx={gridSx}
>
{props.data.map(({ label, value }, index) => (
<React.Fragment key={index}>
<div>{label}</div>
<div>{value}</div>
</React.Fragment>
))}
</Sheet>
</Box>
);
}
@@ -70,35 +88,124 @@ export function BlockPartToolInvocation(props: {
onDoubleClick?: (event: React.MouseEvent) => void;
}) {
const part = props.toolInvocationPart;
// state
const [expanded, setExpanded] = React.useState(false);
const kvData: KeyValueData = React.useMemo(() => {
switch (part.invocation.type) {
// external state
const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
// memo name
const { id: iId, invocation } = props.toolInvocationPart;
const { humanName, originalName } = React.useMemo(() => {
const invocationType = invocation.type;
const originalName = invocationType === 'function_call' ? invocation.name : 'code_execution';
const humanName = humanReadableFunctionName(originalName, invocationType, 'invocation');
return { humanName, originalName };
}, [invocation]);
// memo details
const detailsData: KeyValueData = React.useMemo(() => {
switch (invocation.type) {
case 'function_call':
return [
{ label: 'Name', value: <strong>{part.invocation.name}</strong> },
{ label: 'Args', value: part.invocation.args || 'None', asCode: true },
{ label: 'Id', value: part.id },
{ label: 'Name', value: invocation.name },
{ label: 'Args', value: invocation.args || 'None', asCode: true },
{ label: 'ID', value: iId },
];
case 'code_execution':
return [
{ label: 'Language', value: part.invocation.language },
{ label: 'Author', value: part.invocation.author },
{ label: 'Language', value: invocation.language },
{ label: 'Author', value: invocation.author },
{
label: 'Code',
value: <div style={{ whiteSpace: 'pre-wrap' }}>{part.invocation.code.trim()}</div>,
value: <div style={{ whiteSpace: 'pre-wrap' }}>{invocation.code.trim()}</div>,
},
{ label: 'Id', value: part.id },
{ label: 'ID', value: iId },
];
}
}, [part]);
}, [invocation, iId]);
const toggleExpanded = React.useCallback((event: React.MouseEvent) => {
event.stopPropagation();
setExpanded(prev => !prev);
}, []);
return (
<BlocksContainer onDoubleClick={props.onDoubleClick}>
<KeyValueGrid
data={kvData}
contentScaling={props.contentScaling}
/>
</BlocksContainer>
<BlocksContainer onDoubleClick={props.onDoubleClick}><Box /*sx={{ px: 1.5 }}*/>
<Sheet
variant='soft'
sx={{
borderLeft: '3px solid',
borderLeftColor: 'primary.softBg',
borderRadius: 'sm',
pl: 1,
pr: 2,
py: 0.75,
fontSize,
lineHeight,
display: 'flex',
flexDirection: 'column',
...(expanded ? {
border: '1px solid',
borderColor: 'primary.outlinedBorder',
boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
} : {}),
}}
>
{/* Compact header */}
<Box
sx={{
display: 'flex',
alignItems: 'center',
gap: 1,
cursor: 'pointer',
'&:hover': { '& .expand-icon': { opacity: 1 } },
}}
onClick={toggleExpanded}
>
<IconButton
size='sm'
className='expand-icon'
sx={{
minWidth: 'auto',
minHeight: 'auto',
padding: 0,
opacity: expanded ? 1 : 0.5,
transition: 'opacity 0.2s',
}}
>
{expanded ? <KeyboardArrowDownIcon fontSize='small' /> : <KeyboardArrowRightIcon fontSize='small' />}
</IconButton>
{/*<Tooltip title={humanName !== originalName ? `Original: ${originalName}` : undefined} placement='top'>*/}
<Typography level='body-sm' sx={{ fontWeight: 'md' }}>
{humanName}
</Typography>
{/*</Tooltip>*/}
</Box>
{/* Expanded details */}
<ExpanderControlledBox expanded={expanded}>
{expanded && <Box sx={{ mt: 1, ml: 2.625, pl: 1 }}>
<KeyValueGrid
data={detailsData}
// contentScaling={props.contentScaling}
// stableSx={_styleKeyValueGrid}
/>
</Box>}
</ExpanderControlledBox>
</Sheet>
</Box></BlocksContainer>
);
}
@@ -0,0 +1,85 @@
//
// Utilities for rendering tool invocations
//
/**
* [EDITORIAL] Known hosted tool name translations
*
* This mapping provides human-readable names for actual hosted tools
* from AI model providers. Only add entries for confirmed provider-hosted tools.
*
* Note: Tool calls != Function calls
* - Tool calls: Provider-hosted tools (e.g., Anthropic's computer use, Gemini's code execution)
* - Function calls: User/app-defined functions that the model can invoke
*/
const KNOWN_TOOL_TRANSLATIONS: Record<string, string> = {
// Anthropic Computer Use Tools (hosted)
'computer': 'Computer Use',
'computer_20241022': 'Computer Use',
'bash': 'Bash',
'bash_20241022': 'Bash',
'text_editor': 'Text Editor',
'text_editor_20241022': 'Text Editor',
// Gemini Tools (hosted)
'code_execution': 'Code Execution',
'google_search_retrieval': 'Google Search',
// Add other confirmed provider-hosted tools here as discovered
} as const;
/**
* Translate a function/tool name to a human-readable format
*
* First checks for known hosted tools, then applies heuristics for function names
*/
export function humanReadableFunctionName(name: string, invocationType: 'function_call' | 'code_execution', phase: 'invocation' | 'response'): string {
if (invocationType === 'code_execution')
return phase === 'invocation' ? 'Generated code' : 'Executed code';
// check for known hosted tools
if (KNOWN_TOOL_TRANSLATIONS[name])
return KNOWN_TOOL_TRANSLATIONS[name];
// apply heuristics for user-defined function names
if (name.startsWith('get_'))
return _toTitleCase(name.substring(4));
if (name.startsWith('fetch_'))
return _toTitleCase(name.substring(6));
if (name.startsWith('search_'))
return _toTitleCase(name.substring(7)) + ' Search';
return _toTitleCase(name);
}
/**
* Get function display name and color
*/
export function functionNameAppearance(environment: 'upstream' | 'server' | 'client'): {
label: string;
color: 'primary' | 'neutral' | 'success';
} {
switch (environment) {
case 'upstream':
return { label: 'Hosted', color: 'primary' };
case 'server':
return { label: 'Server', color: 'neutral' };
case 'client':
return { label: 'Client', color: 'success' };
}
}
function _toTitleCase(fName: string): string {
// snake_case -> Title Case
if (fName.includes('_'))
return fName
.split('_')
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
.join(' ');
// camelCase -> Title Case
const withSpaces = fName.replace(/([A-Z])/g, ' $1').trim();
return withSpaces.charAt(0).toUpperCase() + withSpaces.slice(1);
}
@@ -1,10 +1,17 @@
import * as React from 'react';
import { Box, Chip, IconButton, Sheet, Typography } from '@mui/joy';
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
import KeyboardArrowRightIcon from '@mui/icons-material/KeyboardArrowRight';
import { BlocksContainer } from '~/modules/blocks/BlocksContainers';
import { useScaledTypographySx } from '~/modules/blocks/blocks.styles';
import type { ContentScaling } from '~/common/app.theme';
import type { DMessageToolResponsePart } from '~/common/stores/chat/chat.fragments';
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
import { functionNameAppearance, humanReadableFunctionName } from './BlockPartToolInvocation.utils';
import { KeyValueData, KeyValueGrid } from './BlockPartToolInvocation';
@@ -14,36 +21,148 @@ export function BlockPartToolResponse(props: {
onDoubleClick?: (event: React.MouseEvent) => void;
}) {
const part = props.toolResponsePart;
// state
const [expanded, setExpanded] = React.useState(false);
const kvData: KeyValueData = React.useMemo(() => {
switch (part.response.type) {
// external state
const { fontSize, lineHeight } = useScaledTypographySx(props.contentScaling, false, false);
// memo name
const { id: rId, response, environment, error: rError } = props.toolResponsePart;
const { humanName, originalName, envInfo } = React.useMemo(() => {
const invocationType = response.type;
const originalName = invocationType === 'function_call' ? response.name : 'code_execution';
const humanName = humanReadableFunctionName(originalName, invocationType, 'response');
const envInfo = functionNameAppearance(environment);
return { humanName, originalName, envInfo };
}, [response, environment]);
// memo details data
const detailsData: KeyValueData = React.useMemo(() => {
switch (response.type) {
case 'function_call':
return [
{ label: 'Id', value: part.id },
{ label: 'Name', value: <strong>{part.response.name}</strong> },
{ label: 'Response', value: part.response.result, asCode: true },
...(!part.error ? [] : [{ label: 'Error', value: part.error }]),
{ label: 'Environment', value: part.environment },
{ label: 'Function', value: response.name },
{ label: 'Result', value: response.result, asCode: true },
...(!rError ? [] : [{ label: 'Error', value: String(rError) }]),
{ label: 'Environment', value: envInfo.label },
{ label: 'ID', value: rId },
];
case 'code_execution':
return [
{ label: 'Id', value: part.id },
{ label: 'Response', value: part.response.result, asCode: true },
...(!part.error ? [] : [{ label: 'Error', value: part.error }]),
{ label: 'Executor', value: part.response.executor },
{ label: 'Environment', value: part.environment },
{ label: 'Result', value: response.result, asCode: true },
...(!rError ? [] : [{ label: 'Error', value: String(rError) }]),
{ label: 'Executor', value: response.executor },
{ label: 'Environment', value: envInfo.label },
{ label: 'ID', value: rId },
];
}
}, [part]);
}, [envInfo.label, rError, rId, response]);
// memo border color
const borderColor = React.useMemo(() => {
if (rError) return 'danger.softBg';
switch (environment) {
case 'upstream':
return 'primary.softBg'; // Hosted - blue
case 'server':
return 'neutral.softBg'; // Server - gray
case 'client':
return 'success.softBg'; // Client - green
}
}, [rError, environment]);
const toggleExpanded = React.useCallback((event: React.MouseEvent) => {
event.stopPropagation();
setExpanded(prev => !prev);
}, []);
return (
<BlocksContainer onDoubleClick={props.onDoubleClick}>
<KeyValueGrid
data={kvData}
contentScaling={props.contentScaling}
color={part.error ? 'danger' : 'primary'}
/>
</BlocksContainer>
<BlocksContainer onDoubleClick={props.onDoubleClick}><Box /*sx={{ px: 1.5 }}*/>
<Sheet
variant='soft'
color={rError ? 'danger' : undefined}
sx={{
borderLeft: '3px solid',
borderLeftColor: borderColor,
borderRadius: 'sm',
pl: 1,
pr: 2,
py: 0.75,
fontSize,
lineHeight,
display: 'flex',
flexDirection: 'column',
...(expanded ? {
border: '1px solid',
borderColor: 'primary.outlinedBorder',
boxShadow: 'inset 2px 0 4px -2px rgba(0, 0, 0, 0.2)',
} : {}),
}}
>
{/* Compact header */}
<Box
sx={{
display: 'flex',
alignItems: 'center',
gap: 1,
cursor: 'pointer',
'&:hover': { '& .expand-icon': { opacity: 1 } },
}}
onClick={toggleExpanded}
>
<IconButton
size='sm'
className='expand-icon'
sx={{
minWidth: 'auto',
minHeight: 'auto',
padding: 0,
opacity: expanded ? 1 : 0.5,
transition: 'opacity 0.2s',
}}
>
{expanded ? <KeyboardArrowDownIcon fontSize='small' /> : <KeyboardArrowRightIcon fontSize='small' />}
</IconButton>
{/*<Tooltip title={humanName !== originalName ? `Original: ${originalName}` : undefined} placement='top'>*/}
<Typography level='body-sm' sx={{ fontWeight: 'md' }}>
{humanName}
</Typography>
{/*</Tooltip>*/}
{rError && (
<Chip size='sm' color='danger' variant='soft'>
Error
</Chip>
)}
<Chip size='sm' color={envInfo.color} variant='soft' sx={{ ml: 'auto' }}>
{envInfo.label}
</Chip>
</Box>
{/* Expanded details */}
<ExpanderControlledBox expanded={expanded}>
{expanded && <Box sx={{ mt: 1, ml: 2.625, pl: 1 }}>
<KeyValueGrid
data={detailsData}
// contentScaling={props.contentScaling}
// stableSx={_styleKeyValueGrid}
/>
</Box>}
</ExpanderControlledBox>
</Sheet>
</Box></BlocksContainer>
);
}
@@ -6,16 +6,21 @@ import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRendere
import type { ContentScaling, UIComplexityMode } from '~/common/app.theme';
import type { DMessageRole } from '~/common/stores/chat/chat.message';
import { DMessageContentFragment, DMessageFragmentId, isTextPart } from '~/common/stores/chat/chat.fragments';
import type { InterleavedFragment } from '~/common/stores/chat/hooks/useFragmentBuckets';
import { DMessageContentFragment, DMessageFragmentId, isTextContentFragment, isTextPart, isVoidPlaceholderFragment } from '~/common/stores/chat/chat.fragments';
import { Release } from '~/common/app.release';
import type { ChatMessageTextPartEditState } from '../ChatMessage';
import { BlockEdit_TextFragment } from './BlockEdit_TextFragment';
import { BlockOpEmpty } from './BlockOpEmpty';
import { BlockPartError } from './BlockPartError';
import { BlockPartImageRef } from './BlockPartImageRef';
import { BlockPartModelAux } from '../fragments-void/BlockPartModelAux';
import { BlockPartPlaceholder } from '../fragments-void/BlockPartPlaceholder';
import { BlockPartText_AutoBlocks } from './BlockPartText_AutoBlocks';
import { BlockPartToolInvocation } from './BlockPartToolInvocation';
import { BlockPartToolResponse } from './BlockPartToolResponse';
import { humanReadableFunctionName } from './BlockPartToolInvocation.utils';
const _editLayoutSx: SxProps = {
@@ -42,7 +47,7 @@ const _endLayoutSx: SxProps = {
export function ContentFragments(props: {
contentFragments: DMessageContentFragment[]
contentFragments: InterleavedFragment[]
showEmptyNotice: boolean,
contentScaling: ContentScaling,
@@ -50,6 +55,8 @@ export function ContentFragments(props: {
fitScreen: boolean,
isMobile: boolean,
messageRole: DMessageRole,
messagePendingIncomplete?: boolean,
messageGeneratorLlmId?: string | null,
optiAllowSubBlocksMemo?: boolean,
disableMarkdownText: boolean,
enhanceCodeBlocks: boolean,
@@ -76,8 +83,18 @@ export function ContentFragments(props: {
const isEditingText = !!props.textEditsState;
const enableRestartFromEdit = !fromAssistant && props.messageRole !== 'system';
// solo placeholder - dataStreamViz trigger
const showDataStreamViz =
!Release.Features.LIGHTER_ANIMATIONS
&& props.uiComplexityMode !== 'minimal'
&& props.contentFragments.length === 1
// && props.noVoidFragments // not needed, we have all the interleaved fragments here
&& isVoidPlaceholderFragment(props.contentFragments[0]);
// Content Fragments Edit Zero-State: button to create a new TextContentFragment
if (isEditingText && isEmpty)
if (isEditingText && !props.contentFragments.some(isTextContentFragment))
return !props.onFragmentAddBlank ? null : (
<Button aria-label='message body empty' variant='plain' color='neutral' onClick={props.onFragmentAddBlank} sx={{ justifyContent: 'flex-start' }}>
add text ...
@@ -92,7 +109,7 @@ export function ContentFragments(props: {
if (!props.showEmptyNotice && isEmpty)
return null;
return <Box aria-label='message body' sx={isEditingText ? _editLayoutSx : fromAssistant ? _startLayoutSx : _endLayoutSx}>
return <Box aria-label='message body' sx={(showDataStreamViz || isEditingText) ? _editLayoutSx : fromAssistant ? _startLayoutSx : _endLayoutSx}>
{/* Empty Message Block - if empty */}
{props.showEmptyNotice && (
@@ -103,35 +120,107 @@ export function ContentFragments(props: {
/>
)}
{props.contentFragments.map((fragment) => {
{props.contentFragments.map((fragment, fragmentIndex) => {
// simplify
const { fId, part } = fragment;
const { fId, ft } = fragment;
// Determine the text to edit based on the part type
let editText = '';
let editLabel;
if (isTextPart(part))
editText = part.text;
else if (part.pt === 'error')
editText = part.error;
else if (part.pt === 'tool_invocation') {
if (part.invocation.type === 'function_call') {
editText = part.invocation.args /* string | null */ || '';
editLabel = `[Invocation] Function Call: \`${part.invocation.name}\``;
} else {
editText = part.invocation.code;
editLabel = `[Invocation] Code Execution: \`${part.invocation.language}\``;
}
} else if (part.pt === 'tool_response') {
if (!part.error) {
editText = part.response.result;
editLabel = `[Response]: ${part.response.type === 'function_call' ? 'Function Call' : 'Code Execution'}: \`${part.id}\``;
// VOID FRAGMENTS (reasoning, placeholders - interleaved with content)
if (ft === 'void') {
const { part } = fragment;
switch (part.pt) {
// Handled by VoidFragments
// case 'annotations':
// console.warn('[DEV] ContentFragments: annotations fragment found in interleaved list');
// return null;
case 'ma':
return (
<BlockPartModelAux
key={fId}
fragmentId={fId}
auxType={part.aType}
auxText={part.aText}
auxHasSignature={part.textSignature !== undefined}
auxRedactedDataCount={part.redactedData?.length ?? 0}
messagePendingIncomplete={!!props.messagePendingIncomplete}
zenMode={props.uiComplexityMode === 'minimal'}
contentScaling={props.contentScaling}
isLastFragment={fragmentIndex === props.contentFragments.length - 1}
onFragmentDelete={props.onFragmentDelete}
onFragmentReplace={props.onFragmentReplace}
/>
);
case 'ph':
return (
<BlockPartPlaceholder
key={fId}
placeholderText={part.pText}
placeholderType={part.pType}
placeholderModelOp={part.modelOp}
placeholderAixControl={part.aixControl}
messageRole={props.messageRole}
contentScaling={props.contentScaling}
showAsItalic
showAsDataStreamViz={showDataStreamViz}
/>
);
case '_pt_sentinel':
return null;
default:
const _exhaustiveVoidCheck: never = part;
// fallthrough - we don't handle these here anymore
case 'annotations':
return (
<ScaledTextBlockRenderer
key={fId}
text={`Unknown Void Fragment: ${(part as any)?.pt}`}
contentScaling={props.contentScaling}
textRenderVariant='text'
showAsDanger
/>
);
}
}
// CONTENT FRAGMENTS (text, code, tool calls, images, errors)
const { part } = fragment;
// editing for text parts, tool invocations, or tool responses
if (props.textEditsState && !!props.setEditedText && (isTextPart(part) || part.pt === 'error' || part.pt === 'tool_invocation' || part.pt === 'tool_response')) {
if (props.textEditsState && !!props.setEditedText && (
isTextPart(part) || part.pt === 'error' || part.pt === 'tool_invocation' || part.pt === 'tool_response'
)) {
// Determine the text to edit based on the part type
let editText = '';
let editLabel;
if (isTextPart(part)) {
editText = part.text;
} else if (part.pt === 'error') {
editText = part.error;
} else if (part.pt === 'tool_invocation') {
if (part.invocation.type === 'function_call') {
editText = part.invocation.args /* string | null */ || '';
const humanName = humanReadableFunctionName(part.invocation.name, 'function_call', 'invocation');
editLabel = `[Invocation] ${humanName} · \`${part.invocation.name}\``;
} else {
editText = part.invocation.code;
const humanName = humanReadableFunctionName('code_execution', 'code_execution', 'invocation');
editLabel = `[Invocation] ${humanName} · \`${part.invocation.language}\``;
}
} else if (part.pt === 'tool_response') {
if (!part.error) {
editText = part.response.result;
const responseName = part.response.type === 'function_call' ? part.response.name : 'code_execution';
const humanName = humanReadableFunctionName(responseName, part.response.type, 'response');
editLabel = `[Response] ${humanName} · \`${part.id}\``;
}
}
return (
<BlockEdit_TextFragment
key={'edit-' + fId}
@@ -155,7 +244,9 @@ export function ContentFragments(props: {
<BlockPartError
key={fId}
errorText={part.error}
errorHint={part.hint}
messageRole={props.messageRole}
messageGeneratorLlmId={props.messageGeneratorLlmId}
contentScaling={props.contentScaling}
/>
);
@@ -165,7 +256,7 @@ export function ContentFragments(props: {
const rt = part.rt;
switch (rt) {
case 'zync':
const zt = part.zType
const zt = part.zType;
switch (zt) {
case 'asset':
// TODO: [ASSET] future: implement rendering for the real Reference to Zync Asset
@@ -170,7 +170,9 @@ export function BlockPartModelAnnotations(props: {
return null;
return (
<Box>
<Box
sx={{ mx: 1.5 }}
>
{/* Row of favicons */}
<Button
@@ -3,6 +3,7 @@ import * as React from 'react';
import type { ColorPaletteProp } from '@mui/joy/styles/types';
import { Box, Chip, Typography } from '@mui/joy';
import AllInclusiveIcon from '@mui/icons-material/AllInclusive';
import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline';
import TextFieldsIcon from '@mui/icons-material/TextFields';
import { RenderMarkdown } from '~/modules/blocks/markdown/RenderMarkdown';
@@ -11,6 +12,7 @@ import { useScaledTypographySx } from '~/modules/blocks/blocks.styles';
import { ConfirmationModal } from '~/common/components/modals/ConfirmationModal';
import { ExpanderControlledBox } from '~/common/components/ExpanderControlledBox';
import { adjustContentScaling, ContentScaling } from '~/common/app.theme';
import { animationSpinHalfPause } from '~/common/util/animUtils';
import { createTextContentFragment, DMessageContentFragment, DMessageFragmentId } from '~/common/stores/chat/chat.fragments';
import { useOverlayComponents } from '~/common/layout/overlays/useOverlayComponents';
@@ -32,17 +34,29 @@ const _styles = {
chip: {
px: 1.5,
py: 0.375,
my: '1px', // to not crop the outline on mobile
my: '1px', // to not crop the outline on mobile, or on beam
outline: '1px solid',
outlineColor: `${REASONING_COLOR}.solidBg`, // .outlinedBorder
boxShadow: `1px 2px 4px -3px var(--joy-palette-${REASONING_COLOR}-solidBg)`,
} as const,
chipDisabled: {
px: 1.5,
py: 0.375,
my: '1px', // to not crop the outline on mobile, or on beam
} as const,
chipIcon: {
fontSize: '1rem',
mr: 0.5,
} as const,
chipIconPending: {
fontSize: '1rem',
mr: 0.5,
animation: `${animationSpinHalfPause} 2s ease-in-out infinite`,
} as const,
chipExpanded: {
mt: '1px', // need to copy the `chip` mt
px: 1.5,
@@ -93,8 +107,11 @@ export function BlockPartModelAux(props: {
auxText: string,
auxHasSignature: boolean,
auxRedactedDataCount: number,
messagePendingIncomplete: boolean,
zenMode: boolean,
contentScaling: ContentScaling,
isLastFragment: boolean,
onFragmentDelete?: (fragmentId: DMessageFragmentId) => void,
onFragmentReplace?: (fragmentId: DMessageFragmentId, newFragment: DMessageContentFragment) => void,
}) {
@@ -115,7 +132,8 @@ export function BlockPartModelAux(props: {
// handlers
const { onFragmentReplace } = props;
const { onFragmentDelete, onFragmentReplace } = props;
const showDelete = !!onFragmentDelete;
const showInline = !!onFragmentReplace;
const handleToggleExpanded = React.useCallback(() => {
@@ -123,6 +141,23 @@ export function BlockPartModelAux(props: {
setExpanded(on => !on);
}, []);
const handleDelete = React.useCallback(() => {
if (!onFragmentDelete) return;
showPromisedOverlay('chat-message-delete-aux', {}, ({ onResolve, onUserReject }) =>
<ConfirmationModal
open onClose={onUserReject} onPositive={() => onResolve(true)}
confirmationText={<>
Delete this {typeText.toLowerCase()} completely?
<br />
This action cannot be undone.
</>}
positiveActionText='Delete'
/>,
).then(() => {
onFragmentDelete(props.fragmentId);
}).catch(() => null /* ignore closure */);
}, [onFragmentDelete, props.fragmentId, showPromisedOverlay, typeText]);
const handleInline = React.useCallback(() => {
if (!onFragmentReplace) return;
showPromisedOverlay('chat-message-inline-aux', {}, ({ onResolve, onUserReject }) =>
@@ -149,29 +184,52 @@ export function BlockPartModelAux(props: {
{/* Chip to expand/collapse */}
<Box sx={{ display: 'flex', flexWrap: 'wrap', gap: 1, alignItems: 'center', justifyContent: 'space-between' }}>
<Chip
color={REASONING_COLOR}
color={props.isLastFragment ? REASONING_COLOR : 'neutral'}
variant={expanded ? 'solid' : 'soft'}
size='sm'
onClick={handleToggleExpanded}
sx={expanded ? _styles.chipExpanded : _styles.chip}
startDecorator={<AllInclusiveIcon sx={_styles.chipIcon} /* sx={{ color: expanded ? undefined : REASONING_COLOR }} */ />}
sx={expanded ? _styles.chipExpanded : props.isLastFragment ? _styles.chip : _styles.chipDisabled}
startDecorator={
<AllInclusiveIcon
sx={(props.messagePendingIncomplete && !expanded && props.isLastFragment) ? _styles.chipIconPending : _styles.chipIcon}
/* sx={{ color: expanded ? undefined : REASONING_COLOR }} */
/>
}
// startDecorator='🧠'
>
Show {typeText}
</Chip>
{expanded && showInline && !!props.auxText && (
<Chip
color={REASONING_COLOR}
variant='soft'
size='sm'
disabled={!onFragmentReplace}
onClick={!onFragmentReplace ? undefined : handleInline}
endDecorator={<TextFieldsIcon />}
sx={_styles.chip}
>
Make Regular Text
</Chip>
{expanded && (showInline || showDelete) && !!props.auxText && (
<Box sx={{ display: 'flex', gap: 1 }}>
{/* Make inline */}
{showInline && <Chip
color={REASONING_COLOR}
variant='soft'
size='sm'
disabled={!onFragmentReplace || props.messagePendingIncomplete}
onClick={!onFragmentReplace ? undefined : handleInline}
endDecorator={<TextFieldsIcon />}
sx={(!onFragmentReplace || props.messagePendingIncomplete) ? _styles.chipDisabled : _styles.chip}
>
Make Regular Text
</Chip>}
{/* Delete */}
{showDelete && <Chip
color={REASONING_COLOR}
variant='soft'
size='sm'
disabled={!onFragmentDelete || props.messagePendingIncomplete}
onClick={!onFragmentDelete ? undefined : handleDelete}
endDecorator={<DeleteOutlineIcon />}
sx={(!onFragmentDelete || props.messagePendingIncomplete) ? _styles.chipDisabled : _styles.chip}
>
Delete
</Chip>}
</Box>
)}
</Box>
@@ -5,13 +5,14 @@ import { Box, Chip } from '@mui/joy';
import BrushRoundedIcon from '@mui/icons-material/BrushRounded';
import CodeIcon from '@mui/icons-material/Code';
import HourglassEmptyIcon from '@mui/icons-material/HourglassEmpty';
import RepeatIcon from '@mui/icons-material/Repeat';
import SearchRoundedIcon from '@mui/icons-material/SearchRounded';
import { BlocksContainer } from '~/modules/blocks/BlocksContainers';
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
import type { DMessageRole } from '~/common/stores/chat/chat.message';
import type { DVoidPlaceholderModelOp } from '~/common/stores/chat/chat.fragments';
import type { DVoidPlaceholderModelOp, DVoidPlaceholderPart } from '~/common/stores/chat/chat.fragments';
import { adjustContentScaling, ContentScaling, themeScalingMap } from '~/common/app.theme';
import { DataStreamViz } from '~/common/components/DataStreamViz';
import { animationSpinHalfPause } from '~/common/util/animUtils';
@@ -31,6 +32,10 @@ const _styles = {
outline: '1px solid',
outlineColor: 'primary.solidBg', // .outlinedBorder
boxShadow: `1px 2px 4px -3px var(--joy-palette-primary-solidBg)`,
// wrap text if needed - introduced for retry error messages
whiteSpace: 'normal',
wordBreak: 'break-word',
} as const,
followUpChipIcon: {
@@ -113,8 +118,9 @@ function ModelOperationChip(props: {
export function BlockPartPlaceholder(props: {
placeholderText: string,
placeholderType?: 'chat-gen-follow-up',
placeholderType?: DVoidPlaceholderPart['pType'],
placeholderModelOp?: DVoidPlaceholderModelOp,
placeholderAixControl?: DVoidPlaceholderPart['aixControl'],
messageRole: DMessageRole,
contentScaling: ContentScaling,
showAsItalic?: boolean,
@@ -146,7 +152,8 @@ export function BlockPartPlaceholder(props: {
// Type-based visualization
if (props.placeholderType === 'chat-gen-follow-up') return (
const isFollowUp = props.placeholderType === 'chat-gen-follow-up';
if (isFollowUp) return (
<Chip
color='primary'
variant='soft'
@@ -158,6 +165,34 @@ export function BlockPartPlaceholder(props: {
</Chip>
);
// AIX Control renderer (e.g., error correction retry)
if (props.placeholderAixControl?.ctl === 'ec-retry') {
const { rScope, rCauseHttp, rCauseConn } = props.placeholderAixControl;
const color = rScope === 'srv-dispatch' ? 'primary' : rScope === 'srv-op' ? 'warning' : 'danger';
return (
<Chip
// size='sm'
color={color}
variant='soft'
startDecorator={<div style={{ opacity: 0.75 }}>{rCauseHttp || rCauseConn || rScope}</div>}
endDecorator={<RepeatIcon style={{ opacity: 0.5 }} />}
onClick={() => console.log({ props })}
sx={{
gap: 1.5,
px: 1.5,
py: 0.375,
my: '1px', // to not crop the outline on mobile, or on beam
boxShadow: `1px 2px 4px -3px var(--joy-palette-${color}-solidBg)`,
// wrap text if needed - introduced for retry error messages
whiteSpace: 'normal',
wordBreak: 'break-word',
}}
>
{props.placeholderText}
</Chip>
);
}
// Model operation renderer
if (props.placeholderModelOp)
return (
@@ -6,29 +6,16 @@ import { Box } from '@mui/joy';
import { ScaledTextBlockRenderer } from '~/modules/blocks/ScaledTextBlockRenderer';
import type { ContentScaling, UIComplexityMode } from '~/common/app.theme';
import type { DMessageContentFragment, DMessageFragmentId, DMessageVoidFragment } from '~/common/stores/chat/chat.fragments';
import type { DMessageRole } from '~/common/stores/chat/chat.message';
import { DMessageContentFragment, DMessageFragmentId, DMessageVoidFragment, isPlaceholderPart } from '~/common/stores/chat/chat.fragments';
import { Release } from '~/common/app.release';
import { BlockPartModelAux } from './BlockPartModelAux';
import { BlockPartPlaceholder } from './BlockPartPlaceholder';
import { BlockPartModelAnnotations } from './BlockPartModelAnnotations';
const editLayoutSx: SxProps = {
const startLayoutSx: SxProps = {
display: 'grid',
gap: 1.5, // see why we give more space on ChatMessage
// horizontal separator between messages (second part+ and before)
// '& > *:not(:first-of-type)': {
// borderTop: '1px solid',
// borderTopColor: 'background.level3',
// },
};
const startLayoutSx: SxProps = {
...editLayoutSx,
// NOTE: we used to have 'flex-start' here, but it was causing the Annotation fragment to not be able to
// stretch to the full with of this 'void fragments' container.
// So now we don't have 'flex-start' anymore, and we may expect issues with other Fragment kinds?
@@ -36,7 +23,7 @@ const startLayoutSx: SxProps = {
};
const endLayoutSx: SxProps = {
...editLayoutSx,
...startLayoutSx,
justifyContent: 'flex-end',
};
@@ -47,6 +34,7 @@ const endLayoutSx: SxProps = {
*
* In the future we can revisit this decision in case Content fragments and *Void Fragments** are
* interleaved - but for now, Void fragments will be grouped together at the top.
* ^ 2025-11-20: NOTE: Lol, yes we did
*/
export function VoidFragments(props: {
@@ -56,21 +44,16 @@ export function VoidFragments(props: {
contentScaling: ContentScaling,
uiComplexityMode: UIComplexityMode,
messageRole: DMessageRole,
messagePendingIncomplete?: boolean,
onFragmentDelete?: (fragmentId: DMessageFragmentId) => void,
onFragmentReplace?: (fragmentId: DMessageFragmentId, newFragment: DMessageContentFragment) => void,
}) {
const showDataStreamViz =
!Release.Features.LIGHTER_ANIMATIONS
&& props.uiComplexityMode !== 'minimal'
&& props.voidFragments.length === 1 && props.nonVoidFragmentsCount === 0
&& isPlaceholderPart(props.voidFragments[0].part);
const fromAssistant = props.messageRole === 'assistant';
return <Box aria-label='message void' sx={showDataStreamViz ? editLayoutSx : fromAssistant ? startLayoutSx : endLayoutSx}>
return <Box aria-label='message void' sx={fromAssistant ? startLayoutSx : endLayoutSx}>
{props.voidFragments.map(({ fId, part }) => {
switch (part.pt) {
@@ -84,41 +67,15 @@ export function VoidFragments(props: {
/>
);
case 'ma':
return (
<BlockPartModelAux
key={fId}
fragmentId={fId}
auxType={part.aType}
auxText={part.aText}
auxHasSignature={part.textSignature !== undefined}
auxRedactedDataCount={part.redactedData?.length ?? 0}
zenMode={props.uiComplexityMode === 'minimal'}
contentScaling={props.contentScaling}
onFragmentReplace={props.onFragmentReplace}
/>
);
case 'ph':
return (
<BlockPartPlaceholder
key={fId}
placeholderText={part.pText}
placeholderType={part.pType}
placeholderModelOp={part.modelOp}
messageRole={props.messageRole}
contentScaling={props.contentScaling}
showAsItalic
showAsDataStreamViz={showDataStreamViz}
/>
);
case '_pt_sentinel':
return null;
default:
// noinspection JSUnusedLocalSymbols
const _exhaustiveVoidFragmentCheck: never = part;
// fallthrough - we don't handle these here anymore
case 'ma':
case 'ph':
return (
<ScaledTextBlockRenderer
key={fId}
@@ -7,6 +7,14 @@ import { wrapWithMarkdownSyntax } from '~/modules/blocks/markdown/markdown.wrapp
import { BUBBLE_MIN_TEXT_LENGTH } from './ChatMessage';
/**
* Text matching strategy for selection highlighting:
* - 'exact': Direct substring match in source (former behavior)
* - 'md-approx': Markdown-approximate match - finds rendered text in decorated source (new behavior)
*/
const MATCH_METHOD: 'exact' | 'md-approx' = 'md-approx';
/* Note: future evolution of Marking:
* 'data-purpose'?: 'review' | 'important' | 'note'; // Purpose of the highlight
* 'data-user-id'?: string; // Unique user identifier
@@ -27,6 +35,112 @@ const APPLY_CUT = (_text: string) => ''; // Cut removes the text entirely
type HighlightTool = 'highlight' | 'strike' | 'strong' | 'cut';
// -- Matcher algorithms --
/**
* Result from text matching: the source substring and the inner text to apply tools to
*/
interface MatchResult {
sourceText: string; // Text in source (may include decorators)
selText: string; // Text to apply tool to (decorators stripped)
leadingDecorators: string;
trailingDecorators: string;
}
/**
* Finds text using exact substring matching.
*/
function findExactMatch(needle: string, haystack: string): MatchResult | null {
const firstIndex = haystack.indexOf(needle);
if (firstIndex === -1) return null;
// Ensure uniqueness - only one occurrence
if (haystack.indexOf(needle, firstIndex + 1) !== -1) return null;
return {
sourceText: needle,
selText: needle,
leadingDecorators: '',
trailingDecorators: '',
};
}
/**
* Finds text in source markdown by stripping decorators and tracking positions.
* Returns the source substring (including decorators) that renders to the needle text.
*/
function findInMarkdownSource(needle: string, haystack: string): MatchResult | null {
// 1. strip markdown decorators while tracking positions
let stripped = '';
const posMap: number[] = []; // stripped char index -> haystack char index
let i = 0;
while (i < haystack.length) {
const char = haystack[i];
// skip common markdown decorator characters
if (char === '*' || char === '_' || char === '~' || char === '`') {
i++;
continue;
}
// regular character - track position
stripped += char;
posMap.push(i);
i++;
}
// if the needle is empty after stripping -- nothing we can do here
const idx = stripped.indexOf(needle);
if (idx === -1) {
// not found - need a different approach
return null;
}
// ensure uniqueness - only one occurrence
if (stripped.indexOf(needle, idx + 1) !== -1) {
// multiple occurrences - need a different approach
return null;
}
// map back to source positions
const startPos = posMap[idx];
const endIdx = idx + needle.length - 1;
const endPos = endIdx < posMap.length ? posMap[endIdx] + 1 : haystack.length;
// expand to include surrounding markdown decorators
let actualStart = startPos;
let actualEnd = endPos;
// walk backwards to include opening decorators
while (actualStart > 0) {
const prevChar = haystack[actualStart - 1];
if (prevChar === '*' || prevChar === '_' || prevChar === '~' || prevChar === '`')
actualStart--;
else
break;
}
// walk forwards to include closing decorators
while (actualEnd < haystack.length) {
const nextChar = haystack[actualEnd];
if (nextChar === '*' || nextChar === '_' || nextChar === '~' || nextChar === '`')
actualEnd++;
else
break;
}
const sourceText = haystack.substring(actualStart, actualEnd);
const leadingDecorators = sourceText.match(/^[*_~`]+/)?.[0] || '';
const trailingDecorators = sourceText.match(/[*_~`]+$/)?.[0] || '';
const selText = sourceText.slice(leadingDecorators.length, sourceText.length - trailingDecorators.length);
return { sourceText, selText, leadingDecorators, trailingDecorators };
}
export function useSelHighlighterMemo(
messageId: DMessageId,
selText: string | null,
@@ -44,31 +158,35 @@ export function useSelHighlighterMemo(
const highlightFunction = fragments.reduce((acc: false /* not found */ | ((tool: HighlightTool) => void) | true /* more than one */, fragment) => {
if (!acc && isTextContentFragment(fragment)) {
const fragmentText = fragment.part.text;
let index = fragmentText.indexOf(selText);
const match = MATCH_METHOD === 'md-approx'
? findInMarkdownSource(selText, fragmentText)
: findExactMatch(selText, fragmentText);
while (index !== -1) {
// If we've found more than one occurrence, we can stop
if (match) {
// If we already found one, this is a duplicate
if (acc) return true;
index = fragmentText.indexOf(selText, index + 1);
const { sourceText, selText, leadingDecorators, trailingDecorators } = match;
// Tool application function
acc = (tool: HighlightTool) => {
// Apply the tool
const highlighted =
// Apply the tool to the inner text
const selProcessed =
tool === 'highlight' ? APPLY_HTML_HIGHLIGHT(selText)
: tool === 'strike' ? APPLY_HTML_STRIKE(selText)
: tool === 'strong' ? APPLY_MD_STRONG(selText)
: tool === 'cut' ? APPLY_CUT(selText)
: selText;
// Reconstruct with original decorators
const reconstructed = leadingDecorators + selProcessed + trailingDecorators;
// Toggle, if the tooled text is already present (except for cut which always removes)
const newFragmentText =
tool === 'cut' ? fragmentText.replace(selText, highlighted) // Cut always removes text
: fragmentText.includes(highlighted) ? fragmentText.replace(highlighted, selText) // toggles selection
: fragmentText.replace(selText, highlighted);
tool === 'cut' ? fragmentText.replace(sourceText, reconstructed) // Cut always removes text
: fragmentText.includes(reconstructed) ? fragmentText.replace(reconstructed, sourceText) // toggles selection
: fragmentText.replace(sourceText, reconstructed);
// Replace the whole fragment within the message
onMessageFragmentReplace(messageId, fragment.fId, createTextContentFragment(newFragmentText));
+3 -3
View File
@@ -1,4 +1,4 @@
import { AixChatGenerateContent_DMessage, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
import { AixChatGenerateContent_DMessageGuts, aixChatGenerateContent_DMessage_FromConversation } from '~/modules/aix/client/aix.client';
import { autoChatFollowUps } from '~/modules/aifn/auto-chat-follow-ups/autoChatFollowUps';
import { autoConversationTitle } from '~/modules/aifn/autotitle/autoTitle';
@@ -19,7 +19,7 @@ export const CHATGENERATE_RESPONSE_PLACEHOLDER = '...'; // 💫 ..., 🖊️ ...
export interface PersonaProcessorInterface {
handleMessage(accumulatedMessage: AixChatGenerateContent_DMessage, messageComplete: boolean): void;
handleMessage(accumulatedMessage: AixChatGenerateContent_DMessageGuts, messageComplete: boolean): void;
}
@@ -72,7 +72,7 @@ export async function runPersonaOnConversationHead(
'conversation',
conversationId,
{ abortSignal: abortController.signal, throttleParallelThreads: parallelViewCount },
(messageOverwrite: AixChatGenerateContent_DMessage, messageComplete: boolean) => {
(messageOverwrite: AixChatGenerateContent_DMessageGuts, messageComplete: boolean) => {
// Note: there was an abort check here, but it removed the last packet, which contained the cause and final text.
// if (abortController.signal.aborted)
@@ -1,9 +1,8 @@
import { elevenLabsSpeakText } from '~/modules/elevenlabs/elevenlabs.client';
import type { AixChatGenerateContent_DMessageGuts } from '~/modules/aix/client/aix.client';
import { speakText } from '~/modules/speex/speex.client';
import { isTextContentFragment } from '~/common/stores/chat/chat.fragments';
import type { AixChatGenerateContent_DMessage } from '~/modules/aix/client/aix.client';
import type { PersonaProcessorInterface } from '../chat-persona';
@@ -16,7 +15,7 @@ export class PersonaChatMessageSpeak implements PersonaProcessorInterface {
constructor(private autoSpeakType: AutoSpeakType) {
}
handleMessage(accumulatedMessage: Partial<AixChatGenerateContent_DMessage>, messageComplete: boolean) {
handleMessage(accumulatedMessage: Partial<AixChatGenerateContent_DMessageGuts>, messageComplete: boolean) {
if (this.autoSpeakType === 'off' || this.spokenLine) return;
// Require a Content.Text first fragment
@@ -58,7 +57,7 @@ export class PersonaChatMessageSpeak implements PersonaProcessorInterface {
#speak(text: string) {
console.log('📢 TTS:', text);
this.spokenLine = true;
// fire/forget: we don't want to stall this loop
void elevenLabsSpeakText(text, undefined, false, true);
// fire/forget: we don't want to stall streaming
void speakText(text, undefined, { label: 'Chat message' });
}
}
+1 -1
View File
@@ -283,7 +283,7 @@ export function AppNews() {
</Box>
)}
{idx === 0 && <Divider sx={{ my: 6, mx: 6 }}/>}
{idx === 1 && <Divider sx={{ my: 6, mx: 6 }}/>}
</React.Fragment>;
})}
+15
View File
@@ -71,6 +71,21 @@ export const DevNewsItem: NewsItem = {
// news and feature surfaces
export const NewsItems: NewsItem[] = [
{
versionCode: '2.0.2',
versionName: 'Heavy Critters',
versionDate: new Date('2025-12-01T06:00:00Z'), // 2.0.2
// versionDate: new Date('2025-11-24T23:30:00Z'), // 2.0.1
items: [
{ text: <><B>New in 2.0.2</B> Speech synthesis with Web Speech, LocalAI, OpenAI and more</> },
{ text: <><B>Opus 4.5</B>, <B>Gemini 3 Pro</B>, <B>Nano Banana Pro</B>, <B>Grok 4.1</B>, <B>GPT-5.1</B>, <B>Kimi K2</B></> },
{ text: <><B>Image Generation</B> with Azure and LocalAI providers, in addition to OpenAI</> },
{ text: <>Enhanced <B>OpenRouter</B> integration with auto-capabilities and reasoning</> },
{ text: <>Call transcripts, generate persona images, search button in beams</> },
{ text: <>Starred models, errors resilience, 278 fixes</> },
{ text: <ExternalLink href='https://github.com/enricoros/big-agi/issues/new?template=ai-triage.yml'>AI-Automatic feature development</ExternalLink> },
],
},
{
versionCode: '2.0.0',
versionName: 'Open',
@@ -129,6 +129,13 @@ export function AppChatSettingsAI() {
</>}
/>
<FormControlDomainModel
domainId='imageCaption'
title='Vision model'
description='Image captioning'
tooltip='Vision model used to generate text descriptions of images when the Caption (Text) attachment option is selected.'
/>
{labsDevMode && (
<FormControlDomainModel
domainId='primaryChat'
+58 -20
View File
@@ -1,18 +1,17 @@
import * as React from 'react';
import { Accordion, AccordionDetails, accordionDetailsClasses, AccordionGroup, AccordionSummary, accordionSummaryClasses, Avatar, Box, Button, ListItemContent, styled, Tab, TabList, TabPanel, Tabs } from '@mui/joy';
import { Accordion, AccordionDetails, AccordionGroup, AccordionSummary, accordionSummaryClasses, Avatar, Box, Button, ListItemContent, styled, Tab, TabList, TabPanel, Tabs, Typography } from '@mui/joy';
import AddIcon from '@mui/icons-material/Add';
import AutoAwesomeIcon from '@mui/icons-material/AutoAwesome';
import KeyboardCommandKeyOutlinedIcon from '@mui/icons-material/KeyboardCommandKeyOutlined';
import LanguageRoundedIcon from '@mui/icons-material/LanguageRounded';
import MicIcon from '@mui/icons-material/Mic';
import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver';
import ScienceIcon from '@mui/icons-material/Science';
import SearchIcon from '@mui/icons-material/Search';
import TerminalOutlinedIcon from '@mui/icons-material/TerminalOutlined';
import { BrowseSettings } from '~/modules/browse/BrowseSettings';
import { DallESettings } from '~/modules/t2i/dalle/DallESettings';
import { ElevenlabsSettings } from '~/modules/elevenlabs/ElevenlabsSettings';
import { GoogleSearchSettings } from '~/modules/google/GoogleSearchSettings';
import { T2ISettings } from '~/modules/t2i/T2ISettings';
@@ -20,14 +19,15 @@ import type { PreferencesTabId } from '~/common/layout/optima/store-layout-optim
import { AppBreadcrumbs } from '~/common/components/AppBreadcrumbs';
import { DarkModeToggleButton, darkModeToggleButtonSx } from '~/common/components/DarkModeToggleButton';
import { GoodModal } from '~/common/components/modals/GoodModal';
import { Is } from '~/common/util/pwaUtils';
import { PhVoice } from '~/common/components/icons/phosphor/PhVoice';
import { optimaActions } from '~/common/layout/optima/useOptima';
import { useIsMobile } from '~/common/components/useMatchMedia';
import { AppChatSettingsAI } from './AppChatSettingsAI';
import { AppChatSettingsUI } from './settings-ui/AppChatSettingsUI';
import { UxLabsSettings } from './UxLabsSettings';
import { VoiceSettings } from './VoiceSettings';
import { VoiceInSettings } from './VoiceInSettings';
import { VoiceOutSettings } from './VoiceOutSettings';
// configuration
@@ -44,7 +44,11 @@ const Topics = styled(AccordionGroup)({
// larger summary, with a spinning icon
[`& .${accordionSummaryClasses.button}`]: {
minHeight: 64,
minHeight: '52px',
border: 'none',
paddingRight: '0.75rem',
backgroundColor: 'rgba(var(--joy-palette-primary-lightChannel) / 0.2)',
gap: '1rem',
},
[`& .${accordionSummaryClasses.indicator}`]: {
transition: '0.2s',
@@ -52,11 +56,6 @@ const Topics = styled(AccordionGroup)({
[`& [aria-expanded="true"] .${accordionSummaryClasses.indicator}`]: {
transform: 'rotate(45deg)',
},
// larger padded block
[`& .${accordionDetailsClasses.content}.${accordionDetailsClasses.expanded}`]: {
paddingBlock: '1rem',
},
});
function Topic(props: { title?: React.ReactNode, icon?: string | React.ReactNode, startCollapsed?: boolean, children?: React.ReactNode }) {
@@ -92,9 +91,9 @@ function Topic(props: { title?: React.ReactNode, icon?: string | React.ReactNode
>
{!!props.icon && (
<Avatar
size='sm'
color={COLOR_TOPIC_ICON}
variant={expanded ? 'plain' /* was: soft */ : 'plain'}
// size='sm'
>
{props.icon}
</Avatar>
@@ -109,7 +108,7 @@ function Topic(props: { title?: React.ReactNode, icon?: string | React.ReactNode
slotProps={{
content: {
sx: {
px: { xs: 1.5, md: 2 },
p: { xs: 1.5, md: 2.5 },
},
},
}}
@@ -153,6 +152,7 @@ const _styles = {
tabsListTab: {
// borderRadius: '2rem',
borderRadius: 'sm',
fontSize: 'sm',
flex: 1,
p: 0,
'&[aria-selected="true"]': {
@@ -251,7 +251,7 @@ export function SettingsModal(props: {
<Tab value='tools' disableIndicator sx={_styles.tabsListTab}>Tools</Tab>
</TabList>
<TabPanel value='chat' variant='outlined' sx={_styles.tabPanel}>
<TabPanel value='chat' color='primary' variant='outlined' sx={_styles.tabPanel}>
<Topics>
<Topic>
<AppChatSettingsUI />
@@ -268,18 +268,18 @@ export function SettingsModal(props: {
</Topics>
</TabPanel>
<TabPanel value='voice' variant='outlined' sx={_styles.tabPanel}>
<TabPanel value='voice' color='primary' variant='outlined' sx={_styles.tabPanel}>
<Topics>
<Topic icon={/*'🎙️'*/ <MicIcon />} title='Microphone'>
<VoiceSettings />
<VoiceInSettings isMobile={isMobile} />
</Topic>
<Topic icon={/*'📢'*/ <RecordVoiceOverIcon />} title='ElevenLabs API'>
<ElevenlabsSettings />
<Topic icon={/*'📢'*/ <PhVoice />} title={'Speech'/*<>Voices <GoodBadge badge='New' /></>*/}>
<VoiceOutSettings isMobile={isMobile} />
</Topic>
</Topics>
</TabPanel>
<TabPanel value='draw' variant='outlined' sx={_styles.tabPanel}>
<TabPanel value='draw' color='primary' variant='outlined' sx={_styles.tabPanel}>
<Topics>
<Topic>
<T2ISettings />
@@ -290,7 +290,45 @@ export function SettingsModal(props: {
</Topics>
</TabPanel>
<TabPanel value='tools' variant='outlined' sx={_styles.tabPanel}>
<TabPanel value='tools' color='primary' variant='outlined' sx={_styles.tabPanel}>
{/* Search Modifier Info */}
<Box sx={{
p: 2,
borderRadius: 'calc(var(--joy-radius-md) - 1px)',
// backgroundColor: 'background.level1',
display: 'flex',
alignItems: 'center',
gap: 2,
}}>
<Button
variant='soft'
color='success'
startDecorator={<SearchIcon />}
sx={{
// this is copied frmo ButtonSearchControl._styles.desktop
minWidth: 100,
justifyContent: 'flex-start',
borderRadius: '18px',
pointerEvents: 'none',
'[data-joy-color-scheme="light"] &': {
bgcolor: '#d5ec31',
},
boxShadow: 'inset 0 2px 4px -1px rgba(0,0,0,0.15)',
textWrap: 'nowrap',
}}
>
Search
</Button>
<Box sx={{ flex: 1 }}>
<Typography level='body-sm' sx={{ fontWeight: 'md', mb: 0.5 }}>
Use the Search button
</Typography>
<Typography level='body-xs' sx={{ color: 'text.secondary' }}>
Modern AI models have native search built-in. Click the Search button when chatting to enable real-time web search.
</Typography>
</Box>
</Box>
<Topics>
<Topic icon={<LanguageRoundedIcon />} title='Load Web Pages (with images)' startCollapsed>
<BrowseSettings />
@@ -38,6 +38,7 @@ const shortcutsMd = platformAwareKeystrokes(`
| Ctrl + , | Preferences |
| Ctrl + Shift + M | 🧠 Models |
| Ctrl + Shift + O | 💬 Options (current Chat Model) |
| Ctrl + Shift + A | Toggle AI Request Inspector |
| Ctrl + Shift + + | Increase Text Size |
| Ctrl + Shift + - | Decrease Text Size |
| Ctrl + Shift + / | Shortcuts |
@@ -0,0 +1,54 @@
import * as React from 'react';
import { FormControl } from '@mui/joy';
import { useChatMicTimeoutMs } from '../chat/store-app-chat';
import type { FormRadioOption } from '~/common/components/forms/FormRadioControl';
import { FormChipControl } from '~/common/components/forms/FormChipControl';
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
import { LanguageSelect } from '~/common/components/LanguageSelect';
const _minTimeouts: ReadonlyArray<FormRadioOption<string>> = [
{ value: '600', label: '0.6s', description: 'Best for quick calls' },
{ value: '2000', label: '2s', description: 'Standard' },
{ value: '5000', label: '5s', description: 'Breathe' },
{ value: '15000', label: '15s', description: 'Best for thinking' },
] as const;
export function VoiceInSettings(props: { isMobile: boolean }) {
// external state
const [chatTimeoutMs, setChatTimeoutMs] = useChatMicTimeoutMs();
// derived - converts from string keys to numbers and vice versa
const chatTimeoutValue: string = '' + chatTimeoutMs;
const setChatTimeoutValue = React.useCallback((value: string) => {
value && setChatTimeoutMs(parseInt(value));
}, [setChatTimeoutMs]);
return <>
<FormControl orientation='horizontal' sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
<FormLabelStart
title='Language'
description='Mic and voice'
// tooltip='For Microphone input and Voice output. Microphone support varies by browser (iPhone/Safari lacks speech input).'
/>
<LanguageSelect />
</FormControl>
{!props.isMobile && (
<FormChipControl
title='Timeout'
// color='primary'
options={_minTimeouts}
value={chatTimeoutValue}
onChange={setChatTimeoutValue}
/>
)}
</>;
}
@@ -0,0 +1,46 @@
import { SpeexConfigureEngines } from '~/modules/speex/components/SpeexConfigureEngines';
import { useSpeexEngines } from '~/modules/speex/store-module-speex';
import { ChatAutoSpeakType, useChatAutoAI } from '../chat/store-app-chat';
import { FormRadioOption } from '~/common/components/forms/FormRadioControl';
import { FormChipControl } from '~/common/components/forms/FormChipControl';
const _autoSpeakOptions: FormRadioOption<ChatAutoSpeakType>[] = [
{ value: 'off', label: 'No', description: 'Off' },
{ value: 'firstLine', label: 'Start', description: 'First paragraph' },
{ value: 'all', label: 'Full', description: 'Complete response' },
] as const;
/**
* Voice output settings - Auto-speak mode and TTS engine configuration
*/
export function VoiceOutSettings(props: { isMobile: boolean }) {
// external state
const { autoSpeak, setAutoSpeak } = useChatAutoAI();
// external state - module
const hasEngines = useSpeexEngines().length > 0;
return <>
{/* Auto-speak setting */}
<FormChipControl
title='Speak Chats'
size='md'
// color='primary'
tooltip={!hasEngines ? 'No voice engines available. Configure a TTS service or use system voice.' : undefined}
disabled={!hasEngines}
options={_autoSpeakOptions}
value={autoSpeak}
onChange={setAutoSpeak}
/>
{/* Engine configuration */}
<SpeexConfigureEngines isMobile={props.isMobile} />
</>;
}
-47
View File
@@ -1,47 +0,0 @@
import * as React from 'react';
import { FormControl } from '@mui/joy';
import { useChatMicTimeoutMs } from '../chat/store-app-chat';
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
import { FormRadioControl } from '~/common/components/forms/FormRadioControl';
import { LanguageSelect } from '~/common/components/LanguageSelect';
import { useIsMobile } from '~/common/components/useMatchMedia';
export function VoiceSettings() {
// external state
const isMobile = useIsMobile();
const [chatTimeoutMs, setChatTimeoutMs] = useChatMicTimeoutMs();
// this converts from string keys to numbers and vice versa
const chatTimeoutValue: string = '' + chatTimeoutMs;
const setChatTimeoutValue = (value: string) => value && setChatTimeoutMs(parseInt(value));
return <>
{/* LanguageSelect: moved from the UI settings (where it logically belongs), just to group things better from an UX perspective */}
<FormControl orientation='horizontal' sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
<FormLabelStart title='Language'
description='ASR and TTS'
tooltip='Currently for Microphone input and Voice output. Microphone support varies by browser (iPhone/Safari lacks speech input). We will use the ElevenLabs MultiLanguage model if a language other than English is selected.' />
<LanguageSelect />
</FormControl>
{!isMobile && <FormRadioControl
title='Mic Timeout'
description={chatTimeoutMs < 1000 ? 'Best for quick calls' : chatTimeoutMs > 5000 ? 'Best for thinking' : 'Standard'}
options={[
{ value: '600', label: '.6s' },
{ value: '2000', label: '2s' },
{ value: '5000', label: '5s' },
{ value: '15000', label: '15s' },
]}
value={chatTimeoutValue} onChange={setChatTimeoutValue}
/>}
</>;
}
+11 -3
View File
@@ -23,8 +23,8 @@ export const Release = {
// this is here to trigger revalidation of data, e.g. models refresh
Monotonics: {
Aix: 37,
NewsVersion: 200,
Aix: 43,
NewsVersion: 202,
},
// Frontend: pretty features
@@ -59,6 +59,14 @@ export const Release = {
export const BaseProduct = {
ProductName: 'Big-AGI',
ProductURL: 'https://big-agi.com',
PrivacyPolicy: 'https://big-agi.com/privacy',
TermsOfService: 'https://big-agi.com/terms',
// ecosystem
DocsBaseSite: 'https://big-agi.com/docs',
OpenSupportDiscord: 'https://discord.gg/MkH4qj2Jp9',
OpenSourceRepo: 'https://github.com/enricoros/big-agi',
ReleaseNotes: '',
SupportForm: (_userId?: string) => 'https://github.com/enricoros/big-AGI/issues/new',
SupportForm: (_userId?: string) => 'https://github.com/enricoros/big-AGI/issues/new?template=ai-triage.yml',
} as const;
@@ -2,12 +2,13 @@ import type { FileWithHandle } from 'browser-fs-access';
import { callBrowseFetchPageOrThrow } from '~/modules/browse/browse.client';
import { extractYoutubeVideoIDFromURL } from '~/modules/youtube/youtube.utils';
import { imageCaptionFromImageOrThrow } from '~/modules/aifn/image-caption/imageCaptionFromImage';
import { youTubeGetVideoData } from '~/modules/youtube/useYouTubeTranscript';
import type { CommonImageMimeTypes } from '~/common/util/imageUtils';
import { Is } from '~/common/util/pwaUtils';
import { PLATFORM_IMAGE_MIMETYPE } from '~/common/util/imageUtils';
import { agiCustomId, agiUuid } from '~/common/util/idUtils';
import { convert_Base64DataURL_To_Base64WithMimeType, convert_Base64WithMimeType_To_Blob } from '~/common/util/blobUtils';
import { getDomainModelConfiguration } from '~/common/stores/llms/hooks/useModelDomain';
import { htmlTableToMarkdown } from '~/common/util/htmlTableToMarkdown';
import { humanReadableHyphenated } from '~/common/util/textUtils';
import { pdfToImageDataURLs, pdfToText } from '~/common/util/pdfUtils';
@@ -21,9 +22,6 @@ import { guessInputContentTypeFromMime, heuristicMimeTypeFixup, mimeTypeIsDocX,
import { imageDataToImageAttachmentFragmentViaDBlob } from './attachment.dblobs';
// configuration
export const DEFAULT_ADRAFT_IMAGE_MIMETYPE: CommonImageMimeTypes = !Is.Browser.Safari ? 'image/webp' : 'image/jpeg';
export const DEFAULT_ADRAFT_IMAGE_QUALITY = 0.96;
const PDF_IMAGE_PAGE_SCALE = 1.5;
const PDF_IMAGE_QUALITY = 0.5;
const ENABLE_TEXT_AND_IMAGES = false; // [PROD] ?
@@ -279,11 +277,13 @@ export function attachmentDefineConverters(source: AttachmentDraftSource, input:
// Images (Known/Unknown)
case input.mimeType.startsWith('image/'):
const inputImageMimeSupported = mimeTypeIsSupportedImage(input.mimeType);
const visionModelMissing = !getDomainModelConfiguration('imageCaption', true, true);
converters.push({ id: 'image-resized-high', name: 'Image (high detail)', disabled: !inputImageMimeSupported });
converters.push({ id: 'image-resized-low', name: 'Image (low detail)', disabled: !inputImageMimeSupported });
converters.push({ id: 'image-original', name: 'Image (original quality)', disabled: !inputImageMimeSupported });
if (!inputImageMimeSupported)
converters.push({ id: 'image-to-default', name: `As Image (${DEFAULT_ADRAFT_IMAGE_MIMETYPE})` });
converters.push({ id: 'image-to-default', name: `As Image (${PLATFORM_IMAGE_MIMETYPE})` });
converters.push({ id: 'image-caption', name: 'Caption (Text)', disabled: visionModelMissing });
converters.push({ id: 'unhandled', name: 'No Image' });
converters.push({ id: 'image-ocr', name: 'Add Text (OCR)', isCheckbox: true });
break;
@@ -561,7 +561,7 @@ export async function attachmentPerformConversion(
// image converted (potentially unsupported mime)
case 'image-to-default':
if (!_expectBlob(input.data, 'image-to-default')) return;
const imageCastF = await imageDataToImageAttachmentFragmentViaDBlob(input.mimeType, input.data, source, title, caption, DEFAULT_ADRAFT_IMAGE_MIMETYPE, false);
const imageCastF = await imageDataToImageAttachmentFragmentViaDBlob(input.mimeType, input.data, source, title, caption, PLATFORM_IMAGE_MIMETYPE, false);
if (imageCastF)
newFragments.push(imageCastF);
break;
@@ -590,6 +590,35 @@ export async function attachmentPerformConversion(
}
break;
// image to caption
case 'image-caption':
if (!_expectBlob(input.data, 'Image captioning converter')) break;
try {
const abortController = new AbortController();
const captionText = await imageCaptionFromImageOrThrow(
input.data,
input.mimeType,
attachment.id,
abortController.signal,
progress => edit(attachment.id, { outputsConversionProgress: progress / 100 }),
);
// if we're here we shall have valid text
newFragments.push(createDocAttachmentFragment(
title,
caption + ' (Caption)',
DVMimeType.TextPlain,
createDMessageDataInlineText(captionText || 'This image could not be described', 'text/plain'),
refString,
DOCPART_DEFAULT_VERSION,
{ ...docMeta, srcOcrFrom: 'image-caption' },
));
} catch (error: any) {
console.log('[DEV] Failed to caption image:', error);
const errorText = `[Captioning failed: ${error?.message || String(error)}]`;
newFragments.push(createDocAttachmentFragment(title, caption + ' (Error)', DVMimeType.TextPlain, createDMessageDataInlineText(errorText, 'text/plain'), refString, DOCPART_DEFAULT_VERSION, { ...docMeta, srcOcrFrom: 'image-caption' }));
}
break;
// pdf to text
case 'pdf-text':
@@ -610,7 +639,7 @@ export async function attachmentPerformConversion(
if (!_expectBlob(input.data, 'PDF images converter')) break;
// Convert Blob to ArrayBuffer for PDF.js
try {
const imageDataURLs = await pdfToImageDataURLs(await input.data.arrayBuffer(), DEFAULT_ADRAFT_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
const imageDataURLs = await pdfToImageDataURLs(await input.data.arrayBuffer(), PLATFORM_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
edit(attachment.id, { outputsConversionProgress: progress });
});
for (const pdfPageImage of imageDataURLs) {
@@ -634,7 +663,7 @@ export async function attachmentPerformConversion(
// duplicated from 'pdf-images' (different progress update)
const imageFragments: DMessageAttachmentFragment[] = [];
const imageDataURLs = await pdfToImageDataURLs(pdfArrayBufferForImages, DEFAULT_ADRAFT_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
const imageDataURLs = await pdfToImageDataURLs(pdfArrayBufferForImages, PLATFORM_IMAGE_MIMETYPE, PDF_IMAGE_QUALITY, PDF_IMAGE_PAGE_SCALE, (progress) => {
edit(attachment.id, { outputsConversionProgress: progress / 2 }); // Update progress (0% to 50%)
});
for (const pdfPageImage of imageDataURLs) {
@@ -136,7 +136,7 @@ export type AttachmentDraftConverter = {
export type AttachmentDraftConverterType =
| 'text' | 'rich-text' | 'rich-text-cleaner' | 'rich-text-table'
| 'image-original' | 'image-resized-high' | 'image-resized-low' | 'image-ocr' | 'image-to-default'
| 'image-original' | 'image-resized-high' | 'image-resized-low' | 'image-ocr' | 'image-caption' | 'image-to-default'
| 'pdf-text' | 'pdf-images' | 'pdf-text-and-images'
| 'docx-to-html'
| 'url-page-text' | 'url-page-markdown' | 'url-page-html' | 'url-page-null' | 'url-page-image'
+16 -7
View File
@@ -9,10 +9,10 @@ export const hasPostHogAnalytics = !!process.env.NEXT_PUBLIC_POSTHOG_KEY;
// global to survive route changes
let _posthog: undefined | PostHog | null = undefined; // underined: not loaded, null: loading or opt-out, PostHog: loaded
let _posthog: undefined | PostHog | null = undefined; // undefined: not loaded, null: loading or opt-out, PostHog: loaded
// unused yet
// noinspection JSUnusedGlobalSymbols - unused yet
export function posthogAnalyticsOptOut() {
if (isBrowser) {
localStorage.setItem('app-analytics-posthog-optout', 'true');
@@ -20,10 +20,10 @@ export function posthogAnalyticsOptOut() {
}
}
// unused yet
export function posthogCaptureEvent(eventName: string, properties?: Properties) {
export function posthogCaptureEvent(eventName: string, properties?: Properties, options?: { sendInstantly?: boolean }) {
if (isBrowser && hasPostHogAnalytics) {
_posthog?.capture(eventName, properties);
// For events before navigation (e.g., login button clicks), send immediately
_posthog?.capture(eventName, properties, options?.sendInstantly ? { send_instantly: true } : undefined);
}
}
@@ -33,8 +33,9 @@ export function posthogCaptureException(error: Error | unknown, additionalProper
}
}
// unused yet
/**
* Posthog Identify - Login
*/
export function posthogUser(userId: string, userProperties?: Record<string, any>) {
if (isBrowser && hasPostHogAnalytics) {
_posthog?.identify(userId, {
@@ -45,6 +46,14 @@ export function posthogUser(userId: string, userProperties?: Record<string, any>
}
}
/**
* Posthog Reset - Logout
*/
export function posthogReset() {
if (isBrowser && hasPostHogAnalytics)
_posthog?.reset();
}
/**
* PostHog Analytics implementation - with dynamic loading
+44 -1
View File
@@ -55,6 +55,7 @@ export function DataStreamViz(props: { height: number, speed?: number }) {
const tokensRef = React.useRef<Token[]>([]);
const lastTimeRef = React.useRef<number>(0);
const lastTokenTimeRef = React.useRef<number>(0);
const isVisibleRef = React.useRef<boolean>(true);
// derived
const dpr = window.devicePixelRatio || 1;
@@ -162,6 +163,9 @@ export function DataStreamViz(props: { height: number, speed?: number }) {
// Animation function
const animate = React.useCallback((currentTime: number) => {
// early exit if not visible or no animation ID (component unmounting)
if (!isVisibleRef.current || !animationRef.current) return;
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d');
@@ -199,10 +203,46 @@ export function DataStreamViz(props: { height: number, speed?: number }) {
}
lastTimeRef.current = currentTime;
animationRef.current = requestAnimationFrame(animate);
// only schedule next frame if still visible
if (isVisibleRef.current)
animationRef.current = requestAnimationFrame(animate);
}, [createToken, drawGrid, drawToken]);
// [effect] Detect visibility
React.useEffect(() => {
const container = containerRef.current;
if (!container) return;
const observer = new IntersectionObserver(([entry]) => {
const visible = entry.isIntersecting;
isVisibleRef.current = visible;
if (visible) {
// restart animation when becoming visible (cancel any existing first)
cancelAnimationFrame(animationRef.current);
animationRef.current = requestAnimationFrame(animate);
} else {
// stop animation and clear memory when going off-screen
cancelAnimationFrame(animationRef.current);
animationRef.current = 0;
tokensRef.current = [];
lastTimeRef.current = 0;
lastTokenTimeRef.current = 0;
}
}, {
threshold: 0.1, // Trigger when at least 10% visible
rootMargin: '50px', // Start animating slightly before entering viewport
});
observer.observe(container);
return () => {
observer.disconnect();
};
}, [animate]);
// Canvas setup and animation effect
React.useEffect(() => {
const container = containerRef.current;
if (!container) return;
@@ -216,11 +256,14 @@ export function DataStreamViz(props: { height: number, speed?: number }) {
handleResize();
window.addEventListener('resize', handleResize);
// start initial animation (cancel any existing first to prevent duplicate loops)
cancelAnimationFrame(animationRef.current);
animationRef.current = requestAnimationFrame(animate);
return () => {
window.removeEventListener('resize', handleResize);
cancelAnimationFrame(animationRef.current);
animationRef.current = 0; // Prevent RAF callbacks after unmount
};
}, [animate, props.height, setupCanvas]);
+38
View File
@@ -0,0 +1,38 @@
import * as React from 'react';
import { Chip, ChipProps } from '@mui/joy';
/**
* Simple badge/label component for inline status indicators like "New", "Beta", etc.
*/
export function GoodBadge(props: {
badge: React.ReactNode;
color?: ChipProps['color'];
variant?: ChipProps['variant'];
sx?: ChipProps['sx'];
}) {
return (
<Chip
size='sm'
color={props.color ?? 'success'}
variant={props.variant ?? 'soft'}
sx={{
ml: 1.5,
fontSize: 'xs',
fontWeight: 'md',
borderRadius: 'xs',
px: 1,
py: 0.25,
// default "new" color - lime/yellow-green
...(props.color === undefined && {
bgcolor: '#d5ec31',
color: 'primary.softColor',
}),
...props.sx,
}}
>
{props.badge}
</Chip>
);
}
@@ -0,0 +1,59 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { InlineTextarea } from './InlineTextarea';
/**
* Displays text and switches to edit mode on click
*/
export function InlineTextareaEditable(props: {
value: string;
onSave: (newValue: string) => void;
renderDisplay: (onClickEdit: (event: React.MouseEvent) => void) => React.ReactNode;
placeholder?: string;
disabled?: boolean;
textareaSx?: SxProps;
}) {
// state
const [isEditing, setIsEditing] = React.useState(false);
const valueRef = React.useRef(props.value);
valueRef.current = props.value;
// handlers
const { onSave } = props;
const handleBeginEditing = React.useCallback((event: React.MouseEvent) => {
if (props.disabled) return;
if (event.shiftKey) return; // Reserved for debug/inspect
setIsEditing(true);
}, [props.disabled]);
const handleSave = React.useCallback((newValue: string) => {
setIsEditing(false);
const trimmed = newValue.trim();
if (!trimmed || trimmed === valueRef.current) return;
onSave(trimmed);
}, [onSave]);
const handleCancel = React.useCallback(() => {
setIsEditing(false);
}, []);
// render
return !isEditing ? props.renderDisplay(handleBeginEditing) : (
<InlineTextarea
initialText={props.value}
placeholder={props.placeholder}
onEdit={handleSave}
onCancel={handleCancel}
sx={props.textareaSx}
/>
);
}
+22 -8
View File
@@ -1,6 +1,6 @@
import * as React from 'react';
import { Option, Select } from '@mui/joy';
import { Option, optionClasses, Select, SelectSlotsAndSlotProps } from '@mui/joy';
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
import { useUIPreferencesStore } from '~/common/stores/store-ui';
@@ -10,6 +10,20 @@ import { useUIPreferencesStore } from '~/common/stores/store-ui';
import languages from './Languages.json';
// copied from useLLMSelect.tsx - inspired by optimaSelectSlotProps.listbox
const _selectSlotProps: SelectSlotsAndSlotProps<false>['slotProps'] = {
root: { sx: { minWidth: 200 } },
listbox: {
sx: {
boxShadow: 'xl',
[`& .${optionClasses.root}`]: {
maxWidth: 'min(640px, calc(100dvw - 0.25rem))',
},
},
} as const,
} as const;
export function LanguageSelect() {
// external state
@@ -32,19 +46,19 @@ export function LanguageSelect() {
</Option>
) : (
Object.entries(localesOrCode).map(([country, code]) => (
<Option key={code} value={code}>
<Option key={code} value={code} label={language}>
{`${language} (${country})`}
</Option>
))
)), []);
return (
<Select value={preferredLanguage} onChange={handleLanguageChanged}
indicator={<KeyboardArrowDownIcon />}
slotProps={{
root: { sx: { minWidth: 200 } },
indicator: { sx: { opacity: 0.5 } },
}}>
<Select
value={preferredLanguage}
onChange={handleLanguageChanged}
indicator={<KeyboardArrowDownIcon />}
slotProps={_selectSlotProps}
>
{languageOptions}
</Select>
);
+7 -2
View File
@@ -32,12 +32,17 @@ export const StarredNoXL2 = React.memo(function StarredNoXL2() {
return <StarIcon sx={starIconStyles.starredNoXl2} />;
});
export const UnStarred = React.memo(function UnStarred() {
const UnStarredNoXL2 = React.memo(function UnStarred() {
return <StarBorderIcon />;
})
export const StarredState = React.memo(function StarredState({ isStarred }: { isStarred: boolean }) {
return isStarred ? <Starred /> : <UnStarred />;
return isStarred ? <Starred /> : <UnStarredNoXL2 />;
});
// have an unstyled that just returns StarIcon or StarBorderIcon and we can use with our own styles and props {...}
export const StarIconUnstyled = React.memo(function StarIconUnstyled({ isStarred }: { isStarred: boolean }) {
return isStarred ? <StarIcon /> : <StarBorderIcon />;
});
@@ -21,6 +21,13 @@ const _styles = {
gap: 1,
} as const,
chipGroupEnd: {
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'flex-end',
gap: 1,
} as const,
chip: {
'--Chip-minHeight': '1.75rem', // this makes it prob better
px: 1.5,
@@ -36,6 +43,7 @@ export const FormChipControl = <TValue extends string>(props: {
// specific
size?: 'sm' | 'md' | 'lg',
color?: ColorPaletteProp,
alignEnd?: boolean,
// =FormRadioControl
title: string | React.JSX.Element;
description?: string | React.JSX.Element;
@@ -48,6 +56,9 @@ export const FormChipControl = <TValue extends string>(props: {
const { onChange } = props;
const selectedOption = props.options.find(option => option.value === props.value);
const description = selectedOption?.description ?? props.description;
const handleChipClick = React.useCallback((value: Immutable<TValue>) => {
if (!props.disabled)
onChange(value);
@@ -55,8 +66,8 @@ export const FormChipControl = <TValue extends string>(props: {
return (
<FormControl orientation='horizontal' disabled={props.disabled} sx={_styles.control}>
{(!!props.title || !!props.description) && <FormLabelStart title={props.title} description={props.description} tooltip={props.tooltip} />}
<Box sx={_styles.chipGroup}>
{(!!props.title || !!description) && <FormLabelStart title={props.title} description={description} tooltip={props.tooltip} />}
<Box sx={props.alignEnd ? _styles.chipGroupEnd : _styles.chipGroup}>
{props.options.map((option) => (
<Chip
key={'opt-' + option.value}
+4 -4
View File
@@ -13,8 +13,8 @@ const slotPropsInputSx: InputSlotsAndSlotProps['slotProps'] = {
input: {
sx: {
width: '100%',
}
}
},
},
} as const;
@@ -40,7 +40,7 @@ export function FormInputKey(props: {
const endDecorator = React.useMemo(() => !!props.value && !props.noKey && (
<IconButton onClick={() => setIsVisible(!isVisible)}>
{isVisible ? <VisibilityIcon sx={{ fontSize: 'lg'}} /> : <VisibilityOffIcon sx={{ fontSize: 'md' }} />}
{isVisible ? <VisibilityIcon sx={{ fontSize: 'lg' }} /> : <VisibilityOffIcon sx={{ fontSize: 'md' }} />}
</IconButton>
), [props.value, props.noKey, isVisible]);
@@ -78,7 +78,7 @@ export function FormInputKey(props: {
placeholder={props.required ? props.placeholder ? 'required: ' + props.placeholder : 'required' : props.placeholder || '...'}
type={(isVisible || !!props.noKey) ? 'text' : 'password'}
error={props.isError}
startDecorator={!props.noKey && <KeyIcon sx={{ fontSize: 'lg' }} />}
startDecorator={!props.noKey && <KeyIcon sx={{ fontSize: 'md' }} />}
endDecorator={endDecorator}
slotProps={slotPropsInputSx}
/>
@@ -10,6 +10,7 @@ import { FormLabelStart } from './FormLabelStart';
export type FormRadioOption<T extends string> = {
value: T,
label: string | React.JSX.Element,
description?: string,
disabled?: boolean
};
@@ -23,18 +24,24 @@ export const FormRadioControl = <TValue extends string>(props: {
options: Immutable<FormRadioOption<TValue>[]>;
value?: TValue;
onChange: (value: TValue) => void;
}) =>
<FormControl size={props.size} orientation='horizontal' disabled={props.disabled} sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
{(!!props.title || !!props.description) && <FormLabelStart title={props.title} description={props.description} tooltip={props.tooltip} />}
<RadioGroup
size={props.size}
orientation='horizontal'
value={props.value}
onChange={(event: React.ChangeEvent<HTMLInputElement>) => event.target.value && props.onChange(event.target.value as TValue)}
sx={{ flexWrap: 'wrap' }}
>
{props.options.map((option) =>
<Radio key={'opt-' + option.value} value={option.value} label={option.label} disabled={option.disabled || props.disabled} />,
)}
</RadioGroup>
</FormControl>;
}) => {
const selectedOption = props.options.find(option => option.value === props.value);
const description = selectedOption?.description ?? props.description;
return (
<FormControl size={props.size} orientation='horizontal' disabled={props.disabled} sx={{ justifyContent: 'space-between', alignItems: 'center' }}>
{(!!props.title || !!description) && <FormLabelStart title={props.title} description={description} tooltip={props.tooltip} />}
<RadioGroup
size={props.size}
orientation='horizontal'
value={props.value}
onChange={(event: React.ChangeEvent<HTMLInputElement>) => event.target.value && props.onChange(event.target.value as TValue)}
sx={{ flexWrap: 'wrap', gap: 1 }}
>
{props.options.map((option) =>
<Radio key={'opt-' + option.value} value={option.value} label={option.label} disabled={option.disabled || props.disabled} />,
)}
</RadioGroup>
</FormControl>
);
};
@@ -0,0 +1,95 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { FormControl, IconButton, Input } from '@mui/joy';
import KeyIcon from '@mui/icons-material/Key';
import VisibilityIcon from '@mui/icons-material/Visibility';
import VisibilityOffIcon from '@mui/icons-material/VisibilityOff';
import { FormLabelStart } from './FormLabelStart';
const _styles = {
formControl: {
flexWrap: 'wrap',
justifyContent: 'space-between',
alignItems: 'center',
},
inputDefault: {
flexGrow: 1,
},
} as const satisfies Record<string, SxProps>;
/**
* Secret/API key form field with visibility toggle.
* Same inline layout as FormTextField but with secret-specific features:
* - Password masking with visibility toggle
* - Key icon (customizable)
* - Password manager integration
*/
export function FormSecretField(props: {
autoCompleteId: string;
title: string | React.JSX.Element;
description?: string | React.JSX.Element;
tooltip?: string | React.JSX.Element;
placeholder?: string;
value: string;
onChange: (text: string) => void;
// Behavior
required?: boolean;
disabled?: boolean;
isError?: boolean;
// Appearance
inputSx?: SxProps;
/** Custom start decorator, or false to hide. Default: KeyIcon */
startDecorator?: React.ReactNode | false;
}) {
// state
const [isVisible, setIsVisible] = React.useState(false);
// derived
const acId = 'secret-' + props.autoCompleteId;
// password manager username
const ghost = props.autoCompleteId.replace(/-key$/, '').replace(/-/g, ' ');
const endDecorator = React.useMemo(() => !!props.value && (
<IconButton size='sm' onClick={() => setIsVisible(on => !on)}>
{isVisible ? <VisibilityIcon sx={{ fontSize: 'md' }} /> : <VisibilityOffIcon sx={{ fontSize: 'md' }} />}
</IconButton>
), [props.value, isVisible]);
return (
<FormControl
id={acId}
orientation='horizontal'
disabled={props.disabled}
sx={_styles.formControl}
>
<FormLabelStart title={props.title} description={props.description} tooltip={props.tooltip} />
{/* Hidden username field for password manager association */}
<input
type='text'
autoComplete='username'
value={ghost}
readOnly
tabIndex={-1}
style={{ display: 'none' }}
/>
<Input
name={acId}
type={isVisible ? 'text' : 'password'}
autoComplete='new-password'
variant='outlined'
placeholder={props.required && !props.placeholder ? 'required' : props.placeholder}
error={props.isError}
value={props.value}
onChange={event => props.onChange(event.target.value)}
startDecorator={props.startDecorator ?? <KeyIcon sx={{ fontSize: 'md' }} />}
endDecorator={endDecorator}
sx={props.inputSx ?? _styles.inputDefault}
/>
</FormControl>
);
}
@@ -24,6 +24,7 @@ export function FormSliderControl(props: {
startAdornment?: React.ReactNode,
endAdornment?: React.ReactNode,
styleNoTrack?: boolean,
sliderSx?: SxProps,
}) {
@@ -66,8 +67,7 @@ export function FormSliderControl(props: {
onChange={handleChange}
onChangeCommitted={handleChangeCommitted}
valueLabelDisplay={props.valueLabelDisplay}
sx={props.styleNoTrack ? _styleNoTrack : undefined}
// sx={{ py: 1, mt: 1.1 }}
sx={props.styleNoTrack ? _styleNoTrack : props.sliderSx}
/>
{props.endAdornment}
</FormControl>
+13 -7
View File
@@ -6,11 +6,16 @@ import { FormControl, Input } from '@mui/joy';
import { FormLabelStart } from './FormLabelStart';
const formControlSx: SxProps = {
flexWrap: 'wrap',
justifyContent: 'space-between',
alignItems: 'center',
};
const _styles = {
formControl: {
flexWrap: 'wrap',
justifyContent: 'space-between',
alignItems: 'center',
},
inputDefault: {
flexGrow: 1,
},
} as const satisfies Record<string, SxProps>;
/**
@@ -23,6 +28,7 @@ export function FormTextField(props: {
tooltip?: string | React.JSX.Element,
placeholder?: string, isError?: boolean, disabled?: boolean,
value: string | undefined, onChange: (text: string) => void,
inputSx?: SxProps,
}) {
const acId = 'text-' + props.autoCompleteId;
return (
@@ -30,7 +36,7 @@ export function FormTextField(props: {
id={acId}
orientation='horizontal'
disabled={props.disabled}
sx={formControlSx}
sx={_styles.formControl}
>
<FormLabelStart title={props.title} description={props.description} tooltip={props.tooltip} />
<Input
@@ -39,7 +45,7 @@ export function FormTextField(props: {
autoComplete='off'
variant='outlined' placeholder={props.placeholder} error={props.isError}
value={props.value} onChange={event => props.onChange(event.target.value)}
sx={{ flexGrow: 1 }}
sx={props.inputSx ?? _styles.inputDefault}
/>
</FormControl>
);
@@ -0,0 +1,38 @@
import * as React from 'react';
import { FormSwitchControl } from './FormSwitchControl';
/**
* Reusable toggle for enabling client-side API fetch.
* Appears with animation when client key is present.
*/
export function SetupFormClientSideToggle(props: {
visible: boolean;
checked: boolean;
onChange: (on: boolean) => void;
helpText: string;
disabled?: boolean;
}) {
return (
<div
style={{
display: 'grid',
gridTemplateRows: props.visible ? '1fr' : '0fr',
transition: 'grid-template-rows 0.3s cubic-bezier(0.4, 0, 0.2, 1)',
}}
>
<div style={{ overflow: 'hidden' }}>
<FormSwitchControl
title='Direct Connection'
description={props.checked ? 'Connect from browser' : 'Via server (default)'}
tooltip={props.helpText}
checked={props.checked}
onChange={props.onChange}
disabled={props.disabled}
/>
</div>
</div>
);
}
+41 -9
View File
@@ -1,7 +1,7 @@
import * as React from 'react';
import type { SxProps } from '@mui/joy/styles/types';
import { Box, Chip, ColorPaletteProp, FormControl, IconButton, ListDivider, ListItemDecorator, Option, optionClasses, Select, SelectSlotsAndSlotProps, SvgIconProps, VariantProp } from '@mui/joy';
import { Box, Chip, ColorPaletteProp, FormControl, IconButton, ListDivider, ListItem, ListItemButton, ListItemDecorator, Option, Select, SelectSlotsAndSlotProps, SvgIconProps, VariantProp, optionClasses } from '@mui/joy';
import ArrowForwardRoundedIcon from '@mui/icons-material/ArrowForwardRounded';
import AutoModeIcon from '@mui/icons-material/AutoMode';
import BuildCircleIcon from '@mui/icons-material/BuildCircle';
@@ -11,12 +11,13 @@ import { findModelVendor } from '~/modules/llms/vendors/vendors.registry';
import { llmsGetVendorIcon, LLMVendorIcon } from '~/modules/llms/components/LLMVendorIcon';
import type { DModelDomainId } from '~/common/stores/llms/model.domains.types';
import { DLLM, DLLMId, LLM_IF_OAI_Reasoning, LLM_IF_Outputs_Audio, LLM_IF_Outputs_Image, LLM_IF_Tools_WebSearch } from '~/common/stores/llms/llms.types';
import { DLLM, DLLMId, getLLMPricing, LLM_IF_OAI_Reasoning, LLM_IF_Outputs_Audio, LLM_IF_Outputs_Image, LLM_IF_Tools_WebSearch } from '~/common/stores/llms/llms.types';
import { PhGearSixIcon } from '~/common/components/icons/phosphor/PhGearSixIcon';
import { StarredNoXL2 } from '~/common/components/StarIcons';
import { StarIconUnstyled, StarredNoXL2 } from '~/common/components/StarIcons';
import { TooltipOutlined } from '~/common/components/TooltipOutlined';
import { getChatLLMId, llmsStoreActions } from '~/common/stores/llms/store-llms';
import { optimaActions, optimaOpenModels } from '~/common/layout/optima/useOptima';
import { useUIPreferencesStore } from '~/common/stores/store-ui';
import { useVisibleLLMs } from '~/common/stores/llms/llms.hooks';
import { FormLabelStart } from './FormLabelStart';
@@ -58,6 +59,16 @@ const _styles = {
backgroundColor: 'background.popup',
boxShadow: 'xs',
},
listFooter: {
// '--ListItem-minHeight': '2.25rem',
borderTop: '1px solid',
borderTopColor: 'divider',
// pb: 0,
position: 'sticky',
bottom: 0,
backgroundColor: 'background.surface',
zIndex: 1,
},
listVendor: {
// see OptimaBarDropdown's _styles.separator
fontSize: 'sm',
@@ -130,6 +141,7 @@ interface LLMSelectOptions {
isHorizontal?: boolean;
autoRefreshDomain?: DModelDomainId;
appendConfigureModels?: boolean; // appends a bottom option to open the Models panel
showStarFilter?: boolean; // show a button to filter starred models only
}
/**
@@ -145,14 +157,18 @@ export function useLLMSelect(
options: LLMSelectOptions,
): [DLLM | null, React.JSX.Element | null, React.FunctionComponent<SvgIconProps> | undefined] {
// options
const { label, larger = false, disabled = false, placeholder = LLM_TEXT_PLACEHOLDER, isHorizontal = false, autoRefreshDomain, appendConfigureModels = false, showStarFilter = false } = options;
// state
const [controlledOpen, setControlledOpen] = React.useState(false);
// external state
const _filteredLLMs = useVisibleLLMs(llmId);
const starredOnly = useUIPreferencesStore(state => showStarFilter && state.showModelsStarredOnly);
// const modelsStarredOnTop = useUIPreferencesStore(state => state.modelsStarredOnTop); // unsupported, this creates some issues with groups I believe
const { llms: _filteredLLMs, hasStarred } = useVisibleLLMs(llmId, starredOnly, false);
// derived state
const { label, larger = false, disabled = false, placeholder = LLM_TEXT_PLACEHOLDER, isHorizontal = false, autoRefreshDomain, appendConfigureModels = false } = options;
const noIcons = false; //smaller;
const llm = !llmId ? null : _filteredLLMs.find(llm => llm.id === llmId) ?? null;
const isReasoning = !LLM_SELECT_SHOW_REASONING_ICON ? false : llm?.interfaces?.includes(LLM_IF_OAI_Reasoning) ?? false;
@@ -182,7 +198,7 @@ export function useLLMSelect(
let features = '';
const isNotSymlink = !llm.label.startsWith('🔗');
const seemsFree = !!llm.pricing?.chat?._isFree;
const seemsFree = !!getLLMPricing(llm)?.chat?._isFree;
if (isNotSymlink) {
// check features
if (seemsFree) features += 'free ';
@@ -209,7 +225,7 @@ export function useLLMSelect(
>
{!noIcons && (
<ListItemDecorator>
{llm.userStarred ? <StarredNoXL2 /> : vendor?.id ? <LLMVendorIcon vendorId={vendor.id} /> : null}
{(llm.userStarred && !starredOnly) ? <StarredNoXL2 /> : vendor?.id ? <LLMVendorIcon vendorId={vendor.id} /> : null}
</ListItemDecorator>
)}
{/*<Tooltip title={llm.description}>*/}
@@ -244,7 +260,7 @@ export function useLLMSelect(
return acc;
}, [] as React.JSX.Element[]);
}, [_filteredLLMs, llmId, noIcons, optimizeToSingleVisibleId]);
}, [_filteredLLMs, llmId, noIcons, optimizeToSingleVisibleId, starredOnly]);
const onSelectChange = React.useCallback((_event: unknown, value: DLLMId | null) => {
@@ -297,10 +313,26 @@ export function useLLMSelect(
</Option>
)}
{/* Star Filter Toggle - shown at the top of the list only if visible */}
{showStarFilter && hasStarred && !optimizeToSingleVisibleId && (
<ListItem key='star-filter-toggle' sx={_styles.listFooter}>
<ListItemButton
variant={starredOnly ? 'soft' : 'plain'}
onClick={useUIPreferencesStore.getState().toggleShowModelsStarredOnly}
// sx={{ backgroundColor: 'background.surface', position: 'sticky', top: 0, zIndex: 1 }}
>
<ListItemDecorator>
<StarIconUnstyled isStarred={starredOnly} />
</ListItemDecorator>
{starredOnly ? 'Showing: Starred' : 'Showing: All'}
</ListItemButton>
</ListItem>
)}
</Select>
{/*</Box>*/}
</FormControl>
), [appendConfigureModels, autoRefreshDomain, controlledOpen, disabled, hasNoModels, isHorizontal, isReasoning, label, larger, llmId, onSelectChange, optimizeToSingleVisibleId, options.color, options.sx, options.variant, optionsArray, placeholder, showNoOptions]);
), [appendConfigureModels, autoRefreshDomain, controlledOpen, disabled, hasNoModels, hasStarred, isHorizontal, isReasoning, label, larger, llmId, onSelectChange, optimizeToSingleVisibleId, options.color, options.sx, options.variant, optionsArray, placeholder, showNoOptions, showStarFilter, starredOnly]);
// Memo the vendor icon for the chat LLM
const chatLLMVendorIconFC = React.useMemo(() => {
@@ -0,0 +1,14 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
/*
* Source: 'https://phosphoricons.com/' - gift
*/
export function PhGift(props: SvgIconProps) {
return (
<SvgIcon viewBox='0 0 256 256' stroke='none' fill='currentColor' width='24' height='24' {...props}>
<path d='M216,72H180.92c.39-.33.79-.65,1.17-1A29.53,29.53,0,0,0,192,49.57,32.62,32.62,0,0,0,158.44,16,29.53,29.53,0,0,0,137,25.91a54.94,54.94,0,0,0-9,14.48,54.94,54.94,0,0,0-9-14.48A29.53,29.53,0,0,0,97.56,16,32.62,32.62,0,0,0,64,49.57,29.53,29.53,0,0,0,73.91,71c.38.33.78.65,1.17,1H40A16,16,0,0,0,24,88v32a16,16,0,0,0,16,16v64a16,16,0,0,0,16,16H200a16,16,0,0,0,16-16V136a16,16,0,0,0,16-16V88A16,16,0,0,0,216,72ZM149,36.51a13.69,13.69,0,0,1,10-4.5h.49A16.62,16.62,0,0,1,176,49.08a13.69,13.69,0,0,1-4.5,10c-9.49,8.4-25.24,11.36-35,12.4C137.7,60.89,141,45.5,149,36.51Zm-64.09.36A16.63,16.63,0,0,1,96.59,32h.49a13.69,13.69,0,0,1,10,4.5c8.39,9.48,11.35,25.2,12.39,34.92-9.72-1-25.44-4-34.92-12.39a13.69,13.69,0,0,1-4.5-10A16.6,16.6,0,0,1,84.87,36.87ZM40,88h80v32H40Zm16,48h64v64H56Zm144,64H136V136h64Zm16-80H136V88h80v32Z' />
</SvgIcon>
);
}
@@ -0,0 +1,14 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
/*
* Source: 'https://phosphoricons.com/' - key
*/
export function PhKey(props: SvgIconProps) {
return (
<SvgIcon viewBox='0 0 256 256' stroke='none' fill='currentColor' width='24' height='24' {...props}>
<path d='M216.57,39.43A80,80,0,0,0,83.91,120.78L28.69,176A15.86,15.86,0,0,0,24,187.31V216a16,16,0,0,0,16,16H72a8,8,0,0,0,8-8V208H96a8,8,0,0,0,8-8V184h16a8,8,0,0,0,5.66-2.34l9.56-9.57A79.73,79.73,0,0,0,160,176h.1A80,80,0,0,0,216.57,39.43ZM224,98.1c-1.09,34.09-29.75,61.86-63.89,61.9H160a63.7,63.7,0,0,1-23.65-4.51,8,8,0,0,0-8.84,1.68L116.69,168H96a8,8,0,0,0-8,8v16H72a8,8,0,0,0-8,8v16H40V187.31l58.83-58.82a8,8,0,0,0,1.68-8.84A63.72,63.72,0,0,1,96,95.92c0-34.14,27.81-62.8,61.9-63.89A64,64,0,0,1,224,98.1ZM192,76a12,12,0,1,1-12-12A12,12,0,0,1,192,76Z' />
</SvgIcon>
);
}
@@ -0,0 +1,14 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
/*
* Source: 'https://phosphoricons.com/' - megaphone
*/
export function PhMegaphone(props: SvgIconProps) {
return (
<SvgIcon viewBox='0 0 256 256' stroke='none' fill='currentColor' width='24' height='24' {...props}>
<path d='M248,120a48.05,48.05,0,0,0-48-48H160.2c-2.91-.17-53.62-3.74-101.91-44.24A16,16,0,0,0,32,40V200a16,16,0,0,0,26.29,12.25c37.77-31.68,77-40.76,93.71-43.3v31.72A16,16,0,0,0,159.12,214l11,7.33A16,16,0,0,0,194.5,212l11.77-44.36A48.07,48.07,0,0,0,248,120ZM48,199.93V40h0c42.81,35.91,86.63,45,104,47.24v65.48C134.65,155,90.84,164.07,48,199.93Zm131,8,0,.11-11-7.33V168h21.6ZM200,152H168V88h32a32,32,0,1,1,0,64Z' />
</SvgIcon>
);
}
@@ -0,0 +1,14 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
/*
* Source: 'https://phosphoricons.com/' - pencil-simple
*/
export function PhPencilSimple(props: SvgIconProps) {
return (
<SvgIcon viewBox='0 0 256 256' stroke='none' fill='currentColor' width='24' height='24' {...props}>
<path d='M227.31,73.37,182.63,28.68a16,16,0,0,0-22.63,0L36.69,152A15.86,15.86,0,0,0,32,163.31V208a16,16,0,0,0,16,16H92.69A15.86,15.86,0,0,0,104,219.31L227.31,96a16,16,0,0,0,0-22.63ZM92.69,208H48V163.31l88-88L180.69,120ZM192,108.68,147.31,64l24-24L216,84.68Z' />
</SvgIcon>
);
}
@@ -0,0 +1,14 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
/*
* Source: 'https://phosphoricons.com/' - terminal
*/
export function PhTerminal(props: SvgIconProps) {
return (
<SvgIcon viewBox='0 0 256 256' stroke='none' fill='currentColor' width='24' height='24' {...props}>
<path d='M117.31,134l-72,64a8,8,0,1,1-10.63-12L100,128,34.69,70A8,8,0,1,1,45.32,58l72,64a8,8,0,0,1,0,12ZM216,184H120a8,8,0,0,0,0,16h96a8,8,0,0,0,0-16Z' />
</SvgIcon>
);
}
@@ -0,0 +1,14 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
/*
* Source: 'https://phosphoricons.com/' - user-sound
*/
export function PhVoice(props: SvgIconProps) {
return (
<SvgIcon viewBox='0 0 256 256' stroke='none' fill='currentColor' width='24' height='24' {...props}>
<path d='M144,165.68a68,68,0,1,0-71.9,0c-20.65,6.76-39.23,19.39-54.17,37.17a8,8,0,0,0,12.25,10.3C50.25,189.19,77.91,176,108,176s57.75,13.19,77.88,37.15a8,8,0,1,0,12.25-10.3C183.18,185.07,164.6,172.44,144,165.68ZM56,108a52,52,0,1,1,52,52A52.06,52.06,0,0,1,56,108ZM207.36,65.6a108.36,108.36,0,0,1,0,84.8,8,8,0,0,1-7.36,4.86,8,8,0,0,1-7.36-11.15,92.26,92.26,0,0,0,0-72.22,8,8,0,0,1,14.72-6.29ZM248,108a139,139,0,0,1-11.29,55.15,8,8,0,0,1-14.7-6.3,124.43,124.43,0,0,0,0-97.7,8,8,0,1,1,14.7-6.3A139,139,0,0,1,248,108Z' />
</SvgIcon>
);
}
@@ -0,0 +1,9 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
export function ElevenLabsIcon(props: SvgIconProps) {
return <SvgIcon viewBox='0 0 24 24' width='24' height='24' fill='currentColor' {...props}>
<path d='M7 4h3v16H7V4zm7 0h3v16h-3V4z' />
</SvgIcon>;
}
+11
View File
@@ -0,0 +1,11 @@
import * as React from 'react';
import { SvgIcon, SvgIconProps } from '@mui/joy';
// from https://platform.moonshot.ai/lightmode.svg - 2025-11-09
export function MoonshotIcon(props: SvgIconProps) {
return <SvgIcon viewBox='0 0 465 470' width='24' height='24' strokeWidth={0} stroke='none' fill='currentColor' strokeLinecap='butt' strokeLinejoin='miter' {...props}>
<path fillRule='evenodd' clipRule='evenodd'
d='M418.766 93.78C388.626 53.17 345.086 22.07 292.446 7.97001C239.806 -6.12999 186.536 -0.969994 140.136 19.13L418.766 93.78ZM40.7758 100.85C61.3758 70.84 88.5858 46.43 119.776 29.15L325.036 84.15C311.566 91.92 297.136 105.31 285.966 120.03L454.736 165.25C459.526 181.08 462.646 197.49 463.956 214.24L40.7758 100.85ZM456.686 292.45C454.306 301.35 451.436 309.99 448.116 318.34L2.44584 198.93C3.74584 190.03 5.58584 181.12 7.96584 172.22C13.5658 151.31 21.8458 131.84 32.3458 114.08L250.436 172.52C241.706 185 233.756 198.56 226.756 213.07L460.516 275.71C459.446 281.3 458.166 286.88 456.676 292.47L456.686 292.45ZM13.4858 310.23C2.67584 279.96 -1.93416 247.22 0.745843 213.95L206.426 269.06C204.946 273.74 203.556 278.48 202.266 283.29C199.646 293.08 197.506 302.85 195.846 312.55L417.946 372.06C407.606 385.79 395.826 398.23 382.896 409.21L13.4858 310.23ZM172.206 456.69C102.096 437.9 48.1158 388.98 20.4758 327.59L187.846 372.44C187.576 388.05 188.546 403.26 190.676 417.85L312.286 450.43C268.876 466.39 220.286 469.57 172.206 456.69Z' />
</SvgIcon>;
}

Some files were not shown because too many files have changed in this diff Show More