Merge branch 'release-1.16.0'

This commit is contained in:
Enrico Ros
2024-05-08 15:11:10 -07:00
13 changed files with 161 additions and 69 deletions
+14 -8
View File
@@ -17,18 +17,23 @@ Or fork & run on Vercel
## 👉 [roadmap](https://github.com/users/enricoros/projects/4/views/2) 👉 [installation](docs/installation.md) 👉 [documentation](docs/README.md)
> Note: bigger better features (incl. Beam-2) are being cooked outside of `main`.
[//]: # (big-AGI is an open book; see the **[ready-to-ship and future ideas](https://github.com/users/enricoros/projects/4/views/2)** in our open roadmap)
#### What's New in 1.15.1 · April 10, 2024 (minor release, models support)
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
- Support for the newly released Gemini Pro 1.5 models
- Support for the new OpenAI 2024-04-09 Turbo models
- Ctrl+S and Ctrl+O to save/load chats on desktop
- Resilience fixes after the large success of 1.15.0
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
- Major enhancements to the Auto-Diagrams tool
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
- More: code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
- Update Anthropic, Groq, Ollama, OpenAI, OpenRouter, Perplexity models
- Developers: update the LLMs data structures
> Note: Beam-2 and new larger features are being cooked outside of `main`.
### 3,000 Commits Milestone · April 7, 2024
#### 3,000 Commits Milestone · April 7, 2024
![big-AGI Milestone](https://github.com/enricoros/big-AGI/assets/32999/47fddbb1-9bd6-4b58-ace4-781dfcb80923)
@@ -42,6 +47,7 @@ Or fork & run on Vercel
- Message **Starring ⭐**: star important messages within chats, to attach them later. [#476](https://github.com/enricoros/big-AGI/issues/476)
- Enhanced the default Persona
- Fixes to Gemini models and SVGs, improvements to UI and icons
- 1.15.1: Support for Gemini Pro 1.5 and OpenAI Turbo models
- Beast release, over 430 commits, 10,000+ lines changed: [release notes](https://github.com/enricoros/big-AGI/releases/tag/v1.15.0), and changes [v1.14.1...v1.15.0](https://github.com/enricoros/big-AGI/compare/v1.14.1...v1.15.0)
### What's New in 1.14.1 · March 7, 2024 · Modelmorphic
+18
View File
@@ -10,6 +10,24 @@ by release.
- milestone: [1.16.0](https://github.com/enricoros/big-agi/milestone/16)
- work in progress: [big-AGI open roadmap](https://github.com/users/enricoros/projects/4/views/2), [help here](https://github.com/users/enricoros/projects/4/views/4)
### What's New in 1.16.0 · May 9, 2024 · Crystal Clear
- [Beam](https://big-agi.com/blog/beam-multi-model-ai-reasoning) core and UX improvements based on user feedback
- Chat cost estimation 💰 (enable it in Labs / hover the token counter)
- Major enhancements to the Auto-Diagrams tool
- Save/load chat files with Ctrl+S / Ctrl+O on desktop
- YouTube Transcriber Persona for chatting with video content, [#500](https://github.com/enricoros/big-AGI/pull/500)
- Improved formula rendering (LaTeX), and dark-mode diagrams, [#508](https://github.com/enricoros/big-AGI/issues/508), [#520](https://github.com/enricoros/big-AGI/issues/520)
- More: code soft-wrap, chat text selection toolbar, 3x faster on Apple silicon, [#517](https://github.com/enricoros/big-AGI/issues/517), [507](https://github.com/enricoros/big-AGI/pull/507)
- Update Anthropic, Groq, Ollama, OpenAI, OpenRouter, Perplexity models
- Developers: update the LLMs data structures
### What's New in 1.15.1 · April 10, 2024 (minor release, models support)
- Support for the newly released Gemini Pro 1.5 models
- Support for the new OpenAI 2024-04-09 Turbo models
- Resilience fixes after the large success of 1.15.0
### What's New in 1.15.0 · April 1, 2024 · Beam
- ⚠️ [**Beam**: the multi-model AI chat](https://big-agi.com/blog/beam-multi-model-ai-reasoning). find better answers, faster - a game-changer for brainstorming, decision-making, and creativity. [#443](https://github.com/enricoros/big-AGI/issues/443)
+2 -2
View File
@@ -1,12 +1,12 @@
{
"name": "big-agi",
"version": "1.15.1",
"version": "1.16.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "big-agi",
"version": "1.15.1",
"version": "1.16.0",
"hasInstallScript": true,
"dependencies": {
"@emotion/cache": "^11.11.0",
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "big-agi",
"version": "1.15.1",
"version": "1.16.0",
"private": true,
"author": "Enrico Ros <enrico.ros@gmail.com>",
"repository": "https://github.com/enricoros/big-agi",
Binary file not shown.

After

Width:  |  Height:  |  Size: 248 KiB

@@ -117,9 +117,10 @@ export function Composer(props: {
// external state
const { openPreferencesTab /*, setIsFocusedMode*/ } = useOptimaLayout();
const { labsAttachScreenCapture, labsCameraDesktop } = useUXLabsStore(useShallow(state => ({
const { labsAttachScreenCapture, labsCameraDesktop, labsShowCost } = useUXLabsStore(useShallow(state => ({
labsAttachScreenCapture: state.labsAttachScreenCapture,
labsCameraDesktop: state.labsCameraDesktop,
labsShowCost: state.labsShowCost,
})));
const timeToShowTips = useAppStateStore(state => state.usageCount > 2);
const { novel: explainShiftEnter, touch: touchShiftEnter } = useUICounter('composer-shift-enter');
@@ -675,8 +676,8 @@ export function Composer(props: {
<TokenProgressbarMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} tokenPriceIn={tokenPriceIn} tokenPriceOut={tokenPriceOut} />
)}
{!showChatReplyTo && !!tokenLimit && (
<TokenBadgeMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} tokenPriceIn={tokenPriceIn} tokenPriceOut={tokenPriceOut} showExcess absoluteBottomRight />
{!showChatReplyTo && tokenLimit > 0 && (
<TokenBadgeMemo direct={tokensComposer} history={tokensHistory} responseMax={tokensReponseMax} limit={tokenLimit} tokenPriceIn={tokenPriceIn} tokenPriceOut={tokenPriceOut} showCost={labsShowCost} showExcess absoluteBottomRight />
)}
</Box>
@@ -8,8 +8,20 @@ function alignRight(value: number, columnSize: number = 8) {
return str.padStart(columnSize);
}
function formatCost(cost: number) {
return cost < 1
? (cost * 100).toFixed(cost < 0.010 ? 2 : 1) + ' ¢'
: '$ ' + cost.toFixed(2);
}
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number, tokenPriceIn?: number, tokenPriceOut?: number): { color: ColorPaletteProp; message: string; remainingTokens: number } {
export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, historyTokens?: number, responseMaxTokens?: number, tokenPriceIn?: number, tokenPriceOut?: number): {
color: ColorPaletteProp,
message: string,
remainingTokens: number,
costMax?: number,
costMin?: number,
} {
const usedInputTokens = directTokens + (historyTokens || 0);
const usedMaxTokens = usedInputTokens + (responseMaxTokens || 0);
const remainingTokens = tokenLimit - usedMaxTokens;
@@ -18,6 +30,10 @@ export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, h
// message
let message: string = gteLimit ? '⚠️ ' : '';
// costs
let costMax: number | undefined = undefined;
let costMin: number | undefined = undefined;
// no limit: show used tokens only
if (!tokenLimit) {
message += `Requested: ${usedMaxTokens.toLocaleString()} tokens`;
@@ -33,24 +49,26 @@ export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, h
// add the price, if available
if (tokenPriceIn || tokenPriceOut) {
const costIn = tokenPriceIn ? usedInputTokens * tokenPriceIn / 1E6 : undefined;
costMin = tokenPriceIn ? usedInputTokens * tokenPriceIn / 1E6 : undefined;
const costOutMax = (tokenPriceOut && responseMaxTokens) ? responseMaxTokens * tokenPriceOut / 1E6 : undefined;
if (costIn || costOutMax) {
if (costMin || costOutMax) {
message += `\n\n\n▶ Chat Turn Cost (max, approximate)\n`;
if (costIn) message += '\n' +
if (costMin) message += '\n' +
` Input tokens: ${alignRight(usedInputTokens)}\n` +
` Input Price $/M: ${tokenPriceIn!.toFixed(2).padStart(8)}\n` +
` Input cost: ${('$' + costIn!.toFixed(3)).padStart(8)}\n`;
` Input cost: ${('$' + costMin!.toFixed(4)).padStart(8)}\n`;
if (costOutMax) message += '\n' +
` Max output tokens: ${alignRight(responseMaxTokens!)}\n` +
` Output Price $/M: ${tokenPriceOut!.toFixed(2).padStart(8)}\n` +
` Max output cost: ${('$' + costOutMax!.toFixed(3)).padStart(8)}\n`;
` Max output cost: ${('$' + costOutMax!.toFixed(4)).padStart(8)}\n`;
const costMax = costIn && costOutMax ? costIn + costOutMax : undefined;
if (costMin) message += '\n' +
` > Min turn cost: ${formatCost(costMin).padStart(8)}`;
costMax = (costMin && costOutMax) ? costMin + costOutMax : undefined;
if (costMax) message += '\n' +
` = Max turn cost: ${('$' + costMax.toFixed(4)).padStart(8)}`;
` < Max turn cost: ${formatCost(costMax).padStart(8)}`;
}
}
}
@@ -69,11 +87,11 @@ export function tokensPrettyMath(tokenLimit: number | 0, directTokens: number, h
? 'warning'
: 'primary';
return { color, message, remainingTokens };
return { color, message, remainingTokens, costMax, costMin };
}
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.JSX.Element }) =>
export const TokenTooltip = (props: { message: string | null, color: ColorPaletteProp, placement?: 'top' | 'top-end', children: React.ReactElement }) =>
<Tooltip
placement={props.placement}
variant={props.color !== 'primary' ? 'solid' : 'soft'} color={props.color}
@@ -104,41 +122,57 @@ function TokenBadge(props: {
tokenPriceIn?: number,
tokenPriceOut?: number,
showCost?: boolean
showExcess?: boolean,
absoluteBottomRight?: boolean,
inline?: boolean,
}) {
const { message, color, remainingTokens } = tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax, props.tokenPriceIn, props.tokenPriceOut);
const { message, color, remainingTokens, costMax, costMin } =
tokensPrettyMath(props.limit, props.direct, props.history, props.responseMax, props.tokenPriceIn, props.tokenPriceOut);
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
? Math.abs(remainingTokens)
: props.direct;
let badgeValue: string;
const showAltCosts = !!props.showCost && !!costMax && costMin !== undefined;
if (showAltCosts) {
badgeValue = '< ' + formatCost(costMax);
} else {
// show the direct tokens, unless we exceed the limit and 'showExcess' is enabled
const value = (props.showExcess && (props.limit && remainingTokens <= 0))
? Math.abs(remainingTokens)
: props.direct;
badgeValue = value.toLocaleString();
}
const shallHide = !props.direct && remainingTokens >= 0 && !showAltCosts;
if (shallHide) return null;
return (
<Badge
variant='solid' color={color} max={100000}
invisible={!props.direct && remainingTokens >= 0}
badgeContent={
<TokenTooltip color={color} message={message} placement='top-end'>
<span>{value.toLocaleString()}</span>
</TokenTooltip>
}
sx={{
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
cursor: 'help',
}}
slotProps={{
badge: {
sx: {
// the badge (not the tooltip)
fontFamily: 'code',
fontSize: 'sm',
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
<TokenTooltip color={color} message={message} placement='top-end'>
<Badge
variant='soft' color={color} max={1000000}
// invisible={shallHide}
badgeContent={badgeValue}
slotProps={{
root: {
sx: {
...((props.absoluteBottomRight) && { position: 'absolute', bottom: 8, right: 8 }),
cursor: 'help',
},
},
},
}}
/>
badge: {
sx: {
// the badge (not the tooltip)
// boxShadow: 'sm',
fontFamily: 'code',
fontSize: 'xs',
...((props.absoluteBottomRight || props.inline) && { position: 'static', transform: 'none' }),
},
},
}}
/>
</TokenTooltip>
);
}
+2 -2
View File
@@ -111,7 +111,7 @@ export function AppNews() {
return <React.Fragment key={idx}>
{/* Inject the Beam item here*/}
{idx === 0 && (
{idx === 2 && (
<Box sx={{ mb: 3 }}>
{beamNewsCallout}
</Box>
@@ -191,7 +191,7 @@ export function AppNews() {
onClick={() => setLastNewsIdx(index => index + NEWS_LOAD_STEP)}
endDecorator={<ExpandMoreIcon />}
>
Load Previous News
Previous News
</Button>
)}
+1 -1
View File
@@ -14,7 +14,7 @@ export const beamNewsCallout =
<Card variant='solid' invertedColors>
<CardContent sx={{ gap: 2 }}>
<Typography level='title-lg'>
Beam - just launched in 1.15
Beam - launched in 1.15
</Typography>
<Typography level='body-sm'>
Beam is a world-first, multi-model AI chat modality that accelerates the discovery of superior solutions by leveraging the collective strengths of diverse LLMs.
+33 -13
View File
@@ -17,8 +17,12 @@ import { Link } from '~/common/components/Link';
import { clientUtmSource } from '~/common/util/pwaUtils';
import { platformAwareKeystrokes } from '~/common/components/KeyStroke';
import { beamBlogUrl } from './beam.data';
// Cover Images
// A landscape image of a capybara made entirely of clear, translucent crystal, wearing oversized black sunglasses, sitting at a sleek, minimalist desk. The desk is bathed in a soft, ethereal light emanating from within the capybara, symbolizing clarity and transparency. The capybara is typing on a futuristic, holographic keyboard, with floating code snippets and diagrams surrounding it, illustrating an improved developer experience and Auto-Diagrams feature. The background is a clean, white space with subtle, geometric patterns. Close-up photography style with a bokeh effect.
import coverV116 from '../../../public/images/covers/release-cover-v1.16.0.png';
// (not exactly) Imagine a futuristic, holographically bounded space. Inside this space, four capybaras stand. Three of them are in various stages of materialization, their forms made up of thousands of tiny, vibrant particles of electric blues, purples, and greens. These particles represent the merging of different intelligent inputs, symbolizing the concept of 'Beaming'. Positioned slightly towards the center and ahead of the others, the fourth capybara is fully materialized and composed of shimmering golden cotton candy, representing the optimal solution the 'Beam' feature seeks to achieve. The golden capybara gazes forward confidently, embodying a target achieved. Illuminated grid lines softly glow on the floor and walls of the setting, amplifying the futuristic aspect. In front of the golden capybara, floating, holographic interfaces depict complex networks of points and lines symbolizing the solution space 'Beaming' explores. The capybara interacts with these interfaces, implying the user's ability to control and navigate towards the best outcomes.
import coverV115 from '../../../public/images/covers/release-cover-v1.15.0.png';
// An image of a capybara sculpted entirely from iridescent blue cotton candy, gazing into a holographic galaxy of floating AI model icons (representing various AI models like Perplexity, Groq, etc.). The capybara is wearing a lightweight, futuristic headset, and its paws are gesturing as if orchestrating the movement of the models in the galaxy. The backdrop is minimalist, with occasional bursts of neon light beams, creating a sense of depth and wonder. Close-up photography, bokeh effect, with a dark but vibrant background to make the colors pop.
@@ -27,7 +31,6 @@ import coverV114 from '../../../public/images/covers/release-cover-v1.14.0.png';
import coverV113 from '../../../public/images/covers/release-cover-v1.13.0.png';
// An image of a capybara sculpted entirely from black cotton candy, set against a minimalist backdrop with splashes of bright, contrasting sparkles. The capybara is calling on a 3D origami old-school pink telephone and the camera is zooming on the telephone. Close up photography, bokeh, white background.
import coverV112 from '../../../public/images/covers/release-cover-v1.12.0.png';
import { beamBlogUrl } from './beam.data';
interface NewsItem {
@@ -57,7 +60,24 @@ export const NewsItems: NewsItem[] = [
]
}*/
{
versionCode: '1.15.1',
versionCode: '1.16',
versionName: 'Crystal Clear',
versionDate: new Date('2024-05-09T00:00:00Z'),
versionCoverImage: coverV116,
items: [
{ text: <><B href={beamBlogUrl} wow>Beam</B> core and UX improvements based on user feedback</>, issue: 470, icon: ChatBeamIcon },
{ text: <>Chat <B>cost estimation</B>, enable in labs 💰</> },
{ text: <>Major <B>Auto-Diagrams</B> enhancements</> },
{ text: <>Save/load chat files with Ctrl+S / O</>, issue: 466 },
{ text: <><B issue={500}>YouTube Transcriber</B> persona: chat with videos</>, issue: 500 },
{ text: <>Improved <B issue={508}>formula render</B>, dark-mode diagrams</>, issue: 508 },
{ text: <>More: <B issue={517}>code soft-wrap</B>, selection toolbar, <B issue={507}>3x faster</B> on Apple silicon</>, issue: 507 },
{ text: <>Updated <B>Anthropic</B>, <B>Groq</B>, <B>Ollama</B>, <B>OpenAI</B>, <B>OpenRouter</B>, <B>Perplexity</B> models</> },
{ text: <>Developers: update LLMs data structures</>, dev: true },
],
},
{
versionCode: '1.15',
versionName: 'Beam',
versionDate: new Date('2024-04-10T08:00:00Z'),
versionCoverImage: coverV115,
@@ -73,7 +93,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.14.1',
versionCode: '1.14',
versionName: 'Modelmorphic',
versionCoverImage: coverV114,
versionDate: new Date('2024-03-07T08:00:00Z'),
@@ -92,7 +112,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.13.0',
versionCode: '1.13',
versionName: 'Multi + Mind',
versionMoji: '🧠🔀',
versionDate: new Date('2024-02-08T07:47:00Z'),
@@ -108,7 +128,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.12.0',
versionCode: '1.12',
versionName: 'AGI Hotline',
versionMoji: '✨🗣️',
versionDate: new Date('2024-01-26T12:30:00Z'),
@@ -127,7 +147,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.11.0',
versionCode: '1.11',
versionName: 'Singularity',
versionMoji: '🌌🌠',
versionDate: new Date('2024-01-16T06:30:00Z'),
@@ -141,7 +161,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.10.0',
versionCode: '1.10',
versionName: 'The Year of AGI',
// versionMoji: '🎊✨',
versionDate: new Date('2024-01-06T08:00:00Z'),
@@ -155,7 +175,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.9.0',
versionCode: '1.9',
versionName: 'Creative Horizons',
// versionMoji: '🎨🌌',
versionDate: new Date('2023-12-28T22:30:00Z'),
@@ -170,7 +190,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.8.0',
versionCode: '1.8',
versionName: 'To The Moon And Back',
// versionMoji: '🚀🌕🔙❤️',
versionDate: new Date('2023-12-20T09:30:00Z'),
@@ -187,7 +207,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.7.0',
versionCode: '1.7',
versionName: 'Attachment Theory',
// versionDate: new Date('2023-12-11T06:00:00Z'), // 1.7.3
versionDate: new Date('2023-12-10T12:00:00Z'), // 1.7.0
@@ -203,7 +223,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.6.0',
versionCode: '1.6',
versionName: 'Surf\'s Up',
versionDate: new Date('2023-11-28T21:00:00Z'),
items: [
@@ -218,7 +238,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.5.0',
versionCode: '1.5',
versionName: 'Loaded!',
versionDate: new Date('2023-11-19T21:00:00Z'),
items: [
@@ -234,7 +254,7 @@ export const NewsItems: NewsItem[] = [
],
},
{
versionCode: '1.4.0',
versionCode: '1.4',
items: [
{ text: <><B>Share and clone</B> conversations, with public links</> },
{ text: <><B code='/docs/config-azure-openai.md'>Azure</B> models, incl. gpt-4-32k</> },
+2 -2
View File
@@ -19,8 +19,8 @@ const shortcutsMd = platformAwareKeystrokes(`
| Ctrl + Shift + V | Attach clipboard (better than Ctrl + V) |
| Ctrl + M | Microphone (voice typing) |
| **Chats** | |
| Ctrl + O | Open Chat ... |
| Ctrl + S | Save Chat ... |
| Ctrl + O | Open Chat File ... |
| Ctrl + S | Save Chat File ... |
| Ctrl + Alt + N | **New** chat |
| Ctrl + Alt + X | **Reset** chat |
| Ctrl + Alt + D | **Delete** chat |
@@ -2,6 +2,7 @@ import * as React from 'react';
import { FormControl, Typography } from '@mui/joy';
import AddAPhotoIcon from '@mui/icons-material/AddAPhoto';
import LocalAtmOutlinedIcon from '@mui/icons-material/LocalAtmOutlined';
import ScreenshotMonitorIcon from '@mui/icons-material/ScreenshotMonitor';
import SpeedIcon from '@mui/icons-material/Speed';
import TitleIcon from '@mui/icons-material/Title';
@@ -26,6 +27,7 @@ export function UxLabsSettings() {
labsCameraDesktop, setLabsCameraDesktop,
labsChatBarAlt, setLabsChatBarAlt,
labsHighPerformance, setLabsHighPerformance,
labsShowCost, setLabsShowCost,
} = useUXLabsStore();
return <>
@@ -52,6 +54,11 @@ export function UxLabsSettings() {
checked={labsCameraDesktop} onChange={setLabsCameraDesktop}
/>}
<FormSwitchControl
title={<><LocalAtmOutlinedIcon sx={{ fontSize: 'lg', mr: 0.5, mb: 0.25 }} />Cost of messages</>} description={labsShowCost ? 'Show when available' : 'Disabled'}
checked={labsShowCost} onChange={setLabsShowCost}
/>
{/*
Other Graduated (removed or backlog):
- <Link href='https://github.com/enricoros/big-AGI/issues/359' target='_blank'>Draw App</Link>
+6
View File
@@ -21,6 +21,9 @@ interface UXLabsStore {
labsHighPerformance: boolean;
setLabsHighPerformance: (labsHighPerformance: boolean) => void;
labsShowCost: boolean;
setLabsShowCost: (labsShowCost: boolean) => void;
}
export const useUXLabsStore = create<UXLabsStore>()(
@@ -39,6 +42,9 @@ export const useUXLabsStore = create<UXLabsStore>()(
labsHighPerformance: false,
setLabsHighPerformance: (labsHighPerformance: boolean) => set({ labsHighPerformance }),
labsShowCost: false,
setLabsShowCost: (labsShowCost: boolean) => set({ labsShowCost }),
}),
{
name: 'app-ux-labs',