DLLMs: Recyclable Parameters configurator

This commit is contained in:
Enrico Ros
2025-01-09 02:32:32 -08:00
parent 792f3f20f7
commit e5e9f489d3
3 changed files with 81 additions and 29 deletions
@@ -0,0 +1,41 @@
import * as React from 'react';
import type { DLLM } from '~/common/stores/llms/llms.types';
import type { DModelParameterId, DModelParameterValues } from '~/common/stores/llms/llms.parameters';
import { InlineError } from '~/common/components/InlineError';
import { llmsStoreActions } from '~/common/stores/llms/store-llms';
import { LLMParametersEditor } from './LLMParametersEditor';
export function LLMOptionsGlobal(props: { llm: DLLM }) {
// derived input
const llm = props.llm;
const llmId = llm?.id ?? null;
// handlers
const handleChangeParameter = React.useCallback((partial: Partial<DModelParameterValues>) => {
llmsStoreActions().updateLLMUserParameters(llmId, partial);
}, [llmId]);
const handleRemoveParameter = React.useCallback((parameterId: DModelParameterId) => {
llmsStoreActions().deleteLLMUserParameter(llmId, parameterId);
}, [llmId]);
if (!llmId)
return <InlineError error='No model selected' />;
return (
<LLMParametersEditor
maxOutputTokens={llm.maxOutputTokens}
parameterSpecs={llm.parameterSpecs}
baselineParameters={llm.initialParameters}
parameters={llm.userParameters}
onChangeParameter={handleChangeParameter}
onRemoveParameter={handleRemoveParameter}
/>
);
}
@@ -13,7 +13,7 @@ import { GoodModal } from '~/common/components/modals/GoodModal';
import { llmsStoreActions } from '~/common/stores/llms/store-llms';
import { useDefaultLLMIDs, useLLM } from '~/common/stores/llms/llms.hooks';
import { LLMOptions } from './LLMOptions';
import { LLMOptionsGlobal } from './LLMOptionsGlobal';
function prettyPricingComponent(pricingChatGenerate: DPricingChatGenerate): React.ReactNode {
@@ -98,7 +98,7 @@ export function LLMOptionsModal(props: { id: DLLMId, onClose: () => void }) {
>
<Box sx={{ display: 'grid', gap: 'var(--Card-padding)' }}>
<LLMOptions llm={llm} />
<LLMOptionsGlobal llm={llm} />
</Box>
<Divider />
@@ -3,48 +3,59 @@ import * as React from 'react';
import { IconButton, Tooltip } from '@mui/joy';
import LocalFireDepartmentIcon from '@mui/icons-material/LocalFireDepartment';
import type { DLLM } from '~/common/stores/llms/llms.types';
import { DModelParameterSpec, FALLBACK_LLM_PARAM_RESPONSE_TOKENS, FALLBACK_LLM_PARAM_TEMPERATURE, getAllModelParameterValues } from '~/common/stores/llms/llms.parameters';
import { DModelParameterId, DModelParameterSpec, DModelParameterValues, FALLBACK_LLM_PARAM_RESPONSE_TOKENS, FALLBACK_LLM_PARAM_TEMPERATURE, getAllModelParameterValues } from '~/common/stores/llms/llms.parameters';
import { FormSelectControl } from '~/common/components/forms/FormSelectControl';
import { FormSliderControl } from '~/common/components/forms/FormSliderControl';
import { InlineError } from '~/common/components/InlineError';
import { llmsStoreActions } from '~/common/stores/llms/store-llms';
const reasoningEffortOptions = [
{ value: 'high', label: 'High', description: 'Deep, thorough analysis' },
{ value: 'medium', label: 'Medium', description: 'Balanced reasoning depth' },
{ value: 'low', label: 'Low', description: 'Quick, concise responses' },
{ value: 'unspecified', label: 'Default', description: 'Default value (unset)' },
const _UNSPECIFIED = '_UNSPECIFIED' as const;
const _reasoningEffortOptions = [
{ value: 'high', label: 'High', description: 'Deep, thorough analysis' } as const,
{ value: 'medium', label: 'Medium', description: 'Balanced reasoning depth' } as const,
{ value: 'low', label: 'Low', description: 'Quick, concise responses' } as const,
{ value: _UNSPECIFIED, label: 'Default', description: 'Default value (unset)' } as const,
] as const;
export function LLMOptions(props: { llm: DLLM }) {
export function LLMParametersEditor(props: {
// consts
maxOutputTokens: number | null,
parameterSpecs: DModelParameterSpec<DModelParameterId>[],
baselineParameters: DModelParameterValues,
// input state
const { id: llmId, maxOutputTokens, initialParameters, userParameters, parameterSpecs } = props.llm;
// value and onChange for the parameters
parameters: undefined | DModelParameterValues,
onChangeParameter: (parameterValue: DModelParameterValues) => void,
onRemoveParameter: (parameterId: DModelParameterId) => void,
}) {
// derived input
const { maxOutputTokens, parameterSpecs, baselineParameters, parameters, onChangeParameter, onRemoveParameter } = props;
// external state
const { updateLLMUserParameters, deleteLLMUserParameter } = llmsStoreActions();
const allParameters = getAllModelParameterValues(initialParameters, userParameters);
const allParameters = getAllModelParameterValues(baselineParameters, parameters);
// derived state
const llmTemperature = allParameters?.llmTemperature ?? FALLBACK_LLM_PARAM_TEMPERATURE;
const llmResponseTokens = allParameters?.llmResponseTokens ?? FALLBACK_LLM_PARAM_RESPONSE_TOKENS;
const llmVndOaiReasoningEffort = allParameters?.llmVndOaiReasoningEffort;
const llmTemperature = allParameters.llmTemperature ?? FALLBACK_LLM_PARAM_TEMPERATURE;
const llmResponseTokens = allParameters.llmResponseTokens ?? FALLBACK_LLM_PARAM_RESPONSE_TOKENS;
const llmVndOaiReasoningEffort = allParameters.llmVndOaiReasoningEffort;
const tempAboveOne = llmTemperature > 1;
// more state (here because the initial state depends on props)
const [overheat, setOverheat] = React.useState(llmTemperature > 1);
const [overheat, setOverheat] = React.useState(tempAboveOne);
// handlers
const handleOverheatToggle = React.useCallback(() => {
// snap to 1 when disabling overheating
if (overheat && tempAboveOne)
updateLLMUserParameters(llmId, { llmTemperature: 1 });
setOverheat(!overheat);
}, [llmId, overheat, tempAboveOne, updateLLMUserParameters]);
onChangeParameter({ llmTemperature: 1 });
// toggle overheating
setOverheat(on => !on);
}, [onChangeParameter, overheat, tempAboveOne]);
// find the reasoning effort parameter spec
@@ -60,7 +71,7 @@ export function LLMOptions(props: { llm: DLLM }) {
min={0} max={overheat ? 2 : 1} step={0.1} defaultValue={0.5}
valueLabelDisplay='on'
value={llmTemperature}
onChange={value => updateLLMUserParameters(llmId, { llmTemperature: value })}
onChange={value => onChangeParameter({ llmTemperature: value })}
endAdornment={
<Tooltip title={overheat ? 'Disable LLM Overheating' : 'Increase Max LLM Temperature to 2'} sx={{ p: 1 }}>
<IconButton
@@ -80,7 +91,7 @@ export function LLMOptions(props: { llm: DLLM }) {
min={256} max={maxOutputTokens} step={256} defaultValue={1024}
valueLabelDisplay='on'
value={llmResponseTokens}
onChange={value => updateLLMUserParameters(llmId, { llmResponseTokens: value })}
onChange={value => onChangeParameter({ llmResponseTokens: value })}
/>
) : (
<InlineError error='Max Output Tokens: Token computations are disabled because this model does not declare the context window size.' />
@@ -90,14 +101,14 @@ export function LLMOptions(props: { llm: DLLM }) {
<FormSelectControl
title='Reasoning Effort'
tooltip='Controls how much effort the model spends on reasoning'
value={llmVndOaiReasoningEffort ?? 'unspecified'}
value={llmVndOaiReasoningEffort ?? _UNSPECIFIED}
onChange={(value) => {
if (value === 'unspecified' || !value)
deleteLLMUserParameter(llmId, 'llmVndOaiReasoningEffort');
if (value === _UNSPECIFIED || !value)
onRemoveParameter('llmVndOaiReasoningEffort');
else
updateLLMUserParameters(llmId, { 'llmVndOaiReasoningEffort': value });
onChangeParameter({ 'llmVndOaiReasoningEffort': value });
}}
options={reasoningEffortOptions}
options={_reasoningEffortOptions}
/>
)}