diff --git a/src/common/stores/llms/llms.types.ts b/src/common/stores/llms/llms.types.ts index d00f5b515..399bd1c29 100644 --- a/src/common/stores/llms/llms.types.ts +++ b/src/common/stores/llms/llms.types.ts @@ -143,7 +143,6 @@ export type DModelInterfaceV1 = | 'ant-tools-search' | 'oai-chat-vision' | 'oai-chat-reasoning' - | 'oai-complete' | 'ant-prompt-caching' | 'oai-prompt-caching' | 'oai-realtime' @@ -174,7 +173,6 @@ export const LLM_IF_Outputs_Audio: DModelInterfaceV1 = 'outputs-audio'; export const LLM_IF_Outputs_Image: DModelInterfaceV1 = 'outputs-image'; export const LLM_IF_Outputs_NoText: DModelInterfaceV1 = 'outputs-no-text'; export const LLM_IF_Tools_WebSearch: DModelInterfaceV1 = 'tools-web-search'; -export const LLM_IF_OAI_Complete: DModelInterfaceV1 = 'oai-complete'; export const LLM_IF_ANT_PromptCaching: DModelInterfaceV1 = 'ant-prompt-caching'; export const LLM_IF_OAI_PromptCaching: DModelInterfaceV1 = 'oai-prompt-caching'; export const LLM_IF_OAI_Responses: DModelInterfaceV1 = 'oai-responses'; @@ -213,8 +211,6 @@ export const LLMS_ALL_INTERFACES = [ LLM_IF_HOTFIX_StripImages, // remove images from input (e.g. o3-mini-2025-01-31) LLM_IF_HOTFIX_StripSys0, // strip system instruction (e.g. Gemini Image Generation 2025-03-13), excludes Sys0ToUsr0 LLM_IF_HOTFIX_Sys0ToUsr0, // downgrade system to user messages for this model (e.g. o1-mini-2024-09-12) - // old/unused - LLM_IF_OAI_Complete, // UNUSED - older text completion, pre-chats ] as const; // Future changes? diff --git a/src/modules/llms/models-modal/ModelsList.tsx b/src/modules/llms/models-modal/ModelsList.tsx index 630f3b3a4..25be60fc9 100644 --- a/src/modules/llms/models-modal/ModelsList.tsx +++ b/src/modules/llms/models-modal/ModelsList.tsx @@ -9,7 +9,7 @@ import VisibilityOutlinedIcon from '@mui/icons-material/VisibilityOutlined'; import type { DModelsServiceId } from '~/common/stores/llms/llms.service.types'; import { isLLMChatFree_cached } from '~/common/stores/llms/llms.pricing'; -import { DLLM, DLLMId, getLLMContextTokens, getLLMLabel, getLLMMaxOutputTokens, isLLMHidden, LLM_IF_ANT_PromptCaching, LLM_IF_GEM_CodeExecution, LLM_IF_OAI_Complete, LLM_IF_OAI_Fn, LLM_IF_OAI_Json, LLM_IF_OAI_PromptCaching, LLM_IF_OAI_Reasoning, LLM_IF_OAI_Vision, LLM_IF_Outputs_Audio, LLM_IF_Outputs_Image, LLM_IF_Tools_WebSearch } from '~/common/stores/llms/llms.types'; +import { DLLM, DLLMId, getLLMContextTokens, getLLMLabel, getLLMMaxOutputTokens, isLLMHidden, LLM_IF_ANT_PromptCaching, LLM_IF_GEM_CodeExecution, LLM_IF_OAI_Fn, LLM_IF_OAI_Json, LLM_IF_OAI_PromptCaching, LLM_IF_OAI_Reasoning, LLM_IF_OAI_Vision, LLM_IF_Outputs_Audio, LLM_IF_Outputs_Image, LLM_IF_Tools_WebSearch } from '~/common/stores/llms/llms.types'; import { GoodTooltip } from '~/common/components/GoodTooltip'; import { PhGearSixIcon } from '~/common/components/icons/phosphor/PhGearSixIcon'; import { STAR_EMOJI, StarredToggle, starredToggleStyle } from '~/common/components/StarIcons'; @@ -155,7 +155,6 @@ export const ModelItem = React.memo(function ModelItem(props: { // Ignored case LLM_IF_OAI_Json: case LLM_IF_OAI_Fn: - case LLM_IF_OAI_Complete: case LLM_IF_GEM_CodeExecution: return null; }