Move server-side functions around

This commit is contained in:
Enrico Ros
2025-05-29 16:22:08 -07:00
parent 0d41c92c01
commit 088e67c235
5 changed files with 25 additions and 23 deletions
-16
View File
@@ -97,17 +97,6 @@ export function asValidURL(textString: string | null, relaxProtocol: boolean = f
}
}
/**
* Add https if missing, and remove trailing slash if present and the path starts with a slash.
*/
export function fixupHost(host: string, apiPath: string): string {
if (!host.startsWith('http'))
host = `https://${host}`;
if (host.endsWith('/') && apiPath.startsWith('/'))
host = host.slice(0, -1);
return host;
}
/**
* Extracts URLs from a text string.
*/
@@ -192,11 +181,6 @@ export function base64ToArrayBuffer(base64Data: string) {
return base64ToUint8Array(base64Data).buffer;
}
export function base64ToBlob(base64Data: string, mimeType: string) {
const buffer = Buffer.from(base64Data, 'base64');
return new Blob([buffer], { type: mimeType });
}
/**
* Creates a Blob Object URL (that can be opened in a new tab with window.open, for instance) from a Data URL
@@ -5,11 +5,11 @@ import { env } from '~/server/env';
import { fetchJsonOrTRPCThrow } from '~/server/trpc/trpc.router.fetchers';
import { LLM_IF_ANT_PromptCaching, LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { fixupHost } from '~/common/util/urlUtils';
import { ListModelsResponse_schema, ModelDescriptionSchema } from '../llm.server.types';
import { hardcodedAnthropicModels, hardcodedAnthropicVariants } from './anthropic.models';
import { fixupHost } from '~/modules/llms/server/openai/openai.router';
// configuration and defaults
@@ -8,10 +8,9 @@ import { fetchJsonOrTRPCThrow } from '~/server/trpc/trpc.router.fetchers';
import { GeminiWire_API_Models_List, GeminiWire_Safety } from '~/modules/aix/server/dispatch/wiretypes/gemini.wiretypes';
import { fixupHost } from '~/common/util/urlUtils';
import { ListModelsResponse_schema } from '../llm.server.types';
import { geminiDevCheckForParserMisses_DEV, geminiDevCheckForSuperfluousModels_DEV, geminiFilterModels, geminiModelsAddVariants, geminiModelToModelDescription, geminiSortModels } from './gemini.models';
import { fixupHost } from '~/modules/llms/server/openai/openai.router';
// Default hosts
@@ -6,12 +6,12 @@ import { fetchJsonOrTRPCThrow, fetchTextOrTRPCThrow } from '~/server/trpc/trpc.r
import { LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
import { capitalizeFirstLetter } from '~/common/util/textUtils';
import { fixupHost } from '~/common/util/urlUtils';
import { ListModelsResponse_schema } from '../llm.server.types';
import { OLLAMA_BASE_MODELS, OLLAMA_PREV_UPDATE } from './ollama.models';
import { wireOllamaListModelsSchema, wireOllamaModelInfoSchema } from './ollama.wiretypes';
import { fixupHost } from '~/modules/llms/server/openai/openai.router';
// Default hosts
@@ -10,7 +10,6 @@ import type { T2ICreateImageAsyncStreamOp } from '~/modules/t2i/t2i.server';
import { heartbeatsWhileAwaiting } from '~/modules/aix/server/dispatch/heartbeatsWhileAwaiting';
import { Brand } from '~/common/app.config';
import { base64ToBlob, fixupHost } from '~/common/util/urlUtils';
import { OpenAIWire_API_Images_Generations, OpenAIWire_API_Models_List, OpenAIWire_API_Moderations_Create } from '~/modules/aix/server/dispatch/wiretypes/openai.wiretypes';
@@ -61,6 +60,18 @@ export type OpenAIAccessSchema = z.infer<typeof openAIAccessSchema>;
// export type OpenAIHistorySchema = z.infer<typeof openAIHistorySchema>;
// Fixup host function
/** Add https if missing, and remove trailing slash if present and the path starts with a slash. */
export function fixupHost(host: string, apiPath: string): string {
if (!host.startsWith('http'))
host = `https://${host}`;
if (host.endsWith('/') && apiPath.startsWith('/'))
host = host.slice(0, -1);
return host;
}
// Router Input Schemas
const listModelsInputSchema = z.object({
@@ -334,7 +345,7 @@ export const llmOpenAIRouter = createTRPCRouter({
const { base64, mimeType } = editConfig.inputImages[i];
requestBody.append(
imagesCount === 1 ? 'image' : 'image[]',
base64ToBlob(base64, mimeType),
server_base64ToBlob(base64, mimeType),
`image_${i}.${mimeType.split('/')[1] || 'png'}`, // important to be a unique filename
);
}
@@ -343,7 +354,7 @@ export const llmOpenAIRouter = createTRPCRouter({
if (editConfig.maskImage)
requestBody.append(
'mask',
base64ToBlob(editConfig.maskImage.base64, editConfig.maskImage.mimeType),
server_base64ToBlob(editConfig.maskImage.base64, editConfig.maskImage.mimeType),
`mask.${editConfig.maskImage.mimeType.split('/')[1] || 'png'}`,
);
}
@@ -468,6 +479,7 @@ const DEFAULT_PERPLEXITY_HOST = 'https://api.perplexity.ai';
const DEFAULT_TOGETHERAI_HOST = 'https://api.together.xyz';
const DEFAULT_XAI_HOST = 'https://api.x.ai';
/**
* Get a random key from a comma-separated list of API keys
* @param multiKeyString Comma-separated string of API keys
@@ -764,3 +776,10 @@ async function openaiPOSTOrThrow<TOut extends object, TPostBody extends object |
const { headers, url } = openAIAccess(access, modelRefId, apiPath);
return await fetchJsonOrTRPCThrow<TOut, TPostBody>({ url, method: 'POST', headers, body, name: `OpenAI/${serverCapitalizeFirstLetter(access.dialect)}` });
}
/** @serverSide Buffer is a Node.js API, not a Browser API. */
function server_base64ToBlob(base64Data: string, mimeType: string) {
const buffer = Buffer.from(base64Data, 'base64');
return new Blob([buffer], { type: mimeType });
}