diff --git a/docs/README.md b/docs/README.md
index 6be2c07c4..f0456b99c 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -18,7 +18,7 @@ Detailed guides to configure AI models and advanced features in big-AGI.
- **Cloud AI Services**:
- **[Azure OpenAI](config-azure-openai.md)**
- **[OpenRouter](config-openrouter.md)**
- - Easy API key setup: **Anthropic**, **Deepseek**, **Google AI**, **Groq**, **Mistral**, **OpenAI**, **OpenPipe**, **Perplexity**, **TogetherAI**
+ - Easy API key setup: **Anthropic**, **Deepseek**, **Google AI**, **Groq**, **Mistral**, **OpenAI**, **OpenPipe**, **Perplexity**, **TogetherAI**, **xAI**
- **Local AI Integrations**:
diff --git a/docs/environment-variables.md b/docs/environment-variables.md
index 264531f0e..688c70547 100644
--- a/docs/environment-variables.md
+++ b/docs/environment-variables.md
@@ -38,6 +38,7 @@ OPENPIPE_API_KEY=
OPENROUTER_API_KEY=
PERPLEXITY_API_KEY=
TOGETHERAI_API_KEY=
+XAI_API_KEY=
# Model Observability: Helicone
HELICONE_API_KEY=
@@ -102,6 +103,7 @@ requiring the user to enter an API key
| `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional |
| `PERPLEXITY_API_KEY` | The API key for Perplexity | Optional |
| `TOGETHERAI_API_KEY` | The API key for Together AI | Optional |
+| `XAI_API_KEY` | The API key for xAI | Optional |
### LLM Observability: Helicone
diff --git a/docs/k8s/env-secret.yaml b/docs/k8s/env-secret.yaml
index 2cc48e33d..7b4caabd9 100644
--- a/docs/k8s/env-secret.yaml
+++ b/docs/k8s/env-secret.yaml
@@ -20,6 +20,7 @@ stringData:
AZURE_OPENAI_API_KEY: ""
ANTHROPIC_API_KEY: ""
ANTHROPIC_API_HOST: ""
+ DEEPSEEK_API_KEY: ""
GEMINI_API_KEY: ""
GROQ_API_KEY: ""
LOCALAI_API_HOST: ""
@@ -30,7 +31,7 @@ stringData:
OPENROUTER_API_KEY: ""
PERPLEXITY_API_KEY: ""
TOGETHERAI_API_KEY: ""
- DEEPSEEK_API_KEY: ""
+ XAI_API_KEY: ""
# Browse
PUPPETEER_WSS_ENDPOINT: ""
diff --git a/src/common/components/icons/vendors/XAIIcon.tsx b/src/common/components/icons/vendors/XAIIcon.tsx
new file mode 100644
index 000000000..9bf1edc85
--- /dev/null
+++ b/src/common/components/icons/vendors/XAIIcon.tsx
@@ -0,0 +1,11 @@
+import * as React from 'react';
+
+import { SvgIcon, SvgIconProps } from '@mui/joy';
+
+export function XAIIcon(props: SvgIconProps) {
+ return (
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/common/util/dMessageUtils.tsx b/src/common/util/dMessageUtils.tsx
index 12189ce44..a6de90162 100644
--- a/src/common/util/dMessageUtils.tsx
+++ b/src/common/util/dMessageUtils.tsx
@@ -354,6 +354,8 @@ export function prettyShortChatModelName(model: string | undefined): string {
// [Ollama]
if (model.includes(':'))
return model.replace(':latest', '').replaceAll(':', ' ');
+ // [xAI]
+ if (model.includes('grok-beta')) return 'Grok Beta';
return model;
}
diff --git a/src/modules/aix/AIX.README.md b/src/modules/aix/AIX.README.md
index f73cae894..56367632d 100644
--- a/src/modules/aix/AIX.README.md
+++ b/src/modules/aix/AIX.README.md
@@ -35,6 +35,7 @@ Built with tRPC, it manages the lifecycle of AI-generated content from request t
| OpenRouter | ✅ | ❌ (inconsistent) | | ✅ | Yes + 📦 | |
| Perplexity | ✅ | ❌ (rejected) | | ✅ | Yes + 📦 | |
| TogetherAI | ✅ | ✅ | | ✅ | Yes + 📦 | |
+| xAI | | | | | | |
| Ollama (2) | ❌ (broken) | ? | | | | |
Notes:
diff --git a/src/modules/aix/server/dispatch/chatGenerate/chatGenerate.dispatch.ts b/src/modules/aix/server/dispatch/chatGenerate/chatGenerate.dispatch.ts
index 98bd575dc..6efa06b1f 100644
--- a/src/modules/aix/server/dispatch/chatGenerate/chatGenerate.dispatch.ts
+++ b/src/modules/aix/server/dispatch/chatGenerate/chatGenerate.dispatch.ts
@@ -75,6 +75,7 @@ export function createChatGenerateDispatch(access: AixAPI_Access, model: AixAPI_
case 'openrouter':
case 'perplexity':
case 'togetherai':
+ case 'xai':
return {
request: {
...openAIAccess(access, model.id, '/v1/chat/completions'),
diff --git a/src/modules/backend/backend.router.ts b/src/modules/backend/backend.router.ts
index 2c3a30295..5c75b9eff 100644
--- a/src/modules/backend/backend.router.ts
+++ b/src/modules/backend/backend.router.ts
@@ -63,6 +63,7 @@ export const backendRouter = createTRPCRouter({
hasLlmOpenRouter: !!env.OPENROUTER_API_KEY,
hasLlmPerplexity: !!env.PERPLEXITY_API_KEY,
hasLlmTogetherAI: !!env.TOGETHERAI_API_KEY,
+ hasLlmXAI: !!env.XAI_API_KEY,
// others
hasDB: (!!env.MDB_URI) || (!!env.POSTGRES_PRISMA_URL && !!env.POSTGRES_URL_NON_POOLING),
hasBrowsing: !!env.PUPPETEER_WSS_ENDPOINT,
diff --git a/src/modules/backend/store-backend-capabilities.ts b/src/modules/backend/store-backend-capabilities.ts
index beb6a9a96..4e11ed648 100644
--- a/src/modules/backend/store-backend-capabilities.ts
+++ b/src/modules/backend/store-backend-capabilities.ts
@@ -22,6 +22,7 @@ export interface BackendCapabilities {
hasLlmOpenRouter: boolean;
hasLlmPerplexity: boolean;
hasLlmTogetherAI: boolean;
+ hasLlmXAI: boolean;
// others
hasDB: boolean;
hasBrowsing: boolean;
@@ -61,6 +62,7 @@ const useBackendCapabilitiesStore = create()(
hasLlmOpenRouter: false,
hasLlmPerplexity: false,
hasLlmTogetherAI: false,
+ hasLlmXAI: false,
hasDB: false,
hasBrowsing: false,
hasGoogleCustomSearch: false,
diff --git a/src/modules/llms/server/openai/models/models.data.ts b/src/modules/llms/server/openai/models/models.data.ts
index 0468db9e3..5a77fbbb1 100644
--- a/src/modules/llms/server/openai/models/models.data.ts
+++ b/src/modules/llms/server/openai/models/models.data.ts
@@ -579,13 +579,25 @@ export function fromManualMapping(mappings: ManualMappings, id: string, created?
// check whether this is a partial map, which indicates an unknown/new variant
const suffix = id.slice(known.idPrefix.length).trim();
+ // full label
+ label = label
+ + (suffix ? ` [${suffix.replaceAll('-', ' ').trim()}]` : '')
+ + (known.isLatest ? ' 🌟' : '')
+ + (known.isLegacy ? /*' 💩'*/ ' [legacy]' : '');
+
+ // set the date in YYYY-MM-DD format if available and requested
+ // if (label.indexOf('{{Created}}') !== -1) {
+ // const targetDate = updated || created;
+ // if (targetDate)
+ // label = label.replace('{{Created}}', `(${new Date(targetDate * 1000).toISOString().slice(0, 10)})`);
+ // else
+ // label = label.replace('{{Created}}', '');
+ // }
+
// create the model description
const md: ModelDescriptionSchema = {
id,
- label: label
- + (suffix ? ` [${suffix.replaceAll('-', ' ').trim()}]` : '')
- + (known.isLatest ? ' 🌟' : '')
- + (known.isLegacy ? /*' 💩'*/ ' [legacy]' : ''),
+ label,
created: created || 0,
updated: updated || created || 0,
description: known.description,
diff --git a/src/modules/llms/server/openai/models/xai.models.ts b/src/modules/llms/server/openai/models/xai.models.ts
new file mode 100644
index 000000000..f2887d630
--- /dev/null
+++ b/src/modules/llms/server/openai/models/xai.models.ts
@@ -0,0 +1,87 @@
+import { z } from 'zod';
+
+import { fetchJsonOrTRPCThrow } from '~/server/api/trpc.router.fetchers';
+
+import { LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, LLM_IF_OAI_Vision } from '~/common/stores/llms/llms.types';
+
+import type { ModelDescriptionSchema } from '../../llm.server.types';
+import { fromManualMapping, ManualMappings } from './models.data';
+import { openAIAccess, OpenAIAccessSchema } from '../openai.router';
+
+
+// Known xAI Models - Manual Mappings
+// List on: https://console.x.ai/team/_TEAM_ID_/models
+const _knownXAIChatModels: ManualMappings = [
+ {
+ idPrefix: 'grok-beta',
+ label: `Grok Beta`,
+ description: 'xAI\'s flagship model with real-time knowledge from the X platform. Supports text generation with a 131K token context window.',
+ contextWindow: 131072, // 131,072 tokens as shown in the Context column
+ maxCompletionTokens: 16384,
+ interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn],
+ chatPrice: { input: 5, output: 15 },
+ },
+];
+
+
+//
+export async function xaiModelDescriptions(access: OpenAIAccessSchema): Promise {
+
+ // List models
+ const { headers, url } = openAIAccess(access, null, '/v1/language-models');
+ const modelsResponse = await fetchJsonOrTRPCThrow({ url, headers, name: 'xAI' });
+
+ const xaiModels = wireXAIModelsListSchema.parse(modelsResponse);
+
+ return xaiModels.models.map(model => fromManualMapping(_knownXAIChatModels, model.id, model.created, undefined, {
+ idPrefix: model.id,
+ label: `${model.id} ${model.version || ''}`, // {{Created}}`,
+ description: `xAI model ${model.id}`,
+ contextWindow: 16384,
+ interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, ...(model.input_modalities?.includes('image') ? [LLM_IF_OAI_Vision] : [])],
+ ...(model.prompt_text_token_price && model.completion_text_token_price && {
+ chatPrice: {
+ input: model.prompt_text_token_price / 10000, // FIXME: SCALE UNKNOWN for now
+ output: model.completion_text_token_price / 10000,
+ },
+ }),
+ }));
+}
+
+export function xaiModelSort(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number {
+ return b.label.localeCompare(a.label);
+}
+
+
+// not much for wiretypes, so we embed them locally
+export const wireTogetherAIListOutputSchema = z.array(z.object({
+ id: z.string(),
+ object: z.literal('model'),
+ created: z.number(),
+}));
+
+export const wireXAIModelSchema = z.object({
+ id: z.string(),
+ object: z.literal('model'),
+ owned_by: z.literal('xai').or(z.string()),
+
+ // timestamps
+ created: z.number().optional(),
+ updated: z.number().optional(),
+ version: z.string().optional(),
+
+ // modalities
+ input_modalities: z.array(z.string()), // relaxing it
+ output_modalities: z.array(z.string()), // relaxing it
+ // input_modalities: z.array(z.enum(['text'])),
+ // output_modalities: z.array(z.enum(['text'])),
+
+ // pricing - FIXME: SCALE UNKNOWN for now
+ prompt_text_token_price: z.number().optional(),
+ prompt_image_token_price: z.number().optional(),
+ completion_text_token_price: z.number().optional(),
+});
+
+export const wireXAIModelsListSchema = z.object({
+ models: z.array(wireXAIModelSchema),
+});
\ No newline at end of file
diff --git a/src/modules/llms/server/openai/openai.router.ts b/src/modules/llms/server/openai/openai.router.ts
index 256655bcf..349f22644 100644
--- a/src/modules/llms/server/openai/openai.router.ts
+++ b/src/modules/llms/server/openai/openai.router.ts
@@ -18,10 +18,11 @@ import { mistralModelsSort, mistralModelToModelDescription } from './models/mist
import { openAIModelFilter, openAIModelToModelDescription } from './models/openai.models';
import { perplexityAIModelDescriptions, perplexityAIModelSort } from './models/perplexity.models';
import { wilreLocalAIModelsApplyOutputSchema, wireLocalAIModelsAvailableOutputSchema, wireLocalAIModelsListOutputSchema } from './localai.wiretypes';
+import { xaiModelDescriptions, xaiModelSort } from './models/xai.models';
const openAIDialects = z.enum([
- 'azure', 'deepseek', 'groq', 'lmstudio', 'localai', 'mistral', 'openai', 'openpipe', 'openrouter', 'perplexity', 'togetherai',
+ 'azure', 'deepseek', 'groq', 'lmstudio', 'localai', 'mistral', 'openai', 'openpipe', 'openrouter', 'perplexity', 'togetherai', 'xai',
]);
export type OpenAIDialects = z.infer;
@@ -124,6 +125,9 @@ export const llmOpenAIRouter = createTRPCRouter({
if (access.dialect === 'perplexity')
return { models: perplexityAIModelDescriptions().sort(perplexityAIModelSort) };
+ // [xAI]: custom models listing
+ if (access.dialect === 'xai')
+ return { models: (await xaiModelDescriptions(access)).sort(xaiModelSort) };
// [OpenAI-dialects]: fetch openAI-style for all but Azure (will be then used in each dialect)
const openAIWireModelsResponse = await openaiGETOrThrow(access, '/v1/models');
@@ -373,6 +377,7 @@ const DEFAULT_OPENPIPE_HOST = 'https://app.openpipe.ai/api';
const DEFAULT_OPENROUTER_HOST = 'https://openrouter.ai/api';
const DEFAULT_PERPLEXITY_HOST = 'https://api.perplexity.ai';
const DEFAULT_TOGETHERAI_HOST = 'https://api.together.xyz';
+const DEFAULT_XAI_HOST = 'https://api.x.ai';
export function openAIAccess(access: OpenAIAccessSchema, modelRefId: string | null, apiPath: string): { headers: HeadersInit, url: string } {
switch (access.dialect) {
@@ -577,6 +582,19 @@ export function openAIAccess(access: OpenAIAccessSchema, modelRefId: string | nu
url: togetherHost + apiPath,
};
+
+ case 'xai':
+ const xaiKey = access.oaiKey || env.XAI_API_KEY || '';
+ if (!xaiKey)
+ throw new Error('Missing xAI API Key. Add it on the UI (Models Setup) or server side (your deployment).');
+ return {
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${xaiKey}`,
+ },
+ url: DEFAULT_XAI_HOST + apiPath,
+ };
+
}
}
diff --git a/src/modules/llms/vendors/vendors.registry.ts b/src/modules/llms/vendors/vendors.registry.ts
index 0d9652b50..2c3ce02ac 100644
--- a/src/modules/llms/vendors/vendors.registry.ts
+++ b/src/modules/llms/vendors/vendors.registry.ts
@@ -12,6 +12,7 @@ import { ModelVendorOpenPipe } from './openpipe/openpipe.vendor';
import { ModelVendorOpenRouter } from './openrouter/openrouter.vendor';
import { ModelVendorPerplexity } from './perplexity/perplexity.vendor';
import { ModelVendorTogetherAI } from './togetherai/togetherai.vendor';
+import { ModelVendorXAI } from './xai/xai.vendor';
import type { IModelVendor } from './IModelVendor';
@@ -31,6 +32,7 @@ export type ModelVendorId =
| 'openrouter'
| 'perplexity'
| 'togetherai'
+ | 'xai'
;
/** Global: Vendor Instances Registry **/
@@ -49,6 +51,7 @@ const MODEL_VENDOR_REGISTRY: Record = {
openrouter: ModelVendorOpenRouter,
perplexity: ModelVendorPerplexity,
togetherai: ModelVendorTogetherAI,
+ xai: ModelVendorXAI,
} as Record;
diff --git a/src/modules/llms/vendors/xai/XAIServiceSetup.tsx b/src/modules/llms/vendors/xai/XAIServiceSetup.tsx
new file mode 100644
index 000000000..c431bcc15
--- /dev/null
+++ b/src/modules/llms/vendors/xai/XAIServiceSetup.tsx
@@ -0,0 +1,70 @@
+import * as React from 'react';
+
+import type { DModelsServiceId } from '~/common/stores/llms/modelsservice.types';
+import { AlreadySet } from '~/common/components/AlreadySet';
+import { ExternalLink } from '~/common/components/ExternalLink';
+import { FormInputKey } from '~/common/components/forms/FormInputKey';
+import { InlineError } from '~/common/components/InlineError';
+import { SetupFormRefetchButton } from '~/common/components/forms/SetupFormRefetchButton';
+
+import { ApproximateCosts } from '../ApproximateCosts';
+import { useLlmUpdateModels } from '../../llm.client.hooks';
+import { useServiceSetup } from '../useServiceSetup';
+
+import { ModelVendorXAI } from './xai.vendor';
+
+
+// configuration
+const EXTERNAL_LINK_XAI_API_KEYS = 'https://console.x.ai/';
+
+
+export function XAIServiceSetup(props: { serviceId: DModelsServiceId }) {
+
+ // external state
+ const { service, serviceAccess, serviceHasBackendCap, serviceHasLLMs, serviceSetupValid, updateSettings } =
+ useServiceSetup(props.serviceId, ModelVendorXAI);
+
+ // derived state
+ const { oaiKey: xaiKey } = serviceAccess;
+ const needsUserKey = !serviceHasBackendCap;
+
+ // key validation
+ const shallFetchSucceed = !needsUserKey || (!!xaiKey && serviceSetupValid);
+ const showKeyError = !!xaiKey && !serviceSetupValid;
+
+ // fetch models
+ const { isFetching, refetch, isError, error } =
+ useLlmUpdateModels(!serviceHasLLMs && shallFetchSucceed, service);
+
+ return <>
+
+
+
+ {!needsUserKey
+ ? !xaiKey && get a key
+ : }
+ >}
+ value={xaiKey}
+ onChange={(value) => updateSettings({ xaiKey: value })}
+ required={needsUserKey}
+ isError={showKeyError}
+ placeholder='Your xAI API Key'
+ />
+
+ {/* updateSettings({ xaiHost: text })}*/}
+ {/*/>*/}
+
+
+
+ {isError && }
+
+ >;
+}
diff --git a/src/modules/llms/vendors/xai/xai.vendor.ts b/src/modules/llms/vendors/xai/xai.vendor.ts
new file mode 100644
index 000000000..f4fe89478
--- /dev/null
+++ b/src/modules/llms/vendors/xai/xai.vendor.ts
@@ -0,0 +1,44 @@
+import { XAIIcon } from '~/common/components/icons/vendors/XAIIcon';
+
+import type { IModelVendor } from '../IModelVendor';
+import type { OpenAIAccessSchema } from '../../server/openai/openai.router';
+
+import { DOpenAILLMOptions, ModelVendorOpenAI } from '../openai/openai.vendor';
+import { OpenAILLMOptions } from '../openai/OpenAILLMOptions';
+
+import { XAIServiceSetup } from './XAIServiceSetup';
+
+
+export interface DXAIServiceSettings {
+ xaiKey: string;
+}
+
+export const ModelVendorXAI: IModelVendor = {
+ id: 'xai',
+ name: 'xAI',
+ displayRank: 15,
+ location: 'cloud',
+ instanceLimit: 1,
+ hasBackendCapKey: 'hasLlmXAI',
+
+ // Components
+ Icon: XAIIcon,
+ ServiceSetupComponent: XAIServiceSetup,
+ LLMOptionsComponent: OpenAILLMOptions,
+
+ // functions
+ initializeSetup: () => ({ xaiKey: '' }),
+ validateSetup: setup => setup.xaiKey?.length >= 80, // we assume all API keys are 80 chars+ - we won't have a strict validation
+ getTransportAccess: (partialSetup) => ({
+ dialect: 'xai',
+ oaiKey: partialSetup?.xaiKey || '',
+ oaiOrg: '',
+ oaiHost: '',
+ heliKey: '',
+ moderationCheck: false,
+ }),
+
+ // OpenAI transport ('xai' dialect in 'access')
+ rpcUpdateModelsOrThrow: ModelVendorOpenAI.rpcUpdateModelsOrThrow,
+
+};
diff --git a/src/server/env.mjs b/src/server/env.mjs
index a06dd6b59..ccecd58f4 100644
--- a/src/server/env.mjs
+++ b/src/server/env.mjs
@@ -60,6 +60,9 @@ export const env = createEnv({
// LLM: Together AI
TOGETHERAI_API_KEY: z.string().optional(),
+ // LLM: xAI
+ XAI_API_KEY: z.string().optional(),
+
// Helicone - works on both OpenAI and Anthropic vendors
HELICONE_API_KEY: z.string().optional(),