mirror of
https://github.com/enricoros/big-AGI.git
synced 2026-05-11 06:00:15 -07:00
Perplexity: updated models. Fixes #642.
This commit is contained in:
@@ -2,77 +2,90 @@ import type { ModelDescriptionSchema } from '../../llm.server.types';
|
||||
|
||||
import { LLM_IF_OAI_Chat } from '~/common/stores/llms/llms.types';
|
||||
|
||||
|
||||
const _knownPerplexityChatModels: ModelDescriptionSchema[] = [
|
||||
// Perplexity models
|
||||
// Perplexity Sonar Online Models
|
||||
{
|
||||
id: 'llama-3-sonar-small-32k-chat',
|
||||
label: 'Sonar Small Chat',
|
||||
description: 'Llama 3 Sonar Small 32k Chat',
|
||||
contextWindow: 32768,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
},
|
||||
{
|
||||
id: 'llama-3-sonar-small-32k-online',
|
||||
id: 'llama-3.1-sonar-small-128k-online',
|
||||
label: 'Sonar Small Online 🌐',
|
||||
description: 'Llama 3 Sonar Small 32k Online',
|
||||
contextWindow: 28000,
|
||||
description: 'Llama 3.1 Sonar Small 128k Online',
|
||||
contextWindow: 127072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 0.2, /* Price per 1M tokens in USD */ output: 0.2 /* Same as input *//* perRequest: 0.005, // $5 per 1000 requests = $0.005 per request */ },
|
||||
},
|
||||
{
|
||||
id: 'llama-3-sonar-large-32k-chat',
|
||||
label: 'Sonar Large Chat',
|
||||
description: 'Llama 3 Sonar Large 32k Chat',
|
||||
contextWindow: 32768,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
},
|
||||
{
|
||||
id: 'llama-3-sonar-large-32k-online',
|
||||
id: 'llama-3.1-sonar-large-128k-online',
|
||||
label: 'Sonar Large Online 🌐',
|
||||
description: 'Llama 3 Sonar Large 32k Online',
|
||||
contextWindow: 28000,
|
||||
description: 'Llama 3.1 Sonar Large 128k Online',
|
||||
contextWindow: 127072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 1, output: 1/*perRequest: 0.005,*/ },
|
||||
},
|
||||
{
|
||||
id: 'llama-3.1-sonar-huge-128k-online',
|
||||
label: 'Sonar Huge Online 🌐',
|
||||
description: 'Llama 3.1 Sonar Huge 128k Online',
|
||||
contextWindow: 127072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 5, output: 5/*perRequest: 0.005,*/ },
|
||||
},
|
||||
|
||||
// Open models
|
||||
// Perplexity Sonar Chat Models
|
||||
{
|
||||
id: 'llama-3-8b-instruct',
|
||||
label: 'Llama 3 8B Instruct',
|
||||
description: 'Llama 3 8B Instruct',
|
||||
contextWindow: 8192,
|
||||
id: 'llama-3.1-sonar-small-128k-chat',
|
||||
label: 'Sonar Small Chat',
|
||||
description: 'Llama 3.1 Sonar Small 128k Chat',
|
||||
contextWindow: 127072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 0.2, output: 0.2 },
|
||||
},
|
||||
{
|
||||
id: 'llama-3-70b-instruct',
|
||||
label: 'Llama 3 70B Instruct',
|
||||
description: 'Llama 3 70B Instruct',
|
||||
contextWindow: 8192,
|
||||
id: 'llama-3.1-sonar-large-128k-chat',
|
||||
label: 'Sonar Large Chat',
|
||||
description: 'Llama 3.1 Sonar Large 128k Chat',
|
||||
contextWindow: 127072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 1, output: 1 },
|
||||
},
|
||||
|
||||
// Open-Source Models
|
||||
{
|
||||
id: 'llama-3.1-8b-instruct',
|
||||
label: 'Llama 3.1 8B Instruct',
|
||||
description: 'Llama 3.1 8B Instruct',
|
||||
contextWindow: 131072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 0.2, output: 0.2 },
|
||||
},
|
||||
{
|
||||
id: 'mixtral-8x7b-instruct',
|
||||
label: 'Mixtral 8x7B Instruct',
|
||||
description: 'Mixtral 8x7B Instruct',
|
||||
contextWindow: 16384,
|
||||
id: 'llama-3.1-70b-instruct',
|
||||
label: 'Llama 3.1 70B Instruct',
|
||||
description: 'Llama 3.1 70B Instruct',
|
||||
contextWindow: 131072,
|
||||
interfaces: [LLM_IF_OAI_Chat],
|
||||
chatPrice: { input: 1, output: 1 },
|
||||
},
|
||||
];
|
||||
|
||||
const perplexityAIModelFamilyOrder = [
|
||||
'llama-3-sonar-large', 'llama-3-sonar-small', 'llama-3', 'mixtral', '',
|
||||
'llama-3.1-sonar-huge',
|
||||
'llama-3.1-sonar-large',
|
||||
'llama-3.1-sonar-small',
|
||||
'llama-3.1',
|
||||
'',
|
||||
];
|
||||
|
||||
export function perplexityAIModelDescriptions() {
|
||||
// change this implementation once upstream implements some form of models listing
|
||||
// Returns the list of known Perplexity models
|
||||
return _knownPerplexityChatModels;
|
||||
}
|
||||
|
||||
export function perplexityAIModelSort(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number {
|
||||
const aPrefixIndex = perplexityAIModelFamilyOrder.findIndex(prefix => a.id.startsWith(prefix));
|
||||
const bPrefixIndex = perplexityAIModelFamilyOrder.findIndex(prefix => b.id.startsWith(prefix));
|
||||
// sort by family
|
||||
if (aPrefixIndex !== -1 && bPrefixIndex !== -1)
|
||||
if (aPrefixIndex !== bPrefixIndex)
|
||||
return aPrefixIndex - bPrefixIndex;
|
||||
// then by reverse label
|
||||
const aPrefixIndex = perplexityAIModelFamilyOrder.findIndex((prefix) => a.id.startsWith(prefix));
|
||||
const bPrefixIndex = perplexityAIModelFamilyOrder.findIndex((prefix) => b.id.startsWith(prefix));
|
||||
// Sort by family order
|
||||
if (aPrefixIndex !== -1 && bPrefixIndex !== -1) {
|
||||
if (aPrefixIndex !== bPrefixIndex) return aPrefixIndex - bPrefixIndex;
|
||||
}
|
||||
// Then sort by label in reverse order
|
||||
return b.label.localeCompare(a.label);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user