Tokens: rename the text methods

This commit is contained in:
Enrico Ros
2024-05-24 22:25:14 -07:00
parent 23ca49128a
commit 2396966740
6 changed files with 12 additions and 12 deletions
@@ -29,7 +29,7 @@ import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptim
import { SpeechResult, useSpeechRecognition } from '~/common/components/useSpeechRecognition';
import { animationEnterBelow } from '~/common/util/animUtils';
import { conversationTitle, DConversationId } from '~/common/stores/chat/chat.conversation';
import { countModelTokens } from '~/common/util/token-counter';
import { textTokensForLLMId } from '~/common/util/token-counter';
import { getConversation, useChatStore } from '~/common/stores/chat/store-chats';
import { isMacUser } from '~/common/util/pwaUtils';
import { launchAppCall } from '~/common/app.routes';
@@ -175,7 +175,7 @@ export function Composer(props: {
const tokensComposerText = React.useMemo(() => {
if (!debouncedText || !chatLLMId)
return 0;
return countModelTokens(debouncedText, chatLLMId, 'composer text') ?? 0;
return textTokensForLLMId(debouncedText, chatLLMId, 'composer text') ?? 0;
}, [chatLLMId, debouncedText]);
let tokensComposer = tokensComposerText + llmAttachmentDrafts.llmTokenCountApprox;
if (tokensComposer > 0)
@@ -3,7 +3,7 @@ import * as React from 'react';
import { DLLM, DLLMId, LLM_IF_OAI_Vision } from '~/modules/llms/store-llms';
import type { DAttachmentPart } from '~/common/stores/chat/chat.message';
import { countModelTokens } from '~/common/util/token-counter';
import { textTokensForLLMId } from '~/common/util/token-counter';
import type { AttachmentDraft, AttachmentDraftId } from '~/common/attachment-drafts/attachment.types';
@@ -68,7 +68,7 @@ function toLLMAttachment(attachmentDraft: AttachmentDraft, llmSupportedOutputPar
const llmTokenCountApprox = llmForTokenCount
? attachmentCollapsedParts.reduce((acc, output) => {
if (output.atype === 'atext')
return acc + (countModelTokens(output.text, llmForTokenCount, 'attachments tokens count') ?? 0);
return acc + (textTokensForLLMId(output.text, llmForTokenCount, 'attachments tokens count') ?? 0);
console.warn('Unhandled token preview for output type:', output.atype);
return acc;
}, 0)
+2 -2
View File
@@ -3,7 +3,7 @@ import * as React from 'react';
import { Box, Container, FormControl, Textarea, Typography } from '@mui/joy';
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
import { countTokenizerTokens, preloadTiktokenLibrary } from '~/common/util/token-counter';
import { textTokensForEncodingId, preloadTiktokenLibrary } from '~/common/util/token-counter';
import { lineHeightTextareaMd } from '~/common/app.theme';
import { useTokenizerSelect } from '~/common/components/forms/useTokenizerSelect';
@@ -47,7 +47,7 @@ export function AppTokens() {
const updateTokenDetails = (text: string, encodingId: string | null) => {
if (encodingId) {
const details = countTokenizerTokens(text, encodingId, 'AppTokens');
const details = textTokensForEncodingId(text, encodingId, 'AppTokens');
setTokenDetails(details || []);
}
};
+2 -2
View File
@@ -7,7 +7,7 @@ import { DLLMId, getChatLLMId } from '~/modules/llms/store-llms';
import { SystemPurposeId } from '../../../data';
import { backupIdbV3, idbStateStorage } from '~/common/util/idbUtils';
import { countModelTokens } from '~/common/util/token-counter';
import { textTokensForLLMId } from '~/common/util/token-counter';
import { DMessage, DMessageId, DMessageMetadata, reduceContentToText } from './chat.message';
import { conversationTitle, convertCConversation_V3_V4, createDConversation, DConversation, DConversationId, duplicateCConversation } from './chat.conversation';
@@ -341,7 +341,7 @@ function updateMessageTokenCount(message: DMessage, llmId: DLLMId | null, forceU
const messageTextParts = reduceContentToText(message.content, '');
// TODO: handle attachments too
message.tokenCount = countModelTokens(messageTextParts, llmId, debugFrom) ?? 0;
message.tokenCount = textTokensForLLMId(messageTextParts, llmId, debugFrom) ?? 0;
}
return message.tokenCount;
}
+2 -2
View File
@@ -67,7 +67,7 @@ const tokenizerCache: { [encodingId: string]: Tiktoken } = {};
* @param {string} debugFrom - Debug information.
* @returns {number | null} The token count or null if not ready.
*/
export function countModelTokens(text: string, llmId: DLLMId, debugFrom: string): number | null {
export function textTokensForLLMId(text: string, llmId: DLLMId, debugFrom: string): number | null {
// The library shall have been preloaded - if not, attempt to start its loading and return null to indicate we're not ready to count
if (!encoding_for_model || !get_encoding) {
if (!informTheUser) {
@@ -114,7 +114,7 @@ export function countModelTokens(text: string, llmId: DLLMId, debugFrom: string)
* @param {string} debugFrom - Debug information.
* @returns {Array<{ token: number, bytes: string }> | null} The detailed token information or null if not ready.
*/
export function countTokenizerTokens(text: string, encodingId: string, debugFrom: string): Array<TokenChunk> | null {
export function textTokensForEncodingId(text: string, encodingId: string, debugFrom: string): Array<TokenChunk> | null {
// The library shall have been preloaded - if not, attempt to start its loading and return null to indicate we're not ready to count
if (!get_encoding) {
if (!informTheUser) {
@@ -10,7 +10,7 @@ import { TokenBadgeMemo } from '../../../apps/chat/components/composer/TokenBadg
import { FormLabelStart } from '~/common/components/forms/FormLabelStart';
import { GoodModal } from '~/common/components/GoodModal';
import { Section } from '~/common/components/Section';
import { countModelTokens } from '~/common/util/token-counter';
import { textTokensForLLMId } from '~/common/util/token-counter';
import { lineHeightTextareaMd } from '~/common/app.theme';
import { summerizeToFitContextBudget } from './summerize';
@@ -48,7 +48,7 @@ export function ContentReducer(props: {
const [processing, setProcessing] = React.useState(false);
// derived state
const reducedTokens = reducerModelId ? countModelTokens(reducedText, reducerModelId, 'content reducer reduce') ?? 0 : 0;
const reducedTokens = reducerModelId ? textTokensForLLMId(reducedText, reducerModelId, 'content reducer reduce') ?? 0 : 0;
const remainingTokens = props.tokenLimit - reducedTokens;