diff --git a/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx b/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx index 4f1cf3573..1559d34f9 100644 --- a/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx +++ b/src/modules/llms/oobabooga/OobaboogaSourceSetup.tsx @@ -18,7 +18,7 @@ export function OobaboogaSourceSetup(props: { sourceId: DModelSourceId }) { // external state const { - source, sourceLLMs, updateSetup, normSetup, + source, updateSetup, normSetup, } = useSourceSetup(props.sourceId, normalizeOobaboogaSetup); @@ -38,9 +38,9 @@ export function OobaboogaSourceSetup(props: { sourceId: DModelSourceId }) { return - + You can use a running - text-generation-webui instance as a source for models. + text-generation-webui instance as a source for local models. Follow the instructions to set up the server. @@ -61,6 +61,11 @@ export function OobaboogaSourceSetup(props: { sourceId: DModelSourceId }) { /> + + Note: The active model must be selected on the Oobabooga server, as it does not support switching models via API. Concurrent + model execution is also not supported. + +