This commit is contained in:
Patrick Wiltrout 2024-11-20 01:39:50 +07:00 committed by GitHub
commit e7c12945df
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 24 additions and 7 deletions

View file

@ -9,4 +9,8 @@ ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
[API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
[OLLAMA_PARAMS]
TEMPERATURE = 0.7 # ollama default temp is 0.8
NUM_CTX = 2_048 # ollama num_ctx default is 2048

View file

@ -18,6 +18,10 @@ interface Config {
SEARXNG: string;
OLLAMA: string;
};
OLLAMA_PARAMS: {
TEMPERATURE: number;
NUM_CTX: number;
}
}
type RecursivePartial<T> = {
@ -45,6 +49,10 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getModelTemperature = () => loadConfig().OLLAMA_PARAMS.TEMPERATURE;
export const getModelNumCtx = () => loadConfig().OLLAMA_PARAMS.NUM_CTX;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();

View file

@ -1,10 +1,11 @@
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { getOllamaApiEndpoint } from '../../config';
import { getModelNumCtx, getModelTemperature, getOllamaApiEndpoint } from '../../config';
import logger from '../../utils/logger';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
export const loadOllamaChatModels = async () => {
const ollamaEndpoint = getOllamaApiEndpoint();
if (!ollamaEndpoint) return {};
@ -16,20 +17,24 @@ export const loadOllamaChatModels = async () => {
});
const { models: ollamaModels } = (await response.json()) as any;
const chatModels = ollamaModels.reduce((acc, model) => {
const modelTemperature = getModelTemperature();
const modelNumCtx = getModelNumCtx();
acc[model.model] = {
displayName: model.name,
model: new ChatOllama({
baseUrl: ollamaEndpoint,
model: model.model,
temperature: 0.7,
temperature: modelTemperature,
numCtx: modelNumCtx,
}),
};
return acc;
}, {});
return chatModels;
} catch (err) {
logger.error(`Error loading Ollama models: ${err}`);

View file

@ -281,7 +281,7 @@ const SettingsDialog = ({
? chatModelProvider.map((model) => ({
value: model.name,
label: model.displayName,
}))
})).sort((a, b) => a.label.localeCompare(b.label))
: [
{
value: '',
@ -392,7 +392,7 @@ const SettingsDialog = ({
? embeddingModelProvider.map((model) => ({
label: model.displayName,
value: model.name,
}))
})).sort((a, b) => a.label.localeCompare(b.label))
: [
{
label: 'No embedding models available',