From 2709c71b85f4647d4385f69544c6e097a6a65e13 Mon Sep 17 00:00:00 2001 From: Patrick Wiltrout Date: Fri, 8 Nov 2024 14:29:20 -0500 Subject: [PATCH] added ollama model param configs to config.toml for temp and num_ctx --- src/config.ts | 8 ++++++++ src/lib/providers/ollama.ts | 13 +++++++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/config.ts b/src/config.ts index bb69335..c3f0dfb 100644 --- a/src/config.ts +++ b/src/config.ts @@ -18,6 +18,10 @@ interface Config { SEARXNG: string; OLLAMA: string; }; + OLLAMA_PARAMS: { + TEMPERATURE: number; + NUM_CTX: number; + } } type RecursivePartial = { @@ -45,6 +49,10 @@ export const getSearxngApiEndpoint = () => export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; +export const getModelTemperature = () => loadConfig().OLLAMA_PARAMS.TEMPERATURE; + +export const getModelNumCtx = () => loadConfig().OLLAMA_PARAMS.NUM_CTX; + export const updateConfig = (config: RecursivePartial) => { const currentConfig = loadConfig(); diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts index ed68bfa..b3753eb 100644 --- a/src/lib/providers/ollama.ts +++ b/src/lib/providers/ollama.ts @@ -1,10 +1,11 @@ import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; -import { getOllamaApiEndpoint } from '../../config'; +import { getModelNumCtx, getModelTemperature, getOllamaApiEndpoint } from '../../config'; import logger from '../../utils/logger'; import { ChatOllama } from '@langchain/community/chat_models/ollama'; export const loadOllamaChatModels = async () => { const ollamaEndpoint = getOllamaApiEndpoint(); + if (!ollamaEndpoint) return {}; @@ -16,20 +17,24 @@ export const loadOllamaChatModels = async () => { }); const { models: ollamaModels } = (await response.json()) as any; - + const chatModels = ollamaModels.reduce((acc, model) => { + const modelTemperature = getModelTemperature(); + const modelNumCtx = getModelNumCtx(); acc[model.model] = { displayName: model.name, model: new ChatOllama({ baseUrl: ollamaEndpoint, model: model.model, - temperature: 0.7, + temperature: modelTemperature, + numCtx: modelNumCtx, }), }; return acc; }, {}); - + + return chatModels; } catch (err) { logger.error(`Error loading Ollama models: ${err}`);