added ollama model param configs to config.toml for temp and num_ctx

This commit is contained in:
Patrick Wiltrout 2024-11-08 14:29:20 -05:00
parent 1e99fe8d69
commit 2709c71b85
2 changed files with 17 additions and 4 deletions

View file

@ -18,6 +18,10 @@ interface Config {
SEARXNG: string;
OLLAMA: string;
};
OLLAMA_PARAMS: {
TEMPERATURE: number;
NUM_CTX: number;
}
}
type RecursivePartial<T> = {
@ -45,6 +49,10 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getModelTemperature = () => loadConfig().OLLAMA_PARAMS.TEMPERATURE;
export const getModelNumCtx = () => loadConfig().OLLAMA_PARAMS.NUM_CTX;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();

View file

@ -1,10 +1,11 @@
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { getOllamaApiEndpoint } from '../../config';
import { getModelNumCtx, getModelTemperature, getOllamaApiEndpoint } from '../../config';
import logger from '../../utils/logger';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
export const loadOllamaChatModels = async () => {
const ollamaEndpoint = getOllamaApiEndpoint();
if (!ollamaEndpoint) return {};
@ -16,20 +17,24 @@ export const loadOllamaChatModels = async () => {
});
const { models: ollamaModels } = (await response.json()) as any;
const chatModels = ollamaModels.reduce((acc, model) => {
const modelTemperature = getModelTemperature();
const modelNumCtx = getModelNumCtx();
acc[model.model] = {
displayName: model.name,
model: new ChatOllama({
baseUrl: ollamaEndpoint,
model: model.model,
temperature: 0.7,
temperature: modelTemperature,
numCtx: modelNumCtx,
}),
};
return acc;
}, {});
return chatModels;
} catch (err) {
logger.error(`Error loading Ollama models: ${err}`);