added ollama model param configs to config.toml for temp and num_ctx
This commit is contained in:
parent
1e99fe8d69
commit
2709c71b85
2 changed files with 17 additions and 4 deletions
|
@ -18,6 +18,10 @@ interface Config {
|
|||
SEARXNG: string;
|
||||
OLLAMA: string;
|
||||
};
|
||||
OLLAMA_PARAMS: {
|
||||
TEMPERATURE: number;
|
||||
NUM_CTX: number;
|
||||
}
|
||||
}
|
||||
|
||||
type RecursivePartial<T> = {
|
||||
|
@ -45,6 +49,10 @@ export const getSearxngApiEndpoint = () =>
|
|||
|
||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||
|
||||
export const getModelTemperature = () => loadConfig().OLLAMA_PARAMS.TEMPERATURE;
|
||||
|
||||
export const getModelNumCtx = () => loadConfig().OLLAMA_PARAMS.NUM_CTX;
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
const currentConfig = loadConfig();
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||
import { getOllamaApiEndpoint } from '../../config';
|
||||
import { getModelNumCtx, getModelTemperature, getOllamaApiEndpoint } from '../../config';
|
||||
import logger from '../../utils/logger';
|
||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||
|
||||
export const loadOllamaChatModels = async () => {
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
|
||||
|
||||
if (!ollamaEndpoint) return {};
|
||||
|
||||
try {
|
||||
|
@ -18,18 +19,22 @@ export const loadOllamaChatModels = async () => {
|
|||
const { models: ollamaModels } = (await response.json()) as any;
|
||||
|
||||
const chatModels = ollamaModels.reduce((acc, model) => {
|
||||
const modelTemperature = getModelTemperature();
|
||||
const modelNumCtx = getModelNumCtx();
|
||||
acc[model.model] = {
|
||||
displayName: model.name,
|
||||
model: new ChatOllama({
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: model.model,
|
||||
temperature: 0.7,
|
||||
temperature: modelTemperature,
|
||||
numCtx: modelNumCtx,
|
||||
}),
|
||||
};
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Ollama models: ${err}`);
|
||||
|
|
Loading…
Add table
Reference in a new issue