added ollama model param configs to config.toml for temp and num_ctx
This commit is contained in:
parent
1e99fe8d69
commit
2709c71b85
2 changed files with 17 additions and 4 deletions
|
@ -18,6 +18,10 @@ interface Config {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
OLLAMA: string;
|
OLLAMA: string;
|
||||||
};
|
};
|
||||||
|
OLLAMA_PARAMS: {
|
||||||
|
TEMPERATURE: number;
|
||||||
|
NUM_CTX: number;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type RecursivePartial<T> = {
|
type RecursivePartial<T> = {
|
||||||
|
@ -45,6 +49,10 @@ export const getSearxngApiEndpoint = () =>
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||||
|
|
||||||
|
export const getModelTemperature = () => loadConfig().OLLAMA_PARAMS.TEMPERATURE;
|
||||||
|
|
||||||
|
export const getModelNumCtx = () => loadConfig().OLLAMA_PARAMS.NUM_CTX;
|
||||||
|
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||||
const currentConfig = loadConfig();
|
const currentConfig = loadConfig();
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||||
import { getOllamaApiEndpoint } from '../../config';
|
import { getModelNumCtx, getModelTemperature, getOllamaApiEndpoint } from '../../config';
|
||||||
import logger from '../../utils/logger';
|
import logger from '../../utils/logger';
|
||||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||||
|
|
||||||
export const loadOllamaChatModels = async () => {
|
export const loadOllamaChatModels = async () => {
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
|
|
||||||
|
|
||||||
if (!ollamaEndpoint) return {};
|
if (!ollamaEndpoint) return {};
|
||||||
|
|
||||||
|
@ -16,20 +17,24 @@ export const loadOllamaChatModels = async () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
const chatModels = ollamaModels.reduce((acc, model) => {
|
const chatModels = ollamaModels.reduce((acc, model) => {
|
||||||
|
const modelTemperature = getModelTemperature();
|
||||||
|
const modelNumCtx = getModelNumCtx();
|
||||||
acc[model.model] = {
|
acc[model.model] = {
|
||||||
displayName: model.name,
|
displayName: model.name,
|
||||||
model: new ChatOllama({
|
model: new ChatOllama({
|
||||||
baseUrl: ollamaEndpoint,
|
baseUrl: ollamaEndpoint,
|
||||||
model: model.model,
|
model: model.model,
|
||||||
temperature: 0.7,
|
temperature: modelTemperature,
|
||||||
|
numCtx: modelNumCtx,
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
|
|
||||||
return chatModels;
|
return chatModels;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Error loading Ollama models: ${err}`);
|
logger.error(`Error loading Ollama models: ${err}`);
|
||||||
|
|
Loading…
Add table
Reference in a new issue