feat: add keep alive strategy for ollama api

This commit is contained in:
mok-liee 2024-10-04 15:14:17 +02:00
parent 1680a1786e
commit 6d6c7122b9
5 changed files with 67 additions and 2 deletions

View file

@ -10,3 +10,6 @@ ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
[API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
[OLLAMA]
KEEP_ALIVE = "5m"

View file

@ -14,6 +14,9 @@ interface Config {
GROQ: string;
ANTHROPIC: string;
};
OLLAMA: {
KEEP_ALIVE: string;
};
API_ENDPOINTS: {
SEARXNG: string;
OLLAMA: string;
@ -45,6 +48,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getOllamaKeepAliveStrategy = () => loadConfig().OLLAMA.KEEP_ALIVE;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();

View file

@ -1,5 +1,5 @@
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { getOllamaApiEndpoint } from '../../config';
import { getOllamaApiEndpoint, getOllamaKeepAliveStrategy } from '../../config';
import logger from '../../utils/logger';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
@ -24,6 +24,7 @@ export const loadOllamaChatModels = async () => {
baseUrl: ollamaEndpoint,
model: model.model,
temperature: 0.7,
keepAlive: getOllamaKeepAliveStrategy(),
}),
};
@ -57,6 +58,7 @@ export const loadOllamaEmbeddingsModels = async () => {
model: new OllamaEmbeddings({
baseUrl: ollamaEndpoint,
model: model.model,
keepAlive: getOllamaKeepAliveStrategy(),
}),
};

View file

@ -9,6 +9,7 @@ import {
getAnthropicApiKey,
getOpenaiApiKey,
updateConfig,
getOllamaKeepAliveStrategy,
} from '../config';
import logger from '../utils/logger';
@ -50,6 +51,7 @@ router.get('/', async (_, res) => {
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaKeepAliveStrategy'] = getOllamaKeepAliveStrategy();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
@ -72,6 +74,9 @@ router.post('/', async (req, res) => {
API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl,
},
OLLAMA: {
KEEP_ALIVE: config.ollamaKeepAliveStrategy,
},
};
updateConfig(updatedConfig);

View file

@ -54,6 +54,7 @@ interface SettingsType {
embeddingModelProviders: {
[key: string]: [Record<string, any>];
};
ollamaKeepAliveStrategy: string;
openaiApiKey: string;
groqApiKey: string;
anthropicApiKey: string;
@ -78,6 +79,10 @@ const SettingsDialog = ({
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
null,
);
const [selectedOllamaKeepAliveStrategy, setSelectedOllamaKeepAliveStrategy] =
useState<string | null>(null);
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] =
useState<string | null>(null);
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
@ -124,6 +129,10 @@ const SettingsDialog = ({
(data.chatModelProviders &&
data.chatModelProviders[chatModelProvider]?.[0].name) ||
'';
const ollamaKeepAliveStrategy =
localStorage.getItem('ollamaKeepAliveStrategy') ||
data.ollamaKeepAliveStrategy ||
'';
const embeddingModelProvider =
localStorage.getItem('embeddingModelProvider') ||
defaultEmbeddingModelProvider ||
@ -136,6 +145,7 @@ const SettingsDialog = ({
setSelectedChatModelProvider(chatModelProvider);
setSelectedChatModel(chatModel);
setSelectedOllamaKeepAliveStrategy(ollamaKeepAliveStrategy);
setSelectedEmbeddingModelProvider(embeddingModelProvider);
setSelectedEmbeddingModel(embeddingModel);
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
@ -164,6 +174,10 @@ const SettingsDialog = ({
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
localStorage.setItem('chatModel', selectedChatModel!);
localStorage.setItem(
'ollamaKeepAliveStrategy',
selectedOllamaKeepAliveStrategy!,
);
localStorage.setItem(
'embeddingModelProvider',
selectedEmbeddingModelProvider!,
@ -293,6 +307,42 @@ const SettingsDialog = ({
/>
</div>
)}
{selectedChatModelProvider &&
selectedChatModelProvider === 'ollama' && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
KeepAlive Strategy
</p>
<Select
value={selectedOllamaKeepAliveStrategy ?? undefined}
onChange={(e) => {
setSelectedOllamaKeepAliveStrategy(
e.target.value,
);
setConfig({
...config,
ollamaKeepAliveStrategy: e.target.value,
});
}}
options={[
{
value: '5m',
label: '5 Minutes',
},
{
value: '60m',
label: '1 Hour',
},
{
value: '-1m',
label: 'Forever',
},
]}
/>
</div>
)}
{selectedChatModelProvider &&
selectedChatModelProvider === 'custom_openai' && (
<>