feat(agents): support local LLMs
This commit is contained in:
parent
28a7175afc
commit
d37a1a8020
15 changed files with 135 additions and 100 deletions
58
src/lib/providers.ts
Normal file
58
src/lib/providers.ts
Normal file
|
@ -0,0 +1,58 @@
|
|||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||
import { getOllamaApiEndpoint, getOpenaiApiKey } from '../config';
|
||||
|
||||
export const getAvailableProviders = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
|
||||
const models = {};
|
||||
|
||||
if (openAIApiKey) {
|
||||
models['openai'] = {
|
||||
'gpt-3.5-turbo': new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-3.5-turbo',
|
||||
temperature: 0.7,
|
||||
}),
|
||||
'gpt-4': new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
}),
|
||||
embeddings: new OpenAIEmbeddings({
|
||||
openAIApiKey,
|
||||
modelName: 'text-embedding-3-large',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
if (ollamaEndpoint) {
|
||||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`);
|
||||
|
||||
const { models: ollamaModels } = (await response.json()) as any;
|
||||
|
||||
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
||||
acc[model.model] = new ChatOllama({
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: model.model,
|
||||
temperature: 0.7,
|
||||
});
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
if (Object.keys(models['ollama']).length > 0) {
|
||||
models['ollama']['embeddings'] = new OllamaEmbeddings({
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: models['ollama'][Object.keys(models['ollama'])[0]].model,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue