feat: support deepinfra as a service provider
This commit is contained in:
parent
2c5ca94b3c
commit
b0fba3d5c7
5 changed files with 100 additions and 0 deletions
|
@ -15,6 +15,7 @@ interface Config {
|
|||
GROQ: string;
|
||||
ANTHROPIC: string;
|
||||
GEMINI: string;
|
||||
DEEPINFRA: string;
|
||||
};
|
||||
API_ENDPOINTS: {
|
||||
SEARXNG: string;
|
||||
|
@ -46,6 +47,8 @@ export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC;
|
|||
|
||||
export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI;
|
||||
|
||||
export const getDeepInftaApiKeys = () => loadConfig().API_KEYS.DEEPINFRA;
|
||||
|
||||
export const getSearxngApiEndpoint = () =>
|
||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
|
||||
|
|
76
src/lib/providers/deepinfra.ts
Normal file
76
src/lib/providers/deepinfra.ts
Normal file
|
@ -0,0 +1,76 @@
|
|||
import { DeepInfraEmbeddings } from "@langchain/community/embeddings/deepinfra";
|
||||
import { ChatDeepInfra } from "@langchain/community/chat_models/deepinfra";
|
||||
import { getDeepInftaApiKeys } from "../../config";
|
||||
import logger from '../../utils/logger';
|
||||
|
||||
export const loadDeepInfraChatModels = async () => {
|
||||
const deepinfraApiKey = getDeepInftaApiKeys();
|
||||
|
||||
if (!deepinfraApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels = {
|
||||
'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo': {
|
||||
displayName: 'LLaMA 3.1 70B Turbo',
|
||||
model: new ChatDeepInfra({
|
||||
model: 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo',
|
||||
temperature: 0.7,
|
||||
apiKey: deepinfraApiKey,
|
||||
}),
|
||||
},
|
||||
'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo': {
|
||||
displayName: 'LLaMA 3.1 8B Turbo',
|
||||
model: new ChatDeepInfra({
|
||||
model: 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo',
|
||||
temperature: 0.7,
|
||||
apiKey: deepinfraApiKey,
|
||||
}),
|
||||
},
|
||||
'meta-llama/Meta-Llama-3.1-70B-Instruct': {
|
||||
displayName: 'LLaMA 3.1 70B',
|
||||
model: new ChatDeepInfra({
|
||||
model: 'meta-llama/Meta-Llama-3.1-70B-Instruct',
|
||||
temperature: 0.7,
|
||||
apiKey: deepinfraApiKey,
|
||||
}),
|
||||
},
|
||||
'meta-llama/Meta-Llama-3.1-8B-Instruct': {
|
||||
displayName: 'LLaMA 3.1 8B',
|
||||
model: new ChatDeepInfra({
|
||||
model: 'meta-llama/Meta-Llama-3.1-8B-Instruct',
|
||||
temperature: 0.7,
|
||||
apiKey: deepinfraApiKey,
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
return chatModels;
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Gemini models: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const loadDeepInfraEmbeddingsModels = async () => {
|
||||
const deepinfraApiKey = getDeepInftaApiKeys();
|
||||
|
||||
if (!deepinfraApiKey) return {};
|
||||
|
||||
try {
|
||||
const embeddingModels = {
|
||||
'BAAI/bge-m3': {
|
||||
displayName: 'BAAI/bge-m3',
|
||||
model: new DeepInfraEmbeddings({
|
||||
apiToken: deepinfraApiKey,
|
||||
modelName: 'BAAI/bge-m3',
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
return embeddingModels;
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Gemini embeddings model: ${err}`);
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
|
@ -4,6 +4,7 @@ import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
|
|||
import { loadAnthropicChatModels } from './anthropic';
|
||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
||||
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';
|
||||
import { loadDeepInfraChatModels, loadDeepInfraEmbeddingsModels } from './deepinfra';
|
||||
|
||||
const chatModelProviders = {
|
||||
openai: loadOpenAIChatModels,
|
||||
|
@ -11,6 +12,7 @@ const chatModelProviders = {
|
|||
ollama: loadOllamaChatModels,
|
||||
anthropic: loadAnthropicChatModels,
|
||||
gemini: loadGeminiChatModels,
|
||||
deepinfra: loadDeepInfraChatModels,
|
||||
};
|
||||
|
||||
const embeddingModelProviders = {
|
||||
|
@ -18,6 +20,7 @@ const embeddingModelProviders = {
|
|||
local: loadTransformersEmbeddingsModels,
|
||||
ollama: loadOllamaEmbeddingsModels,
|
||||
gemini: loadGeminiEmbeddingsModels,
|
||||
deepinfra: loadDeepInfraEmbeddingsModels,
|
||||
};
|
||||
|
||||
export const getAvailableChatModelProviders = async () => {
|
||||
|
|
|
@ -71,6 +71,7 @@ router.post('/', async (req, res) => {
|
|||
GROQ: config.groqApiKey,
|
||||
ANTHROPIC: config.anthropicApiKey,
|
||||
GEMINI: config.geminiApiKey,
|
||||
DEEPINFRA: config.deepInfraApiKey,
|
||||
},
|
||||
API_ENDPOINTS: {
|
||||
OLLAMA: config.ollamaApiUrl,
|
||||
|
|
|
@ -64,6 +64,7 @@ interface SettingsType {
|
|||
groqApiKey: string;
|
||||
anthropicApiKey: string;
|
||||
geminiApiKey: string;
|
||||
deepinfraApiKey: string;
|
||||
ollamaApiUrl: string;
|
||||
}
|
||||
|
||||
|
@ -493,6 +494,22 @@ const SettingsDialog = ({
|
|||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
DeepInfra API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="DeepInfta API key"
|
||||
defaultValue={config.geminiApiKey}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
deepinfraApiKey: e.target.value,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{isLoading && (
|
||||
|
|
Loading…
Add table
Reference in a new issue