From 0b059bb71bbf501f33bbd281585b40e64f52df5c Mon Sep 17 00:00:00 2001 From: joe Date: Mon, 29 Apr 2024 16:39:18 +0800 Subject: [PATCH] add openai custom base uri --- sample.config.toml | 1 + src/lib/providers.ts | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/sample.config.toml b/sample.config.toml index 2d09b4b..79c3148 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -6,6 +6,7 @@ CHAT_MODEL = "gpt-3.5-turbo" # Name of the model to use [API_KEYS] OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef +OPENAI_BASE_URL = "" # Custom Open AI Base URL , may be use cloudflare AI Gateway [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL diff --git a/src/lib/providers.ts b/src/lib/providers.ts index c730da8..e05182c 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -1,10 +1,11 @@ import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; -import { getOllamaApiEndpoint, getOpenaiApiKey } from '../config'; +import { getOllamaApiEndpoint, getOpenaiApiBaseUrl, getOpenaiApiKey } from '../config'; export const getAvailableProviders = async () => { const openAIApiKey = getOpenaiApiKey(); + const openAIBaseUrl = getOpenaiApiBaseUrl(); const ollamaEndpoint = getOllamaApiEndpoint(); const models = {}; @@ -16,15 +17,20 @@ export const getAvailableProviders = async () => { openAIApiKey, modelName: 'gpt-3.5-turbo', temperature: 0.7, + }), 'gpt-4': new ChatOpenAI({ openAIApiKey, modelName: 'gpt-4', temperature: 0.7, + configuration: { + baseURL: openAIBaseUrl, + }, }), embeddings: new OpenAIEmbeddings({ openAIApiKey, modelName: 'text-embedding-3-large', + }), }; } catch (err) {