Add OpenaiURL
This commit is contained in:
parent
476303f52b
commit
e980945d59
4 changed files with 159 additions and 168 deletions
|
@ -16,6 +16,7 @@ interface Config {
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
OLLAMA: string;
|
OLLAMA: string;
|
||||||
|
OPENAI: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,6 +42,8 @@ export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||||
|
|
||||||
|
export const getOpenaiApiEndpoint = () => loadConfig().API_ENDPOINTS.OPENAI;
|
||||||
|
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||||
const currentConfig = loadConfig();
|
const currentConfig = loadConfig();
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ import {
|
||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
|
getOpenaiApiEndpoint,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
@ -13,6 +14,7 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
const openAIApiKey = getOpenaiApiKey();
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
const groqApiKey = getGroqApiKey();
|
const groqApiKey = getGroqApiKey();
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
|
const openAIEndpoint = getOpenaiApiEndpoint();
|
||||||
|
|
||||||
const models = {};
|
const models = {};
|
||||||
|
|
||||||
|
@ -23,22 +25,38 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'gpt-3.5-turbo',
|
modelName: 'gpt-3.5-turbo',
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
baseURL:openAIEndpoint,
|
||||||
|
},
|
||||||
|
),
|
||||||
'GPT-4': new ChatOpenAI({
|
'GPT-4': new ChatOpenAI({
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'gpt-4',
|
modelName: 'gpt-4',
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
baseURL:openAIEndpoint,
|
||||||
|
},
|
||||||
|
),
|
||||||
'GPT-4 turbo': new ChatOpenAI({
|
'GPT-4 turbo': new ChatOpenAI({
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'gpt-4-turbo',
|
modelName: 'gpt-4-turbo',
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
baseURL:openAIEndpoint,
|
||||||
|
},
|
||||||
|
),
|
||||||
'GPT-4 omni': new ChatOpenAI({
|
'GPT-4 omni': new ChatOpenAI({
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'gpt-4o',
|
modelName: 'gpt-4o',
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
baseURL:openAIEndpoint,
|
||||||
|
},
|
||||||
|
),
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Error loading OpenAI models: ${err}`);
|
logger.error(`Error loading OpenAI models: ${err}`);
|
||||||
|
@ -117,6 +135,8 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
models['custom_openai'] = {};
|
models['custom_openai'] = {};
|
||||||
|
|
||||||
return models;
|
return models;
|
||||||
|
@ -125,6 +145,7 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
export const getAvailableEmbeddingModelProviders = async () => {
|
export const getAvailableEmbeddingModelProviders = async () => {
|
||||||
const openAIApiKey = getOpenaiApiKey();
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
|
const openAIEndpoint = getOpenaiApiEndpoint();
|
||||||
|
|
||||||
const models = {};
|
const models = {};
|
||||||
|
|
||||||
|
@ -134,11 +155,19 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
||||||
'Text embedding 3 small': new OpenAIEmbeddings({
|
'Text embedding 3 small': new OpenAIEmbeddings({
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'text-embedding-3-small',
|
modelName: 'text-embedding-3-small',
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
baseURL:openAIEndpoint,
|
||||||
|
},
|
||||||
|
),
|
||||||
'Text embedding 3 large': new OpenAIEmbeddings({
|
'Text embedding 3 large': new OpenAIEmbeddings({
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'text-embedding-3-large',
|
modelName: 'text-embedding-3-large',
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
baseURL:openAIEndpoint,
|
||||||
|
},
|
||||||
|
),
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Error loading OpenAI embeddings: ${err}`);
|
logger.error(`Error loading OpenAI embeddings: ${err}`);
|
||||||
|
@ -167,6 +196,7 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
models['local'] = {
|
models['local'] = {
|
||||||
'BGE Small': new HuggingFaceTransformersEmbeddings({
|
'BGE Small': new HuggingFaceTransformersEmbeddings({
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
|
getOpenaiApiEndpoint,
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
|
|
||||||
|
@ -38,7 +39,7 @@ router.get('/', async (_, res) => {
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
|
config['openaiApiUrl'] = getOpenaiApiEndpoint();
|
||||||
res.status(200).json(config);
|
res.status(200).json(config);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -52,6 +53,7 @@ router.post('/', async (req, res) => {
|
||||||
},
|
},
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
OLLAMA: config.ollamaApiUrl,
|
OLLAMA: config.ollamaApiUrl,
|
||||||
|
OPENAI: config.openaiApiUrl,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,52 +1,6 @@
|
||||||
import { cn } from '@/lib/utils';
|
|
||||||
import { Dialog, Transition } from '@headlessui/react';
|
import { Dialog, Transition } from '@headlessui/react';
|
||||||
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
|
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
|
||||||
import React, {
|
import React, { Fragment, useEffect, useState } from 'react';
|
||||||
Fragment,
|
|
||||||
useEffect,
|
|
||||||
useMemo,
|
|
||||||
useState,
|
|
||||||
type SelectHTMLAttributes,
|
|
||||||
} from 'react';
|
|
||||||
import ThemeSwitcher from './theme/Switcher';
|
|
||||||
|
|
||||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
|
|
||||||
|
|
||||||
const Input = ({ className, ...restProps }: InputProps) => {
|
|
||||||
return (
|
|
||||||
<input
|
|
||||||
{...restProps}
|
|
||||||
className={cn(
|
|
||||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
|
||||||
className,
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
|
||||||
options: { value: string; label: string; disabled?: boolean }[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
|
||||||
return (
|
|
||||||
<select
|
|
||||||
{...restProps}
|
|
||||||
className={cn(
|
|
||||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
|
||||||
className,
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
{options.map(({ label, value, disabled }) => {
|
|
||||||
return (
|
|
||||||
<option key={value} value={value} disabled={disabled}>
|
|
||||||
{label}
|
|
||||||
</option>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</select>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
interface SettingsType {
|
interface SettingsType {
|
||||||
chatModelProviders: {
|
chatModelProviders: {
|
||||||
|
@ -58,6 +12,7 @@ interface SettingsType {
|
||||||
openaiApiKey: string;
|
openaiApiKey: string;
|
||||||
groqApiKey: string;
|
groqApiKey: string;
|
||||||
ollamaApiUrl: string;
|
ollamaApiUrl: string;
|
||||||
|
openaiApiUrl: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const SettingsDialog = ({
|
const SettingsDialog = ({
|
||||||
|
@ -191,7 +146,7 @@ const SettingsDialog = ({
|
||||||
leaveFrom="opacity-100"
|
leaveFrom="opacity-100"
|
||||||
leaveTo="opacity-0"
|
leaveTo="opacity-0"
|
||||||
>
|
>
|
||||||
<div className="fixed inset-0 bg-white/50 dark:bg-black/50" />
|
<div className="fixed inset-0 bg-black/50" />
|
||||||
</Transition.Child>
|
</Transition.Child>
|
||||||
<div className="fixed inset-0 overflow-y-auto">
|
<div className="fixed inset-0 overflow-y-auto">
|
||||||
<div className="flex min-h-full items-center justify-center p-4 text-center">
|
<div className="flex min-h-full items-center justify-center p-4 text-center">
|
||||||
|
@ -204,24 +159,18 @@ const SettingsDialog = ({
|
||||||
leaveFrom="opacity-100 scale-200"
|
leaveFrom="opacity-100 scale-200"
|
||||||
leaveTo="opacity-0 scale-95"
|
leaveTo="opacity-0 scale-95"
|
||||||
>
|
>
|
||||||
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
|
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-[#111111] border border-[#1c1c1c] p-6 text-left align-middle shadow-xl transition-all">
|
||||||
<Dialog.Title className="text-xl font-medium leading-6 dark:text-white">
|
<Dialog.Title className="text-xl font-medium leading-6 text-white">
|
||||||
Settings
|
Settings
|
||||||
</Dialog.Title>
|
</Dialog.Title>
|
||||||
{config && !isLoading && (
|
{config && !isLoading && (
|
||||||
<div className="flex flex-col space-y-4 mt-6">
|
<div className="flex flex-col space-y-4 mt-6">
|
||||||
<div className="flex flex-col space-y-1">
|
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
|
||||||
Theme
|
|
||||||
</p>
|
|
||||||
<ThemeSwitcher />
|
|
||||||
</div>
|
|
||||||
{config.chatModelProviders && (
|
{config.chatModelProviders && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Chat model Provider
|
Chat model Provider
|
||||||
</p>
|
</p>
|
||||||
<Select
|
<select
|
||||||
value={selectedChatModelProvider ?? undefined}
|
value={selectedChatModelProvider ?? undefined}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setSelectedChatModelProvider(e.target.value);
|
setSelectedChatModelProvider(e.target.value);
|
||||||
|
@ -229,99 +178,97 @@ const SettingsDialog = ({
|
||||||
config.chatModelProviders[e.target.value][0],
|
config.chatModelProviders[e.target.value][0],
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
options={Object.keys(config.chatModelProviders).map(
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
(provider) => ({
|
>
|
||||||
value: provider,
|
{Object.keys(config.chatModelProviders).map(
|
||||||
label:
|
(provider) => (
|
||||||
provider.charAt(0).toUpperCase() +
|
<option key={provider} value={provider}>
|
||||||
provider.slice(1),
|
{provider.charAt(0).toUpperCase() +
|
||||||
}),
|
provider.slice(1)}
|
||||||
|
</option>
|
||||||
|
),
|
||||||
)}
|
)}
|
||||||
/>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{selectedChatModelProvider &&
|
{selectedChatModelProvider &&
|
||||||
selectedChatModelProvider != 'custom_openai' && (
|
selectedChatModelProvider != 'custom_openai' && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">Chat Model</p>
|
||||||
Chat Model
|
<select
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={selectedChatModel ?? undefined}
|
value={selectedChatModel ?? undefined}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
setSelectedChatModel(e.target.value)
|
setSelectedChatModel(e.target.value)
|
||||||
}
|
}
|
||||||
options={(() => {
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
const chatModelProvider =
|
>
|
||||||
|
{config.chatModelProviders[
|
||||||
|
selectedChatModelProvider
|
||||||
|
] ? (
|
||||||
|
config.chatModelProviders[
|
||||||
|
selectedChatModelProvider
|
||||||
|
].length > 0 ? (
|
||||||
config.chatModelProviders[
|
config.chatModelProviders[
|
||||||
selectedChatModelProvider
|
selectedChatModelProvider
|
||||||
];
|
].map((model) => (
|
||||||
|
<option key={model} value={model}>
|
||||||
return chatModelProvider
|
{model}
|
||||||
? chatModelProvider.length > 0
|
</option>
|
||||||
? chatModelProvider.map((model) => ({
|
))
|
||||||
value: model,
|
) : (
|
||||||
label: model,
|
<option value="" disabled>
|
||||||
}))
|
No models available
|
||||||
: [
|
</option>
|
||||||
{
|
)
|
||||||
value: '',
|
) : (
|
||||||
label: 'No models available',
|
<option value="" disabled>
|
||||||
disabled: true,
|
Invalid provider, please check backend logs
|
||||||
},
|
</option>
|
||||||
]
|
)}
|
||||||
: [
|
</select>
|
||||||
{
|
|
||||||
value: '',
|
|
||||||
label:
|
|
||||||
'Invalid provider, please check backend logs',
|
|
||||||
disabled: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
})()}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{selectedChatModelProvider &&
|
{selectedChatModelProvider &&
|
||||||
selectedChatModelProvider === 'custom_openai' && (
|
selectedChatModelProvider === 'custom_openai' && (
|
||||||
<>
|
<>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">Model name</p>
|
||||||
Model name
|
<input
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="Model name"
|
placeholder="Model name"
|
||||||
defaultValue={selectedChatModel!}
|
defaultValue={selectedChatModel!}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
setSelectedChatModel(e.target.value)
|
setSelectedChatModel(e.target.value)
|
||||||
}
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Custom OpenAI API Key
|
Custom OpenAI API Key
|
||||||
</p>
|
</p>
|
||||||
<Input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="Custom OpenAI API Key"
|
placeholder="Custom OpenAI API Key"
|
||||||
defaultValue={customOpenAIApiKey!}
|
defaultValue={customOpenAIApiKey!}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
setCustomOpenAIApiKey(e.target.value)
|
setCustomOpenAIApiKey(e.target.value)
|
||||||
}
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Custom OpenAI Base URL
|
Custom OpenAI Base URL
|
||||||
</p>
|
</p>
|
||||||
<Input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="Custom OpenAI Base URL"
|
placeholder="Custom OpenAI Base URL"
|
||||||
defaultValue={customOpenAIBaseURL!}
|
defaultValue={customOpenAIBaseURL!}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
setCustomOpenAIBaseURL(e.target.value)
|
setCustomOpenAIBaseURL(e.target.value)
|
||||||
}
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
|
@ -329,10 +276,10 @@ const SettingsDialog = ({
|
||||||
{/* Embedding models */}
|
{/* Embedding models */}
|
||||||
{config.embeddingModelProviders && (
|
{config.embeddingModelProviders && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">
|
||||||
Embedding model Provider
|
Embedding model Provider
|
||||||
</p>
|
</p>
|
||||||
<Select
|
<select
|
||||||
value={selectedEmbeddingModelProvider ?? undefined}
|
value={selectedEmbeddingModelProvider ?? undefined}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setSelectedEmbeddingModelProvider(e.target.value);
|
setSelectedEmbeddingModelProvider(e.target.value);
|
||||||
|
@ -340,63 +287,58 @@ const SettingsDialog = ({
|
||||||
config.embeddingModelProviders[e.target.value][0],
|
config.embeddingModelProviders[e.target.value][0],
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
options={Object.keys(
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
config.embeddingModelProviders,
|
>
|
||||||
).map((provider) => ({
|
{Object.keys(config.embeddingModelProviders).map(
|
||||||
label:
|
(provider) => (
|
||||||
provider.charAt(0).toUpperCase() +
|
<option key={provider} value={provider}>
|
||||||
provider.slice(1),
|
{provider.charAt(0).toUpperCase() +
|
||||||
value: provider,
|
provider.slice(1)}
|
||||||
}))}
|
</option>
|
||||||
/>
|
),
|
||||||
|
)}
|
||||||
|
</select>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{selectedEmbeddingModelProvider && (
|
{selectedEmbeddingModelProvider && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">Embedding Model</p>
|
||||||
Embedding Model
|
<select
|
||||||
</p>
|
|
||||||
<Select
|
|
||||||
value={selectedEmbeddingModel ?? undefined}
|
value={selectedEmbeddingModel ?? undefined}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
setSelectedEmbeddingModel(e.target.value)
|
setSelectedEmbeddingModel(e.target.value)
|
||||||
}
|
}
|
||||||
options={(() => {
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
const embeddingModelProvider =
|
>
|
||||||
|
{config.embeddingModelProviders[
|
||||||
|
selectedEmbeddingModelProvider
|
||||||
|
] ? (
|
||||||
|
config.embeddingModelProviders[
|
||||||
|
selectedEmbeddingModelProvider
|
||||||
|
].length > 0 ? (
|
||||||
config.embeddingModelProviders[
|
config.embeddingModelProviders[
|
||||||
selectedEmbeddingModelProvider
|
selectedEmbeddingModelProvider
|
||||||
];
|
].map((model) => (
|
||||||
|
<option key={model} value={model}>
|
||||||
return embeddingModelProvider
|
{model}
|
||||||
? embeddingModelProvider.length > 0
|
</option>
|
||||||
? embeddingModelProvider.map((model) => ({
|
))
|
||||||
label: model,
|
) : (
|
||||||
value: model,
|
<option value="" disabled selected>
|
||||||
}))
|
No embedding models available
|
||||||
: [
|
</option>
|
||||||
{
|
)
|
||||||
label: 'No embedding models available',
|
) : (
|
||||||
value: '',
|
<option value="" disabled selected>
|
||||||
disabled: true,
|
Invalid provider, please check backend logs
|
||||||
},
|
</option>
|
||||||
]
|
)}
|
||||||
: [
|
</select>
|
||||||
{
|
|
||||||
label:
|
|
||||||
'Invalid provider, please check backend logs',
|
|
||||||
value: '',
|
|
||||||
disabled: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
})()}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">OpenAI API Key</p>
|
||||||
OpenAI API Key
|
<input
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="OpenAI API Key"
|
placeholder="OpenAI API Key"
|
||||||
defaultValue={config.openaiApiKey}
|
defaultValue={config.openaiApiKey}
|
||||||
|
@ -406,13 +348,12 @@ const SettingsDialog = ({
|
||||||
openaiApiKey: e.target.value,
|
openaiApiKey: e.target.value,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">Ollama API URL</p>
|
||||||
Ollama API URL
|
<input
|
||||||
</p>
|
|
||||||
<Input
|
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="Ollama API URL"
|
placeholder="Ollama API URL"
|
||||||
defaultValue={config.ollamaApiUrl}
|
defaultValue={config.ollamaApiUrl}
|
||||||
|
@ -422,13 +363,27 @@ const SettingsDialog = ({
|
||||||
ollamaApiUrl: e.target.value,
|
ollamaApiUrl: e.target.value,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-white/70 text-sm">OpenAI API URL</p>
|
||||||
GROQ API Key
|
<input
|
||||||
</p>
|
type="text"
|
||||||
<Input
|
placeholder="OpenAI API URL"
|
||||||
|
defaultValue={config.openaiApiUrl}
|
||||||
|
onChange={(e) =>
|
||||||
|
setConfig({
|
||||||
|
...config,
|
||||||
|
openaiApiUrl: e.target.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-white/70 text-sm">GROQ API Key</p>
|
||||||
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="GROQ API Key"
|
placeholder="GROQ API Key"
|
||||||
defaultValue={config.groqApiKey}
|
defaultValue={config.groqApiKey}
|
||||||
|
@ -438,17 +393,18 @@ const SettingsDialog = ({
|
||||||
groqApiKey: e.target.value,
|
groqApiKey: e.target.value,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{isLoading && (
|
{isLoading && (
|
||||||
<div className="w-full flex items-center justify-center mt-6 text-black/70 dark:text-white/70 py-6">
|
<div className="w-full flex items-center justify-center mt-6 text-white/70 py-6">
|
||||||
<RefreshCcw className="animate-spin" />
|
<RefreshCcw className="animate-spin" />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<div className="w-full mt-6 space-y-2">
|
<div className="w-full mt-6 space-y-2">
|
||||||
<p className="text-xs text-black/50 dark:text-white/50">
|
<p className="text-xs text-white/50">
|
||||||
We'll refresh the page after updating the settings.
|
We'll refresh the page after updating the settings.
|
||||||
</p>
|
</p>
|
||||||
<button
|
<button
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue