/* eslint-disable unicorn/no-nested-ternary */ import { cn } from "@/lib/utils"; import { Dialog, Transition } from "@headlessui/react"; import { CloudUpload, RefreshCcw, RefreshCw } from "lucide-react"; import React, { Fragment, useEffect, useState, type SelectHTMLAttributes } from "react"; import ThemeSwitcher from "./theme/Switcher"; interface InputProperties extends React.InputHTMLAttributes {} const Input = ({ className, ...restProperties }: InputProperties) => { return ( ); }; interface SelectProperties extends SelectHTMLAttributes { options: { value: string; label: string; disabled?: boolean }[]; } export const Select = ({ className, options, ...restProperties }: SelectProperties) => { return ( ); }; interface SettingsType { chatModelProviders: { [key: string]: string[]; }; embeddingModelProviders: { [key: string]: string[]; }; openaiApiKey: string; groqApiKey: string; ollamaApiUrl: string; } const SettingsDialog = ({ isOpen, setIsOpen }: { isOpen: boolean; setIsOpen: (isOpen: boolean) => void }) => { const [config, setConfig] = useState(null); const [selectedChatModelProvider, setSelectedChatModelProvider] = useState(null); const [selectedChatModel, setSelectedChatModel] = useState(null); const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] = useState(null); const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState(null); const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState(""); const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState(""); const [isLoading, setIsLoading] = useState(false); const [isUpdating, setIsUpdating] = useState(false); useEffect(() => { if (isOpen) { const fetchConfig = async () => { setIsLoading(true); const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { headers: { "Content-Type": "application/json", }, }); const data = (await res.json()) as SettingsType; setConfig(data); const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {}); const embeddingModelProvidersKeys = Object.keys(data.embeddingModelProviders || {}); const defaultChatModelProvider = chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : ""; const defaultEmbeddingModelProvider = embeddingModelProvidersKeys.length > 0 ? embeddingModelProvidersKeys[0] : ""; const chatModelProvider = localStorage.getItem("chatModelProvider") || defaultChatModelProvider || ""; const chatModel = localStorage.getItem("chatModel") || (data.chatModelProviders && data.chatModelProviders[chatModelProvider]?.[0]) || ""; const embeddingModelProvider = localStorage.getItem("embeddingModelProvider") || defaultEmbeddingModelProvider || ""; const embeddingModel = localStorage.getItem("embeddingModel") || (data.embeddingModelProviders && data.embeddingModelProviders[embeddingModelProvider]?.[0]) || ""; setSelectedChatModelProvider(chatModelProvider); setSelectedChatModel(chatModel); setSelectedEmbeddingModelProvider(embeddingModelProvider); setSelectedEmbeddingModel(embeddingModel); setCustomOpenAIApiKey(localStorage.getItem("openAIApiKey") || ""); setCustomOpenAIBaseURL(localStorage.getItem("openAIBaseURL") || ""); setIsLoading(false); }; fetchConfig(); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [isOpen]); const handleSubmit = async () => { setIsUpdating(true); try { await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { method: "POST", headers: { "Content-Type": "application/json", }, body: JSON.stringify(config), }); localStorage.setItem("chatModelProvider", selectedChatModelProvider!); localStorage.setItem("chatModel", selectedChatModel!); localStorage.setItem("embeddingModelProvider", selectedEmbeddingModelProvider!); localStorage.setItem("embeddingModel", selectedEmbeddingModel!); localStorage.setItem("openAIApiKey", customOpenAIApiKey!); localStorage.setItem("openAIBaseURL", customOpenAIBaseURL!); } catch (error) { console.log(error); } finally { setIsUpdating(false); setIsOpen(false); window.location.reload(); } }; return ( setIsOpen(false)}>
Settings {config && !isLoading && (

Theme

{config.chatModelProviders && (

Chat model Provider

setSelectedChatModel(e.target.value)} options={(() => { const chatModelProvider = config.chatModelProviders[selectedChatModelProvider]; return chatModelProvider ? chatModelProvider.length > 0 ? chatModelProvider.map(model => ({ value: model, label: model, })) : [ { value: "", label: "No models available", disabled: true, }, ] : [ { value: "", label: "Invalid provider, please check backend logs", disabled: true, }, ]; })()} />
)} {selectedChatModelProvider && selectedChatModelProvider === "custom_openai" && ( <>

Model name

setSelectedChatModel(e.target.value)} />

Custom OpenAI API Key

setCustomOpenAIApiKey(e.target.value)} />

Custom OpenAI Base URL

setCustomOpenAIBaseURL(e.target.value)} />
)} {/* Embedding models */} {config.embeddingModelProviders && (

Embedding model Provider

setSelectedEmbeddingModel(e.target.value)} options={(() => { const embeddingModelProvider = config.embeddingModelProviders[selectedEmbeddingModelProvider]; return embeddingModelProvider ? embeddingModelProvider.length > 0 ? embeddingModelProvider.map(model => ({ label: model, value: model, })) : [ { label: "No embedding models available", value: "", disabled: true, }, ] : [ { label: "Invalid provider, please check backend logs", value: "", disabled: true, }, ]; })()} />
)}

OpenAI API Key

setConfig({ ...config, openaiApiKey: e.target.value, }) } />

Ollama API URL

setConfig({ ...config, ollamaApiUrl: e.target.value, }) } />

GROQ API Key

setConfig({ ...config, groqApiKey: e.target.value, }) } />
)} {isLoading && (
)}

We'll refresh the page after updating the settings.

); }; export default SettingsDialog;