feat(video-search): handle custom OpenAI
This commit is contained in:
parent
540f38ae68
commit
7c6ee2ead1
5 changed files with 94 additions and 25 deletions
|
@ -186,10 +186,10 @@ const MessageBox = ({
|
|||
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
||||
<SearchImages
|
||||
query={history[messageIndex - 1].content}
|
||||
chat_history={history.slice(0, messageIndex - 1)}
|
||||
chatHistory={history.slice(0, messageIndex - 1)}
|
||||
/>
|
||||
<SearchVideos
|
||||
chat_history={history.slice(0, messageIndex - 1)}
|
||||
chatHistory={history.slice(0, messageIndex - 1)}
|
||||
query={history[messageIndex - 1].content}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -13,10 +13,10 @@ type Image = {
|
|||
|
||||
const SearchImages = ({
|
||||
query,
|
||||
chat_history,
|
||||
chatHistory,
|
||||
}: {
|
||||
query: string;
|
||||
chat_history: Message[];
|
||||
chatHistory: Message[];
|
||||
}) => {
|
||||
const [images, setImages] = useState<Image[] | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
@ -33,6 +33,9 @@ const SearchImages = ({
|
|||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/images`,
|
||||
{
|
||||
|
@ -42,9 +45,15 @@ const SearchImages = ({
|
|||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chat_history: chat_history,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chat_model: chatModel,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
|
|
@ -26,10 +26,10 @@ declare module 'yet-another-react-lightbox' {
|
|||
|
||||
const Searchvideos = ({
|
||||
query,
|
||||
chat_history,
|
||||
chatHistory,
|
||||
}: {
|
||||
query: string;
|
||||
chat_history: Message[];
|
||||
chatHistory: Message[];
|
||||
}) => {
|
||||
const [videos, setVideos] = useState<Video[] | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
@ -46,6 +46,9 @@ const Searchvideos = ({
|
|||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/videos`,
|
||||
{
|
||||
|
@ -55,9 +58,15 @@ const Searchvideos = ({
|
|||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chat_history: chat_history,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chat_model: chatModel,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
|
|
@ -4,15 +4,24 @@ export const getSuggestions = async (chatHisory: Message[]) => {
|
|||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
|
||||
const customOpenAIKey = localStorage.getItem('openAIApiKey');
|
||||
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
|
||||
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
chat_history: chatHisory,
|
||||
chat_model: chatModel,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chatHistory: chatHisory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIKey,
|
||||
customOpenAIBaseURL,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue