From ba7b92ffded3cf0e764b1101200376b9e7451e19 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 5 May 2024 10:53:27 +0530 Subject: [PATCH 001/256] feat(providers): add `Content-Type` header --- src/lib/providers.ts | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/lib/providers.ts b/src/lib/providers.ts index d6904c0..751d23f 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -90,7 +90,11 @@ export const getAvailableChatModelProviders = async () => { if (ollamaEndpoint) { try { - const response = await fetch(`${ollamaEndpoint}/api/tags`); + const response = await fetch(`${ollamaEndpoint}/api/tags`, { + headers: { + 'Content-Type': 'application/json', + }, + }); const { models: ollamaModels } = (await response.json()) as any; @@ -137,7 +141,11 @@ export const getAvailableEmbeddingModelProviders = async () => { if (ollamaEndpoint) { try { - const response = await fetch(`${ollamaEndpoint}/api/tags`); + const response = await fetch(`${ollamaEndpoint}/api/tags`, { + headers: { + 'Content-Type': 'application/json', + }, + }); const { models: ollamaModels } = (await response.json()) as any; From 6e61c88c9e655e238cabf5a4ac50481c2bdeb48d Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 5 May 2024 16:28:46 +0530 Subject: [PATCH 002/256] feat(error-object): add `key` --- src/websocket/connectionManager.ts | 3 ++- src/websocket/messageHandler.ts | 30 ++++++++++++++++++++++++++---- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/src/websocket/connectionManager.ts b/src/websocket/connectionManager.ts index 88efb6b..daa4cbc 100644 --- a/src/websocket/connectionManager.ts +++ b/src/websocket/connectionManager.ts @@ -70,7 +70,8 @@ export const handleConnection = async ( ws.send( JSON.stringify({ type: 'error', - data: 'Invalid LLM or embeddings model selected', + data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.', + key: 'INVALID_MODEL_SELECTED', }), ); ws.close(); diff --git a/src/websocket/messageHandler.ts b/src/websocket/messageHandler.ts index 537651f..98f67c2 100644 --- a/src/websocket/messageHandler.ts +++ b/src/websocket/messageHandler.ts @@ -57,7 +57,13 @@ const handleEmitterEvents = ( }); emitter.on('error', (data) => { const parsedData = JSON.parse(data); - ws.send(JSON.stringify({ type: 'error', data: parsedData.data })); + ws.send( + JSON.stringify({ + type: 'error', + data: parsedData.data, + key: 'CHAIN_ERROR', + }), + ); }); }; @@ -73,7 +79,11 @@ export const handleMessage = async ( if (!parsedMessage.content) return ws.send( - JSON.stringify({ type: 'error', data: 'Invalid message format' }), + JSON.stringify({ + type: 'error', + data: 'Invalid message format', + key: 'INVALID_FORMAT', + }), ); const history: BaseMessage[] = parsedMessage.history.map((msg) => { @@ -99,11 +109,23 @@ export const handleMessage = async ( ); handleEmitterEvents(emitter, ws, id); } else { - ws.send(JSON.stringify({ type: 'error', data: 'Invalid focus mode' })); + ws.send( + JSON.stringify({ + type: 'error', + data: 'Invalid focus mode', + key: 'INVALID_FOCUS_MODE', + }), + ); } } } catch (err) { - ws.send(JSON.stringify({ type: 'error', data: 'Invalid message format' })); + ws.send( + JSON.stringify({ + type: 'error', + data: 'Invalid message format', + key: 'INVALID_FORMAT', + }), + ); logger.error(`Failed to handle message: ${err}`); } }; From 94ea6c372a5c42afb2f6f9cc6942d57681a5e9fb Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 5 May 2024 16:29:40 +0530 Subject: [PATCH 003/256] feat(chat-window): clear storage after error --- ui/components/ChatWindow.tsx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx index 79f93a8..ca1aaa3 100644 --- a/ui/components/ChatWindow.tsx +++ b/ui/components/ChatWindow.tsx @@ -108,6 +108,9 @@ const useSocket = (url: string) => { const parsedData = JSON.parse(e.data); if (parsedData.type === 'error') { toast.error(parsedData.data); + if (parsedData.key === 'INVALID_MODEL_SELECTED') { + localStorage.clear(); + } } }; }; From 9b088cd1614cfb96eab27506a8d74d816d4551ca Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 5 May 2024 16:35:06 +0530 Subject: [PATCH 004/256] feat(package): bump version --- package.json | 2 +- ui/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index a82ee31..6838da8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-backend", - "version": "1.3.1", + "version": "1.3.2", "license": "MIT", "author": "ItzCrazyKns", "scripts": { diff --git a/ui/package.json b/ui/package.json index 31dd895..2a0ba02 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-frontend", - "version": "1.3.1", + "version": "1.3.2", "license": "MIT", "author": "ItzCrazyKns", "scripts": { From f28257b480b4706e0d9199418123122dc723f31e Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 6 May 2024 12:34:59 +0530 Subject: [PATCH 005/256] feat(settings): fetch localStorage at state change --- ui/components/SettingsDialog.tsx | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index f77a95c..d704488 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -53,6 +53,14 @@ const SettingsDialog = ({ }); const data = await res.json(); setConfig(data); + setSelectedChatModelProvider(localStorage.getItem('chatModelProvider')); + setSelectedChatModel(localStorage.getItem('chatModel')); + setSelectedEmbeddingModelProvider( + localStorage.getItem('embeddingModelProvider'), + ); + setSelectedEmbeddingModel(localStorage.getItem('embeddingModel')); + setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey')); + setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl')); setIsLoading(false); }; @@ -61,17 +69,6 @@ const SettingsDialog = ({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [isOpen]); - useEffect(() => { - setSelectedChatModelProvider(localStorage.getItem('chatModelProvider')); - setSelectedChatModel(localStorage.getItem('chatModel')); - setSelectedEmbeddingModelProvider( - localStorage.getItem('embeddingModelProvider'), - ); - setSelectedEmbeddingModel(localStorage.getItem('embeddingModel')); - setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey')); - setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl')); - }, []); - const handleSubmit = async () => { setIsUpdating(true); From 38b19956771e77a14cabfcd4f82ab67418bb8225 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 6 May 2024 12:36:13 +0530 Subject: [PATCH 006/256] feat(package): bump version --- package.json | 2 +- ui/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 6838da8..36e35d9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-backend", - "version": "1.3.2", + "version": "1.3.3", "license": "MIT", "author": "ItzCrazyKns", "scripts": { diff --git a/ui/package.json b/ui/package.json index 2a0ba02..99b2334 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-frontend", - "version": "1.3.2", + "version": "1.3.3", "license": "MIT", "author": "ItzCrazyKns", "scripts": { From b4d787d33359f09d7ffb1a193157204fe028bed5 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 6 May 2024 12:58:40 +0530 Subject: [PATCH 007/256] feat(readme): add troubleshooting --- README.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/README.md b/README.md index 7c0b09a..fb01b4b 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - [Non-Docker Installation](#non-docker-installation) + - [Troubleshooting](#troubleshooting) + - [Ollama connection errors](#ollama-connection-errors) - [One-Click Deployment](#one-click-deployment) - [Upcoming Features](#upcoming-features) - [Support Us](#support-us) @@ -90,6 +92,19 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. +### Troubleshooting +Enocuntered a bug or an issue? Feel free to join our [Discord community](https://discord.gg/2bdhg2R3ze) where we can do our best to help you. + +#### Ollama connection errors + +If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following: + +On Windows: `http://host.docker.internal:11434`
+On Mac: `http://host.docker.internal:11434`
+On Linux: `http://private_ip_of_computer_hosting_ollama:11434` + +You need to edit the ports accordingly. + ## One-Click Deployment [![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267) From ed47191d9baec836dc6a526032d3055084bc6d40 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 6 May 2024 13:00:07 +0530 Subject: [PATCH 008/256] feat(readme): update readme --- README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.md b/README.md index fb01b4b..750006c 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,6 @@ - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - [Non-Docker Installation](#non-docker-installation) - - [Troubleshooting](#troubleshooting) - [Ollama connection errors](#ollama-connection-errors) - [One-Click Deployment](#one-click-deployment) - [Upcoming Features](#upcoming-features) @@ -92,9 +91,6 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. -### Troubleshooting -Enocuntered a bug or an issue? Feel free to join our [Discord community](https://discord.gg/2bdhg2R3ze) where we can do our best to help you. - #### Ollama connection errors If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following: From e8fe74ae7c82c4a3f9399a3dcd03eaa7793c329b Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 6 May 2024 19:59:13 +0530 Subject: [PATCH 009/256] feat(ws-managers): implement better error handling --- src/websocket/connectionManager.ts | 125 ++++++++++++++++------------- ui/components/ChatWindow.tsx | 4 +- 2 files changed, 71 insertions(+), 58 deletions(-) diff --git a/src/websocket/connectionManager.ts b/src/websocket/connectionManager.ts index daa4cbc..5cb075b 100644 --- a/src/websocket/connectionManager.ts +++ b/src/websocket/connectionManager.ts @@ -14,74 +14,87 @@ export const handleConnection = async ( ws: WebSocket, request: IncomingMessage, ) => { - const searchParams = new URL(request.url, `http://${request.headers.host}`) - .searchParams; + try { + const searchParams = new URL(request.url, `http://${request.headers.host}`) + .searchParams; - const [chatModelProviders, embeddingModelProviders] = await Promise.all([ - getAvailableChatModelProviders(), - getAvailableEmbeddingModelProviders(), - ]); + const [chatModelProviders, embeddingModelProviders] = await Promise.all([ + getAvailableChatModelProviders(), + getAvailableEmbeddingModelProviders(), + ]); - const chatModelProvider = - searchParams.get('chatModelProvider') || Object.keys(chatModelProviders)[0]; - const chatModel = - searchParams.get('chatModel') || - Object.keys(chatModelProviders[chatModelProvider])[0]; + const chatModelProvider = + searchParams.get('chatModelProvider') || + Object.keys(chatModelProviders)[0]; + const chatModel = + searchParams.get('chatModel') || + Object.keys(chatModelProviders[chatModelProvider])[0]; - const embeddingModelProvider = - searchParams.get('embeddingModelProvider') || - Object.keys(embeddingModelProviders)[0]; - const embeddingModel = - searchParams.get('embeddingModel') || - Object.keys(embeddingModelProviders[embeddingModelProvider])[0]; + const embeddingModelProvider = + searchParams.get('embeddingModelProvider') || + Object.keys(embeddingModelProviders)[0]; + const embeddingModel = + searchParams.get('embeddingModel') || + Object.keys(embeddingModelProviders[embeddingModelProvider])[0]; - let llm: BaseChatModel | undefined; - let embeddings: Embeddings | undefined; + let llm: BaseChatModel | undefined; + let embeddings: Embeddings | undefined; - if ( - chatModelProviders[chatModelProvider] && - chatModelProviders[chatModelProvider][chatModel] && - chatModelProvider != 'custom_openai' - ) { - llm = chatModelProviders[chatModelProvider][chatModel] as - | BaseChatModel - | undefined; - } else if (chatModelProvider == 'custom_openai') { - llm = new ChatOpenAI({ - modelName: chatModel, - openAIApiKey: searchParams.get('openAIApiKey'), - temperature: 0.7, - configuration: { - baseURL: searchParams.get('openAIBaseURL'), - }, - }); - } + if ( + chatModelProviders[chatModelProvider] && + chatModelProviders[chatModelProvider][chatModel] && + chatModelProvider != 'custom_openai' + ) { + llm = chatModelProviders[chatModelProvider][chatModel] as + | BaseChatModel + | undefined; + } else if (chatModelProvider == 'custom_openai') { + llm = new ChatOpenAI({ + modelName: chatModel, + openAIApiKey: searchParams.get('openAIApiKey'), + temperature: 0.7, + configuration: { + baseURL: searchParams.get('openAIBaseURL'), + }, + }); + } - if ( - embeddingModelProviders[embeddingModelProvider] && - embeddingModelProviders[embeddingModelProvider][embeddingModel] - ) { - embeddings = embeddingModelProviders[embeddingModelProvider][ - embeddingModel - ] as Embeddings | undefined; - } + if ( + embeddingModelProviders[embeddingModelProvider] && + embeddingModelProviders[embeddingModelProvider][embeddingModel] + ) { + embeddings = embeddingModelProviders[embeddingModelProvider][ + embeddingModel + ] as Embeddings | undefined; + } - if (!llm || !embeddings) { + if (!llm || !embeddings) { + ws.send( + JSON.stringify({ + type: 'error', + data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.', + key: 'INVALID_MODEL_SELECTED', + }), + ); + ws.close(); + } + + ws.on( + 'message', + async (message) => + await handleMessage(message.toString(), ws, llm, embeddings), + ); + + ws.on('close', () => logger.debug('Connection closed')); + } catch (err) { ws.send( JSON.stringify({ type: 'error', - data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.', - key: 'INVALID_MODEL_SELECTED', + data: 'Internal server error.', + key: 'INTERNAL_SERVER_ERROR', }), ); ws.close(); + logger.error(err); } - - ws.on( - 'message', - async (message) => - await handleMessage(message.toString(), ws, llm, embeddings), - ); - - ws.on('close', () => logger.debug('Connection closed')); }; diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx index ca1aaa3..6f58757 100644 --- a/ui/components/ChatWindow.tsx +++ b/ui/components/ChatWindow.tsx @@ -50,13 +50,13 @@ const useSocket = (url: string) => { !chatModelProviders || Object.keys(chatModelProviders).length === 0 ) - return console.error('No chat models available'); + return toast.error('No chat models available'); if ( !embeddingModelProviders || Object.keys(embeddingModelProviders).length === 0 ) - return console.error('No embedding models available'); + return toast.error('No embedding models available'); chatModelProvider = Object.keys(chatModelProviders)[0]; chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; From 4cb0aeeee3e9a9085e95eebcf72f28d3bc2609be Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Mon, 6 May 2024 20:00:56 +0530 Subject: [PATCH 010/256] feat(settings): conditionally pick selected models --- ui/components/SettingsDialog.tsx | 59 ++++++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 15 deletions(-) diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index d704488..634a163 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -33,12 +33,8 @@ const SettingsDialog = ({ const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState< string | null >(null); - const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState( - null, - ); - const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState( - null, - ); + const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState(''); + const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState(''); const [isLoading, setIsLoading] = useState(false); const [isUpdating, setIsUpdating] = useState(false); @@ -51,16 +47,49 @@ const SettingsDialog = ({ 'Content-Type': 'application/json', }, }); - const data = await res.json(); + + const data = (await res.json()) as SettingsType; setConfig(data); - setSelectedChatModelProvider(localStorage.getItem('chatModelProvider')); - setSelectedChatModel(localStorage.getItem('chatModel')); - setSelectedEmbeddingModelProvider( - localStorage.getItem('embeddingModelProvider'), + + const chatModelProvidersKeys = Object.keys( + data.chatModelProviders || {}, ); - setSelectedEmbeddingModel(localStorage.getItem('embeddingModel')); - setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey')); - setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl')); + const embeddingModelProvidersKeys = Object.keys( + data.embeddingModelProviders || {}, + ); + + const defaultChatModelProvider = + chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : ''; + const defaultEmbeddingModelProvider = + embeddingModelProvidersKeys.length > 0 + ? embeddingModelProvidersKeys[0] + : ''; + + const chatModelProvider = + localStorage.getItem('chatModelProvider') || + defaultChatModelProvider || + ''; + const chatModel = + localStorage.getItem('chatModel') || + (data.chatModelProviders && + data.chatModelProviders[chatModelProvider]?.[0]) || + ''; + const embeddingModelProvider = + localStorage.getItem('embeddingModelProvider') || + defaultEmbeddingModelProvider || + ''; + const embeddingModel = + localStorage.getItem('embeddingModel') || + (data.embeddingModelProviders && + data.embeddingModelProviders[embeddingModelProvider]?.[0]) || + ''; + + setSelectedChatModelProvider(chatModelProvider); + setSelectedChatModel(chatModel); + setSelectedEmbeddingModelProvider(embeddingModelProvider); + setSelectedEmbeddingModel(embeddingModel); + setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || ''); + setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl') || ''); setIsLoading(false); }; @@ -223,7 +252,7 @@ const SettingsDialog = ({

- Custom OpenAI API Key (optional) + Custom OpenAI API Key

Date: Mon, 6 May 2024 20:01:57 +0530 Subject: [PATCH 011/256] feat(package): bump version --- package.json | 2 +- ui/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 36e35d9..ca01c44 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-backend", - "version": "1.3.3", + "version": "1.3.4", "license": "MIT", "author": "ItzCrazyKns", "scripts": { diff --git a/ui/package.json b/ui/package.json index 99b2334..7775d23 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-frontend", - "version": "1.3.3", + "version": "1.3.4", "license": "MIT", "author": "ItzCrazyKns", "scripts": { From 01fc683d321fd1d338e50d865d7198c44a0bc455 Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Tue, 7 May 2024 06:35:39 +0800 Subject: [PATCH 012/256] fix(SettingDialog): use `value` instead of `selected` props in { setSelectedChatModelProvider(e.target.value); setSelectedChatModel( @@ -180,13 +181,7 @@ const SettingsDialog = ({ > {Object.keys(config.chatModelProviders).map( (provider) => ( - @@ -200,6 +195,7 @@ const SettingsDialog = ({

Chat Model

{ setSelectedEmbeddingModelProvider(e.target.value); setSelectedEmbeddingModel( @@ -297,13 +290,7 @@ const SettingsDialog = ({ > {Object.keys(config.embeddingModelProviders).map( (provider) => ( - @@ -316,6 +303,7 @@ const SettingsDialog = ({

Embedding Model

{Object.keys(config.chatModelProviders).map( (provider) => ( @@ -193,13 +193,15 @@ const SettingsDialog = ({ {selectedChatModelProvider && selectedChatModelProvider != 'custom_openai' && (
-

Chat Model

+

+ Chat Model +

setSelectedChatModel(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
-

+

Custom OpenAI API Key

setCustomOpenAIApiKey(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
-

+

Custom OpenAI Base URL

setCustomOpenAIBaseURL(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
@@ -275,7 +279,7 @@ const SettingsDialog = ({ {/* Embedding models */} {config.embeddingModelProviders && (
-

+

Embedding model Provider

setSelectedEmbeddingModel(e.target.value) } - className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" > {config.embeddingModelProviders[ selectedEmbeddingModelProvider @@ -336,7 +342,9 @@ const SettingsDialog = ({
)}
-

OpenAI API Key

+

+ OpenAI API Key +

-

Ollama API URL

+

+ Ollama API URL +

-

GROQ API Key

+

+ GROQ API Key +

)} {isLoading && ( -
+
)}
-

+

We'll refresh the page after updating the settings.

+ ); +} const Sidebar = ({ children }: { children: React.ReactNode }) => { const segments = useSelectedLayoutSegments(); @@ -38,31 +45,39 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => { return (
-
+
- + -
+ {navLinks.map((link, i) => ( {link.active && ( -
+
)} ))} -
- setIsSettingsOpen(!isSettingsOpen)} - className="text-white cursor-pointer" - /> + + + + + + setIsSettingsOpen(!isSettingsOpen)} + className="cursor-pointer" + /> + + {
-
+
{navLinks.map((link, i) => ( Date: Fri, 24 May 2024 21:58:14 +0800 Subject: [PATCH 052/256] refactor(SettingDialog): extract reduplicate code to common component DO NOT REPEAT YOURSELF! --- ui/components/SettingsDialog.tsx | 213 ++++++++++++++++++------------- 1 file changed, 127 insertions(+), 86 deletions(-) diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 1942179..4d80b9c 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -1,6 +1,51 @@ +import { cn } from '@/lib/utils'; import { Dialog, Transition } from '@headlessui/react'; import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react'; -import React, { Fragment, useEffect, useState } from 'react'; +import React, { + Fragment, + useEffect, + useMemo, + useState, + type SelectHTMLAttributes, +} from 'react'; + +interface InputProps extends React.InputHTMLAttributes {} + +function Input({ className, ...restProps }: InputProps) { + return ( + + ); +} + +interface SelectProps extends SelectHTMLAttributes { + options: { value: string; label: string; disabled?: boolean }[]; +} + +function Select({ className, options, ...restProps }: SelectProps) { + return ( + + ); +} interface SettingsType { chatModelProviders: { @@ -169,7 +214,7 @@ const SettingsDialog = ({

Chat model Provider

- + />
)} {selectedChatModelProvider && @@ -196,37 +239,40 @@ const SettingsDialog = ({

Chat Model

- + ]; + + return chatModelProvider + ? chatModelProvider.length > 0 + ? chatModelProvider.map((model) => ({ + value: model, + label: model, + })) + : [ + { + value: '', + label: 'No models available', + disabled: true, + }, + ] + : [ + { + value: '', + label: + 'Invalid provider, please check backend logs', + disabled: true, + }, + ]; + })()} + />
)} {selectedChatModelProvider && @@ -236,42 +282,39 @@ const SettingsDialog = ({

Model name

- setSelectedChatModel(e.target.value) } - className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />

Custom OpenAI API Key

- setCustomOpenAIApiKey(e.target.value) } - className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />

Custom OpenAI Base URL

- setCustomOpenAIBaseURL(e.target.value) } - className="bg-primaryLight dark:bg-primaryDark px-3 py-2 flex items-center overflow-hidden border border-light dark:border-dark dark:text-white rounded-lg text-sm" />
@@ -282,7 +325,7 @@ const SettingsDialog = ({

Embedding model Provider

- + options={Object.keys( + config.embeddingModelProviders, + ).map((provider) => ({ + label: + provider.charAt(0).toUpperCase() + + provider.slice(1), + value: provider, + }))} + />
)} {selectedEmbeddingModelProvider && ( @@ -308,44 +349,47 @@ const SettingsDialog = ({

Embedding Model

- + ]; + + return embeddingModelProvider + ? embeddingModelProvider.length > 0 + ? embeddingModelProvider.map((model) => ({ + label: model, + value: model, + })) + : [ + { + label: 'No embedding models available', + value: '', + disabled: true, + }, + ] + : [ + { + label: + 'Invalid provider, please check backend logs', + value: '', + disabled: true, + }, + ]; + })()} + />
)}

OpenAI API Key

-

Ollama API URL

-

GROQ API Key

-
From 89c30530bc44013d6c7d088f8a7011c11c4c450c Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Fri, 24 May 2024 22:08:47 +0800 Subject: [PATCH 053/256] update(Navbar): update Navbar light mode background --- ui/components/Navbar.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/components/Navbar.tsx b/ui/components/Navbar.tsx index c07d6fd..f95a455 100644 --- a/ui/components/Navbar.tsx +++ b/ui/components/Navbar.tsx @@ -38,7 +38,7 @@ const Navbar = ({ messages }: { messages: Message[] }) => { }, []); return ( -
+
Date: Fri, 24 May 2024 22:41:06 +0800 Subject: [PATCH 054/256] update(SearchVideos): video cover label style adapt light mode --- ui/components/SearchVideos.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/components/SearchVideos.tsx b/ui/components/SearchVideos.tsx index 05c3180..eaad078 100644 --- a/ui/components/SearchVideos.tsx +++ b/ui/components/SearchVideos.tsx @@ -118,7 +118,7 @@ const Searchvideos = ({ alt={video.title} className="relative h-full w-full aspect-video object-cover rounded-lg" /> -
+

Video

From 382fa295e57593fb80b9371af47dbd1e9abfecee Mon Sep 17 00:00:00 2001 From: Devin Stokes Date: Fri, 24 May 2024 08:19:15 -0700 Subject: [PATCH 055/256] fix: add extra_hosts to docker-compose.yaml to allow connection to ollama --- docker-compose.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose.yaml b/docker-compose.yaml index ac83575..d8e1047 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -18,6 +18,8 @@ services: - searxng ports: - 3001:3001 + extra_hosts: + - "host.docker.internal:host-gateway" networks: - perplexica-network @@ -32,6 +34,8 @@ services: - perplexica-backend ports: - 3000:3000 + extra_hosts: + - "host.docker.internal:host-gateway" networks: - perplexica-network From c97a4347230f505a68339d0c08d13baf2cf67696 Mon Sep 17 00:00:00 2001 From: WanQuanXie Date: Sat, 25 May 2024 06:57:24 +0800 Subject: [PATCH 056/256] fix(ui): hover style class uses --- ui/components/MessageActions/Rewrite.tsx | 2 +- ui/components/MessageBox.tsx | 4 ++-- ui/components/MessageInputActions.tsx | 2 +- ui/components/MessageSources.tsx | 6 +++--- ui/components/SearchImages.tsx | 4 ++-- ui/components/SearchVideos.tsx | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/ui/components/MessageActions/Rewrite.tsx b/ui/components/MessageActions/Rewrite.tsx index b5f865b..76a422f 100644 --- a/ui/components/MessageActions/Rewrite.tsx +++ b/ui/components/MessageActions/Rewrite.tsx @@ -10,7 +10,7 @@ const Rewrite = ({ return ( */} @@ -127,7 +127,7 @@ const MessageBox = ({ start(); } }} - className="p-2 text-black/70 dark:text-white/70 rounded-xl hover(bg-secondLight dark:bg-secondDark) transition duration-200 hover:text-black dark:hover:text-white" + className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-secondLight dark:hover:bg-secondDark transition duration-200 hover:text-black dark:hover:text-white" > {speechStatus === 'started' ? ( diff --git a/ui/components/MessageInputActions.tsx b/ui/components/MessageInputActions.tsx index 22fc708..8b6d784 100644 --- a/ui/components/MessageInputActions.tsx +++ b/ui/components/MessageInputActions.tsx @@ -85,7 +85,7 @@ export const Focus = ({ {focusMode !== 'webSearch' ? (
diff --git a/ui/components/MessageSources.tsx b/ui/components/MessageSources.tsx index 476c73c..292b8c6 100644 --- a/ui/components/MessageSources.tsx +++ b/ui/components/MessageSources.tsx @@ -20,7 +20,7 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
{sources.slice(0, 3).map((source, i) => ( { {sources.length > 3 && ( diff --git a/ui/components/MessageActions/Rewrite.tsx b/ui/components/MessageActions/Rewrite.tsx index 76a422f..80fadb3 100644 --- a/ui/components/MessageActions/Rewrite.tsx +++ b/ui/components/MessageActions/Rewrite.tsx @@ -10,7 +10,7 @@ const Rewrite = ({ return ( */} @@ -127,7 +127,7 @@ const MessageBox = ({ start(); } }} - className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-secondLight dark:hover:bg-secondDark transition duration-200 hover:text-black dark:hover:text-white" + className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white" > {speechStatus === 'started' ? ( @@ -144,7 +144,7 @@ const MessageBox = ({ message.role === 'assistant' && !loading && ( <> -
+
@@ -156,7 +156,7 @@ const MessageBox = ({ className="flex flex-col space-y-3 text-sm" key={i} > -
+
{ sendMessage(suggestion); diff --git a/ui/components/MessageBoxLoading.tsx b/ui/components/MessageBoxLoading.tsx index 3a80fe8..caa6f18 100644 --- a/ui/components/MessageBoxLoading.tsx +++ b/ui/components/MessageBoxLoading.tsx @@ -1,9 +1,9 @@ const MessageBoxLoading = () => { return ( -
-
-
-
+
+
+
+
); }; diff --git a/ui/components/MessageInput.tsx b/ui/components/MessageInput.tsx index 4fbea7c..d215787 100644 --- a/ui/components/MessageInput.tsx +++ b/ui/components/MessageInput.tsx @@ -40,7 +40,7 @@ const MessageInput = ({ } }} className={cn( - 'bg-primaryLight dark:bg-primaryDark p-4 flex items-center overflow-hidden border border-light dark:border-dark', + 'bg-light-primary dark:bg-dark-primary p-4 flex items-center overflow-hidden border border-light-300 dark:border-dark-200', mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full', )} > diff --git a/ui/components/MessageInputActions.tsx b/ui/components/MessageInputActions.tsx index 8b6d784..80b2797 100644 --- a/ui/components/MessageInputActions.tsx +++ b/ui/components/MessageInputActions.tsx @@ -16,7 +16,7 @@ export const Attach = () => { return ( @@ -85,7 +85,7 @@ export const Focus = ({ {focusMode !== 'webSearch' ? (
@@ -109,7 +109,7 @@ export const Focus = ({ leaveTo="opacity-0 translate-y-1" > -
+
{focusModes.map((mode, i) => ( setFocusMode(mode.key)} @@ -117,8 +117,8 @@ export const Focus = ({ className={cn( 'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition', focusMode === mode.key - ? 'bg-primaryLight dark:bg-primaryDark' - : 'hover:bg-primaryLight dark:bg-primaryDark', + ? 'bg-light-primary dark:bg-dark-primary' + : 'hover:bg-light-primary dark:bg-dark-primary', )} >
Copilot {
{sources.slice(0, 3).map((source, i) => ( { {sources.length > 3 && (