Merge branch 'ItzCrazyKns:master' into master

This commit is contained in:
Ber Gutman 2024-10-21 15:15:42 +02:00 committed by GitHub
commit 3d3e5ebec2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 56 additions and 31 deletions

View file

@ -17,6 +17,9 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:

View file

@ -13,6 +13,7 @@
- [Ollama Connection Errors](#ollama-connection-errors)
- [Using as a Search Engine](#using-as-a-search-engine)
- [Using Perplexica's API](#using-perplexicas-api)
- [Expose Perplexica to a network](#expose-perplexica-to-network)
- [One-Click Deployment](#one-click-deployment)
- [Upcoming Features](#upcoming-features)
- [Support Us](#support-us)
@ -135,6 +136,10 @@ Perplexica also provides an API for developers looking to integrate its powerful
For more details, check out the full documentation [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/API/SEARCH.md).
## Expose Perplexica to network
You can access Perplexica over your home network by following our networking guide [here](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md).
## One-Click Deployment
[![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267)

View file

@ -1,4 +1,4 @@
FROM node:slim
FROM node:18-slim
WORKDIR /home/perplexica
@ -10,7 +10,7 @@ COPY yarn.lock /home/perplexica/
RUN mkdir /home/perplexica/data
RUN yarn install --frozen-lockfile
RUN yarn install --frozen-lockfile --network-timeout 600000
RUN yarn build
CMD ["yarn", "start"]

View file

@ -1,6 +1,6 @@
{
"name": "perplexica-backend",
"version": "1.9.0",
"version": "1.9.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {

View file

@ -22,12 +22,12 @@ export const loadGroqChatModels = async () => {
},
),
},
'llama-3.2-11b-text-preview': {
displayName: 'Llama 3.2 11B Text',
'llama-3.2-11b-vision-preview': {
displayName: 'Llama 3.2 11B Vision',
model: new ChatOpenAI(
{
openAIApiKey: groqApiKey,
modelName: 'llama-3.2-11b-text-preview',
modelName: 'llama-3.2-11b-vision-preview',
temperature: 0.7,
},
{
@ -35,12 +35,12 @@ export const loadGroqChatModels = async () => {
},
),
},
'llama-3.2-90b-text-preview': {
displayName: 'Llama 3.2 90B Text',
'llama-3.2-90b-vision-preview': {
displayName: 'Llama 3.2 90B Vision',
model: new ChatOpenAI(
{
openAIApiKey: groqApiKey,
modelName: 'llama-3.2-90b-text-preview',
modelName: 'llama-3.2-90b-vision-preview',
temperature: 0.7,
},
{

View file

@ -10,8 +10,8 @@ import type { BaseChatModel } from '@langchain/core/language_models/chat_models'
import type { Embeddings } from '@langchain/core/embeddings';
import logger from '../utils/logger';
import db from '../db';
import { chats, messages } from '../db/schema';
import { eq } from 'drizzle-orm';
import { chats, messages as messagesSchema } from '../db/schema';
import { eq, asc, gt } from 'drizzle-orm';
import crypto from 'crypto';
type Message = {
@ -71,7 +71,7 @@ const handleEmitterEvents = (
emitter.on('end', () => {
ws.send(JSON.stringify({ type: 'messageEnd', messageId: messageId }));
db.insert(messages)
db.insert(messagesSchema)
.values({
content: recievedMessage,
chatId: chatId,
@ -106,7 +106,9 @@ export const handleMessage = async (
const parsedWSMessage = JSON.parse(message) as WSMessage;
const parsedMessage = parsedWSMessage.message;
const id = crypto.randomBytes(7).toString('hex');
const humanMessageId =
parsedMessage.messageId ?? crypto.randomBytes(7).toString('hex');
const aiMessageId = crypto.randomBytes(7).toString('hex');
if (!parsedMessage.content)
return ws.send(
@ -141,7 +143,7 @@ export const handleMessage = async (
parsedWSMessage.optimizationMode,
);
handleEmitterEvents(emitter, ws, id, parsedMessage.chatId);
handleEmitterEvents(emitter, ws, aiMessageId, parsedMessage.chatId);
const chat = await db.query.chats.findFirst({
where: eq(chats.id, parsedMessage.chatId),
@ -159,18 +161,29 @@ export const handleMessage = async (
.execute();
}
const messageExists = await db.query.messages.findFirst({
where: eq(messagesSchema.messageId, humanMessageId),
});
if (!messageExists) {
await db
.insert(messages)
.insert(messagesSchema)
.values({
content: parsedMessage.content,
chatId: parsedMessage.chatId,
messageId: id,
messageId: humanMessageId,
role: 'user',
metadata: JSON.stringify({
createdAt: new Date(),
}),
})
.execute();
} else {
await db
.delete(messagesSchema)
.where(gt(messagesSchema.id, messageExists.id))
.execute();
}
} else {
ws.send(
JSON.stringify({

View file

@ -333,8 +333,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
}
}, [isMessagesLoaded, isWSReady]);
const sendMessage = async (message: string) => {
const sendMessage = async (message: string, messageId?: string) => {
if (loading) return;
setLoading(true);
setMessageAppeared(false);
@ -342,12 +343,13 @@ const ChatWindow = ({ id }: { id?: string }) => {
let recievedMessage = '';
let added = false;
const messageId = crypto.randomBytes(7).toString('hex');
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
ws?.send(
JSON.stringify({
type: 'message',
message: {
messageId: messageId,
chatId: chatId!,
content: message,
},
@ -474,7 +476,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
sendMessage(message.content);
sendMessage(message.content, message.messageId);
};
useEffect(() => {

View file

@ -128,7 +128,9 @@ const SettingsDialog = ({
const chatModel =
localStorage.getItem('chatModel') ||
(data.chatModelProviders &&
data.chatModelProviders[chatModelProvider]?.[0].name) ||
data.chatModelProviders[chatModelProvider]?.length > 0
? data.chatModelProviders[chatModelProvider][0].name
: undefined) ||
'';
const embeddingModelProvider =
localStorage.getItem('embeddingModelProvider') ||

View file

@ -1,6 +1,6 @@
{
"name": "perplexica-frontend",
"version": "1.9.0",
"version": "1.9.1",
"license": "MIT",
"author": "ItzCrazyKns",
"scripts": {