Perplexica/src/websocket/connectionManager.ts

98 lines
2.9 KiB
TypeScript
Raw Normal View History

import {WebSocket} from 'ws';
import {handleMessage} from './messageHandler';
import {getAvailableChatModelProviders, getAvailableEmbeddingModelProviders,} from '../lib/providers';
import {BaseChatModel} from '@langchain/core/language_models/chat_models';
import type {Embeddings} from '@langchain/core/embeddings';
import type {IncomingMessage} from 'http';
2024-04-30 12:18:18 +05:30
import logger from '../utils/logger';
import {ChatOpenAI} from '@langchain/openai';
2024-04-09 16:21:05 +05:30
export const handleConnection = async (
ws: WebSocket,
request: IncomingMessage,
) => {
try {
const searchParams = new URL(request.url, `http://${request.headers.host}`)
.searchParams;
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(),
]);
const chatModelProvider =
searchParams.get('chatModelProvider') ||
Object.keys(chatModelProviders)[0];
const chatModel =
searchParams.get('chatModel') ||
Object.keys(chatModelProviders[chatModelProvider])[0];
const embeddingModelProvider =
searchParams.get('embeddingModelProvider') ||
Object.keys(embeddingModelProviders)[0];
const embeddingModel =
searchParams.get('embeddingModel') ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
2024-04-20 09:32:19 +05:30
let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined;
2024-04-20 11:18:52 +05:30
if (
chatModelProviders[chatModelProvider] &&
chatModelProviders[chatModelProvider][chatModel] &&
chatModelProvider != 'custom_openai'
) {
llm = chatModelProviders[chatModelProvider][chatModel] as
| BaseChatModel
| undefined;
} else if (chatModelProvider == 'custom_openai') {
(llm as unknown as ChatOpenAI) = new ChatOpenAI({
modelName: chatModel,
openAIApiKey: searchParams.get('openAIApiKey'),
temperature: 0.7,
configuration: {
baseURL: searchParams.get('openAIBaseURL'),
},
});
}
if (
embeddingModelProviders[embeddingModelProvider] &&
embeddingModelProviders[embeddingModelProvider][embeddingModel]
) {
embeddings = embeddingModelProviders[embeddingModelProvider][
embeddingModel
] as Embeddings | undefined;
}
2024-04-20 11:18:52 +05:30
if (!llm || !embeddings) {
ws.send(
JSON.stringify({
type: 'error',
data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
key: 'INVALID_MODEL_SELECTED',
}),
);
ws.close();
}
ws.on(
'message',
async (message) =>
await handleMessage(message.toString(), ws, llm, embeddings),
);
ws.on('close', () => logger.debug('Connection closed'));
} catch (err) {
2024-04-20 11:18:52 +05:30
ws.send(
JSON.stringify({
type: 'error',
data: 'Internal server error.',
key: 'INTERNAL_SERVER_ERROR',
2024-04-20 11:18:52 +05:30
}),
);
ws.close();
logger.error(err);
2024-04-20 11:18:52 +05:30
}
2024-04-09 16:21:05 +05:30
};