feat(chatModels): load model from localstorage

This commit is contained in:
ItzCrazyKns 2024-05-02 12:14:26 +05:30
parent ed9ff3c20f
commit f618b713af
No known key found for this signature in database
GPG key ID: 8162927C7CCE3065
16 changed files with 126 additions and 81 deletions

View file

@ -8,8 +8,6 @@ interface Config {
GENERAL: {
PORT: number;
SIMILARITY_MEASURE: string;
CHAT_MODEL_PROVIDER: string;
CHAT_MODEL: string;
};
API_KEYS: {
OPENAI: string;
@ -35,11 +33,6 @@ export const getPort = () => loadConfig().GENERAL.PORT;
export const getSimilarityMeasure = () =>
loadConfig().GENERAL.SIMILARITY_MEASURE;
export const getChatModelProvider = () =>
loadConfig().GENERAL.CHAT_MODEL_PROVIDER;
export const getChatModel = () => loadConfig().GENERAL.CHAT_MODEL;
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
@ -52,21 +45,19 @@ export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();
for (const key in currentConfig) {
/* if (currentConfig[key] && !config[key]) {
config[key] = currentConfig[key];
} */
if (!config[key]) config[key] = {};
if (currentConfig[key] && typeof currentConfig[key] === 'object') {
if (typeof currentConfig[key] === 'object' && currentConfig[key] !== null) {
for (const nestedKey in currentConfig[key]) {
if (
currentConfig[key][nestedKey] &&
!config[key][nestedKey] &&
currentConfig[key][nestedKey] &&
config[key][nestedKey] !== ''
) {
config[key][nestedKey] = currentConfig[key][nestedKey];
}
}
} else if (currentConfig[key] && !config[key] && config[key] !== '') {
} else if (currentConfig[key] && config[key] !== '') {
config[key] = currentConfig[key];
}
}

View file

@ -1,8 +1,6 @@
import express from 'express';
import { getAvailableProviders } from '../lib/providers';
import {
getChatModel,
getChatModelProvider,
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
@ -26,9 +24,6 @@ router.get('/', async (_, res) => {
config['providers'][provider] = Object.keys(providers[provider]);
}
config['selectedProvider'] = getChatModelProvider();
config['selectedChatModel'] = getChatModel();
config['openeaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['groqApiKey'] = getGroqApiKey();
@ -40,10 +35,6 @@ router.post('/', async (req, res) => {
const config = req.body;
const updatedConfig = {
GENERAL: {
CHAT_MODEL_PROVIDER: config.selectedProvider,
CHAT_MODEL: config.selectedChatModel,
},
API_KEYS: {
OPENAI: config.openeaiApiKey,
GROQ: config.groqApiKey,

View file

@ -2,7 +2,6 @@ import express from 'express';
import handleImageSearch from '../agents/imageSearchAgent';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { getAvailableProviders } from '../lib/providers';
import { getChatModel, getChatModelProvider } from '../config';
import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger';
@ -10,7 +9,7 @@ const router = express.Router();
router.post('/', async (req, res) => {
try {
let { query, chat_history } = req.body;
let { query, chat_history, chat_model_provider, chat_model } = req.body;
chat_history = chat_history.map((msg: any) => {
if (msg.role === 'user') {
@ -20,14 +19,14 @@ router.post('/', async (req, res) => {
}
});
const models = await getAvailableProviders();
const provider = getChatModelProvider();
const chatModel = getChatModel();
const chatModels = await getAvailableProviders();
const provider = chat_model_provider || Object.keys(chatModels)[0];
const chatModel = chat_model || Object.keys(chatModels[provider])[0];
let llm: BaseChatModel | undefined;
if (models[provider] && models[provider][chatModel]) {
llm = models[provider][chatModel] as BaseChatModel | undefined;
if (chatModels[provider] && chatModels[provider][chatModel]) {
llm = chatModels[provider][chatModel] as BaseChatModel | undefined;
}
if (!llm) {

View file

@ -2,11 +2,13 @@ import express from 'express';
import imagesRouter from './images';
import videosRouter from './videos';
import configRouter from './config';
import modelsRouter from './models';
const router = express.Router();
router.use('/images', imagesRouter);
router.use('/videos', videosRouter);
router.use('/config', configRouter);
router.use('/models', modelsRouter);
export default router;

18
src/routes/models.ts Normal file
View file

@ -0,0 +1,18 @@
import express from 'express';
import logger from '../utils/logger';
import { getAvailableProviders } from '../lib/providers';
const router = express.Router();
router.get('/', async (req, res) => {
try {
const providers = await getAvailableProviders();
res.status(200).json({ providers });
} catch (err) {
res.status(500).json({ message: 'An error has occurred.' });
logger.error(err.message);
}
});
export default router;

View file

@ -1,7 +1,6 @@
import express from 'express';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { getAvailableProviders } from '../lib/providers';
import { getChatModel, getChatModelProvider } from '../config';
import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger';
import handleVideoSearch from '../agents/videoSearchAgent';
@ -10,7 +9,7 @@ const router = express.Router();
router.post('/', async (req, res) => {
try {
let { query, chat_history } = req.body;
let { query, chat_history, chat_model_provider, chat_model } = req.body;
chat_history = chat_history.map((msg: any) => {
if (msg.role === 'user') {
@ -20,14 +19,14 @@ router.post('/', async (req, res) => {
}
});
const models = await getAvailableProviders();
const provider = getChatModelProvider();
const chatModel = getChatModel();
const chatModels = await getAvailableProviders();
const provider = chat_model_provider || Object.keys(chatModels)[0];
const chatModel = chat_model || Object.keys(chatModels[provider])[0];
let llm: BaseChatModel | undefined;
if (models[provider] && models[provider][chatModel]) {
llm = models[provider][chatModel] as BaseChatModel | undefined;
if (chatModels[provider] && chatModels[provider][chatModel]) {
llm = chatModels[provider][chatModel] as BaseChatModel | undefined;
}
if (!llm) {

View file

@ -1,15 +1,23 @@
import { WebSocket } from 'ws';
import { handleMessage } from './messageHandler';
import { getChatModel, getChatModelProvider } from '../config';
import { getAvailableProviders } from '../lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { Embeddings } from '@langchain/core/embeddings';
import type { IncomingMessage } from 'http';
import logger from '../utils/logger';
export const handleConnection = async (ws: WebSocket) => {
export const handleConnection = async (
ws: WebSocket,
request: IncomingMessage,
) => {
const searchParams = new URL(request.url, `http://${request.headers.host}`)
.searchParams;
const models = await getAvailableProviders();
const provider = getChatModelProvider();
const chatModel = getChatModel();
const provider =
searchParams.get('chatModelProvider') || Object.keys(models)[0];
const chatModel =
searchParams.get('chatModel') || Object.keys(models[provider])[0];
let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined;

View file

@ -10,9 +10,7 @@ export const initServer = (
const port = getPort();
const wss = new WebSocketServer({ server });
wss.on('connection', (ws) => {
handleConnection(ws);
});
wss.on('connection', handleConnection);
logger.info(`WebSocket server started on port ${port}`);
};