UPDATE new features = redis in docker and Caching Mechanism
This commit is contained in:
parent
1fcd64ad42
commit
27a084b9f0
9 changed files with 23961 additions and 4654 deletions
|
@ -17,6 +17,7 @@ services:
|
||||||
- SEARXNG_API_URL=http://searxng:8080
|
- SEARXNG_API_URL=http://searxng:8080
|
||||||
depends_on:
|
depends_on:
|
||||||
- searxng
|
- searxng
|
||||||
|
- redis
|
||||||
ports:
|
ports:
|
||||||
- 3001:3001
|
- 3001:3001
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -42,9 +43,18 @@ services:
|
||||||
networks:
|
networks:
|
||||||
- perplexica-network
|
- perplexica-network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
redis:
|
||||||
|
image: redis:alpine
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
networks:
|
||||||
|
- perplexica-network
|
||||||
|
restart: unless-stopped
|
||||||
networks:
|
networks:
|
||||||
perplexica-network:
|
perplexica-network:
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
backend-dbstore:
|
backend-dbstore:
|
||||||
|
redis_data:
|
||||||
|
|
9172
package-lock.json
generated
Normal file
9172
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
@ -41,6 +41,7 @@
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"langchain": "^0.1.30",
|
"langchain": "^0.1.30",
|
||||||
"pdf-parse": "^1.1.1",
|
"pdf-parse": "^1.1.1",
|
||||||
|
"redis": "^4.7.0",
|
||||||
"winston": "^3.13.0",
|
"winston": "^3.13.0",
|
||||||
"ws": "^8.17.1",
|
"ws": "^8.17.1",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
|
|
40
src/app.ts
40
src/app.ts
|
@ -1,11 +1,12 @@
|
||||||
import { startWebSocketServer } from './websocket';
|
import { startWebSocketServer } from './websocket';
|
||||||
import express from 'express';
|
import express from 'express';
|
||||||
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import cors from 'cors';
|
import cors from 'cors';
|
||||||
import http from 'http';
|
import http from 'http';
|
||||||
import routes from './routes';
|
import routes from './routes';
|
||||||
import { getPort } from './config';
|
import { getPort } from './config';
|
||||||
import logger from './utils/logger';
|
import logger from './utils/logger';
|
||||||
|
import redisClient from './utils/redisClient';
|
||||||
const port = getPort();
|
const port = getPort();
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
|
@ -18,6 +19,43 @@ const corsOptions = {
|
||||||
app.use(cors(corsOptions));
|
app.use(cors(corsOptions));
|
||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
|
|
||||||
|
app.use(async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
const cache = req.query.cache as string;
|
||||||
|
|
||||||
|
if (cache === '1') {
|
||||||
|
const cacheKey = req.originalUrl || req.url;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const cachedData = await redisClient.get(cacheKey);
|
||||||
|
|
||||||
|
if (cachedData) {
|
||||||
|
logger.info(`Cache hit for ${cacheKey}`);
|
||||||
|
const jsonData = JSON.parse(cachedData);
|
||||||
|
return res.json(JSON.parse(jsonData));
|
||||||
|
} else {
|
||||||
|
const originalSend = res.send.bind(res);
|
||||||
|
|
||||||
|
res.send = (body: any) => {
|
||||||
|
const result = originalSend(body);
|
||||||
|
|
||||||
|
redisClient
|
||||||
|
.setEx(cacheKey, 3600, JSON.stringify(body))
|
||||||
|
.then(() => logger.info(`Cache set for ${cacheKey}`))
|
||||||
|
.catch((err) => logger.error(`Redis setEx error: ${err}`));
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Unexpected error: ${error}`);
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
});
|
||||||
app.use('/api', routes);
|
app.use('/api', routes);
|
||||||
app.get('/api', (_, res) => {
|
app.get('/api', (_, res) => {
|
||||||
res.status(200).json({ status: 'ok' });
|
res.status(200).json({ status: 'ok' });
|
||||||
|
|
|
@ -18,6 +18,10 @@ interface Config {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
OLLAMA: string;
|
OLLAMA: string;
|
||||||
};
|
};
|
||||||
|
REDIS: {
|
||||||
|
HOST: string;
|
||||||
|
PORT: number;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
type RecursivePartial<T> = {
|
type RecursivePartial<T> = {
|
||||||
|
@ -44,7 +48,8 @@ export const getSearxngApiEndpoint = () =>
|
||||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||||
|
export const getRedisHost = () => loadConfig().REDIS.HOST;
|
||||||
|
export const getRedisPort = () => loadConfig().REDIS.PORT;
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||||
const currentConfig = loadConfig();
|
const currentConfig = loadConfig();
|
||||||
|
|
||||||
|
|
15
src/utils/redisClient.ts
Normal file
15
src/utils/redisClient.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import { createClient } from 'redis';
|
||||||
|
import { getRedisHost, getRedisPort } from '../config';
|
||||||
|
import logger from './logger';
|
||||||
|
|
||||||
|
const redisUrl = `redis://${getRedisHost()}:${getRedisPort()}`;
|
||||||
|
const client = createClient({ url: redisUrl });
|
||||||
|
|
||||||
|
client.on('error', (err) => {
|
||||||
|
logger.error(`Redis Client Error: ${err}`);
|
||||||
|
});
|
||||||
|
client
|
||||||
|
.connect()
|
||||||
|
.then(() => logger.info('Connected to Redis'))
|
||||||
|
.catch((err) => logger.error(`Redis connection error: ${err}`));
|
||||||
|
export default client;
|
10317
ui/package-lock.json
generated
Normal file
10317
ui/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
5039
ui/yarn.lock
5039
ui/yarn.lock
File diff suppressed because it is too large
Load diff
Loading…
Add table
Reference in a new issue