Merge pull request #1 from newspedia-crew/yucongj/setup
chore: setting up linter, prettier and PR gated pipeline
This commit is contained in:
commit
78738c9282
83 changed files with 4176 additions and 2202 deletions
14
.editorconfig
Normal file
14
.editorconfig
Normal file
|
@ -0,0 +1,14 @@
|
|||
# editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
max_line_length = 120
|
||||
quote_type = double
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
2
.eslintignore
Normal file
2
.eslintignore
Normal file
|
@ -0,0 +1,2 @@
|
|||
node_modules
|
||||
dist
|
32
.eslintrc.json
Normal file
32
.eslintrc.json
Normal file
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
"root": true,
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:prettier/recommended",
|
||||
"plugin:unicorn/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"prettier",
|
||||
"unicorn"
|
||||
],
|
||||
"rules": {
|
||||
"unicorn/filename-case": [
|
||||
"error",
|
||||
{
|
||||
"cases": {
|
||||
"camelCase": true,
|
||||
"pascalCase": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"unicorn/prevent-abbreviations": "warn",
|
||||
"unicorn/no-null": "off",
|
||||
"@typescript-eslint/no-unused-vars": "off"
|
||||
},
|
||||
"overrides": [
|
||||
|
||||
]
|
||||
}
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
|
@ -1 +0,0 @@
|
|||
patreon: itzcrazykns
|
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,27 +0,0 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create an issue to help us fix bugs
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
7
.github/ISSUE_TEMPLATE/custom.md
vendored
7
.github/ISSUE_TEMPLATE/custom.md
vendored
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
name: Custom issue template
|
||||
about: Describe this issue template's purpose here.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
19
.github/ISSUE_TEMPLATE/feature_request.md
vendored
19
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,19 +0,0 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
39
.github/workflows/pr-gated.yml
vendored
Normal file
39
.github/workflows/pr-gated.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
name: PR-gated
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.event.pull_request.draft == false
|
||||
name: Test on node ${{ matrix.node_version }} and ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
node_version:
|
||||
- 20.x
|
||||
- 22.x
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node_version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node_version }}
|
||||
- name: yarn install
|
||||
run: yarn --frozen-lockfile
|
||||
- name: lint
|
||||
run: yarn lint
|
||||
- name: build
|
||||
run: yarn build
|
||||
- name: yarn install:ui
|
||||
run: yarn --frozen-lockfine
|
||||
working-directory: ./ui
|
||||
- name: build:ui
|
||||
run: yarn build
|
||||
working-directory: ./ui
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -8,11 +8,6 @@ yarn-error.log
|
|||
/out/
|
||||
/dist/
|
||||
|
||||
# IDE/Editor specific
|
||||
.vscode/
|
||||
.idea/
|
||||
*.iml
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
|
|
5
.prettierrc
Normal file
5
.prettierrc
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"endOfLine": "auto",
|
||||
"trailingComma": "all",
|
||||
"arrowParens": "avoid"
|
||||
}
|
14
.vscode/extensions.json
vendored
Normal file
14
.vscode/extensions.json
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"streetsidesoftware.code-spell-checker",
|
||||
"github.codespaces",
|
||||
"github.copilot",
|
||||
"github.copilot-chat",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"vincaslt.highlight-matching-tag",
|
||||
"orta.vscode-jest",
|
||||
"esbenp.prettier-vscode"
|
||||
]
|
||||
}
|
250
.vscode/settings.json
vendored
Normal file
250
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,250 @@
|
|||
{
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.svn": true,
|
||||
"**/.hg": true,
|
||||
"**/CVS": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/.vs": true,
|
||||
"**/.cache": true
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"**/.git": true,
|
||||
"**/.svn": true,
|
||||
"**/.hg": true,
|
||||
"**/CVS": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/.vs": true,
|
||||
"**/.cache": true,
|
||||
"**/node_modules": true,
|
||||
"**/node_modules/**": true,
|
||||
"**/node_modules/*/**": true,
|
||||
"**/dist": true,
|
||||
"**/dist/**": true,
|
||||
"**/dist/*/**": true
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/.cache": true,
|
||||
"**/build": true,
|
||||
"**/dist": true,
|
||||
"**/coverage": true,
|
||||
"**/yarn.lock": true,
|
||||
"**/en-us.*.json": true,
|
||||
"**/*.lcl": true
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit",
|
||||
"source.fixAll.stylelint": "explicit"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"typescript.tsserver.maxTsServerMemory": 4096,
|
||||
"eslint.workingDirectories": [
|
||||
{
|
||||
"mode": "auto"
|
||||
}
|
||||
],
|
||||
"eslint.execArgv": ["--max_old_space_size=8192"],
|
||||
"eslint.codeActionsOnSave.mode": "problems",
|
||||
"css.validate": false,
|
||||
"less.validate": false,
|
||||
"scss.validate": false,
|
||||
"files.associations": {
|
||||
"*.js.mustache": "javascript",
|
||||
"*.json.mustache": "json"
|
||||
},
|
||||
"cSpell.words": [
|
||||
"abortcontroller",
|
||||
"aistudio",
|
||||
"Algos",
|
||||
"amlcompute",
|
||||
"aoai",
|
||||
"appinsights",
|
||||
"automl",
|
||||
"aznb",
|
||||
"azureml",
|
||||
"bigint",
|
||||
"browserslist",
|
||||
"buildresult",
|
||||
"buildscripts",
|
||||
"callout",
|
||||
"clsx",
|
||||
"cobertura",
|
||||
"Conda",
|
||||
"continuationtoken",
|
||||
"cudatoolkit",
|
||||
"Customizer",
|
||||
"customneuralvoice",
|
||||
"customspeech",
|
||||
"cyclomatic",
|
||||
"Databricks",
|
||||
"dataprep",
|
||||
"Dataset",
|
||||
"Datasets",
|
||||
"Datastore",
|
||||
"Datastores",
|
||||
"dataview",
|
||||
"dcid",
|
||||
"debounced",
|
||||
"Detailskey",
|
||||
"devtools",
|
||||
"Dismissable",
|
||||
"Dont",
|
||||
"Dropdown",
|
||||
"eastus",
|
||||
"Edat",
|
||||
"endregion",
|
||||
"Ensembling",
|
||||
"esnext",
|
||||
"etag",
|
||||
"experimentrun",
|
||||
"explainability",
|
||||
"fairlearn",
|
||||
"fbprophet",
|
||||
"featurization",
|
||||
"fileexplorer",
|
||||
"Finetune",
|
||||
"finetuned",
|
||||
"Finetuning",
|
||||
"fluentui",
|
||||
"formik",
|
||||
"Ftaas",
|
||||
"generageresult",
|
||||
"generatebuildresult",
|
||||
"gettingstarted",
|
||||
"Groundedness",
|
||||
"Handleable",
|
||||
"hyperdrive",
|
||||
"Hyperparameters",
|
||||
"inferencing",
|
||||
"Interop",
|
||||
"ipynb",
|
||||
"IUJS",
|
||||
"jqueryui",
|
||||
"jsnext",
|
||||
"jszip",
|
||||
"junit",
|
||||
"Jupyter",
|
||||
"kubernetes",
|
||||
"kuende",
|
||||
"lcov",
|
||||
"leaderboard",
|
||||
"lerna",
|
||||
"lintfix",
|
||||
"livestamp",
|
||||
"locstrings",
|
||||
"machinelearningservices",
|
||||
"managedenv",
|
||||
"mcmf",
|
||||
"metastore",
|
||||
"metricsmetadata",
|
||||
"mfeclient",
|
||||
"mlchartlib",
|
||||
"mllifecycle",
|
||||
"mlworkspace",
|
||||
"mockdate",
|
||||
"modelmanagement",
|
||||
"Mooncake",
|
||||
"msal",
|
||||
"nameof",
|
||||
"npmrc",
|
||||
"numpy",
|
||||
"odata",
|
||||
"onnx",
|
||||
"onwarn",
|
||||
"openai",
|
||||
"packagejson",
|
||||
"papaparse",
|
||||
"parcoords",
|
||||
"paygo",
|
||||
"plotly",
|
||||
"polyfill",
|
||||
"Prefetcher",
|
||||
"projectcontent",
|
||||
"Projectless",
|
||||
"Promptflow",
|
||||
"pytorch",
|
||||
"quickprofile",
|
||||
"Rbac",
|
||||
"realtimespeechtotext",
|
||||
"recents",
|
||||
"Resizable",
|
||||
"resjson",
|
||||
"resourcegraph",
|
||||
"resourcegroups",
|
||||
"rollup",
|
||||
"roosterjs",
|
||||
"RTSTT",
|
||||
"runhistory",
|
||||
"salte",
|
||||
"scipy",
|
||||
"scriptrun",
|
||||
"scrollable",
|
||||
"semibold",
|
||||
"serializable",
|
||||
"serializer",
|
||||
"Serializers",
|
||||
"setuptools",
|
||||
"Signup",
|
||||
"skiptoken",
|
||||
"sklearn",
|
||||
"SKUs",
|
||||
"sourcemap",
|
||||
"spacy",
|
||||
"storyshots",
|
||||
"storysource",
|
||||
"studiocoreservices",
|
||||
"stylelint",
|
||||
"stylelintrc",
|
||||
"submodule",
|
||||
"Subsampling",
|
||||
"svgr",
|
||||
"tablist",
|
||||
"taskkill",
|
||||
"Templatized",
|
||||
"testid",
|
||||
"theming",
|
||||
"ticktext",
|
||||
"tickvals",
|
||||
"timeframe",
|
||||
"timeseries",
|
||||
"Timespan",
|
||||
"treeshake",
|
||||
"tslib",
|
||||
"uifabric",
|
||||
"Unauth",
|
||||
"uniquefy",
|
||||
"unmock",
|
||||
"unmount",
|
||||
"Unversioned",
|
||||
"viewmodel",
|
||||
"vsts",
|
||||
"webdriverio",
|
||||
"webendpoint",
|
||||
"websockets",
|
||||
"workspaces",
|
||||
"wsid",
|
||||
"xgboost",
|
||||
"xlarge",
|
||||
"xmlhttprequest",
|
||||
"xsmall",
|
||||
"YYYYMMDD"
|
||||
],
|
||||
"cSpell.ignoreWords": ["editor", "format", "on", "save"],
|
||||
"editor.tabSize": 2,
|
||||
"jest.jestCommandLine": "yarn jest --passWithNoTests",
|
||||
}
|
|
@ -1,10 +1,10 @@
|
|||
import { defineConfig } from 'drizzle-kit';
|
||||
import { defineConfig } from "drizzle-kit";
|
||||
|
||||
export default defineConfig({
|
||||
dialect: 'sqlite',
|
||||
schema: './src/db/schema.ts',
|
||||
out: './drizzle',
|
||||
dialect: "sqlite",
|
||||
schema: "./src/db/schema.ts",
|
||||
out: "./drizzle",
|
||||
dbCredentials: {
|
||||
url: './data/db.sqlite',
|
||||
url: "./data/db.sqlite",
|
||||
},
|
||||
});
|
||||
|
|
11
package.json
11
package.json
|
@ -8,15 +8,22 @@
|
|||
"build": "tsc",
|
||||
"dev": "nodemon src/app.ts",
|
||||
"db:push": "drizzle-kit push sqlite",
|
||||
"format": "prettier . --check",
|
||||
"format:write": "prettier . --write"
|
||||
"lint": "eslint ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.6.10",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/readable-stream": "^4.0.11",
|
||||
"@typescript-eslint/eslint-plugin": "^7.15.0",
|
||||
"@typescript-eslint/parser": "^7.15.0",
|
||||
"drizzle-kit": "^0.22.7",
|
||||
"eslint": "^8",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-react": "^7.34.3",
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"nodemon": "^3.1.0",
|
||||
"prettier": "^3.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import {
|
||||
PromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
} from '@langchain/core/prompts';
|
||||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import eventEmitter from 'events';
|
||||
import computeSimilarity from '../utils/computeSimilarity';
|
||||
import logger from '../utils/logger';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { StreamEvent } from "@langchain/core/tracers/log_stream";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import eventEmitter from "node:events";
|
||||
import computeSimilarity from "../utils/computeSimilarity";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const basicAcademicSearchRetrieverPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the LLM to search the web for information.
|
||||
|
@ -63,36 +55,18 @@ const basicAcademicSearchResponsePrompt = `
|
|||
Anything between the \`context\` is retrieved from a search engine and is not a part of the conversation with the user. Today's date is ${new Date().toISOString()}
|
||||
`;
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const handleStream = async (
|
||||
stream: AsyncGenerator<StreamEvent, any, unknown>,
|
||||
emitter: eventEmitter,
|
||||
) => {
|
||||
const handleStream = async (stream: AsyncGenerator<StreamEvent, unknown, unknown>, emitter: eventEmitter) => {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalSourceRetriever'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
||||
);
|
||||
if (event.event === "on_chain_end" && event.name === "FinalSourceRetriever") {
|
||||
emitter.emit("data", JSON.stringify({ type: "sources", data: event.data.output }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_stream' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
||||
);
|
||||
if (event.event === "on_chain_stream" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("data", JSON.stringify({ type: "response", data: event.data.chunk }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit('end');
|
||||
if (event.event === "on_chain_end" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("end");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -106,24 +80,19 @@ const createBasicAcademicSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
return RunnableSequence.from([
|
||||
PromptTemplate.fromTemplate(basicAcademicSearchRetrieverPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
if (input === 'not_needed') {
|
||||
return { query: '', docs: [] };
|
||||
if (input === "not_needed") {
|
||||
return { query: "", docs: [] };
|
||||
}
|
||||
|
||||
const res = await searchSearxng(input, {
|
||||
language: 'en',
|
||||
engines: [
|
||||
'arxiv',
|
||||
'google scholar',
|
||||
'internetarchivescholar',
|
||||
'pubmed',
|
||||
],
|
||||
language: "en",
|
||||
engines: ["arxiv", "google scholar", "internetarchivescholar", "pubmed"],
|
||||
});
|
||||
|
||||
const documents = res.results.map(
|
||||
(result) =>
|
||||
result =>
|
||||
new Document({
|
||||
pageContent: result.content,
|
||||
metadata: {
|
||||
|
@ -139,44 +108,30 @@ const createBasicAcademicSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
]);
|
||||
};
|
||||
|
||||
const createBasicAcademicSearchAnsweringChain = (
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicAcademicSearchRetrieverChain =
|
||||
createBasicAcademicSearchRetrieverChain(llm);
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs.map((_, index) => `${index + 1}. ${docs[index].pageContent}`).join("\n");
|
||||
};
|
||||
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs
|
||||
.map((_, index) => `${index + 1}. ${docs[index].pageContent}`)
|
||||
.join('\n');
|
||||
};
|
||||
const createBasicAcademicSearchAnsweringChain = (llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const basicAcademicSearchRetrieverChain = createBasicAcademicSearchRetrieverChain(llm);
|
||||
|
||||
const rerankDocs = async ({
|
||||
query,
|
||||
docs,
|
||||
}: {
|
||||
query: string;
|
||||
docs: Document[];
|
||||
}) => {
|
||||
const rerankDocs = async ({ query, docs }: { query: string; docs: Document[] }) => {
|
||||
if (docs.length === 0) {
|
||||
return docs;
|
||||
}
|
||||
|
||||
const docsWithContent = docs.filter(
|
||||
(doc) => doc.pageContent && doc.pageContent.length > 0,
|
||||
);
|
||||
const docsWithContent = docs.filter(document => document.pageContent && document.pageContent.length > 0);
|
||||
|
||||
const [docEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
|
||||
const [documentEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map(document => document.pageContent)),
|
||||
embeddings.embedQuery(query),
|
||||
]);
|
||||
|
||||
const similarity = docEmbeddings.map((docEmbedding, i) => {
|
||||
const sim = computeSimilarity(queryEmbedding, docEmbedding);
|
||||
const similarity = documentEmbeddings.map((documentEmbedding, index) => {
|
||||
const sim = computeSimilarity(queryEmbedding, documentEmbedding);
|
||||
|
||||
return {
|
||||
index: i,
|
||||
index: index,
|
||||
similarity: sim,
|
||||
};
|
||||
});
|
||||
|
@ -184,7 +139,7 @@ const createBasicAcademicSearchAnsweringChain = (
|
|||
const sortedDocs = similarity
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, 15)
|
||||
.map((sim) => docsWithContent[sim.index]);
|
||||
.map(sim => docsWithContent[sim.index]);
|
||||
|
||||
return sortedDocs;
|
||||
};
|
||||
|
@ -194,41 +149,35 @@ const createBasicAcademicSearchAnsweringChain = (
|
|||
query: (input: BasicChainInput) => input.query,
|
||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
||||
context: RunnableSequence.from([
|
||||
(input) => ({
|
||||
input => ({
|
||||
query: input.query,
|
||||
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
basicAcademicSearchRetrieverChain
|
||||
.pipe(rerankDocs)
|
||||
.withConfig({
|
||||
runName: 'FinalSourceRetriever',
|
||||
runName: "FinalSourceRetriever",
|
||||
})
|
||||
.pipe(processDocs),
|
||||
]),
|
||||
}),
|
||||
ChatPromptTemplate.fromMessages([
|
||||
['system', basicAcademicSearchResponsePrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
["system", basicAcademicSearchResponsePrompt],
|
||||
new MessagesPlaceholder("chat_history"),
|
||||
["user", "{query}"],
|
||||
]),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
runName: "FinalResponseGenerator",
|
||||
});
|
||||
};
|
||||
|
||||
const basicAcademicSearch = (
|
||||
query: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicAcademicSearch = (query: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = new eventEmitter();
|
||||
|
||||
try {
|
||||
const basicAcademicSearchAnsweringChain =
|
||||
createBasicAcademicSearchAnsweringChain(llm, embeddings);
|
||||
const basicAcademicSearchAnsweringChain = createBasicAcademicSearchAnsweringChain(llm, embeddings);
|
||||
|
||||
const stream = basicAcademicSearchAnsweringChain.streamEvents(
|
||||
{
|
||||
|
@ -236,28 +185,20 @@ const basicAcademicSearch = (
|
|||
query: query,
|
||||
},
|
||||
{
|
||||
version: 'v1',
|
||||
version: "v1",
|
||||
},
|
||||
);
|
||||
|
||||
handleStream(stream, emitter);
|
||||
} catch (err) {
|
||||
emitter.emit(
|
||||
'error',
|
||||
JSON.stringify({ data: 'An error has occurred please try again later' }),
|
||||
);
|
||||
logger.error(`Error in academic search: ${err}`);
|
||||
} catch (error) {
|
||||
emitter.emit("error", JSON.stringify({ data: "An error has occurred please try again later" }));
|
||||
logger.error(`Error in academic search: ${error}`);
|
||||
}
|
||||
|
||||
return emitter;
|
||||
};
|
||||
|
||||
const handleAcademicSearch = (
|
||||
message: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const handleAcademicSearch = (message: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = basicAcademicSearch(message, history, llm, embeddings);
|
||||
return emitter;
|
||||
};
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { PromptTemplate } from '@langchain/core/prompts';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { PromptTemplate } from "@langchain/core/prompts";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
|
||||
const imageSearchChainPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
|
||||
|
@ -36,7 +32,7 @@ type ImageSearchChainInput = {
|
|||
query: string;
|
||||
};
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
||||
return RunnableSequence.from([
|
||||
|
@ -50,15 +46,15 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||
}),
|
||||
PromptTemplate.fromTemplate(imageSearchChainPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
const res = await searchSearxng(input, {
|
||||
engines: ['bing images', 'google images'],
|
||||
engines: ["bing images", "google images"],
|
||||
});
|
||||
|
||||
const images = [];
|
||||
|
||||
res.results.forEach((result) => {
|
||||
for (const result of res.results) {
|
||||
if (result.img_src && result.url && result.title) {
|
||||
images.push({
|
||||
img_src: result.img_src,
|
||||
|
@ -66,17 +62,14 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||
title: result.title,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return images.slice(0, 10);
|
||||
}),
|
||||
]);
|
||||
};
|
||||
|
||||
const handleImageSearch = (
|
||||
input: ImageSearchChainInput,
|
||||
llm: BaseChatModel,
|
||||
) => {
|
||||
const handleImageSearch = (input: ImageSearchChainInput, llm: BaseChatModel) => {
|
||||
const imageSearchChain = createImageSearchChain(llm);
|
||||
return imageSearchChain.invoke(input);
|
||||
};
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import {
|
||||
PromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
} from '@langchain/core/prompts';
|
||||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import eventEmitter from 'events';
|
||||
import computeSimilarity from '../utils/computeSimilarity';
|
||||
import logger from '../utils/logger';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { StreamEvent } from "@langchain/core/tracers/log_stream";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import eventEmitter from "node:events";
|
||||
import computeSimilarity from "../utils/computeSimilarity";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const basicRedditSearchRetrieverPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the LLM to search the web for information.
|
||||
|
@ -63,36 +55,18 @@ const basicRedditSearchResponsePrompt = `
|
|||
Anything between the \`context\` is retrieved from Reddit and is not a part of the conversation with the user. Today's date is ${new Date().toISOString()}
|
||||
`;
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const handleStream = async (
|
||||
stream: AsyncGenerator<StreamEvent, any, unknown>,
|
||||
emitter: eventEmitter,
|
||||
) => {
|
||||
const handleStream = async (stream: AsyncGenerator<StreamEvent, unknown, unknown>, emitter: eventEmitter) => {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalSourceRetriever'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
||||
);
|
||||
if (event.event === "on_chain_end" && event.name === "FinalSourceRetriever") {
|
||||
emitter.emit("data", JSON.stringify({ type: "sources", data: event.data.output }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_stream' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
||||
);
|
||||
if (event.event === "on_chain_stream" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("data", JSON.stringify({ type: "response", data: event.data.chunk }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit('end');
|
||||
if (event.event === "on_chain_end" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("end");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -106,21 +80,21 @@ const createBasicRedditSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
return RunnableSequence.from([
|
||||
PromptTemplate.fromTemplate(basicRedditSearchRetrieverPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
if (input === 'not_needed') {
|
||||
return { query: '', docs: [] };
|
||||
if (input === "not_needed") {
|
||||
return { query: "", docs: [] };
|
||||
}
|
||||
|
||||
const res = await searchSearxng(input, {
|
||||
language: 'en',
|
||||
engines: ['reddit'],
|
||||
language: "en",
|
||||
engines: ["reddit"],
|
||||
});
|
||||
|
||||
const documents = res.results.map(
|
||||
(result) =>
|
||||
result =>
|
||||
new Document({
|
||||
pageContent: result.content ? result.content : result.title,
|
||||
pageContent: result.content ?? result.title,
|
||||
metadata: {
|
||||
title: result.title,
|
||||
url: result.url,
|
||||
|
@ -134,44 +108,30 @@ const createBasicRedditSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
]);
|
||||
};
|
||||
|
||||
const createBasicRedditSearchAnsweringChain = (
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicRedditSearchRetrieverChain =
|
||||
createBasicRedditSearchRetrieverChain(llm);
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs.map((_, index) => `${index + 1}. ${docs[index].pageContent}`).join("\n");
|
||||
};
|
||||
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs
|
||||
.map((_, index) => `${index + 1}. ${docs[index].pageContent}`)
|
||||
.join('\n');
|
||||
};
|
||||
const createBasicRedditSearchAnsweringChain = (llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const basicRedditSearchRetrieverChain = createBasicRedditSearchRetrieverChain(llm);
|
||||
|
||||
const rerankDocs = async ({
|
||||
query,
|
||||
docs,
|
||||
}: {
|
||||
query: string;
|
||||
docs: Document[];
|
||||
}) => {
|
||||
const rerankDocs = async ({ query, docs }: { query: string; docs: Document[] }) => {
|
||||
if (docs.length === 0) {
|
||||
return docs;
|
||||
}
|
||||
|
||||
const docsWithContent = docs.filter(
|
||||
(doc) => doc.pageContent && doc.pageContent.length > 0,
|
||||
);
|
||||
const docsWithContent = docs.filter(document => document.pageContent && document.pageContent.length > 0);
|
||||
|
||||
const [docEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
|
||||
const [documentEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map(document => document.pageContent)),
|
||||
embeddings.embedQuery(query),
|
||||
]);
|
||||
|
||||
const similarity = docEmbeddings.map((docEmbedding, i) => {
|
||||
const sim = computeSimilarity(queryEmbedding, docEmbedding);
|
||||
const similarity = documentEmbeddings.map((documentEmbedding, index) => {
|
||||
const sim = computeSimilarity(queryEmbedding, documentEmbedding);
|
||||
|
||||
return {
|
||||
index: i,
|
||||
index: index,
|
||||
similarity: sim,
|
||||
};
|
||||
});
|
||||
|
@ -179,8 +139,8 @@ const createBasicRedditSearchAnsweringChain = (
|
|||
const sortedDocs = similarity
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, 15)
|
||||
.filter((sim) => sim.similarity > 0.3)
|
||||
.map((sim) => docsWithContent[sim.index]);
|
||||
.filter(sim => sim.similarity > 0.3)
|
||||
.map(sim => docsWithContent[sim.index]);
|
||||
|
||||
return sortedDocs;
|
||||
};
|
||||
|
@ -190,69 +150,55 @@ const createBasicRedditSearchAnsweringChain = (
|
|||
query: (input: BasicChainInput) => input.query,
|
||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
||||
context: RunnableSequence.from([
|
||||
(input) => ({
|
||||
input => ({
|
||||
query: input.query,
|
||||
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
basicRedditSearchRetrieverChain
|
||||
.pipe(rerankDocs)
|
||||
.withConfig({
|
||||
runName: 'FinalSourceRetriever',
|
||||
runName: "FinalSourceRetriever",
|
||||
})
|
||||
.pipe(processDocs),
|
||||
]),
|
||||
}),
|
||||
ChatPromptTemplate.fromMessages([
|
||||
['system', basicRedditSearchResponsePrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
["system", basicRedditSearchResponsePrompt],
|
||||
new MessagesPlaceholder("chat_history"),
|
||||
["user", "{query}"],
|
||||
]),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
runName: "FinalResponseGenerator",
|
||||
});
|
||||
};
|
||||
|
||||
const basicRedditSearch = (
|
||||
query: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicRedditSearch = (query: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = new eventEmitter();
|
||||
|
||||
try {
|
||||
const basicRedditSearchAnsweringChain =
|
||||
createBasicRedditSearchAnsweringChain(llm, embeddings);
|
||||
const basicRedditSearchAnsweringChain = createBasicRedditSearchAnsweringChain(llm, embeddings);
|
||||
const stream = basicRedditSearchAnsweringChain.streamEvents(
|
||||
{
|
||||
chat_history: history,
|
||||
query: query,
|
||||
},
|
||||
{
|
||||
version: 'v1',
|
||||
version: "v1",
|
||||
},
|
||||
);
|
||||
|
||||
handleStream(stream, emitter);
|
||||
} catch (err) {
|
||||
emitter.emit(
|
||||
'error',
|
||||
JSON.stringify({ data: 'An error has occurred please try again later' }),
|
||||
);
|
||||
logger.error(`Error in RedditSearch: ${err}`);
|
||||
} catch (error) {
|
||||
emitter.emit("error", JSON.stringify({ data: "An error has occurred please try again later" }));
|
||||
logger.error(`Error in RedditSearch: ${error}`);
|
||||
}
|
||||
|
||||
return emitter;
|
||||
};
|
||||
|
||||
const handleRedditSearch = (
|
||||
message: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const handleRedditSearch = (message: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = basicRedditSearch(message, history, llm, embeddings);
|
||||
return emitter;
|
||||
};
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { RunnableSequence, RunnableMap } from '@langchain/core/runnables';
|
||||
import ListLineOutputParser from '../lib/outputParsers/listLineOutputParser';
|
||||
import { PromptTemplate } from '@langchain/core/prompts';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { RunnableSequence, RunnableMap } from "@langchain/core/runnables";
|
||||
import ListLineOutputParser from "../lib/outputParsers/listLineOutputParser";
|
||||
import { PromptTemplate } from "@langchain/core/prompts";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import { ChatOpenAI } from "@langchain/openai";
|
||||
|
||||
const suggestionGeneratorPrompt = `
|
||||
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
|
||||
|
@ -28,14 +28,13 @@ type SuggestionGeneratorInput = {
|
|||
};
|
||||
|
||||
const outputParser = new ListLineOutputParser({
|
||||
key: 'suggestions',
|
||||
key: "suggestions",
|
||||
});
|
||||
|
||||
const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
|
||||
return RunnableSequence.from([
|
||||
RunnableMap.from({
|
||||
chat_history: (input: SuggestionGeneratorInput) =>
|
||||
formatChatHistoryAsString(input.chat_history),
|
||||
chat_history: (input: SuggestionGeneratorInput) => formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
PromptTemplate.fromTemplate(suggestionGeneratorPrompt),
|
||||
llm,
|
||||
|
@ -43,10 +42,7 @@ const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
|
|||
]);
|
||||
};
|
||||
|
||||
const generateSuggestions = (
|
||||
input: SuggestionGeneratorInput,
|
||||
llm: BaseChatModel,
|
||||
) => {
|
||||
const generateSuggestions = (input: SuggestionGeneratorInput, llm: BaseChatModel) => {
|
||||
(llm as ChatOpenAI).temperature = 0;
|
||||
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
|
||||
return suggestionGeneratorChain.invoke(input);
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { PromptTemplate } from '@langchain/core/prompts';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { PromptTemplate } from "@langchain/core/prompts";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
|
||||
const VideoSearchChainPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
|
||||
|
@ -36,7 +32,7 @@ type VideoSearchChainInput = {
|
|||
query: string;
|
||||
};
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
||||
return RunnableSequence.from([
|
||||
|
@ -50,21 +46,16 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||
}),
|
||||
PromptTemplate.fromTemplate(VideoSearchChainPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
const res = await searchSearxng(input, {
|
||||
engines: ['youtube'],
|
||||
engines: ["youtube"],
|
||||
});
|
||||
|
||||
const videos = [];
|
||||
|
||||
res.results.forEach((result) => {
|
||||
if (
|
||||
result.thumbnail &&
|
||||
result.url &&
|
||||
result.title &&
|
||||
result.iframe_src
|
||||
) {
|
||||
for (const result of res.results) {
|
||||
if (result.thumbnail && result.url && result.title && result.iframe_src) {
|
||||
videos.push({
|
||||
img_src: result.thumbnail,
|
||||
url: result.url,
|
||||
|
@ -72,17 +63,14 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||
iframe_src: result.iframe_src,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return videos.slice(0, 10);
|
||||
}),
|
||||
]);
|
||||
};
|
||||
|
||||
const handleVideoSearch = (
|
||||
input: VideoSearchChainInput,
|
||||
llm: BaseChatModel,
|
||||
) => {
|
||||
const handleVideoSearch = (input: VideoSearchChainInput, llm: BaseChatModel) => {
|
||||
const VideoSearchChain = createVideoSearchChain(llm);
|
||||
return VideoSearchChain.invoke(input);
|
||||
};
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import {
|
||||
PromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
} from '@langchain/core/prompts';
|
||||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import eventEmitter from 'events';
|
||||
import computeSimilarity from '../utils/computeSimilarity';
|
||||
import logger from '../utils/logger';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { StreamEvent } from "@langchain/core/tracers/log_stream";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import eventEmitter from "node:events";
|
||||
import computeSimilarity from "../utils/computeSimilarity";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const basicSearchRetrieverPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the LLM to search the web for information.
|
||||
|
@ -63,36 +55,18 @@ const basicWebSearchResponsePrompt = `
|
|||
Anything between the \`context\` is retrieved from a search engine and is not a part of the conversation with the user. Today's date is ${new Date().toISOString()}
|
||||
`;
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const handleStream = async (
|
||||
stream: AsyncGenerator<StreamEvent, any, unknown>,
|
||||
emitter: eventEmitter,
|
||||
) => {
|
||||
const handleStream = async (stream: AsyncGenerator<StreamEvent, unknown, unknown>, emitter: eventEmitter) => {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalSourceRetriever'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
||||
);
|
||||
if (event.event === "on_chain_end" && event.name === "FinalSourceRetriever") {
|
||||
emitter.emit("data", JSON.stringify({ type: "sources", data: event.data.output }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_stream' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
||||
);
|
||||
if (event.event === "on_chain_stream" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("data", JSON.stringify({ type: "response", data: event.data.chunk }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit('end');
|
||||
if (event.event === "on_chain_end" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("end");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -106,18 +80,18 @@ const createBasicWebSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
return RunnableSequence.from([
|
||||
PromptTemplate.fromTemplate(basicSearchRetrieverPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
if (input === 'not_needed') {
|
||||
return { query: '', docs: [] };
|
||||
if (input === "not_needed") {
|
||||
return { query: "", docs: [] };
|
||||
}
|
||||
|
||||
const res = await searchSearxng(input, {
|
||||
language: 'en',
|
||||
language: "en",
|
||||
});
|
||||
|
||||
const documents = res.results.map(
|
||||
(result) =>
|
||||
result =>
|
||||
new Document({
|
||||
pageContent: result.content,
|
||||
metadata: {
|
||||
|
@ -133,52 +107,39 @@ const createBasicWebSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
]);
|
||||
};
|
||||
|
||||
const createBasicWebSearchAnsweringChain = (
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs.map((_, index) => `${index + 1}. ${docs[index].pageContent}`).join("\n");
|
||||
};
|
||||
|
||||
const createBasicWebSearchAnsweringChain = (llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const basicWebSearchRetrieverChain = createBasicWebSearchRetrieverChain(llm);
|
||||
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs
|
||||
.map((_, index) => `${index + 1}. ${docs[index].pageContent}`)
|
||||
.join('\n');
|
||||
};
|
||||
|
||||
const rerankDocs = async ({
|
||||
query,
|
||||
docs,
|
||||
}: {
|
||||
query: string;
|
||||
docs: Document[];
|
||||
}) => {
|
||||
const rerankDocs = async ({ query, docs }: { query: string; docs: Document[] }) => {
|
||||
if (docs.length === 0) {
|
||||
return docs;
|
||||
}
|
||||
|
||||
const docsWithContent = docs.filter(
|
||||
(doc) => doc.pageContent && doc.pageContent.length > 0,
|
||||
);
|
||||
const docsWithContent = docs.filter(document => document.pageContent && document.pageContent.length > 0);
|
||||
|
||||
const [docEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
|
||||
const [documentEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map(document => document.pageContent)),
|
||||
embeddings.embedQuery(query),
|
||||
]);
|
||||
|
||||
const similarity = docEmbeddings.map((docEmbedding, i) => {
|
||||
const sim = computeSimilarity(queryEmbedding, docEmbedding);
|
||||
const similarity = documentEmbeddings.map((documentEmbedding, index) => {
|
||||
const sim = computeSimilarity(queryEmbedding, documentEmbedding);
|
||||
|
||||
return {
|
||||
index: i,
|
||||
index: index,
|
||||
similarity: sim,
|
||||
};
|
||||
});
|
||||
|
||||
const sortedDocs = similarity
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.filter((sim) => sim.similarity > 0.5)
|
||||
.filter(sim => sim.similarity > 0.5)
|
||||
.slice(0, 15)
|
||||
.map((sim) => docsWithContent[sim.index]);
|
||||
.map(sim => docsWithContent[sim.index]);
|
||||
|
||||
return sortedDocs;
|
||||
};
|
||||
|
@ -188,43 +149,35 @@ const createBasicWebSearchAnsweringChain = (
|
|||
query: (input: BasicChainInput) => input.query,
|
||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
||||
context: RunnableSequence.from([
|
||||
(input) => ({
|
||||
input => ({
|
||||
query: input.query,
|
||||
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
basicWebSearchRetrieverChain
|
||||
.pipe(rerankDocs)
|
||||
.withConfig({
|
||||
runName: 'FinalSourceRetriever',
|
||||
runName: "FinalSourceRetriever",
|
||||
})
|
||||
.pipe(processDocs),
|
||||
]),
|
||||
}),
|
||||
ChatPromptTemplate.fromMessages([
|
||||
['system', basicWebSearchResponsePrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
["system", basicWebSearchResponsePrompt],
|
||||
new MessagesPlaceholder("chat_history"),
|
||||
["user", "{query}"],
|
||||
]),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
runName: "FinalResponseGenerator",
|
||||
});
|
||||
};
|
||||
|
||||
const basicWebSearch = (
|
||||
query: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicWebSearch = (query: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = new eventEmitter();
|
||||
|
||||
try {
|
||||
const basicWebSearchAnsweringChain = createBasicWebSearchAnsweringChain(
|
||||
llm,
|
||||
embeddings,
|
||||
);
|
||||
const basicWebSearchAnsweringChain = createBasicWebSearchAnsweringChain(llm, embeddings);
|
||||
|
||||
const stream = basicWebSearchAnsweringChain.streamEvents(
|
||||
{
|
||||
|
@ -232,28 +185,20 @@ const basicWebSearch = (
|
|||
query: query,
|
||||
},
|
||||
{
|
||||
version: 'v1',
|
||||
version: "v1",
|
||||
},
|
||||
);
|
||||
|
||||
handleStream(stream, emitter);
|
||||
} catch (err) {
|
||||
emitter.emit(
|
||||
'error',
|
||||
JSON.stringify({ data: 'An error has occurred please try again later' }),
|
||||
);
|
||||
logger.error(`Error in websearch: ${err}`);
|
||||
} catch (error) {
|
||||
emitter.emit("error", JSON.stringify({ data: "An error has occurred please try again later" }));
|
||||
logger.error(`Error in websearch: ${error}`);
|
||||
}
|
||||
|
||||
return emitter;
|
||||
};
|
||||
|
||||
const handleWebSearch = (
|
||||
message: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const handleWebSearch = (message: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = basicWebSearch(message, history, llm, embeddings);
|
||||
return emitter;
|
||||
};
|
||||
|
|
|
@ -1,23 +1,15 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import {
|
||||
PromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
} from '@langchain/core/prompts';
|
||||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import eventEmitter from 'events';
|
||||
import logger from '../utils/logger';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { StreamEvent } from "@langchain/core/tracers/log_stream";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import eventEmitter from "node:events";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const basicWolframAlphaSearchRetrieverPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the LLM to search the web for information.
|
||||
|
@ -62,36 +54,18 @@ const basicWolframAlphaSearchResponsePrompt = `
|
|||
Anything between the \`context\` is retrieved from Wolfram Alpha and is not a part of the conversation with the user. Today's date is ${new Date().toISOString()}
|
||||
`;
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const handleStream = async (
|
||||
stream: AsyncGenerator<StreamEvent, any, unknown>,
|
||||
emitter: eventEmitter,
|
||||
) => {
|
||||
const handleStream = async (stream: AsyncGenerator<StreamEvent, unknown, unknown>, emitter: eventEmitter) => {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalSourceRetriever'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
||||
);
|
||||
if (event.event === "on_chain_end" && event.name === "FinalSourceRetriever") {
|
||||
emitter.emit("data", JSON.stringify({ type: "sources", data: event.data.output }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_stream' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
||||
);
|
||||
if (event.event === "on_chain_stream" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("data", JSON.stringify({ type: "response", data: event.data.chunk }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit('end');
|
||||
if (event.event === "on_chain_end" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("end");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -105,19 +79,19 @@ const createBasicWolframAlphaSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
return RunnableSequence.from([
|
||||
PromptTemplate.fromTemplate(basicWolframAlphaSearchRetrieverPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
if (input === 'not_needed') {
|
||||
return { query: '', docs: [] };
|
||||
if (input === "not_needed") {
|
||||
return { query: "", docs: [] };
|
||||
}
|
||||
|
||||
const res = await searchSearxng(input, {
|
||||
language: 'en',
|
||||
engines: ['wolframalpha'],
|
||||
language: "en",
|
||||
engines: ["wolframalpha"],
|
||||
});
|
||||
|
||||
const documents = res.results.map(
|
||||
(result) =>
|
||||
result =>
|
||||
new Document({
|
||||
pageContent: result.content,
|
||||
metadata: {
|
||||
|
@ -133,74 +107,63 @@ const createBasicWolframAlphaSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
]);
|
||||
};
|
||||
|
||||
const createBasicWolframAlphaSearchAnsweringChain = (llm: BaseChatModel) => {
|
||||
const basicWolframAlphaSearchRetrieverChain =
|
||||
createBasicWolframAlphaSearchRetrieverChain(llm);
|
||||
const processDocs = (docs: Document[]) => {
|
||||
return docs.map((_, index) => `${index + 1}. ${docs[index].pageContent}`).join("\n");
|
||||
};
|
||||
|
||||
const processDocs = (docs: Document[]) => {
|
||||
return docs
|
||||
.map((_, index) => `${index + 1}. ${docs[index].pageContent}`)
|
||||
.join('\n');
|
||||
};
|
||||
const createBasicWolframAlphaSearchAnsweringChain = (llm: BaseChatModel) => {
|
||||
const basicWolframAlphaSearchRetrieverChain = createBasicWolframAlphaSearchRetrieverChain(llm);
|
||||
|
||||
return RunnableSequence.from([
|
||||
RunnableMap.from({
|
||||
query: (input: BasicChainInput) => input.query,
|
||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
||||
context: RunnableSequence.from([
|
||||
(input) => ({
|
||||
input => ({
|
||||
query: input.query,
|
||||
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
basicWolframAlphaSearchRetrieverChain
|
||||
.pipe(({ query, docs }) => {
|
||||
.pipe(({ docs }) => {
|
||||
return docs;
|
||||
})
|
||||
.withConfig({
|
||||
runName: 'FinalSourceRetriever',
|
||||
runName: "FinalSourceRetriever",
|
||||
})
|
||||
.pipe(processDocs),
|
||||
]),
|
||||
}),
|
||||
ChatPromptTemplate.fromMessages([
|
||||
['system', basicWolframAlphaSearchResponsePrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
["system", basicWolframAlphaSearchResponsePrompt],
|
||||
new MessagesPlaceholder("chat_history"),
|
||||
["user", "{query}"],
|
||||
]),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
runName: "FinalResponseGenerator",
|
||||
});
|
||||
};
|
||||
|
||||
const basicWolframAlphaSearch = (
|
||||
query: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
) => {
|
||||
const basicWolframAlphaSearch = (query: string, history: BaseMessage[], llm: BaseChatModel) => {
|
||||
const emitter = new eventEmitter();
|
||||
|
||||
try {
|
||||
const basicWolframAlphaSearchAnsweringChain =
|
||||
createBasicWolframAlphaSearchAnsweringChain(llm);
|
||||
const basicWolframAlphaSearchAnsweringChain = createBasicWolframAlphaSearchAnsweringChain(llm);
|
||||
const stream = basicWolframAlphaSearchAnsweringChain.streamEvents(
|
||||
{
|
||||
chat_history: history,
|
||||
query: query,
|
||||
},
|
||||
{
|
||||
version: 'v1',
|
||||
version: "v1",
|
||||
},
|
||||
);
|
||||
|
||||
handleStream(stream, emitter);
|
||||
} catch (err) {
|
||||
emitter.emit(
|
||||
'error',
|
||||
JSON.stringify({ data: 'An error has occurred please try again later' }),
|
||||
);
|
||||
logger.error(`Error in WolframAlphaSearch: ${err}`);
|
||||
} catch (error) {
|
||||
emitter.emit("error", JSON.stringify({ data: "An error has occurred please try again later" }));
|
||||
logger.error(`Error in WolframAlphaSearch: ${error}`);
|
||||
}
|
||||
|
||||
return emitter;
|
||||
|
@ -210,7 +173,8 @@ const handleWolframAlphaSearch = (
|
|||
message: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_embeddings: Embeddings,
|
||||
) => {
|
||||
const emitter = basicWolframAlphaSearch(message, history, llm);
|
||||
return emitter;
|
||||
|
|
|
@ -1,42 +1,27 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import {
|
||||
ChatPromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
} from '@langchain/core/prompts';
|
||||
import { RunnableSequence } from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import type { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||
import eventEmitter from 'events';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import logger from '../utils/logger';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
||||
import { RunnableSequence } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import type { StreamEvent } from "@langchain/core/tracers/log_stream";
|
||||
import eventEmitter from "node:events";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const writingAssistantPrompt = `
|
||||
You are Perplexica, an AI model who is expert at searching the web and answering user's queries. You are currently set on focus mode 'Writing Assistant', this means you will be helping the user write a response to a given query.
|
||||
Since you are a writing assistant, you would not perform web searches. If you think you lack information to answer the query, you can ask the user for more information or suggest them to switch to a different focus mode.
|
||||
`;
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const handleStream = async (
|
||||
stream: AsyncGenerator<StreamEvent, any, unknown>,
|
||||
emitter: eventEmitter,
|
||||
) => {
|
||||
const handleStream = async (stream: AsyncGenerator<StreamEvent, unknown, unknown>, emitter: eventEmitter) => {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_stream' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
||||
);
|
||||
if (event.event === "on_chain_stream" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("data", JSON.stringify({ type: "response", data: event.data.chunk }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit('end');
|
||||
if (event.event === "on_chain_end" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("end");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -44,14 +29,14 @@ const handleStream = async (
|
|||
const createWritingAssistantChain = (llm: BaseChatModel) => {
|
||||
return RunnableSequence.from([
|
||||
ChatPromptTemplate.fromMessages([
|
||||
['system', writingAssistantPrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
["system", writingAssistantPrompt],
|
||||
new MessagesPlaceholder("chat_history"),
|
||||
["user", "{query}"],
|
||||
]),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
runName: "FinalResponseGenerator",
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -59,7 +44,8 @@ const handleWritingAssistant = (
|
|||
query: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_embeddings: Embeddings,
|
||||
) => {
|
||||
const emitter = new eventEmitter();
|
||||
|
||||
|
@ -71,17 +57,14 @@ const handleWritingAssistant = (
|
|||
query: query,
|
||||
},
|
||||
{
|
||||
version: 'v1',
|
||||
version: "v1",
|
||||
},
|
||||
);
|
||||
|
||||
handleStream(stream, emitter);
|
||||
} catch (err) {
|
||||
emitter.emit(
|
||||
'error',
|
||||
JSON.stringify({ data: 'An error has occurred please try again later' }),
|
||||
);
|
||||
logger.error(`Error in writing assistant: ${err}`);
|
||||
} catch (error) {
|
||||
emitter.emit("error", JSON.stringify({ data: "An error has occurred please try again later" }));
|
||||
logger.error(`Error in writing assistant: ${error}`);
|
||||
}
|
||||
|
||||
return emitter;
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import {
|
||||
PromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
MessagesPlaceholder,
|
||||
} from '@langchain/core/prompts';
|
||||
import {
|
||||
RunnableSequence,
|
||||
RunnableMap,
|
||||
RunnableLambda,
|
||||
} from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { searchSearxng } from '../lib/searxng';
|
||||
import type { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import eventEmitter from 'events';
|
||||
import computeSimilarity from '../utils/computeSimilarity';
|
||||
import logger from '../utils/logger';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
import { PromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
||||
import { RunnableSequence, RunnableMap, RunnableLambda } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { searchSearxng } from "../lib/searxng";
|
||||
import type { StreamEvent } from "@langchain/core/tracers/log_stream";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import formatChatHistoryAsString from "../utils/formatHistory";
|
||||
import eventEmitter from "node:events";
|
||||
import computeSimilarity from "../utils/computeSimilarity";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const basicYoutubeSearchRetrieverPrompt = `
|
||||
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the LLM to search the web for information.
|
||||
|
@ -63,36 +55,18 @@ const basicYoutubeSearchResponsePrompt = `
|
|||
Anything between the \`context\` is retrieved from Youtube and is not a part of the conversation with the user. Today's date is ${new Date().toISOString()}
|
||||
`;
|
||||
|
||||
const strParser = new StringOutputParser();
|
||||
const stringParser = new StringOutputParser();
|
||||
|
||||
const handleStream = async (
|
||||
stream: AsyncGenerator<StreamEvent, any, unknown>,
|
||||
emitter: eventEmitter,
|
||||
) => {
|
||||
const handleStream = async (stream: AsyncGenerator<StreamEvent, unknown, unknown>, emitter: eventEmitter) => {
|
||||
for await (const event of stream) {
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalSourceRetriever'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
||||
);
|
||||
if (event.event === "on_chain_end" && event.name === "FinalSourceRetriever") {
|
||||
emitter.emit("data", JSON.stringify({ type: "sources", data: event.data.output }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_stream' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit(
|
||||
'data',
|
||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
||||
);
|
||||
if (event.event === "on_chain_stream" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("data", JSON.stringify({ type: "response", data: event.data.chunk }));
|
||||
}
|
||||
if (
|
||||
event.event === 'on_chain_end' &&
|
||||
event.name === 'FinalResponseGenerator'
|
||||
) {
|
||||
emitter.emit('end');
|
||||
if (event.event === "on_chain_end" && event.name === "FinalResponseGenerator") {
|
||||
emitter.emit("end");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -106,21 +80,21 @@ const createBasicYoutubeSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
return RunnableSequence.from([
|
||||
PromptTemplate.fromTemplate(basicYoutubeSearchRetrieverPrompt),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
RunnableLambda.from(async (input: string) => {
|
||||
if (input === 'not_needed') {
|
||||
return { query: '', docs: [] };
|
||||
if (input === "not_needed") {
|
||||
return { query: "", docs: [] };
|
||||
}
|
||||
|
||||
const res = await searchSearxng(input, {
|
||||
language: 'en',
|
||||
engines: ['youtube'],
|
||||
language: "en",
|
||||
engines: ["youtube"],
|
||||
});
|
||||
|
||||
const documents = res.results.map(
|
||||
(result) =>
|
||||
result =>
|
||||
new Document({
|
||||
pageContent: result.content ? result.content : result.title,
|
||||
pageContent: result.content ?? result.title,
|
||||
metadata: {
|
||||
title: result.title,
|
||||
url: result.url,
|
||||
|
@ -134,44 +108,30 @@ const createBasicYoutubeSearchRetrieverChain = (llm: BaseChatModel) => {
|
|||
]);
|
||||
};
|
||||
|
||||
const createBasicYoutubeSearchAnsweringChain = (
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicYoutubeSearchRetrieverChain =
|
||||
createBasicYoutubeSearchRetrieverChain(llm);
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs.map((_, index) => `${index + 1}. ${docs[index].pageContent}`).join("\n");
|
||||
};
|
||||
|
||||
const processDocs = async (docs: Document[]) => {
|
||||
return docs
|
||||
.map((_, index) => `${index + 1}. ${docs[index].pageContent}`)
|
||||
.join('\n');
|
||||
};
|
||||
const createBasicYoutubeSearchAnsweringChain = (llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const basicYoutubeSearchRetrieverChain = createBasicYoutubeSearchRetrieverChain(llm);
|
||||
|
||||
const rerankDocs = async ({
|
||||
query,
|
||||
docs,
|
||||
}: {
|
||||
query: string;
|
||||
docs: Document[];
|
||||
}) => {
|
||||
const rerankDocs = async ({ query, docs }: { query: string; docs: Document[] }) => {
|
||||
if (docs.length === 0) {
|
||||
return docs;
|
||||
}
|
||||
|
||||
const docsWithContent = docs.filter(
|
||||
(doc) => doc.pageContent && doc.pageContent.length > 0,
|
||||
);
|
||||
const docsWithContent = docs.filter(document => document.pageContent && document.pageContent.length > 0);
|
||||
|
||||
const [docEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
|
||||
const [documentEmbeddings, queryEmbedding] = await Promise.all([
|
||||
embeddings.embedDocuments(docsWithContent.map(document => document.pageContent)),
|
||||
embeddings.embedQuery(query),
|
||||
]);
|
||||
|
||||
const similarity = docEmbeddings.map((docEmbedding, i) => {
|
||||
const sim = computeSimilarity(queryEmbedding, docEmbedding);
|
||||
const similarity = documentEmbeddings.map((documentEmbedding, index) => {
|
||||
const sim = computeSimilarity(queryEmbedding, documentEmbedding);
|
||||
|
||||
return {
|
||||
index: i,
|
||||
index: index,
|
||||
similarity: sim,
|
||||
};
|
||||
});
|
||||
|
@ -179,8 +139,8 @@ const createBasicYoutubeSearchAnsweringChain = (
|
|||
const sortedDocs = similarity
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, 15)
|
||||
.filter((sim) => sim.similarity > 0.3)
|
||||
.map((sim) => docsWithContent[sim.index]);
|
||||
.filter(sim => sim.similarity > 0.3)
|
||||
.map(sim => docsWithContent[sim.index]);
|
||||
|
||||
return sortedDocs;
|
||||
};
|
||||
|
@ -190,41 +150,35 @@ const createBasicYoutubeSearchAnsweringChain = (
|
|||
query: (input: BasicChainInput) => input.query,
|
||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
||||
context: RunnableSequence.from([
|
||||
(input) => ({
|
||||
input => ({
|
||||
query: input.query,
|
||||
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
basicYoutubeSearchRetrieverChain
|
||||
.pipe(rerankDocs)
|
||||
.withConfig({
|
||||
runName: 'FinalSourceRetriever',
|
||||
runName: "FinalSourceRetriever",
|
||||
})
|
||||
.pipe(processDocs),
|
||||
]),
|
||||
}),
|
||||
ChatPromptTemplate.fromMessages([
|
||||
['system', basicYoutubeSearchResponsePrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
["system", basicYoutubeSearchResponsePrompt],
|
||||
new MessagesPlaceholder("chat_history"),
|
||||
["user", "{query}"],
|
||||
]),
|
||||
llm,
|
||||
strParser,
|
||||
stringParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
runName: "FinalResponseGenerator",
|
||||
});
|
||||
};
|
||||
|
||||
const basicYoutubeSearch = (
|
||||
query: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const basicYoutubeSearch = (query: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = new eventEmitter();
|
||||
|
||||
try {
|
||||
const basicYoutubeSearchAnsweringChain =
|
||||
createBasicYoutubeSearchAnsweringChain(llm, embeddings);
|
||||
const basicYoutubeSearchAnsweringChain = createBasicYoutubeSearchAnsweringChain(llm, embeddings);
|
||||
|
||||
const stream = basicYoutubeSearchAnsweringChain.streamEvents(
|
||||
{
|
||||
|
@ -232,28 +186,20 @@ const basicYoutubeSearch = (
|
|||
query: query,
|
||||
},
|
||||
{
|
||||
version: 'v1',
|
||||
version: "v1",
|
||||
},
|
||||
);
|
||||
|
||||
handleStream(stream, emitter);
|
||||
} catch (err) {
|
||||
emitter.emit(
|
||||
'error',
|
||||
JSON.stringify({ data: 'An error has occurred please try again later' }),
|
||||
);
|
||||
logger.error(`Error in youtube search: ${err}`);
|
||||
} catch (error) {
|
||||
emitter.emit("error", JSON.stringify({ data: "An error has occurred please try again later" }));
|
||||
logger.error(`Error in youtube search: ${error}`);
|
||||
}
|
||||
|
||||
return emitter;
|
||||
};
|
||||
|
||||
const handleYoutubeSearch = (
|
||||
message: string,
|
||||
history: BaseMessage[],
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
const handleYoutubeSearch = (message: string, history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
const emitter = basicYoutubeSearch(message, history, llm, embeddings);
|
||||
return emitter;
|
||||
};
|
||||
|
|
22
src/app.ts
22
src/app.ts
|
@ -1,10 +1,10 @@
|
|||
import { startWebSocketServer } from './websocket';
|
||||
import express from 'express';
|
||||
import cors from 'cors';
|
||||
import http from 'http';
|
||||
import routes from './routes';
|
||||
import { getPort } from './config';
|
||||
import logger from './utils/logger';
|
||||
import { startWebSocketServer } from "./websocket";
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import http from "node:http";
|
||||
import routes from "./routes";
|
||||
import { getPort } from "./config";
|
||||
import logger from "./utils/logger";
|
||||
|
||||
const port = getPort();
|
||||
|
||||
|
@ -12,15 +12,15 @@ const app = express();
|
|||
const server = http.createServer(app);
|
||||
|
||||
const corsOptions = {
|
||||
origin: '*',
|
||||
origin: "*",
|
||||
};
|
||||
|
||||
app.use(cors(corsOptions));
|
||||
app.use(express.json());
|
||||
|
||||
app.use('/api', routes);
|
||||
app.get('/api', (_, res) => {
|
||||
res.status(200).json({ status: 'ok' });
|
||||
app.use("/api", routes);
|
||||
app.get("/api", (_, res) => {
|
||||
res.status(200).json({ status: "ok" });
|
||||
});
|
||||
|
||||
server.listen(port, () => {
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import toml from '@iarna/toml';
|
||||
/* eslint-disable unicorn/prefer-module */
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import toml from "@iarna/toml";
|
||||
|
||||
const configFileName = 'config.toml';
|
||||
const configFileName = "config.toml";
|
||||
|
||||
interface Config {
|
||||
GENERAL: {
|
||||
|
@ -24,14 +25,11 @@ type RecursivePartial<T> = {
|
|||
};
|
||||
|
||||
const loadConfig = () =>
|
||||
toml.parse(
|
||||
fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'),
|
||||
) as any as Config;
|
||||
toml.parse(fs.readFileSync(path.join(__dirname, `../${configFileName}`), "utf8")) as unknown as Config;
|
||||
|
||||
export const getPort = () => loadConfig().GENERAL.PORT;
|
||||
|
||||
export const getSimilarityMeasure = () =>
|
||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
export const getSimilarityMeasure = () => loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
|
||||
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
||||
|
||||
|
@ -47,23 +45,16 @@ export const updateConfig = (config: RecursivePartial<Config>) => {
|
|||
for (const key in currentConfig) {
|
||||
if (!config[key]) config[key] = {};
|
||||
|
||||
if (typeof currentConfig[key] === 'object' && currentConfig[key] !== null) {
|
||||
if (typeof currentConfig[key] === "object" && currentConfig[key] !== null) {
|
||||
for (const nestedKey in currentConfig[key]) {
|
||||
if (
|
||||
!config[key][nestedKey] &&
|
||||
currentConfig[key][nestedKey] &&
|
||||
config[key][nestedKey] !== ''
|
||||
) {
|
||||
if (!config[key][nestedKey] && currentConfig[key][nestedKey] && config[key][nestedKey] !== "") {
|
||||
config[key][nestedKey] = currentConfig[key][nestedKey];
|
||||
}
|
||||
}
|
||||
} else if (currentConfig[key] && config[key] !== '') {
|
||||
} else if (currentConfig[key] && config[key] !== "") {
|
||||
config[key] = currentConfig[key];
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, `../${configFileName}`),
|
||||
toml.stringify(config),
|
||||
);
|
||||
fs.writeFileSync(path.join(__dirname, `../${configFileName}`), toml.stringify(config));
|
||||
};
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { drizzle } from 'drizzle-orm/better-sqlite3';
|
||||
import Database from 'better-sqlite3';
|
||||
import * as schema from './schema';
|
||||
import { drizzle } from "drizzle-orm/better-sqlite3";
|
||||
import Database from "better-sqlite3";
|
||||
import * as schema from "./schema";
|
||||
|
||||
const sqlite = new Database('data/db.sqlite');
|
||||
const db = drizzle(sqlite, {
|
||||
const sqlite = new Database("data/db.sqlite");
|
||||
const database = drizzle(sqlite, {
|
||||
schema: schema,
|
||||
});
|
||||
|
||||
export default db;
|
||||
export default database;
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
import { text, integer, sqliteTable } from 'drizzle-orm/sqlite-core';
|
||||
import { text, integer, sqliteTable } from "drizzle-orm/sqlite-core";
|
||||
|
||||
export const messages = sqliteTable('messages', {
|
||||
id: integer('id').primaryKey(),
|
||||
content: text('content').notNull(),
|
||||
chatId: text('chatId').notNull(),
|
||||
messageId: text('messageId').notNull(),
|
||||
role: text('type', { enum: ['assistant', 'user'] }),
|
||||
metadata: text('metadata', {
|
||||
mode: 'json',
|
||||
export const messages = sqliteTable("messages", {
|
||||
id: integer("id").primaryKey(),
|
||||
content: text("content").notNull(),
|
||||
chatId: text("chatId").notNull(),
|
||||
messageId: text("messageId").notNull(),
|
||||
role: text("type", { enum: ["assistant", "user"] }),
|
||||
metadata: text("metadata", {
|
||||
mode: "json",
|
||||
}),
|
||||
});
|
||||
|
||||
export const chats = sqliteTable('chats', {
|
||||
id: text('id').primaryKey(),
|
||||
title: text('title').notNull(),
|
||||
createdAt: text('createdAt').notNull(),
|
||||
focusMode: text('focusMode').notNull(),
|
||||
export const chats = sqliteTable("chats", {
|
||||
id: text("id").primaryKey(),
|
||||
title: text("title").notNull(),
|
||||
createdAt: text("createdAt").notNull(),
|
||||
focusMode: text("focusMode").notNull(),
|
||||
});
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import { Embeddings, type EmbeddingsParams } from '@langchain/core/embeddings';
|
||||
import { chunkArray } from '@langchain/core/utils/chunk_array';
|
||||
import { Embeddings, type EmbeddingsParams } from "@langchain/core/embeddings";
|
||||
import { chunkArray } from "@langchain/core/utils/chunk_array";
|
||||
|
||||
export interface HuggingFaceTransformersEmbeddingsParams
|
||||
extends EmbeddingsParams {
|
||||
export interface HuggingFaceTransformersEmbeddingsParameters extends EmbeddingsParams {
|
||||
modelName: string;
|
||||
|
||||
model: string;
|
||||
|
@ -16,11 +15,11 @@ export interface HuggingFaceTransformersEmbeddingsParams
|
|||
|
||||
export class HuggingFaceTransformersEmbeddings
|
||||
extends Embeddings
|
||||
implements HuggingFaceTransformersEmbeddingsParams
|
||||
implements HuggingFaceTransformersEmbeddingsParameters
|
||||
{
|
||||
modelName = 'Xenova/all-MiniLM-L6-v2';
|
||||
modelName = "Xenova/all-MiniLM-L6-v2";
|
||||
|
||||
model = 'Xenova/all-MiniLM-L6-v2';
|
||||
model = "Xenova/all-MiniLM-L6-v2";
|
||||
|
||||
batchSize = 512;
|
||||
|
||||
|
@ -28,9 +27,10 @@ export class HuggingFaceTransformersEmbeddings
|
|||
|
||||
timeout?: number;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
private pipelinePromise: Promise<any>;
|
||||
|
||||
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParams>) {
|
||||
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParameters>) {
|
||||
super(fields ?? {});
|
||||
|
||||
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
||||
|
@ -40,19 +40,15 @@ export class HuggingFaceTransformersEmbeddings
|
|||
}
|
||||
|
||||
async embedDocuments(texts: string[]): Promise<number[][]> {
|
||||
const batches = chunkArray(
|
||||
this.stripNewLines ? texts.map((t) => t.replace(/\n/g, ' ')) : texts,
|
||||
this.batchSize,
|
||||
);
|
||||
const batches = chunkArray(this.stripNewLines ? texts.map(t => t.replaceAll("\n", " ")) : texts, this.batchSize);
|
||||
|
||||
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
|
||||
const batchRequests = batches.map(batch => this.runEmbedding(batch));
|
||||
const batchResponses = await Promise.all(batchRequests);
|
||||
const embeddings: number[][] = [];
|
||||
|
||||
for (let i = 0; i < batchResponses.length; i += 1) {
|
||||
const batchResponse = batchResponses[i];
|
||||
for (let j = 0; j < batchResponse.length; j += 1) {
|
||||
embeddings.push(batchResponse[j]);
|
||||
for (const batchResponse of batchResponses) {
|
||||
for (const element of batchResponse) {
|
||||
embeddings.push(element);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,22 +56,17 @@ export class HuggingFaceTransformersEmbeddings
|
|||
}
|
||||
|
||||
async embedQuery(text: string): Promise<number[]> {
|
||||
const data = await this.runEmbedding([
|
||||
this.stripNewLines ? text.replace(/\n/g, ' ') : text,
|
||||
]);
|
||||
const data = await this.runEmbedding([this.stripNewLines ? text.replaceAll("\n", " ") : text]);
|
||||
return data[0];
|
||||
}
|
||||
|
||||
private async runEmbedding(texts: string[]) {
|
||||
const { pipeline } = await import('@xenova/transformers');
|
||||
const { pipeline } = await import("@xenova/transformers");
|
||||
|
||||
const pipe = await (this.pipelinePromise ??= pipeline(
|
||||
'feature-extraction',
|
||||
this.model,
|
||||
));
|
||||
const pipe = await (this.pipelinePromise ??= pipeline("feature-extraction", this.model));
|
||||
|
||||
return this.caller.call(async () => {
|
||||
const output = await pipe(texts, { pooling: 'mean', normalize: true });
|
||||
const output = await pipe(texts, { pooling: "mean", normalize: true });
|
||||
return output.tolist();
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,42 +1,41 @@
|
|||
import { BaseOutputParser } from '@langchain/core/output_parsers';
|
||||
import { BaseOutputParser } from "@langchain/core/output_parsers";
|
||||
|
||||
interface LineListOutputParserArgs {
|
||||
interface LineListOutputParserArguments {
|
||||
key?: string;
|
||||
}
|
||||
|
||||
class LineListOutputParser extends BaseOutputParser<string[]> {
|
||||
private key = 'questions';
|
||||
private key = "questions";
|
||||
|
||||
constructor(args?: LineListOutputParserArgs) {
|
||||
constructor(arguments_?: LineListOutputParserArguments) {
|
||||
super();
|
||||
this.key = args.key ?? this.key;
|
||||
this.key = arguments_.key ?? this.key;
|
||||
}
|
||||
|
||||
static lc_name() {
|
||||
return 'LineListOutputParser';
|
||||
return "LineListOutputParser";
|
||||
}
|
||||
|
||||
lc_namespace = ['langchain', 'output_parsers', 'line_list_output_parser'];
|
||||
lc_namespace = ["langchain", "output_parsers", "line_list_output_parser"];
|
||||
|
||||
async parse(text: string): Promise<string[]> {
|
||||
const regex = /^(\s*(-|\*|\d+\.\s|\d+\)\s|\u2022)\s*)+/;
|
||||
const startKeyIndex = text.indexOf(`<${this.key}>`);
|
||||
const endKeyIndex = text.indexOf(`</${this.key}>`);
|
||||
const questionsStartIndex =
|
||||
startKeyIndex === -1 ? 0 : startKeyIndex + `<${this.key}>`.length;
|
||||
const questionsStartIndex = startKeyIndex === -1 ? 0 : startKeyIndex + `<${this.key}>`.length;
|
||||
const questionsEndIndex = endKeyIndex === -1 ? text.length : endKeyIndex;
|
||||
const lines = text
|
||||
.slice(questionsStartIndex, questionsEndIndex)
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => line.trim() !== '')
|
||||
.map((line) => line.replace(regex, ''));
|
||||
.split("\n")
|
||||
.filter(line => line.trim() !== "")
|
||||
.map(line => line.replace(regex, ""));
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
getFormatInstructions(): string {
|
||||
throw new Error('Not implemented.');
|
||||
throw new Error("Not implemented.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||
import { HuggingFaceTransformersEmbeddings } from './huggingfaceTransformer';
|
||||
import {
|
||||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
} from '../config';
|
||||
import logger from '../utils/logger';
|
||||
import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai";
|
||||
import { ChatOllama } from "@langchain/community/chat_models/ollama";
|
||||
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama";
|
||||
import { HuggingFaceTransformersEmbeddings } from "./huggingfaceTransformer";
|
||||
import { getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey } from "../config";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
export const getAvailableChatModelProviders = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
|
@ -18,79 +14,79 @@ export const getAvailableChatModelProviders = async () => {
|
|||
|
||||
if (openAIApiKey) {
|
||||
try {
|
||||
models['openai'] = {
|
||||
'GPT-3.5 turbo': new ChatOpenAI({
|
||||
models["openai"] = {
|
||||
"GPT-3.5 turbo": new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-3.5-turbo',
|
||||
modelName: "gpt-3.5-turbo",
|
||||
temperature: 0.7,
|
||||
}),
|
||||
'GPT-4': new ChatOpenAI({
|
||||
"GPT-4": new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4',
|
||||
modelName: "gpt-4",
|
||||
temperature: 0.7,
|
||||
}),
|
||||
'GPT-4 turbo': new ChatOpenAI({
|
||||
"GPT-4 turbo": new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4-turbo',
|
||||
modelName: "gpt-4-turbo",
|
||||
temperature: 0.7,
|
||||
}),
|
||||
'GPT-4 omni': new ChatOpenAI({
|
||||
"GPT-4 omni": new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4o',
|
||||
modelName: "gpt-4o",
|
||||
temperature: 0.7,
|
||||
}),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`Error loading OpenAI models: ${err}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error loading OpenAI models: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (groqApiKey) {
|
||||
try {
|
||||
models['groq'] = {
|
||||
'LLaMA3 8b': new ChatOpenAI(
|
||||
models["groq"] = {
|
||||
"LLaMA3 8b": new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey: groqApiKey,
|
||||
modelName: 'llama3-8b-8192',
|
||||
modelName: "llama3-8b-8192",
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
baseURL: "https://api.groq.com/openai/v1",
|
||||
},
|
||||
),
|
||||
'LLaMA3 70b': new ChatOpenAI(
|
||||
"LLaMA3 70b": new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey: groqApiKey,
|
||||
modelName: 'llama3-70b-8192',
|
||||
modelName: "llama3-70b-8192",
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
baseURL: "https://api.groq.com/openai/v1",
|
||||
},
|
||||
),
|
||||
'Mixtral 8x7b': new ChatOpenAI(
|
||||
"Mixtral 8x7b": new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey: groqApiKey,
|
||||
modelName: 'mixtral-8x7b-32768',
|
||||
modelName: "mixtral-8x7b-32768",
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
baseURL: "https://api.groq.com/openai/v1",
|
||||
},
|
||||
),
|
||||
'Gemma 7b': new ChatOpenAI(
|
||||
"Gemma 7b": new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey: groqApiKey,
|
||||
modelName: 'gemma-7b-it',
|
||||
modelName: "gemma-7b-it",
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
baseURL: "https://api.groq.com/openai/v1",
|
||||
},
|
||||
),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Groq models: ${err}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error loading Groq models: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -98,26 +94,28 @@ export const getAvailableChatModelProviders = async () => {
|
|||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const { models: ollamaModels } = (await response.json()) as any;
|
||||
|
||||
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
||||
acc[model.model] = new ChatOllama({
|
||||
// eslint-disable-next-line unicorn/no-array-reduce
|
||||
models["ollama"] = ollamaModels.reduce((accumulator, model) => {
|
||||
accumulator[model.model] = new ChatOllama({
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: model.model,
|
||||
temperature: 0.7,
|
||||
});
|
||||
return acc;
|
||||
return accumulator;
|
||||
}, {});
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Ollama models: ${err}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error loading Ollama models: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
models['custom_openai'] = {};
|
||||
models["custom_openai"] = {};
|
||||
|
||||
return models;
|
||||
};
|
||||
|
@ -130,18 +128,18 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
|||
|
||||
if (openAIApiKey) {
|
||||
try {
|
||||
models['openai'] = {
|
||||
'Text embedding 3 small': new OpenAIEmbeddings({
|
||||
models["openai"] = {
|
||||
"Text embedding 3 small": new OpenAIEmbeddings({
|
||||
openAIApiKey,
|
||||
modelName: 'text-embedding-3-small',
|
||||
modelName: "text-embedding-3-small",
|
||||
}),
|
||||
'Text embedding 3 large': new OpenAIEmbeddings({
|
||||
"Text embedding 3 large": new OpenAIEmbeddings({
|
||||
openAIApiKey,
|
||||
modelName: 'text-embedding-3-large',
|
||||
modelName: "text-embedding-3-large",
|
||||
}),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`Error loading OpenAI embeddings: ${err}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error loading OpenAI embeddings: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,38 +147,40 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
|||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const { models: ollamaModels } = (await response.json()) as any;
|
||||
|
||||
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
||||
acc[model.model] = new OllamaEmbeddings({
|
||||
// eslint-disable-next-line unicorn/no-array-reduce
|
||||
models["ollama"] = ollamaModels.reduce((accumulator, model) => {
|
||||
accumulator[model.model] = new OllamaEmbeddings({
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: model.model,
|
||||
});
|
||||
return acc;
|
||||
return accumulator;
|
||||
}, {});
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Ollama embeddings: ${err}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error loading Ollama embeddings: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
models['local'] = {
|
||||
'BGE Small': new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/bge-small-en-v1.5',
|
||||
models["local"] = {
|
||||
"BGE Small": new HuggingFaceTransformersEmbeddings({
|
||||
modelName: "Xenova/bge-small-en-v1.5",
|
||||
}),
|
||||
'GTE Small': new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/gte-small',
|
||||
"GTE Small": new HuggingFaceTransformersEmbeddings({
|
||||
modelName: "Xenova/gte-small",
|
||||
}),
|
||||
'Bert Multilingual': new HuggingFaceTransformersEmbeddings({
|
||||
modelName: 'Xenova/bert-base-multilingual-uncased',
|
||||
"Bert Multilingual": new HuggingFaceTransformersEmbeddings({
|
||||
modelName: "Xenova/bert-base-multilingual-uncased",
|
||||
}),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`Error loading local embeddings: ${err}`);
|
||||
} catch (error) {
|
||||
logger.error(`Error loading local embeddings: ${error}`);
|
||||
}
|
||||
|
||||
return models;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import axios from 'axios';
|
||||
import { getSearxngApiEndpoint } from '../config';
|
||||
import axios from "axios";
|
||||
import { getSearxngApiEndpoint } from "../config";
|
||||
|
||||
interface SearxngSearchOptions {
|
||||
categories?: string[];
|
||||
|
@ -19,23 +19,20 @@ interface SearxngSearchResult {
|
|||
iframe_src?: string;
|
||||
}
|
||||
|
||||
export const searchSearxng = async (
|
||||
query: string,
|
||||
opts?: SearxngSearchOptions,
|
||||
) => {
|
||||
export const searchSearxng = async (query: string, options?: SearxngSearchOptions) => {
|
||||
const searxngURL = getSearxngApiEndpoint();
|
||||
|
||||
const url = new URL(`${searxngURL}/search?format=json`);
|
||||
url.searchParams.append('q', query);
|
||||
url.searchParams.append("q", query);
|
||||
|
||||
if (opts) {
|
||||
Object.keys(opts).forEach((key) => {
|
||||
if (Array.isArray(opts[key])) {
|
||||
url.searchParams.append(key, opts[key].join(','));
|
||||
return;
|
||||
if (options) {
|
||||
for (const key of Object.keys(options)) {
|
||||
if (Array.isArray(options[key])) {
|
||||
url.searchParams.append(key, options[key].join(","));
|
||||
continue;
|
||||
}
|
||||
url.searchParams.append(key, opts[key]);
|
||||
});
|
||||
url.searchParams.append(key, options[key]);
|
||||
}
|
||||
}
|
||||
|
||||
const res = await axios.get(url.toString());
|
||||
|
|
|
@ -1,65 +1,62 @@
|
|||
import express from 'express';
|
||||
import logger from '../utils/logger';
|
||||
import db from '../db/index';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { chats, messages } from '../db/schema';
|
||||
import express from "express";
|
||||
import logger from "../utils/logger";
|
||||
import database from "../db/index";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { chats, messages } from "../db/schema";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/', async (_, res) => {
|
||||
router.get("/", async (_, res) => {
|
||||
try {
|
||||
let chats = await db.query.chats.findMany();
|
||||
let chats = await database.query.chats.findMany();
|
||||
|
||||
chats = chats.reverse();
|
||||
|
||||
return res.status(200).json({ chats: chats });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(`Error in getting chats: ${err.message}`);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(`Error in getting chats: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/:id', async (req, res) => {
|
||||
router.get("/:id", async (request, res) => {
|
||||
try {
|
||||
const chatExists = await db.query.chats.findFirst({
|
||||
where: eq(chats.id, req.params.id),
|
||||
const chatExists = await database.query.chats.findFirst({
|
||||
where: eq(chats.id, request.params.id),
|
||||
});
|
||||
|
||||
if (!chatExists) {
|
||||
return res.status(404).json({ message: 'Chat not found' });
|
||||
return res.status(404).json({ message: "Chat not found" });
|
||||
}
|
||||
|
||||
const chatMessages = await db.query.messages.findMany({
|
||||
where: eq(messages.chatId, req.params.id),
|
||||
const chatMessages = await database.query.messages.findMany({
|
||||
where: eq(messages.chatId, request.params.id),
|
||||
});
|
||||
|
||||
return res.status(200).json({ chat: chatExists, messages: chatMessages });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(`Error in getting chat: ${err.message}`);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(`Error in getting chat: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete(`/:id`, async (req, res) => {
|
||||
router.delete(`/:id`, async (request, res) => {
|
||||
try {
|
||||
const chatExists = await db.query.chats.findFirst({
|
||||
where: eq(chats.id, req.params.id),
|
||||
const chatExists = await database.query.chats.findFirst({
|
||||
where: eq(chats.id, request.params.id),
|
||||
});
|
||||
|
||||
if (!chatExists) {
|
||||
return res.status(404).json({ message: 'Chat not found' });
|
||||
return res.status(404).json({ message: "Chat not found" });
|
||||
}
|
||||
|
||||
await db.delete(chats).where(eq(chats.id, req.params.id)).execute();
|
||||
await db
|
||||
.delete(messages)
|
||||
.where(eq(messages.chatId, req.params.id))
|
||||
.execute();
|
||||
await database.delete(chats).where(eq(chats.id, request.params.id)).execute();
|
||||
await database.delete(messages).where(eq(messages.chatId, request.params.id)).execute();
|
||||
|
||||
return res.status(200).json({ message: 'Chat deleted successfully' });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(`Error in deleting chat: ${err.message}`);
|
||||
return res.status(200).json({ message: "Chat deleted successfully" });
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(`Error in deleting chat: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,18 +1,10 @@
|
|||
import express from 'express';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '../lib/providers';
|
||||
import {
|
||||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
updateConfig,
|
||||
} from '../config';
|
||||
import express from "express";
|
||||
import { getAvailableChatModelProviders, getAvailableEmbeddingModelProviders } from "../lib/providers";
|
||||
import { getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey, updateConfig } from "../config";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/', async (_, res) => {
|
||||
router.get("/", async (_, res) => {
|
||||
const config = {};
|
||||
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
|
@ -20,30 +12,26 @@ router.get('/', async (_, res) => {
|
|||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
config['chatModelProviders'] = {};
|
||||
config['embeddingModelProviders'] = {};
|
||||
config["chatModelProviders"] = {};
|
||||
config["embeddingModelProviders"] = {};
|
||||
|
||||
for (const provider in chatModelProviders) {
|
||||
config['chatModelProviders'][provider] = Object.keys(
|
||||
chatModelProviders[provider],
|
||||
);
|
||||
config["chatModelProviders"][provider] = Object.keys(chatModelProviders[provider]);
|
||||
}
|
||||
|
||||
for (const provider in embeddingModelProviders) {
|
||||
config['embeddingModelProviders'][provider] = Object.keys(
|
||||
embeddingModelProviders[provider],
|
||||
);
|
||||
config["embeddingModelProviders"][provider] = Object.keys(embeddingModelProviders[provider]);
|
||||
}
|
||||
|
||||
config['openaiApiKey'] = getOpenaiApiKey();
|
||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
config["openaiApiKey"] = getOpenaiApiKey();
|
||||
config["ollamaApiUrl"] = getOllamaApiEndpoint();
|
||||
config["groqApiKey"] = getGroqApiKey();
|
||||
|
||||
res.status(200).json(config);
|
||||
});
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
const config = req.body;
|
||||
router.post("/", async (request, res) => {
|
||||
const config = request.body;
|
||||
|
||||
const updatedConfig = {
|
||||
API_KEYS: {
|
||||
|
@ -57,7 +45,7 @@ router.post('/', async (req, res) => {
|
|||
|
||||
updateConfig(updatedConfig);
|
||||
|
||||
res.status(200).json({ message: 'Config updated' });
|
||||
res.status(200).json({ message: "Config updated" });
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
import express from 'express';
|
||||
import handleImageSearch from '../agents/imageSearchAgent';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { getAvailableChatModelProviders } from '../lib/providers';
|
||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import logger from '../utils/logger';
|
||||
import express from "express";
|
||||
import handleImageSearch from "../agents/imageSearchAgent";
|
||||
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import { getAvailableChatModelProviders } from "../lib/providers";
|
||||
import { HumanMessage, AIMessage } from "@langchain/core/messages";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
router.post("/", async (request, res) => {
|
||||
try {
|
||||
let { query, chat_history, chat_model_provider, chat_model } = req.body;
|
||||
const { query, chat_history: raw_chat_history, chat_model_provider, chat_model } = request.body;
|
||||
|
||||
chat_history = chat_history.map((msg: any) => {
|
||||
if (msg.role === 'user') {
|
||||
return new HumanMessage(msg.content);
|
||||
} else if (msg.role === 'assistant') {
|
||||
return new AIMessage(msg.content);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const chat_history = raw_chat_history.map((message: any) => {
|
||||
if (message.role === "user") {
|
||||
return new HumanMessage(message.content);
|
||||
} else if (message.role === "assistant") {
|
||||
return new AIMessage(message.content);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -30,16 +31,16 @@ router.post('/', async (req, res) => {
|
|||
}
|
||||
|
||||
if (!llm) {
|
||||
res.status(500).json({ message: 'Invalid LLM model selected' });
|
||||
res.status(500).json({ message: "Invalid LLM model selected" });
|
||||
return;
|
||||
}
|
||||
|
||||
const images = await handleImageSearch({ query, chat_history }, llm);
|
||||
|
||||
res.status(200).json({ images });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(`Error in image search: ${err.message}`);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(`Error in image search: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
import express from 'express';
|
||||
import imagesRouter from './images';
|
||||
import videosRouter from './videos';
|
||||
import configRouter from './config';
|
||||
import modelsRouter from './models';
|
||||
import suggestionsRouter from './suggestions';
|
||||
import chatsRouter from './chats';
|
||||
import express from "express";
|
||||
import imagesRouter from "./images";
|
||||
import videosRouter from "./videos";
|
||||
import configRouter from "./config";
|
||||
import modelsRouter from "./models";
|
||||
import suggestionsRouter from "./suggestions";
|
||||
import chatsRouter from "./chats";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use('/images', imagesRouter);
|
||||
router.use('/videos', videosRouter);
|
||||
router.use('/config', configRouter);
|
||||
router.use('/models', modelsRouter);
|
||||
router.use('/suggestions', suggestionsRouter);
|
||||
router.use('/chats', chatsRouter);
|
||||
router.use("/images", imagesRouter);
|
||||
router.use("/videos", videosRouter);
|
||||
router.use("/config", configRouter);
|
||||
router.use("/models", modelsRouter);
|
||||
router.use("/suggestions", suggestionsRouter);
|
||||
router.use("/chats", chatsRouter);
|
||||
|
||||
export default router;
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
import express from 'express';
|
||||
import logger from '../utils/logger';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '../lib/providers';
|
||||
import express from "express";
|
||||
import logger from "../utils/logger";
|
||||
import { getAvailableChatModelProviders, getAvailableEmbeddingModelProviders } from "../lib/providers";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
router.get("/", async (_request, res) => {
|
||||
try {
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
|
@ -15,9 +12,9 @@ router.get('/', async (req, res) => {
|
|||
]);
|
||||
|
||||
res.status(200).json({ chatModelProviders, embeddingModelProviders });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(err.message);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(error.message);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
import express from 'express';
|
||||
import generateSuggestions from '../agents/suggestionGeneratorAgent';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { getAvailableChatModelProviders } from '../lib/providers';
|
||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import logger from '../utils/logger';
|
||||
import express from "express";
|
||||
import generateSuggestions from "../agents/suggestionGeneratorAgent";
|
||||
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import { getAvailableChatModelProviders } from "../lib/providers";
|
||||
import { HumanMessage, AIMessage } from "@langchain/core/messages";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
router.post("/", async (request, res) => {
|
||||
try {
|
||||
let { chat_history, chat_model, chat_model_provider } = req.body;
|
||||
const { chat_history: raw_chat_history, chat_model, chat_model_provider } = request.body;
|
||||
|
||||
chat_history = chat_history.map((msg: any) => {
|
||||
if (msg.role === 'user') {
|
||||
return new HumanMessage(msg.content);
|
||||
} else if (msg.role === 'assistant') {
|
||||
return new AIMessage(msg.content);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const chat_history = raw_chat_history.map((message: any) => {
|
||||
if (message.role === "user") {
|
||||
return new HumanMessage(message.content);
|
||||
} else if (message.role === "assistant") {
|
||||
return new AIMessage(message.content);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -30,16 +31,16 @@ router.post('/', async (req, res) => {
|
|||
}
|
||||
|
||||
if (!llm) {
|
||||
res.status(500).json({ message: 'Invalid LLM model selected' });
|
||||
res.status(500).json({ message: "Invalid LLM model selected" });
|
||||
return;
|
||||
}
|
||||
|
||||
const suggestions = await generateSuggestions({ chat_history }, llm);
|
||||
|
||||
res.status(200).json({ suggestions: suggestions });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(`Error in generating suggestions: ${err.message}`);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(`Error in generating suggestions: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
import express from 'express';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { getAvailableChatModelProviders } from '../lib/providers';
|
||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import logger from '../utils/logger';
|
||||
import handleVideoSearch from '../agents/videoSearchAgent';
|
||||
import express from "express";
|
||||
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import { getAvailableChatModelProviders } from "../lib/providers";
|
||||
import { HumanMessage, AIMessage } from "@langchain/core/messages";
|
||||
import logger from "../utils/logger";
|
||||
import handleVideoSearch from "../agents/videoSearchAgent";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
router.post("/", async (request, res) => {
|
||||
try {
|
||||
let { query, chat_history, chat_model_provider, chat_model } = req.body;
|
||||
const { query, chat_history: raw_chat_history, chat_model_provider, chat_model } = request.body;
|
||||
|
||||
chat_history = chat_history.map((msg: any) => {
|
||||
if (msg.role === 'user') {
|
||||
return new HumanMessage(msg.content);
|
||||
} else if (msg.role === 'assistant') {
|
||||
return new AIMessage(msg.content);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const chat_history = raw_chat_history.map((message: any) => {
|
||||
if (message.role === "user") {
|
||||
return new HumanMessage(message.content);
|
||||
} else if (message.role === "assistant") {
|
||||
return new AIMessage(message.content);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -30,16 +31,16 @@ router.post('/', async (req, res) => {
|
|||
}
|
||||
|
||||
if (!llm) {
|
||||
res.status(500).json({ message: 'Invalid LLM model selected' });
|
||||
res.status(500).json({ message: "Invalid LLM model selected" });
|
||||
return;
|
||||
}
|
||||
|
||||
const videos = await handleVideoSearch({ chat_history, query }, llm);
|
||||
|
||||
res.status(200).json({ videos });
|
||||
} catch (err) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
logger.error(`Error in video search: ${err.message}`);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: "An error has occurred." });
|
||||
logger.error(`Error in video search: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import dot from 'compute-dot';
|
||||
import cosineSimilarity from 'compute-cosine-similarity';
|
||||
import { getSimilarityMeasure } from '../config';
|
||||
import dot from "compute-dot";
|
||||
import cosineSimilarity from "compute-cosine-similarity";
|
||||
import { getSimilarityMeasure } from "../config";
|
||||
|
||||
const computeSimilarity = (x: number[], y: number[]): number => {
|
||||
const similarityMeasure = getSimilarityMeasure();
|
||||
|
||||
if (similarityMeasure === 'cosine') {
|
||||
if (similarityMeasure === "cosine") {
|
||||
return cosineSimilarity(x, y);
|
||||
} else if (similarityMeasure === 'dot') {
|
||||
} else if (similarityMeasure === "dot") {
|
||||
return dot(x, y);
|
||||
}
|
||||
|
||||
throw new Error('Invalid similarity measure');
|
||||
throw new Error("Invalid similarity measure");
|
||||
};
|
||||
|
||||
export default computeSimilarity;
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { BaseMessage } from "@langchain/core/messages";
|
||||
|
||||
const formatChatHistoryAsString = (history: BaseMessage[]) => {
|
||||
return history
|
||||
.map((message) => `${message._getType()}: ${message.content}`)
|
||||
.join('\n');
|
||||
return history.map(message => `${message._getType()}: ${message.content}`).join("\n");
|
||||
};
|
||||
|
||||
export default formatChatHistoryAsString;
|
||||
|
|
|
@ -1,20 +1,14 @@
|
|||
import winston from 'winston';
|
||||
import winston from "winston";
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: 'info',
|
||||
level: "info",
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
winston.format.colorize(),
|
||||
winston.format.simple(),
|
||||
),
|
||||
format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||
}),
|
||||
new winston.transports.File({
|
||||
filename: 'app.log',
|
||||
format: winston.format.combine(
|
||||
winston.format.timestamp(),
|
||||
winston.format.json(),
|
||||
),
|
||||
filename: "app.log",
|
||||
format: winston.format.combine(winston.format.timestamp(), winston.format.json()),
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
|
|
@ -1,41 +1,28 @@
|
|||
import { WebSocket } from 'ws';
|
||||
import { handleMessage } from './messageHandler';
|
||||
import {
|
||||
getAvailableEmbeddingModelProviders,
|
||||
getAvailableChatModelProviders,
|
||||
} from '../lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import type { IncomingMessage } from 'http';
|
||||
import logger from '../utils/logger';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { WebSocket } from "ws";
|
||||
import { handleMessage } from "./messageHandler";
|
||||
import { getAvailableEmbeddingModelProviders, getAvailableChatModelProviders } from "../lib/providers";
|
||||
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import type { IncomingMessage } from "node:http";
|
||||
import logger from "../utils/logger";
|
||||
import { ChatOpenAI } from "@langchain/openai";
|
||||
|
||||
export const handleConnection = async (
|
||||
ws: WebSocket,
|
||||
request: IncomingMessage,
|
||||
) => {
|
||||
export const handleConnection = async (ws: WebSocket, request: IncomingMessage) => {
|
||||
try {
|
||||
const searchParams = new URL(request.url, `http://${request.headers.host}`)
|
||||
.searchParams;
|
||||
const searchParameters = new URL(request.url, `http://${request.headers.host}`).searchParams;
|
||||
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
]);
|
||||
|
||||
const chatModelProvider =
|
||||
searchParams.get('chatModelProvider') ||
|
||||
Object.keys(chatModelProviders)[0];
|
||||
const chatModel =
|
||||
searchParams.get('chatModel') ||
|
||||
Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
const chatModelProvider = searchParameters.get("chatModelProvider") || Object.keys(chatModelProviders)[0];
|
||||
const chatModel = searchParameters.get("chatModel") || Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
|
||||
const embeddingModelProvider =
|
||||
searchParams.get('embeddingModelProvider') ||
|
||||
Object.keys(embeddingModelProviders)[0];
|
||||
searchParameters.get("embeddingModelProvider") || Object.keys(embeddingModelProviders)[0];
|
||||
const embeddingModel =
|
||||
searchParams.get('embeddingModel') ||
|
||||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||
searchParameters.get("embeddingModel") || Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
let embeddings: Embeddings | undefined;
|
||||
|
@ -43,18 +30,16 @@ export const handleConnection = async (
|
|||
if (
|
||||
chatModelProviders[chatModelProvider] &&
|
||||
chatModelProviders[chatModelProvider][chatModel] &&
|
||||
chatModelProvider != 'custom_openai'
|
||||
chatModelProvider != "custom_openai"
|
||||
) {
|
||||
llm = chatModelProviders[chatModelProvider][chatModel] as
|
||||
| BaseChatModel
|
||||
| undefined;
|
||||
} else if (chatModelProvider == 'custom_openai') {
|
||||
llm = chatModelProviders[chatModelProvider][chatModel] as BaseChatModel | undefined;
|
||||
} else if (chatModelProvider == "custom_openai") {
|
||||
llm = new ChatOpenAI({
|
||||
modelName: chatModel,
|
||||
openAIApiKey: searchParams.get('openAIApiKey'),
|
||||
openAIApiKey: searchParameters.get("openAIApiKey"),
|
||||
temperature: 0.7,
|
||||
configuration: {
|
||||
baseURL: searchParams.get('openAIBaseURL'),
|
||||
baseURL: searchParameters.get("openAIBaseURL"),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -63,38 +48,32 @@ export const handleConnection = async (
|
|||
embeddingModelProviders[embeddingModelProvider] &&
|
||||
embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||
) {
|
||||
embeddings = embeddingModelProviders[embeddingModelProvider][
|
||||
embeddingModel
|
||||
] as Embeddings | undefined;
|
||||
embeddings = embeddingModelProviders[embeddingModelProvider][embeddingModel] as Embeddings | undefined;
|
||||
}
|
||||
|
||||
if (!llm || !embeddings) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
|
||||
key: 'INVALID_MODEL_SELECTED',
|
||||
type: "error",
|
||||
data: "Invalid LLM or embeddings model selected, please refresh the page and try again.",
|
||||
key: "INVALID_MODEL_SELECTED",
|
||||
}),
|
||||
);
|
||||
ws.close();
|
||||
}
|
||||
|
||||
ws.on(
|
||||
'message',
|
||||
async (message) =>
|
||||
await handleMessage(message.toString(), ws, llm, embeddings),
|
||||
);
|
||||
ws.on("message", async message => await handleMessage(message.toString(), ws, llm, embeddings));
|
||||
|
||||
ws.on('close', () => logger.debug('Connection closed'));
|
||||
} catch (err) {
|
||||
ws.on("close", () => logger.debug("Connection closed"));
|
||||
} catch (error) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: 'Internal server error.',
|
||||
key: 'INTERNAL_SERVER_ERROR',
|
||||
type: "error",
|
||||
data: "Internal server error.",
|
||||
key: "INTERNAL_SERVER_ERROR",
|
||||
}),
|
||||
);
|
||||
ws.close();
|
||||
logger.error(err);
|
||||
logger.error(error);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import { initServer } from './websocketServer';
|
||||
import http from 'http';
|
||||
import { initServer } from "./websocketServer";
|
||||
import http from "node:http";
|
||||
|
||||
export const startWebSocketServer = (
|
||||
server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>,
|
||||
) => {
|
||||
export const startWebSocketServer = (server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>) => {
|
||||
initServer(server);
|
||||
};
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
import { EventEmitter, WebSocket } from 'ws';
|
||||
import { BaseMessage, AIMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import handleWebSearch from '../agents/webSearchAgent';
|
||||
import handleAcademicSearch from '../agents/academicSearchAgent';
|
||||
import handleWritingAssistant from '../agents/writingAssistant';
|
||||
import handleWolframAlphaSearch from '../agents/wolframAlphaSearchAgent';
|
||||
import handleYoutubeSearch from '../agents/youtubeSearchAgent';
|
||||
import handleRedditSearch from '../agents/redditSearchAgent';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import logger from '../utils/logger';
|
||||
import db from '../db';
|
||||
import { chats, messages } from '../db/schema';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import crypto from 'crypto';
|
||||
import { EventEmitter, WebSocket } from "ws";
|
||||
import { BaseMessage, AIMessage, HumanMessage } from "@langchain/core/messages";
|
||||
import handleWebSearch from "../agents/webSearchAgent";
|
||||
import handleAcademicSearch from "../agents/academicSearchAgent";
|
||||
import handleWritingAssistant from "../agents/writingAssistant";
|
||||
import handleWolframAlphaSearch from "../agents/wolframAlphaSearchAgent";
|
||||
import handleYoutubeSearch from "../agents/youtubeSearchAgent";
|
||||
import handleRedditSearch from "../agents/redditSearchAgent";
|
||||
import type { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
||||
import type { Embeddings } from "@langchain/core/embeddings";
|
||||
import logger from "../utils/logger";
|
||||
import database from "../db";
|
||||
import { chats, messages } from "../db/schema";
|
||||
import { eq } from "drizzle-orm";
|
||||
import crypto from "node:crypto";
|
||||
|
||||
type Message = {
|
||||
messageId: string;
|
||||
|
@ -37,30 +37,25 @@ const searchHandlers = {
|
|||
redditSearch: handleRedditSearch,
|
||||
};
|
||||
|
||||
const handleEmitterEvents = (
|
||||
emitter: EventEmitter,
|
||||
ws: WebSocket,
|
||||
messageId: string,
|
||||
chatId: string,
|
||||
) => {
|
||||
let recievedMessage = '';
|
||||
const handleEmitterEvents = (emitter: EventEmitter, ws: WebSocket, messageId: string, chatId: string) => {
|
||||
let recievedMessage = "";
|
||||
let sources = [];
|
||||
|
||||
emitter.on('data', (data) => {
|
||||
emitter.on("data", data => {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.type === 'response') {
|
||||
if (parsedData.type === "response") {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'message',
|
||||
type: "message",
|
||||
data: parsedData.data,
|
||||
messageId: messageId,
|
||||
}),
|
||||
);
|
||||
recievedMessage += parsedData.data;
|
||||
} else if (parsedData.type === 'sources') {
|
||||
} else if (parsedData.type === "sources") {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'sources',
|
||||
type: "sources",
|
||||
data: parsedData.data,
|
||||
messageId: messageId,
|
||||
}),
|
||||
|
@ -68,15 +63,16 @@ const handleEmitterEvents = (
|
|||
sources = parsedData.data;
|
||||
}
|
||||
});
|
||||
emitter.on('end', () => {
|
||||
ws.send(JSON.stringify({ type: 'messageEnd', messageId: messageId }));
|
||||
emitter.on("end", () => {
|
||||
ws.send(JSON.stringify({ type: "messageEnd", messageId: messageId }));
|
||||
|
||||
db.insert(messages)
|
||||
database
|
||||
.insert(messages)
|
||||
.values({
|
||||
content: recievedMessage,
|
||||
chatId: chatId,
|
||||
messageId: messageId,
|
||||
role: 'assistant',
|
||||
role: "assistant",
|
||||
metadata: JSON.stringify({
|
||||
createdAt: new Date(),
|
||||
...(sources && sources.length > 0 && { sources }),
|
||||
|
@ -84,70 +80,58 @@ const handleEmitterEvents = (
|
|||
})
|
||||
.execute();
|
||||
});
|
||||
emitter.on('error', (data) => {
|
||||
emitter.on("error", data => {
|
||||
const parsedData = JSON.parse(data);
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
type: "error",
|
||||
data: parsedData.data,
|
||||
key: 'CHAIN_ERROR',
|
||||
key: "CHAIN_ERROR",
|
||||
}),
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
export const handleMessage = async (
|
||||
message: string,
|
||||
ws: WebSocket,
|
||||
llm: BaseChatModel,
|
||||
embeddings: Embeddings,
|
||||
) => {
|
||||
export const handleMessage = async (message: string, ws: WebSocket, llm: BaseChatModel, embeddings: Embeddings) => {
|
||||
try {
|
||||
const parsedWSMessage = JSON.parse(message) as WSMessage;
|
||||
const parsedMessage = parsedWSMessage.message;
|
||||
|
||||
const id = crypto.randomBytes(7).toString('hex');
|
||||
const id = crypto.randomBytes(7).toString("hex");
|
||||
|
||||
if (!parsedMessage.content)
|
||||
return ws.send(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: 'Invalid message format',
|
||||
key: 'INVALID_FORMAT',
|
||||
type: "error",
|
||||
data: "Invalid message format",
|
||||
key: "INVALID_FORMAT",
|
||||
}),
|
||||
);
|
||||
|
||||
const history: BaseMessage[] = parsedWSMessage.history.map((msg) => {
|
||||
if (msg[0] === 'human') {
|
||||
return new HumanMessage({
|
||||
content: msg[1],
|
||||
});
|
||||
} else {
|
||||
return new AIMessage({
|
||||
content: msg[1],
|
||||
});
|
||||
}
|
||||
const history: BaseMessage[] = parsedWSMessage.history.map(message_ => {
|
||||
return message_[0] === "human"
|
||||
? new HumanMessage({
|
||||
content: message_[1],
|
||||
})
|
||||
: new AIMessage({
|
||||
content: message_[1],
|
||||
});
|
||||
});
|
||||
|
||||
if (parsedWSMessage.type === 'message') {
|
||||
if (parsedWSMessage.type === "message") {
|
||||
const handler = searchHandlers[parsedWSMessage.focusMode];
|
||||
|
||||
if (handler) {
|
||||
const emitter = handler(
|
||||
parsedMessage.content,
|
||||
history,
|
||||
llm,
|
||||
embeddings,
|
||||
);
|
||||
const emitter = handler(parsedMessage.content, history, llm, embeddings);
|
||||
|
||||
handleEmitterEvents(emitter, ws, id, parsedMessage.chatId);
|
||||
|
||||
const chat = await db.query.chats.findFirst({
|
||||
const chat = await database.query.chats.findFirst({
|
||||
where: eq(chats.id, parsedMessage.chatId),
|
||||
});
|
||||
|
||||
if (!chat) {
|
||||
await db
|
||||
await database
|
||||
.insert(chats)
|
||||
.values({
|
||||
id: parsedMessage.chatId,
|
||||
|
@ -158,13 +142,13 @@ export const handleMessage = async (
|
|||
.execute();
|
||||
}
|
||||
|
||||
await db
|
||||
await database
|
||||
.insert(messages)
|
||||
.values({
|
||||
content: parsedMessage.content,
|
||||
chatId: parsedMessage.chatId,
|
||||
messageId: id,
|
||||
role: 'user',
|
||||
role: "user",
|
||||
metadata: JSON.stringify({
|
||||
createdAt: new Date(),
|
||||
}),
|
||||
|
@ -173,21 +157,21 @@ export const handleMessage = async (
|
|||
} else {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: 'Invalid focus mode',
|
||||
key: 'INVALID_FOCUS_MODE',
|
||||
type: "error",
|
||||
data: "Invalid focus mode",
|
||||
key: "INVALID_FOCUS_MODE",
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (error) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'error',
|
||||
data: 'Invalid message format',
|
||||
key: 'INVALID_FORMAT',
|
||||
type: "error",
|
||||
data: "Invalid message format",
|
||||
key: "INVALID_FORMAT",
|
||||
}),
|
||||
);
|
||||
logger.error(`Failed to handle message: ${err}`);
|
||||
logger.error(`Failed to handle message: ${error}`);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
import { WebSocketServer } from 'ws';
|
||||
import { handleConnection } from './connectionManager';
|
||||
import http from 'http';
|
||||
import { getPort } from '../config';
|
||||
import logger from '../utils/logger';
|
||||
import { WebSocketServer } from "ws";
|
||||
import { handleConnection } from "./connectionManager";
|
||||
import http from "node:http";
|
||||
import { getPort } from "../config";
|
||||
import logger from "../utils/logger";
|
||||
|
||||
export const initServer = (
|
||||
server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>,
|
||||
) => {
|
||||
export const initServer = (server: http.Server<typeof http.IncomingMessage, typeof http.ServerResponse>) => {
|
||||
const port = getPort();
|
||||
const wss = new WebSocketServer({ server });
|
||||
|
||||
wss.on('connection', handleConnection);
|
||||
wss.on("connection", handleConnection);
|
||||
|
||||
logger.info(`WebSocket server started on port ${port}`);
|
||||
};
|
||||
|
|
|
@ -11,7 +11,9 @@
|
|||
"emitDecoratorMetadata": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"skipLibCheck": true,
|
||||
"skipDefaultLibCheck": true
|
||||
"skipDefaultLibCheck": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules", "**/*.spec.ts"]
|
||||
|
|
2
ui/.eslintignore
Normal file
2
ui/.eslintignore
Normal file
|
@ -0,0 +1,2 @@
|
|||
node_modules
|
||||
dist
|
|
@ -1,3 +1,23 @@
|
|||
{
|
||||
"extends": "next/core-web-vitals"
|
||||
"extends": ["../.eslintrc.json"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.ts", "*.tsx"],
|
||||
"plugins": ["react", "react-hooks"],
|
||||
"extends": ["plugin:react/recommended", "plugin:react-hooks/recommended", "plugin:react/jsx-runtime"]
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"postcss.config.js",
|
||||
"tailwind.config.js",
|
||||
"tailwind.config.ts"
|
||||
],
|
||||
"rules": {
|
||||
"unicorn/prefer-module": "off"
|
||||
},
|
||||
"env": {
|
||||
"node": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
5
ui/.prettierrc
Normal file
5
ui/.prettierrc
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"endOfLine": "auto",
|
||||
"trailingComma": "all",
|
||||
"arrowParens": "avoid"
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
/** @type {import("prettier").Config} */
|
||||
|
||||
const config = {
|
||||
printWidth: 80,
|
||||
trailingComma: 'all',
|
||||
endOfLine: 'auto',
|
||||
singleQuote: true,
|
||||
tabWidth: 2,
|
||||
};
|
||||
|
||||
module.exports = config;
|
|
@ -1,4 +1,4 @@
|
|||
import ChatWindow from '@/components/ChatWindow';
|
||||
import ChatWindow from "@/components/ChatWindow";
|
||||
|
||||
const Page = ({ params }: { params: { chatId: string } }) => {
|
||||
return <ChatWindow id={params.chatId} />;
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
import type { Metadata } from 'next';
|
||||
import { Montserrat } from 'next/font/google';
|
||||
import './globals.css';
|
||||
import { cn } from '@/lib/utils';
|
||||
import Sidebar from '@/components/Sidebar';
|
||||
import { Toaster } from 'sonner';
|
||||
import ThemeProvider from '@/components/theme/Provider';
|
||||
import type { Metadata } from "next";
|
||||
import { Montserrat } from "next/font/google";
|
||||
import "./globals.css";
|
||||
import { cn } from "@/lib/utils";
|
||||
import Sidebar from "@/components/Sidebar";
|
||||
import { Toaster } from "sonner";
|
||||
import ThemeProvider from "@/components/theme/Provider";
|
||||
|
||||
const montserrat = Montserrat({
|
||||
weight: ['300', '400', '500', '700'],
|
||||
subsets: ['latin'],
|
||||
display: 'swap',
|
||||
fallback: ['Arial', 'sans-serif'],
|
||||
weight: ["300", "400", "500", "700"],
|
||||
subsets: ["latin"],
|
||||
display: "swap",
|
||||
fallback: ["Arial", "sans-serif"],
|
||||
});
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'Perplexica - Chat with the internet',
|
||||
description:
|
||||
'Perplexica is an AI powered chatbot that is connected to the internet.',
|
||||
title: "Perplexica - Chat with the internet",
|
||||
description: "Perplexica is an AI powered chatbot that is connected to the internet.",
|
||||
};
|
||||
|
||||
export default function RootLayout({
|
||||
|
@ -26,7 +25,7 @@ export default function RootLayout({
|
|||
}>) {
|
||||
return (
|
||||
<html className="h-full" lang="en" suppressHydrationWarning>
|
||||
<body className={cn('h-full', montserrat.className)}>
|
||||
<body className={cn("h-full", montserrat.className)}>
|
||||
<ThemeProvider>
|
||||
<Sidebar>{children}</Sidebar>
|
||||
<Toaster
|
||||
|
@ -34,7 +33,7 @@ export default function RootLayout({
|
|||
unstyled: true,
|
||||
classNames: {
|
||||
toast:
|
||||
'bg-light-primary dark:bg-dark-primary text-white rounded-lg p-4 flex flex-row items-center space-x-2',
|
||||
"bg-light-primary dark:bg-dark-primary text-white rounded-lg p-4 flex flex-row items-center space-x-2",
|
||||
},
|
||||
}}
|
||||
/>
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { Metadata } from 'next';
|
||||
import React from 'react';
|
||||
import { Metadata } from "next";
|
||||
import React from "react";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'Library - Perplexica',
|
||||
title: "Library - Perplexica",
|
||||
};
|
||||
|
||||
const Layout = ({ children }: { children: React.ReactNode }) => {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
'use client';
|
||||
"use client";
|
||||
|
||||
import DeleteChat from '@/components/DeleteChat';
|
||||
import { formatTimeDifference } from '@/lib/utils';
|
||||
import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { useEffect, useState } from 'react';
|
||||
import DeleteChat from "@/components/DeleteChat";
|
||||
import { formatTimeDifference } from "@/lib/utils";
|
||||
import { BookOpenText, ClockIcon } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
export interface Chat {
|
||||
id: string;
|
||||
|
@ -22,9 +22,9 @@ const Page = () => {
|
|||
setLoading(true);
|
||||
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/chats`, {
|
||||
method: 'GET',
|
||||
method: "GET",
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -61,24 +61,20 @@ const Page = () => {
|
|||
<div className="fixed z-40 top-0 left-0 right-0 lg:pl-[104px] lg:pr-6 lg:px-8 px-4 py-4 lg:py-6 border-b border-light-200 dark:border-dark-200">
|
||||
<div className="flex flex-row items-center space-x-2 max-w-screen-lg lg:mx-auto">
|
||||
<BookOpenText />
|
||||
<h2 className="text-black dark:text-white lg:text-3xl lg:font-medium">
|
||||
Library
|
||||
</h2>
|
||||
<h2 className="text-black dark:text-white lg:text-3xl lg:font-medium">Library</h2>
|
||||
</div>
|
||||
</div>
|
||||
{chats.length === 0 && (
|
||||
<div className="flex flex-row items-center justify-center min-h-screen">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
No chats found.
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">No chats found.</p>
|
||||
</div>
|
||||
)}
|
||||
{chats.length > 0 && (
|
||||
<div className="flex flex-col pt-16 lg:pt-24">
|
||||
{chats.map((chat, i) => (
|
||||
{chats.map((chat, index) => (
|
||||
<div
|
||||
className="flex flex-col space-y-4 border-b border-white-200 dark:border-dark-200 py-6 lg:mx-4"
|
||||
key={i}
|
||||
key={index}
|
||||
>
|
||||
<Link
|
||||
href={`/c/${chat.id}`}
|
||||
|
@ -89,15 +85,9 @@ const Page = () => {
|
|||
<div className="flex flex-row items-center justify-between w-full">
|
||||
<div className="flex flex-row items-center space-x-1 lg:space-x-1.5 text-black/70 dark:text-white/70">
|
||||
<ClockIcon size={15} />
|
||||
<p className="text-xs">
|
||||
{formatTimeDifference(new Date(), chat.createdAt)} Ago
|
||||
</p>
|
||||
<p className="text-xs">{formatTimeDifference(new Date(), chat.createdAt)} Ago</p>
|
||||
</div>
|
||||
<DeleteChat
|
||||
chatId={chat.id}
|
||||
chats={chats}
|
||||
setChats={setChats}
|
||||
/>
|
||||
<DeleteChat chatId={chat.id} chats={chats} setChats={setChats} />
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import ChatWindow from '@/components/ChatWindow';
|
||||
import { Metadata } from 'next';
|
||||
import { Suspense } from 'react';
|
||||
import ChatWindow from "@/components/ChatWindow";
|
||||
import { Metadata } from "next";
|
||||
import { Suspense } from "react";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'Chat - Perplexica',
|
||||
description: 'Chat with the internet, chat with Perplexica.',
|
||||
title: "Chat - Perplexica",
|
||||
description: "Chat with the internet, chat with Perplexica.",
|
||||
};
|
||||
|
||||
const Home = () => {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
'use client';
|
||||
"use client";
|
||||
|
||||
import { Fragment, useEffect, useRef, useState } from 'react';
|
||||
import MessageInput from './MessageInput';
|
||||
import { Message } from './ChatWindow';
|
||||
import MessageBox from './MessageBox';
|
||||
import MessageBoxLoading from './MessageBoxLoading';
|
||||
import { Fragment, useEffect, useRef, useState } from "react";
|
||||
import MessageInput from "./MessageInput";
|
||||
import { Message } from "./ChatWindow";
|
||||
import MessageBox from "./MessageBox";
|
||||
import MessageBoxLoading from "./MessageBoxLoading";
|
||||
|
||||
const Chat = ({
|
||||
loading,
|
||||
|
@ -20,52 +20,52 @@ const Chat = ({
|
|||
rewrite: (messageId: string) => void;
|
||||
}) => {
|
||||
const [dividerWidth, setDividerWidth] = useState(0);
|
||||
const dividerRef = useRef<HTMLDivElement | null>(null);
|
||||
const dividerReference = useRef<HTMLDivElement | null>(null);
|
||||
const messageEnd = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const updateDividerWidth = () => {
|
||||
if (dividerRef.current) {
|
||||
setDividerWidth(dividerRef.current.scrollWidth);
|
||||
if (dividerReference.current) {
|
||||
setDividerWidth(dividerReference.current.scrollWidth);
|
||||
}
|
||||
};
|
||||
|
||||
updateDividerWidth();
|
||||
|
||||
window.addEventListener('resize', updateDividerWidth);
|
||||
window.addEventListener("resize", updateDividerWidth);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', updateDividerWidth);
|
||||
window.removeEventListener("resize", updateDividerWidth);
|
||||
};
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
messageEnd.current?.scrollIntoView({ behavior: 'smooth' });
|
||||
messageEnd.current?.scrollIntoView({ behavior: "smooth" });
|
||||
|
||||
if (messages.length === 1) {
|
||||
document.title = `${messages[0].content.substring(0, 30)} - Perplexica`;
|
||||
document.title = `${messages[0].content.slice(0, 30)} - Perplexica`;
|
||||
}
|
||||
}, [messages]);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col space-y-6 pt-8 pb-44 lg:pb-32 sm:mx-4 md:mx-8">
|
||||
{messages.map((msg, i) => {
|
||||
const isLast = i === messages.length - 1;
|
||||
{messages.map((message, index) => {
|
||||
const isLast = index === messages.length - 1;
|
||||
|
||||
return (
|
||||
<Fragment key={msg.messageId}>
|
||||
<Fragment key={message.messageId}>
|
||||
<MessageBox
|
||||
key={i}
|
||||
message={msg}
|
||||
messageIndex={i}
|
||||
key={index}
|
||||
message={message}
|
||||
messageIndex={index}
|
||||
history={messages}
|
||||
loading={loading}
|
||||
dividerRef={isLast ? dividerRef : undefined}
|
||||
dividerRef={isLast ? dividerReference : undefined}
|
||||
isLast={isLast}
|
||||
rewrite={rewrite}
|
||||
sendMessage={sendMessage}
|
||||
/>
|
||||
{!isLast && msg.role === 'assistant' && (
|
||||
{!isLast && message.role === "assistant" && (
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
)}
|
||||
</Fragment>
|
||||
|
@ -74,10 +74,7 @@ const Chat = ({
|
|||
{loading && !messageAppeared && <MessageBoxLoading />}
|
||||
<div ref={messageEnd} className="h-0" />
|
||||
{dividerWidth > 0 && (
|
||||
<div
|
||||
className="bottom-24 lg:bottom-10 fixed z-40"
|
||||
style={{ width: dividerWidth }}
|
||||
>
|
||||
<div className="bottom-24 lg:bottom-10 fixed z-40" style={{ width: dividerWidth }}>
|
||||
<MessageInput loading={loading} sendMessage={sendMessage} />
|
||||
</div>
|
||||
)}
|
||||
|
|
|
@ -1,111 +1,81 @@
|
|||
'use client';
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import Navbar from './Navbar';
|
||||
import Chat from './Chat';
|
||||
import EmptyChat from './EmptyChat';
|
||||
import crypto from 'crypto';
|
||||
import { toast } from 'sonner';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
import { getSuggestions } from '@/lib/actions';
|
||||
import Error from 'next/error';
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import Navbar from "./Navbar";
|
||||
import Chat from "./Chat";
|
||||
import EmptyChat from "./EmptyChat";
|
||||
import crypto from "node:crypto";
|
||||
import { toast } from "sonner";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { getSuggestions } from "@/lib/actions";
|
||||
import Error from "next/error";
|
||||
|
||||
export type Message = {
|
||||
messageId: string;
|
||||
chatId: string;
|
||||
createdAt: Date;
|
||||
content: string;
|
||||
role: 'user' | 'assistant';
|
||||
role: "user" | "assistant";
|
||||
suggestions?: string[];
|
||||
sources?: Document[];
|
||||
};
|
||||
|
||||
const useSocket = (
|
||||
url: string,
|
||||
setIsWSReady: (ready: boolean) => void,
|
||||
setError: (error: boolean) => void,
|
||||
) => {
|
||||
const useSocket = (url: string, setIsWSReady: (ready: boolean) => void, setError: (error: boolean) => void) => {
|
||||
const [ws, setWs] = useState<WebSocket | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!ws) {
|
||||
const connectWs = async () => {
|
||||
let chatModel = localStorage.getItem('chatModel');
|
||||
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
let embeddingModel = localStorage.getItem('embeddingModel');
|
||||
let embeddingModelProvider = localStorage.getItem(
|
||||
'embeddingModelProvider',
|
||||
);
|
||||
let chatModel = localStorage.getItem("chatModel");
|
||||
let chatModelProvider = localStorage.getItem("chatModelProvider");
|
||||
let embeddingModel = localStorage.getItem("embeddingModel");
|
||||
let embeddingModelProvider = localStorage.getItem("embeddingModelProvider");
|
||||
|
||||
if (
|
||||
!chatModel ||
|
||||
!chatModelProvider ||
|
||||
!embeddingModel ||
|
||||
!embeddingModelProvider
|
||||
) {
|
||||
const providers = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
if (!chatModel || !chatModelProvider || !embeddingModel || !embeddingModelProvider) {
|
||||
const providers = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/models`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
).then(async (res) => await res.json());
|
||||
}).then(async res => await res.json());
|
||||
|
||||
const chatModelProviders = providers.chatModelProviders;
|
||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||
|
||||
if (
|
||||
!chatModelProviders ||
|
||||
Object.keys(chatModelProviders).length === 0
|
||||
)
|
||||
return toast.error('No chat models available');
|
||||
if (!chatModelProviders || Object.keys(chatModelProviders).length === 0)
|
||||
return toast.error("No chat models available");
|
||||
|
||||
if (
|
||||
!embeddingModelProviders ||
|
||||
Object.keys(embeddingModelProviders).length === 0
|
||||
)
|
||||
return toast.error('No embedding models available');
|
||||
if (!embeddingModelProviders || Object.keys(embeddingModelProviders).length === 0)
|
||||
return toast.error("No embedding models available");
|
||||
|
||||
chatModelProvider = Object.keys(chatModelProviders)[0];
|
||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||
|
||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||
embeddingModel = Object.keys(
|
||||
embeddingModelProviders[embeddingModelProvider],
|
||||
)[0];
|
||||
embeddingModel = Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||
|
||||
localStorage.setItem('chatModel', chatModel!);
|
||||
localStorage.setItem('chatModelProvider', chatModelProvider);
|
||||
localStorage.setItem('embeddingModel', embeddingModel!);
|
||||
localStorage.setItem(
|
||||
'embeddingModelProvider',
|
||||
embeddingModelProvider,
|
||||
);
|
||||
localStorage.setItem("chatModel", chatModel!);
|
||||
localStorage.setItem("chatModelProvider", chatModelProvider);
|
||||
localStorage.setItem("embeddingModel", embeddingModel!);
|
||||
localStorage.setItem("embeddingModelProvider", embeddingModelProvider);
|
||||
}
|
||||
|
||||
const wsURL = new URL(url);
|
||||
const searchParams = new URLSearchParams({});
|
||||
const searchParameters = new URLSearchParams({});
|
||||
|
||||
searchParams.append('chatModel', chatModel!);
|
||||
searchParams.append('chatModelProvider', chatModelProvider);
|
||||
searchParameters.append("chatModel", chatModel!);
|
||||
searchParameters.append("chatModelProvider", chatModelProvider);
|
||||
|
||||
if (chatModelProvider === 'custom_openai') {
|
||||
searchParams.append(
|
||||
'openAIApiKey',
|
||||
localStorage.getItem('openAIApiKey')!,
|
||||
);
|
||||
searchParams.append(
|
||||
'openAIBaseURL',
|
||||
localStorage.getItem('openAIBaseURL')!,
|
||||
);
|
||||
if (chatModelProvider === "custom_openai") {
|
||||
searchParameters.append("openAIApiKey", localStorage.getItem("openAIApiKey")!);
|
||||
searchParameters.append("openAIBaseURL", localStorage.getItem("openAIBaseURL")!);
|
||||
}
|
||||
|
||||
searchParams.append('embeddingModel', embeddingModel!);
|
||||
searchParams.append('embeddingModelProvider', embeddingModelProvider);
|
||||
searchParameters.append("embeddingModel", embeddingModel!);
|
||||
searchParameters.append("embeddingModelProvider", embeddingModelProvider);
|
||||
|
||||
wsURL.search = searchParams.toString();
|
||||
wsURL.search = searchParameters.toString();
|
||||
|
||||
const ws = new WebSocket(wsURL.toString());
|
||||
|
||||
|
@ -113,30 +83,29 @@ const useSocket = (
|
|||
if (ws.readyState !== 1) {
|
||||
ws.close();
|
||||
setError(true);
|
||||
toast.error(
|
||||
'Failed to connect to the server. Please try again later.',
|
||||
);
|
||||
toast.error("Failed to connect to the server. Please try again later.");
|
||||
}
|
||||
}, 10000);
|
||||
}, 10_000);
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('[DEBUG] open');
|
||||
ws.addEventListener("open", () => {
|
||||
console.log("[DEBUG] open");
|
||||
clearTimeout(timeoutId);
|
||||
setError(false);
|
||||
setIsWSReady(true);
|
||||
};
|
||||
});
|
||||
|
||||
// eslint-disable-next-line unicorn/prefer-add-event-listener
|
||||
ws.onerror = () => {
|
||||
clearTimeout(timeoutId);
|
||||
setError(true);
|
||||
toast.error('WebSocket connection error.');
|
||||
toast.error("WebSocket connection error.");
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
ws.addEventListener("close", () => {
|
||||
clearTimeout(timeoutId);
|
||||
setError(true);
|
||||
console.log('[DEBUG] closed');
|
||||
};
|
||||
console.log("[DEBUG] closed");
|
||||
});
|
||||
|
||||
setWs(ws);
|
||||
};
|
||||
|
@ -146,7 +115,7 @@ const useSocket = (
|
|||
|
||||
return () => {
|
||||
ws?.close();
|
||||
console.log('[DEBUG] closed');
|
||||
console.log("[DEBUG] closed");
|
||||
};
|
||||
}, [ws, url, setIsWSReady, setError]);
|
||||
|
||||
|
@ -161,15 +130,12 @@ const loadMessages = async (
|
|||
setFocusMode: (mode: string) => void,
|
||||
setNotFound: (notFound: boolean) => void,
|
||||
) => {
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
if (res.status === 404) {
|
||||
setNotFound(true);
|
||||
|
@ -179,20 +145,20 @@ const loadMessages = async (
|
|||
|
||||
const data = await res.json();
|
||||
|
||||
const messages = data.messages.map((msg: any) => {
|
||||
const messages = data.messages.map((message: any) => {
|
||||
return {
|
||||
...msg,
|
||||
...JSON.parse(msg.metadata),
|
||||
...message,
|
||||
...JSON.parse(message.metadata),
|
||||
};
|
||||
}) as Message[];
|
||||
|
||||
setMessages(messages);
|
||||
|
||||
const history = messages.map((msg) => {
|
||||
return [msg.role, msg.content];
|
||||
const history = messages.map(message => {
|
||||
return [message.role, message.content];
|
||||
}) as [string, string][];
|
||||
|
||||
console.log('[DEBUG] messages loaded');
|
||||
console.log("[DEBUG] messages loaded");
|
||||
|
||||
document.title = messages[0].content;
|
||||
|
||||
|
@ -202,8 +168,8 @@ const loadMessages = async (
|
|||
};
|
||||
|
||||
const ChatWindow = ({ id }: { id?: string }) => {
|
||||
const searchParams = useSearchParams();
|
||||
const initialMessage = searchParams.get('q');
|
||||
const searchParameters = useSearchParams();
|
||||
const initialMessage = searchParameters.get("q");
|
||||
|
||||
const [chatId, setChatId] = useState<string | undefined>(id);
|
||||
const [newChatCreated, setNewChatCreated] = useState(false);
|
||||
|
@ -212,11 +178,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
const [isReady, setIsReady] = useState(false);
|
||||
|
||||
const [isWSReady, setIsWSReady] = useState(false);
|
||||
const ws = useSocket(
|
||||
process.env.NEXT_PUBLIC_WS_URL!,
|
||||
setIsWSReady,
|
||||
setHasError,
|
||||
);
|
||||
const ws = useSocket(process.env.NEXT_PUBLIC_WS_URL!, setIsWSReady, setHasError);
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [messageAppeared, setMessageAppeared] = useState(false);
|
||||
|
@ -224,39 +186,27 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
|
||||
const [focusMode, setFocusMode] = useState('webSearch');
|
||||
const [focusMode, setFocusMode] = useState("webSearch");
|
||||
|
||||
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
|
||||
|
||||
const [notFound, setNotFound] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
chatId &&
|
||||
!newChatCreated &&
|
||||
!isMessagesLoaded &&
|
||||
messages.length === 0
|
||||
) {
|
||||
loadMessages(
|
||||
chatId,
|
||||
setMessages,
|
||||
setIsMessagesLoaded,
|
||||
setChatHistory,
|
||||
setFocusMode,
|
||||
setNotFound,
|
||||
);
|
||||
if (chatId && !newChatCreated && !isMessagesLoaded && messages.length === 0) {
|
||||
loadMessages(chatId, setMessages, setIsMessagesLoaded, setChatHistory, setFocusMode, setNotFound);
|
||||
} else if (!chatId) {
|
||||
setNewChatCreated(true);
|
||||
setIsMessagesLoaded(true);
|
||||
setChatId(crypto.randomBytes(20).toString('hex'));
|
||||
setChatId(crypto.randomBytes(20).toString("hex"));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const messagesRef = useRef<Message[]>([]);
|
||||
const messagesReference = useRef<Message[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
messagesRef.current = messages;
|
||||
messagesReference.current = messages;
|
||||
}, [messages]);
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -270,31 +220,31 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
setLoading(true);
|
||||
setMessageAppeared(false);
|
||||
|
||||
let sources: Document[] | undefined = undefined;
|
||||
let recievedMessage = '';
|
||||
let sources: Document[] | undefined;
|
||||
let recievedMessage = "";
|
||||
let added = false;
|
||||
|
||||
const messageId = crypto.randomBytes(7).toString('hex');
|
||||
const messageId = crypto.randomBytes(7).toString("hex");
|
||||
|
||||
ws?.send(
|
||||
JSON.stringify({
|
||||
type: 'message',
|
||||
type: "message",
|
||||
message: {
|
||||
chatId: chatId!,
|
||||
content: message,
|
||||
},
|
||||
focusMode: focusMode,
|
||||
history: [...chatHistory, ['human', message]],
|
||||
history: [...chatHistory, ["human", message]],
|
||||
}),
|
||||
);
|
||||
|
||||
setMessages((prevMessages) => [
|
||||
...prevMessages,
|
||||
setMessages(previousMessages => [
|
||||
...previousMessages,
|
||||
{
|
||||
content: message,
|
||||
messageId: messageId,
|
||||
chatId: chatId!,
|
||||
role: 'user',
|
||||
role: "user",
|
||||
createdAt: new Date(),
|
||||
},
|
||||
]);
|
||||
|
@ -302,22 +252,22 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
const messageHandler = async (e: MessageEvent) => {
|
||||
const data = JSON.parse(e.data);
|
||||
|
||||
if (data.type === 'error') {
|
||||
if (data.type === "error") {
|
||||
toast.error(data.data);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.type === 'sources') {
|
||||
if (data.type === "sources") {
|
||||
sources = data.data;
|
||||
if (!added) {
|
||||
setMessages((prevMessages) => [
|
||||
...prevMessages,
|
||||
setMessages(previousMessages => [
|
||||
...previousMessages,
|
||||
{
|
||||
content: '',
|
||||
content: "",
|
||||
messageId: data.messageId,
|
||||
chatId: chatId!,
|
||||
role: 'assistant',
|
||||
role: "assistant",
|
||||
sources: sources,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
|
@ -327,15 +277,15 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
setMessageAppeared(true);
|
||||
}
|
||||
|
||||
if (data.type === 'message') {
|
||||
if (data.type === "message") {
|
||||
if (!added) {
|
||||
setMessages((prevMessages) => [
|
||||
...prevMessages,
|
||||
setMessages(previousMessages => [
|
||||
...previousMessages,
|
||||
{
|
||||
content: data.data,
|
||||
messageId: data.messageId,
|
||||
chatId: chatId!,
|
||||
role: 'assistant',
|
||||
role: "assistant",
|
||||
sources: sources,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
|
@ -343,8 +293,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
added = true;
|
||||
}
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((message) => {
|
||||
setMessages(previous =>
|
||||
previous.map(message => {
|
||||
if (message.messageId === data.messageId) {
|
||||
return { ...message, content: message.content + data.data };
|
||||
}
|
||||
|
@ -357,52 +307,51 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
setMessageAppeared(true);
|
||||
}
|
||||
|
||||
if (data.type === 'messageEnd') {
|
||||
setChatHistory((prevHistory) => [
|
||||
...prevHistory,
|
||||
['human', message],
|
||||
['assistant', recievedMessage],
|
||||
]);
|
||||
if (data.type === "messageEnd") {
|
||||
setChatHistory(previousHistory => [...previousHistory, ["human", message], ["assistant", recievedMessage]]);
|
||||
|
||||
ws?.removeEventListener('message', messageHandler);
|
||||
ws?.removeEventListener("message", messageHandler);
|
||||
setLoading(false);
|
||||
|
||||
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
|
||||
const lastMessage = messagesReference.current.at(-1);
|
||||
|
||||
if (
|
||||
lastMsg.role === 'assistant' &&
|
||||
lastMsg.sources &&
|
||||
lastMsg.sources.length > 0 &&
|
||||
!lastMsg.suggestions
|
||||
lastMessage &&
|
||||
lastMessage.role === "assistant" &&
|
||||
lastMessage.sources &&
|
||||
lastMessage.sources.length > 0 &&
|
||||
!lastMessage.suggestions
|
||||
) {
|
||||
const suggestions = await getSuggestions(messagesRef.current);
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) => {
|
||||
if (msg.messageId === lastMsg.messageId) {
|
||||
return { ...msg, suggestions: suggestions };
|
||||
const suggestions = await getSuggestions(messagesReference.current);
|
||||
setMessages(previous =>
|
||||
previous.map(message_ => {
|
||||
if (message_.messageId === lastMessage.messageId) {
|
||||
return { ...message_, suggestions: suggestions };
|
||||
}
|
||||
return msg;
|
||||
return message_;
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ws?.addEventListener('message', messageHandler);
|
||||
ws?.addEventListener("message", messageHandler);
|
||||
};
|
||||
|
||||
const rewrite = (messageId: string) => {
|
||||
const index = messages.findIndex((msg) => msg.messageId === messageId);
|
||||
const index = messages.findIndex(message_ => message_.messageId === messageId);
|
||||
|
||||
if (index === -1) return;
|
||||
|
||||
const message = messages[index - 1];
|
||||
|
||||
setMessages((prev) => {
|
||||
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
|
||||
setMessages(previous => {
|
||||
// eslint-disable-next-line unicorn/no-useless-spread
|
||||
return [...previous.slice(0, messages.length > 2 ? index - 1 : 0)];
|
||||
});
|
||||
setChatHistory((prev) => {
|
||||
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
|
||||
setChatHistory(previous => {
|
||||
// eslint-disable-next-line unicorn/no-useless-spread
|
||||
return [...previous.slice(0, messages.length > 2 ? index - 1 : 0)];
|
||||
});
|
||||
|
||||
sendMessage(message.content);
|
||||
|
@ -442,11 +391,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
/>
|
||||
</>
|
||||
) : (
|
||||
<EmptyChat
|
||||
sendMessage={sendMessage}
|
||||
focusMode={focusMode}
|
||||
setFocusMode={setFocusMode}
|
||||
/>
|
||||
<EmptyChat sendMessage={sendMessage} focusMode={focusMode} setFocusMode={setFocusMode} />
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { Delete, Trash } from 'lucide-react';
|
||||
import { Dialog, Transition } from '@headlessui/react';
|
||||
import { Fragment, useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
import { Chat } from '@/app/library/page';
|
||||
import { Delete, Trash } from "lucide-react";
|
||||
import { Dialog, Transition } from "@headlessui/react";
|
||||
import { Fragment, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { Chat } from "@/app/library/page";
|
||||
|
||||
const DeleteChat = ({
|
||||
chatId,
|
||||
|
@ -19,25 +19,23 @@ const DeleteChat = ({
|
|||
const handleDelete = async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`, {
|
||||
method: "DELETE",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
if (res.status != 200) {
|
||||
throw new Error('Failed to delete chat');
|
||||
throw new Error("Failed to delete chat");
|
||||
}
|
||||
|
||||
const newChats = chats.filter((chat) => chat.id !== chatId);
|
||||
const newChats = chats.filter(chat => chat.id !== chatId);
|
||||
|
||||
setChats(newChats);
|
||||
} catch (err: any) {
|
||||
toast.error(err.message);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (error: any) {
|
||||
toast.error(error.message);
|
||||
} finally {
|
||||
setConfirmationDialogOpen(false);
|
||||
setLoading(false);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import EmptyChatMessageInput from './EmptyChatMessageInput';
|
||||
import EmptyChatMessageInput from "./EmptyChatMessageInput";
|
||||
|
||||
const EmptyChat = ({
|
||||
sendMessage,
|
||||
|
@ -12,14 +12,8 @@ const EmptyChat = ({
|
|||
return (
|
||||
<div className="relative">
|
||||
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
|
||||
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
|
||||
Research begins here.
|
||||
</h2>
|
||||
<EmptyChatMessageInput
|
||||
sendMessage={sendMessage}
|
||||
focusMode={focusMode}
|
||||
setFocusMode={setFocusMode}
|
||||
/>
|
||||
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">Research begins here.</h2>
|
||||
<EmptyChatMessageInput sendMessage={sendMessage} focusMode={focusMode} setFocusMode={setFocusMode} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { ArrowRight } from 'lucide-react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import TextareaAutosize from 'react-textarea-autosize';
|
||||
import CopilotToggle from './MessageInputActions/Copilot';
|
||||
import Focus from './MessageInputActions/Focus';
|
||||
import { ArrowRight } from "lucide-react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import TextareaAutosize from "react-textarea-autosize";
|
||||
import CopilotToggle from "./MessageInputActions/Copilot";
|
||||
import Focus from "./MessageInputActions/Focus";
|
||||
|
||||
const EmptyChatMessageInput = ({
|
||||
sendMessage,
|
||||
|
@ -14,46 +14,46 @@ const EmptyChatMessageInput = ({
|
|||
setFocusMode: (mode: string) => void;
|
||||
}) => {
|
||||
const [copilotEnabled, setCopilotEnabled] = useState(false);
|
||||
const [message, setMessage] = useState('');
|
||||
const [message, setMessage] = useState("");
|
||||
|
||||
const inputRef = useRef<HTMLTextAreaElement | null>(null);
|
||||
const inputReference = useRef<HTMLTextAreaElement | null>(null);
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === '/') {
|
||||
if (e.key === "/") {
|
||||
e.preventDefault();
|
||||
inputRef.current?.focus();
|
||||
inputReference.current?.focus();
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
document.addEventListener("keydown", handleKeyDown);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
document.removeEventListener("keydown", handleKeyDown);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<form
|
||||
onSubmit={(e) => {
|
||||
onSubmit={e => {
|
||||
e.preventDefault();
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
setMessage("");
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
onKeyDown={e => {
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
setMessage("");
|
||||
}
|
||||
}}
|
||||
className="w-full"
|
||||
>
|
||||
<div className="flex flex-col bg-light-secondary dark:bg-dark-secondary px-5 pt-5 pb-2 rounded-lg w-full border border-light-200 dark:border-dark-200">
|
||||
<TextareaAutosize
|
||||
ref={inputRef}
|
||||
ref={inputReference}
|
||||
value={message}
|
||||
onChange={(e) => setMessage(e.target.value)}
|
||||
onChange={e => setMessage(e.target.value)}
|
||||
minRows={2}
|
||||
className="bg-transparent placeholder:text-black/50 dark:placeholder:text-white/50 text-sm text-black dark:text-white resize-none focus:outline-none w-full max-h-24 lg:max-h-36 xl:max-h-48"
|
||||
placeholder="Ask anything..."
|
||||
|
@ -64,10 +64,7 @@ const EmptyChatMessageInput = ({
|
|||
{/* <Attach /> */}
|
||||
</div>
|
||||
<div className="flex flex-row items-center space-x-4 -mx-2">
|
||||
<CopilotToggle
|
||||
copilotEnabled={copilotEnabled}
|
||||
setCopilotEnabled={setCopilotEnabled}
|
||||
/>
|
||||
<CopilotToggle copilotEnabled={copilotEnabled} setCopilotEnabled={setCopilotEnabled} />
|
||||
<button
|
||||
disabled={message.trim().length === 0}
|
||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 disabled:bg-[#e0e0dc] dark:disabled:bg-[#ececec21] hover:bg-opacity-85 transition duration-100 rounded-full p-2"
|
||||
|
|
|
@ -1,20 +1,15 @@
|
|||
import { Check, ClipboardList } from 'lucide-react';
|
||||
import { Message } from '../ChatWindow';
|
||||
import { useState } from 'react';
|
||||
import { Check, ClipboardList } from "lucide-react";
|
||||
import { Message } from "../ChatWindow";
|
||||
import { useState } from "react";
|
||||
|
||||
const Copy = ({
|
||||
message,
|
||||
initialMessage,
|
||||
}: {
|
||||
message: Message;
|
||||
initialMessage: string;
|
||||
}) => {
|
||||
const Copy = ({ message, initialMessage }: { message: Message; initialMessage: string }) => {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={() => {
|
||||
const contentToCopy = `${initialMessage}${message.sources && message.sources.length > 0 && `\n\nCitations:\n${message.sources?.map((source: any, i: any) => `[${i + 1}] ${source.metadata.url}`).join(`\n`)}`}`;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const contentToCopy = `${initialMessage}${message.sources && message.sources.length > 0 && `\n\nCitations:\n${message.sources?.map((source: any, index: any) => `[${index + 1}] ${source.metadata.url}`).join(`\n`)}`}`;
|
||||
navigator.clipboard.writeText(contentToCopy);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 1000);
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
import { ArrowLeftRight } from 'lucide-react';
|
||||
import { ArrowLeftRight } from "lucide-react";
|
||||
|
||||
const Rewrite = ({
|
||||
rewrite,
|
||||
messageId,
|
||||
}: {
|
||||
rewrite: (messageId: string) => void;
|
||||
messageId: string;
|
||||
}) => {
|
||||
const Rewrite = ({ rewrite, messageId }: { rewrite: (messageId: string) => void; messageId: string }) => {
|
||||
return (
|
||||
<button
|
||||
onClick={() => rewrite(messageId)}
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
'use client';
|
||||
"use client";
|
||||
|
||||
/* eslint-disable @next/next/no-img-element */
|
||||
import React, { MutableRefObject, useEffect, useState } from 'react';
|
||||
import { Message } from './ChatWindow';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
BookCopy,
|
||||
Disc3,
|
||||
Volume2,
|
||||
StopCircle,
|
||||
Layers3,
|
||||
Plus,
|
||||
} from 'lucide-react';
|
||||
import Markdown from 'markdown-to-jsx';
|
||||
import Copy from './MessageActions/Copy';
|
||||
import Rewrite from './MessageActions/Rewrite';
|
||||
import MessageSources from './MessageSources';
|
||||
import SearchImages from './SearchImages';
|
||||
import SearchVideos from './SearchVideos';
|
||||
import { useSpeech } from 'react-text-to-speech';
|
||||
import React, { MutableRefObject, useEffect, useState } from "react";
|
||||
import { Message } from "./ChatWindow";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { BookCopy, Disc3, Volume2, StopCircle, Layers3, Plus } from "lucide-react";
|
||||
import Markdown from "markdown-to-jsx";
|
||||
import Copy from "./MessageActions/Copy";
|
||||
import Rewrite from "./MessageActions/Rewrite";
|
||||
import MessageSources from "./MessageSources";
|
||||
import SearchImages from "./SearchImages";
|
||||
import SearchVideos from "./SearchVideos";
|
||||
import { useSpeech } from "react-text-to-speech";
|
||||
|
||||
const MessageBox = ({
|
||||
message,
|
||||
|
@ -43,15 +35,11 @@ const MessageBox = ({
|
|||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||
|
||||
useEffect(() => {
|
||||
const regex = /\[(\d+)\]/g;
|
||||
const regex = /\[(\d+)]/g;
|
||||
|
||||
if (
|
||||
message.role === 'assistant' &&
|
||||
message?.sources &&
|
||||
message.sources.length > 0
|
||||
) {
|
||||
if (message.role === "assistant" && message?.sources && message.sources.length > 0) {
|
||||
return setParsedMessage(
|
||||
message.content.replace(
|
||||
message.content.replaceAll(
|
||||
regex,
|
||||
(_, number) =>
|
||||
`<a href="${message.sources?.[number - 1]?.metadata?.url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${number}</a>`,
|
||||
|
@ -59,7 +47,7 @@ const MessageBox = ({
|
|||
);
|
||||
}
|
||||
|
||||
setSpeechMessage(message.content.replace(regex, ''));
|
||||
setSpeechMessage(message.content.replaceAll(regex, ""));
|
||||
setParsedMessage(message.content);
|
||||
}, [message.content, message.sources, message.role]);
|
||||
|
||||
|
@ -67,27 +55,20 @@ const MessageBox = ({
|
|||
|
||||
return (
|
||||
<div>
|
||||
{message.role === 'user' && (
|
||||
<div className={cn('w-full', messageIndex === 0 ? 'pt-16' : 'pt-8')}>
|
||||
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
|
||||
{message.content}
|
||||
</h2>
|
||||
{message.role === "user" && (
|
||||
<div className={cn("w-full", messageIndex === 0 ? "pt-16" : "pt-8")}>
|
||||
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">{message.content}</h2>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{message.role === 'assistant' && (
|
||||
{message.role === "assistant" && (
|
||||
<div className="flex flex-col space-y-9 lg:space-y-0 lg:flex-row lg:justify-between lg:space-x-9">
|
||||
<div
|
||||
ref={dividerRef}
|
||||
className="flex flex-col space-y-6 w-full lg:w-9/12"
|
||||
>
|
||||
<div ref={dividerRef} className="flex flex-col space-y-6 w-full lg:w-9/12">
|
||||
{message.sources && message.sources.length > 0 && (
|
||||
<div className="flex flex-col space-y-2">
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
<BookCopy className="text-black dark:text-white" size={20} />
|
||||
<h3 className="text-black dark:text-white font-medium text-xl">
|
||||
Sources
|
||||
</h3>
|
||||
<h3 className="text-black dark:text-white font-medium text-xl">Sources</h3>
|
||||
</div>
|
||||
<MessageSources sources={message.sources} />
|
||||
</div>
|
||||
|
@ -95,20 +76,15 @@ const MessageBox = ({
|
|||
<div className="flex flex-col space-y-2">
|
||||
<div className="flex flex-row items-center space-x-2">
|
||||
<Disc3
|
||||
className={cn(
|
||||
'text-black dark:text-white',
|
||||
isLast && loading ? 'animate-spin' : 'animate-none',
|
||||
)}
|
||||
className={cn("text-black dark:text-white", isLast && loading ? "animate-spin" : "animate-none")}
|
||||
size={20}
|
||||
/>
|
||||
<h3 className="text-black dark:text-white font-medium text-xl">
|
||||
Answer
|
||||
</h3>
|
||||
<h3 className="text-black dark:text-white font-medium text-xl">Answer</h3>
|
||||
</div>
|
||||
<Markdown
|
||||
className={cn(
|
||||
'prose dark:prose-invert prose-p:leading-relaxed prose-pre:p-0',
|
||||
'max-w-none break-words text-black dark:text-white text-sm md:text-base font-medium',
|
||||
"prose dark:prose-invert prose-p:leading-relaxed prose-pre:p-0",
|
||||
"max-w-none break-words text-black dark:text-white text-sm md:text-base font-medium",
|
||||
)}
|
||||
>
|
||||
{parsedMessage}
|
||||
|
@ -125,7 +101,7 @@ const MessageBox = ({
|
|||
<Copy initialMessage={message.content} message={message} />
|
||||
<button
|
||||
onClick={() => {
|
||||
if (speechStatus === 'started') {
|
||||
if (speechStatus === "started") {
|
||||
stop();
|
||||
} else {
|
||||
start();
|
||||
|
@ -133,11 +109,7 @@ const MessageBox = ({
|
|||
}}
|
||||
className="p-2 text-black/70 dark:text-white/70 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
{speechStatus === 'started' ? (
|
||||
<StopCircle size={18} />
|
||||
) : (
|
||||
<Volume2 size={18} />
|
||||
)}
|
||||
{speechStatus === "started" ? <StopCircle size={18} /> : <Volume2 size={18} />}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -145,7 +117,7 @@ const MessageBox = ({
|
|||
{isLast &&
|
||||
message.suggestions &&
|
||||
message.suggestions.length > 0 &&
|
||||
message.role === 'assistant' &&
|
||||
message.role === "assistant" &&
|
||||
!loading && (
|
||||
<>
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
|
@ -155,11 +127,8 @@ const MessageBox = ({
|
|||
<h3 className="text-xl font-medium">Related</h3>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-3">
|
||||
{message.suggestions.map((suggestion, i) => (
|
||||
<div
|
||||
className="flex flex-col space-y-3 text-sm"
|
||||
key={i}
|
||||
>
|
||||
{message.suggestions.map((suggestion, index) => (
|
||||
<div className="flex flex-col space-y-3 text-sm" key={index}>
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
<div
|
||||
onClick={() => {
|
||||
|
@ -167,13 +136,8 @@ const MessageBox = ({
|
|||
}}
|
||||
className="cursor-pointer flex flex-row justify-between font-medium space-x-2 items-center"
|
||||
>
|
||||
<p className="transition duration-200 hover:text-[#24A0ED]">
|
||||
{suggestion}
|
||||
</p>
|
||||
<Plus
|
||||
size={20}
|
||||
className="text-[#24A0ED] flex-shrink-0"
|
||||
/>
|
||||
<p className="transition duration-200 hover:text-[#24A0ED]">{suggestion}</p>
|
||||
<Plus size={20} className="text-[#24A0ED] flex-shrink-0" />
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
@ -184,14 +148,8 @@ const MessageBox = ({
|
|||
</div>
|
||||
</div>
|
||||
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
|
||||
<SearchImages
|
||||
query={history[messageIndex - 1].content}
|
||||
chat_history={history.slice(0, messageIndex - 1)}
|
||||
/>
|
||||
<SearchVideos
|
||||
chat_history={history.slice(0, messageIndex - 1)}
|
||||
query={history[messageIndex - 1].content}
|
||||
/>
|
||||
<SearchImages query={history[messageIndex - 1].content} chat_history={history.slice(0, messageIndex - 1)} />
|
||||
<SearchVideos chat_history={history.slice(0, messageIndex - 1)} query={history[messageIndex - 1].content} />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
|
|
@ -1,84 +1,75 @@
|
|||
import { cn } from '@/lib/utils';
|
||||
import { ArrowUp } from 'lucide-react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import TextareaAutosize from 'react-textarea-autosize';
|
||||
import Attach from './MessageInputActions/Attach';
|
||||
import CopilotToggle from './MessageInputActions/Copilot';
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ArrowUp } from "lucide-react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import TextareaAutosize from "react-textarea-autosize";
|
||||
import Attach from "./MessageInputActions/Attach";
|
||||
import CopilotToggle from "./MessageInputActions/Copilot";
|
||||
|
||||
const MessageInput = ({
|
||||
sendMessage,
|
||||
loading,
|
||||
}: {
|
||||
sendMessage: (message: string) => void;
|
||||
loading: boolean;
|
||||
}) => {
|
||||
const MessageInput = ({ sendMessage, loading }: { sendMessage: (message: string) => void; loading: boolean }) => {
|
||||
const [copilotEnabled, setCopilotEnabled] = useState(false);
|
||||
const [message, setMessage] = useState('');
|
||||
const [message, setMessage] = useState("");
|
||||
const [textareaRows, setTextareaRows] = useState(1);
|
||||
const [mode, setMode] = useState<'multi' | 'single'>('single');
|
||||
const [mode, setMode] = useState<"multi" | "single">("single");
|
||||
|
||||
useEffect(() => {
|
||||
if (textareaRows >= 2 && message && mode === 'single') {
|
||||
setMode('multi');
|
||||
} else if (!message && mode === 'multi') {
|
||||
setMode('single');
|
||||
if (textareaRows >= 2 && message && mode === "single") {
|
||||
setMode("multi");
|
||||
} else if (!message && mode === "multi") {
|
||||
setMode("single");
|
||||
}
|
||||
}, [textareaRows, mode, message]);
|
||||
|
||||
const inputRef = useRef<HTMLTextAreaElement | null>(null);
|
||||
const inputReference = useRef<HTMLTextAreaElement | null>(null);
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === '/') {
|
||||
if (e.key === "/") {
|
||||
e.preventDefault();
|
||||
inputRef.current?.focus();
|
||||
inputReference.current?.focus();
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
document.addEventListener("keydown", handleKeyDown);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
document.removeEventListener("keydown", handleKeyDown);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<form
|
||||
onSubmit={(e) => {
|
||||
onSubmit={e => {
|
||||
if (loading) return;
|
||||
e.preventDefault();
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
setMessage("");
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey && !loading) {
|
||||
onKeyDown={e => {
|
||||
if (e.key === "Enter" && !e.shiftKey && !loading) {
|
||||
e.preventDefault();
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
setMessage("");
|
||||
}
|
||||
}}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary p-4 flex items-center overflow-hidden border border-light-200 dark:border-dark-200',
|
||||
mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full',
|
||||
"bg-light-secondary dark:bg-dark-secondary p-4 flex items-center overflow-hidden border border-light-200 dark:border-dark-200",
|
||||
mode === "multi" ? "flex-col rounded-lg" : "flex-row rounded-full",
|
||||
)}
|
||||
>
|
||||
{mode === 'single' && <Attach />}
|
||||
{mode === "single" && <Attach />}
|
||||
<TextareaAutosize
|
||||
ref={inputRef}
|
||||
ref={inputReference}
|
||||
value={message}
|
||||
onChange={(e) => setMessage(e.target.value)}
|
||||
onHeightChange={(height, props) => {
|
||||
setTextareaRows(Math.ceil(height / props.rowHeight));
|
||||
onChange={e => setMessage(e.target.value)}
|
||||
onHeightChange={(height, properties) => {
|
||||
setTextareaRows(Math.ceil(height / properties.rowHeight));
|
||||
}}
|
||||
className="transition bg-transparent dark:placeholder:text-white/50 placeholder:text-sm text-sm dark:text-white resize-none focus:outline-none w-full px-2 max-h-24 lg:max-h-36 xl:max-h-48 flex-grow flex-shrink"
|
||||
placeholder="Ask a follow-up"
|
||||
/>
|
||||
{mode === 'single' && (
|
||||
{mode === "single" && (
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<CopilotToggle
|
||||
copilotEnabled={copilotEnabled}
|
||||
setCopilotEnabled={setCopilotEnabled}
|
||||
/>
|
||||
<CopilotToggle copilotEnabled={copilotEnabled} setCopilotEnabled={setCopilotEnabled} />
|
||||
<button
|
||||
disabled={message.trim().length === 0 || loading}
|
||||
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||
|
@ -87,14 +78,11 @@ const MessageInput = ({
|
|||
</button>
|
||||
</div>
|
||||
)}
|
||||
{mode === 'multi' && (
|
||||
{mode === "multi" && (
|
||||
<div className="flex flex-row items-center justify-between w-full pt-2">
|
||||
<Attach />
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<CopilotToggle
|
||||
copilotEnabled={copilotEnabled}
|
||||
setCopilotEnabled={setCopilotEnabled}
|
||||
/>
|
||||
<CopilotToggle copilotEnabled={copilotEnabled} setCopilotEnabled={setCopilotEnabled} />
|
||||
<button
|
||||
disabled={message.trim().length === 0 || loading}
|
||||
className="bg-[#24A0ED] text-white text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { CopyPlus } from 'lucide-react';
|
||||
import { CopyPlus } from "lucide-react";
|
||||
|
||||
const Attach = () => {
|
||||
return (
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { cn } from '@/lib/utils';
|
||||
import { Switch } from '@headlessui/react';
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Switch } from "@headlessui/react";
|
||||
|
||||
const CopilotToggle = ({
|
||||
copilotEnabled,
|
||||
|
@ -18,20 +18,18 @@ const CopilotToggle = ({
|
|||
<span className="sr-only">Copilot</span>
|
||||
<span
|
||||
className={cn(
|
||||
copilotEnabled
|
||||
? 'translate-x-6 bg-[#24A0ED]'
|
||||
: 'translate-x-1 bg-black/50 dark:bg-white/50',
|
||||
'inline-block h-3 w-3 sm:h-4 sm:w-4 transform rounded-full transition-all duration-200',
|
||||
copilotEnabled ? "translate-x-6 bg-[#24A0ED]" : "translate-x-1 bg-black/50 dark:bg-white/50",
|
||||
"inline-block h-3 w-3 sm:h-4 sm:w-4 transform rounded-full transition-all duration-200",
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
<p
|
||||
onClick={() => setCopilotEnabled(!copilotEnabled)}
|
||||
className={cn(
|
||||
'text-xs font-medium transition-colors duration-150 ease-in-out',
|
||||
"text-xs font-medium transition-colors duration-150 ease-in-out",
|
||||
copilotEnabled
|
||||
? 'text-[#24A0ED]'
|
||||
: 'text-black/50 dark:text-white/50 group-hover:text-black dark:group-hover:text-white',
|
||||
? "text-[#24A0ED]"
|
||||
: "text-black/50 dark:text-white/50 group-hover:text-black dark:group-hover:text-white",
|
||||
)}
|
||||
>
|
||||
Copilot
|
||||
|
|
|
@ -1,90 +1,67 @@
|
|||
import {
|
||||
BadgePercent,
|
||||
ChevronDown,
|
||||
Globe,
|
||||
Pencil,
|
||||
ScanEye,
|
||||
SwatchBook,
|
||||
} from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Popover, Transition } from '@headlessui/react';
|
||||
import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons';
|
||||
import { Fragment } from 'react';
|
||||
import { BadgePercent, ChevronDown, Globe, Pencil, ScanEye, SwatchBook } from "lucide-react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Popover, Transition } from "@headlessui/react";
|
||||
import { SiReddit, SiYoutube } from "@icons-pack/react-simple-icons";
|
||||
import { Fragment } from "react";
|
||||
|
||||
const focusModes = [
|
||||
{
|
||||
key: 'webSearch',
|
||||
title: 'All',
|
||||
description: 'Searches across all of the internet',
|
||||
key: "webSearch",
|
||||
title: "All",
|
||||
description: "Searches across all of the internet",
|
||||
icon: <Globe size={20} />,
|
||||
},
|
||||
{
|
||||
key: 'academicSearch',
|
||||
title: 'Academic',
|
||||
description: 'Search in published academic papers',
|
||||
key: "academicSearch",
|
||||
title: "Academic",
|
||||
description: "Search in published academic papers",
|
||||
icon: <SwatchBook size={20} />,
|
||||
},
|
||||
{
|
||||
key: 'writingAssistant',
|
||||
title: 'Writing',
|
||||
description: 'Chat without searching the web',
|
||||
key: "writingAssistant",
|
||||
title: "Writing",
|
||||
description: "Chat without searching the web",
|
||||
icon: <Pencil size={16} />,
|
||||
},
|
||||
{
|
||||
key: 'wolframAlphaSearch',
|
||||
title: 'Wolfram Alpha',
|
||||
description: 'Computational knowledge engine',
|
||||
key: "wolframAlphaSearch",
|
||||
title: "Wolfram Alpha",
|
||||
description: "Computational knowledge engine",
|
||||
icon: <BadgePercent size={20} />,
|
||||
},
|
||||
{
|
||||
key: 'youtubeSearch',
|
||||
title: 'Youtube',
|
||||
description: 'Search and watch videos',
|
||||
key: "youtubeSearch",
|
||||
title: "Youtube",
|
||||
description: "Search and watch videos",
|
||||
icon: (
|
||||
<SiYoutube
|
||||
className="h-5 w-auto mr-0.5"
|
||||
onPointerEnterCapture={undefined}
|
||||
onPointerLeaveCapture={undefined}
|
||||
/>
|
||||
<SiYoutube className="h-5 w-auto mr-0.5" onPointerEnterCapture={undefined} onPointerLeaveCapture={undefined} />
|
||||
),
|
||||
},
|
||||
{
|
||||
key: 'redditSearch',
|
||||
title: 'Reddit',
|
||||
description: 'Search for discussions and opinions',
|
||||
key: "redditSearch",
|
||||
title: "Reddit",
|
||||
description: "Search for discussions and opinions",
|
||||
icon: (
|
||||
<SiReddit
|
||||
className="h-5 w-auto mr-0.5"
|
||||
onPointerEnterCapture={undefined}
|
||||
onPointerLeaveCapture={undefined}
|
||||
/>
|
||||
<SiReddit className="h-5 w-auto mr-0.5" onPointerEnterCapture={undefined} onPointerLeaveCapture={undefined} />
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
const Focus = ({
|
||||
focusMode,
|
||||
setFocusMode,
|
||||
}: {
|
||||
focusMode: string;
|
||||
setFocusMode: (mode: string) => void;
|
||||
}) => {
|
||||
const Focus = ({ focusMode, setFocusMode }: { focusMode: string; setFocusMode: (mode: string) => void }) => {
|
||||
return (
|
||||
<Popover className="fixed w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<Popover.Button
|
||||
type="button"
|
||||
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
>
|
||||
{focusMode !== 'webSearch' ? (
|
||||
{focusMode === "webSearch" ? (
|
||||
<ScanEye />
|
||||
) : (
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{focusModes.find((mode) => mode.key === focusMode)?.icon}
|
||||
<p className="text-xs font-medium">
|
||||
{focusModes.find((mode) => mode.key === focusMode)?.title}
|
||||
</p>
|
||||
{focusModes.find(mode => mode.key === focusMode)?.icon}
|
||||
<p className="text-xs font-medium">{focusModes.find(mode => mode.key === focusMode)?.title}</p>
|
||||
<ChevronDown size={20} />
|
||||
</div>
|
||||
) : (
|
||||
<ScanEye />
|
||||
)}
|
||||
</Popover.Button>
|
||||
<Transition
|
||||
|
@ -98,31 +75,27 @@ const Focus = ({
|
|||
>
|
||||
<Popover.Panel className="absolute z-10 w-full">
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-1 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-2 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{focusModes.map((mode, i) => (
|
||||
{focusModes.map((mode, index) => (
|
||||
<Popover.Button
|
||||
onClick={() => setFocusMode(mode.key)}
|
||||
key={i}
|
||||
key={index}
|
||||
className={cn(
|
||||
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition',
|
||||
"p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-2 duration-200 cursor-pointer transition",
|
||||
focusMode === mode.key
|
||||
? 'bg-light-secondary dark:bg-dark-secondary'
|
||||
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
|
||||
? "bg-light-secondary dark:bg-dark-secondary"
|
||||
: "hover:bg-light-secondary dark:hover:bg-dark-secondary",
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-row items-center space-x-1',
|
||||
focusMode === mode.key
|
||||
? 'text-[#24A0ED]'
|
||||
: 'text-black dark:text-white',
|
||||
"flex flex-row items-center space-x-1",
|
||||
focusMode === mode.key ? "text-[#24A0ED]" : "text-black dark:text-white",
|
||||
)}
|
||||
>
|
||||
{mode.icon}
|
||||
<p className="text-sm font-medium">{mode.title}</p>
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
{mode.description}
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">{mode.description}</p>
|
||||
</Popover.Button>
|
||||
))}
|
||||
</div>
|
||||
|
|
|
@ -1,29 +1,29 @@
|
|||
/* eslint-disable @next/next/no-img-element */
|
||||
import { Dialog, Transition } from '@headlessui/react';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import { Fragment, useState } from 'react';
|
||||
import { Dialog, Transition } from "@headlessui/react";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { Fragment, useState } from "react";
|
||||
|
||||
const MessageSources = ({ sources }: { sources: Document[] }) => {
|
||||
const [isDialogOpen, setIsDialogOpen] = useState(false);
|
||||
|
||||
const closeModal = () => {
|
||||
setIsDialogOpen(false);
|
||||
document.body.classList.remove('overflow-hidden-scrollable');
|
||||
document.body.classList.remove("overflow-hidden-scrollable");
|
||||
};
|
||||
|
||||
const openModal = () => {
|
||||
setIsDialogOpen(true);
|
||||
document.body.classList.add('overflow-hidden-scrollable');
|
||||
document.body.classList.add("overflow-hidden-scrollable");
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="grid grid-cols-2 lg:grid-cols-4 gap-2">
|
||||
{sources.slice(0, 3).map((source, i) => (
|
||||
{sources.slice(0, 3).map((source, index) => (
|
||||
<a
|
||||
className="bg-light-100 hover:bg-light-200 dark:bg-dark-100 dark:hover:bg-dark-200 transition duration-200 rounded-lg p-3 flex flex-col space-y-2 font-medium"
|
||||
key={i}
|
||||
key={index}
|
||||
href={source.metadata.url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<p className="dark:text-white text-xs overflow-hidden whitespace-nowrap text-ellipsis">
|
||||
{source.metadata.title}
|
||||
|
@ -38,12 +38,12 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
|
|||
className="rounded-lg h-4 w-4"
|
||||
/>
|
||||
<p className="text-xs text-black/50 dark:text-white/50 overflow-hidden whitespace-nowrap text-ellipsis">
|
||||
{source.metadata.url.replace(/.+\/\/|www.|\..+/g, '')}
|
||||
{source.metadata.url.replaceAll(/.+\/\/|www.|\..+/g, "")}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex flex-row items-center space-x-1 text-black/50 dark:text-white/50 text-xs">
|
||||
<div className="bg-black/50 dark:bg-white/50 h-[4px] w-[4px] rounded-full" />
|
||||
<span>{i + 1}</span>
|
||||
<span>{index + 1}</span>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
|
@ -54,20 +54,18 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
|
|||
className="bg-light-100 hover:bg-light-200 dark:bg-dark-100 dark:hover:bg-dark-200 transition duration-200 rounded-lg p-3 flex flex-col space-y-2 font-medium"
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{sources.slice(3, 6).map((source, i) => (
|
||||
{sources.slice(3, 6).map((source, index) => (
|
||||
<img
|
||||
src={`https://s2.googleusercontent.com/s2/favicons?domain_url=${source.metadata.url}`}
|
||||
width={16}
|
||||
height={16}
|
||||
alt="favicon"
|
||||
className="rounded-lg h-4 w-4"
|
||||
key={i}
|
||||
key={index}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-xs text-black/50 dark:text-white/50">
|
||||
View {sources.length - 3} more
|
||||
</p>
|
||||
<p className="text-xs text-black/50 dark:text-white/50">View {sources.length - 3} more</p>
|
||||
</button>
|
||||
)}
|
||||
<Transition appear show={isDialogOpen} as={Fragment}>
|
||||
|
@ -84,16 +82,15 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
|
|||
leaveTo="opacity-0 scale-95"
|
||||
>
|
||||
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
|
||||
<Dialog.Title className="text-lg font-medium leading-6 dark:text-white">
|
||||
Sources
|
||||
</Dialog.Title>
|
||||
<Dialog.Title className="text-lg font-medium leading-6 dark:text-white">Sources</Dialog.Title>
|
||||
<div className="grid grid-cols-2 gap-2 overflow-auto max-h-[300px] mt-2 pr-2">
|
||||
{sources.map((source, i) => (
|
||||
{sources.map((source, index) => (
|
||||
<a
|
||||
className="bg-light-secondary hover:bg-light-200 dark:bg-dark-secondary dark:hover:bg-dark-200 border border-light-200 dark:border-dark-200 transition duration-200 rounded-lg p-3 flex flex-col space-y-2 font-medium"
|
||||
key={i}
|
||||
key={index}
|
||||
href={source.metadata.url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<p className="dark:text-white text-xs overflow-hidden whitespace-nowrap text-ellipsis">
|
||||
{source.metadata.title}
|
||||
|
@ -108,15 +105,12 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
|
|||
className="rounded-lg h-4 w-4"
|
||||
/>
|
||||
<p className="text-xs text-black/50 dark:text-white/50 overflow-hidden whitespace-nowrap text-ellipsis">
|
||||
{source.metadata.url.replace(
|
||||
/.+\/\/|www.|\..+/g,
|
||||
'',
|
||||
)}
|
||||
{source.metadata.url.replaceAll(/.+\/\/|www.|\..+/g, "")}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex flex-row items-center space-x-1 text-black/50 dark:text-white/50 text-xs">
|
||||
<div className="bg-black/50 dark:bg-white/50 h-[4px] w-[4px] rounded-full" />
|
||||
<span>{i + 1}</span>
|
||||
<span>{index + 1}</span>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
|
|
|
@ -1,23 +1,18 @@
|
|||
import { Clock, Edit, Share, Trash } from 'lucide-react';
|
||||
import { Message } from './ChatWindow';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { formatTimeDifference } from '@/lib/utils';
|
||||
import { Clock, Edit, Share, Trash } from "lucide-react";
|
||||
import { Message } from "./ChatWindow";
|
||||
import { useEffect, useState } from "react";
|
||||
import { formatTimeDifference } from "@/lib/utils";
|
||||
|
||||
const Navbar = ({ messages }: { messages: Message[] }) => {
|
||||
const [title, setTitle] = useState<string>('');
|
||||
const [timeAgo, setTimeAgo] = useState<string>('');
|
||||
const [title, setTitle] = useState<string>("");
|
||||
const [timeAgo, setTimeAgo] = useState<string>("");
|
||||
|
||||
useEffect(() => {
|
||||
if (messages.length > 0) {
|
||||
const newTitle =
|
||||
messages[0].content.length > 20
|
||||
? `${messages[0].content.substring(0, 20).trim()}...`
|
||||
: messages[0].content;
|
||||
messages[0].content.length > 20 ? `${messages[0].content.slice(0, 20).trim()}...` : messages[0].content;
|
||||
setTitle(newTitle);
|
||||
const newTimeAgo = formatTimeDifference(
|
||||
new Date(),
|
||||
messages[0].createdAt,
|
||||
);
|
||||
const newTimeAgo = formatTimeDifference(new Date(), messages[0].createdAt);
|
||||
setTimeAgo(newTimeAgo);
|
||||
}
|
||||
}, [messages]);
|
||||
|
@ -25,10 +20,7 @@ const Navbar = ({ messages }: { messages: Message[] }) => {
|
|||
useEffect(() => {
|
||||
const intervalId = setInterval(() => {
|
||||
if (messages.length > 0) {
|
||||
const newTimeAgo = formatTimeDifference(
|
||||
new Date(),
|
||||
messages[0].createdAt,
|
||||
);
|
||||
const newTimeAgo = formatTimeDifference(new Date(), messages[0].createdAt);
|
||||
setTimeAgo(newTimeAgo);
|
||||
}
|
||||
}, 1000);
|
||||
|
@ -39,10 +31,7 @@ const Navbar = ({ messages }: { messages: Message[] }) => {
|
|||
|
||||
return (
|
||||
<div className="fixed z-40 top-0 left-0 right-0 px-4 lg:pl-[104px] lg:pr-6 lg:px-8 flex flex-row items-center justify-between w-full py-4 text-sm text-black dark:text-white/70 border-b bg-light-primary dark:bg-dark-primary border-light-100 dark:border-dark-200">
|
||||
<Edit
|
||||
size={17}
|
||||
className="active:scale-95 transition duration-100 cursor-pointer lg:hidden"
|
||||
/>
|
||||
<Edit size={17} className="active:scale-95 transition duration-100 cursor-pointer lg:hidden" />
|
||||
<div className="hidden lg:flex flex-row items-center justify-center space-x-2">
|
||||
<Clock size={17} />
|
||||
<p className="text-xs">{timeAgo} ago</p>
|
||||
|
@ -50,14 +39,8 @@ const Navbar = ({ messages }: { messages: Message[] }) => {
|
|||
<p className="hidden lg:flex">{title}</p>
|
||||
|
||||
<div className="flex flex-row items-center space-x-4">
|
||||
<Share
|
||||
size={17}
|
||||
className="active:scale-95 transition duration-100 cursor-pointer"
|
||||
/>
|
||||
<Trash
|
||||
size={17}
|
||||
className="text-red-400 active:scale-95 transition duration-100 cursor-pointer"
|
||||
/>
|
||||
<Share size={17} className="active:scale-95 transition duration-100 cursor-pointer" />
|
||||
<Trash size={17} className="text-red-400 active:scale-95 transition duration-100 cursor-pointer" />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
/* eslint-disable @next/next/no-img-element */
|
||||
import { ImagesIcon, PlusIcon } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import Lightbox from 'yet-another-react-lightbox';
|
||||
import 'yet-another-react-lightbox/styles.css';
|
||||
import { Message } from './ChatWindow';
|
||||
import { ImagesIcon, PlusIcon } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import Lightbox from "yet-another-react-lightbox";
|
||||
import "yet-another-react-lightbox/styles.css";
|
||||
import { Message } from "./ChatWindow";
|
||||
|
||||
type Image = {
|
||||
url: string;
|
||||
|
@ -11,16 +10,11 @@ type Image = {
|
|||
title: string;
|
||||
};
|
||||
|
||||
const SearchImages = ({
|
||||
query,
|
||||
chat_history,
|
||||
}: {
|
||||
query: string;
|
||||
chat_history: Message[];
|
||||
}) => {
|
||||
const SearchImages = ({ query, chat_history }: { query: string; chat_history: Message[] }) => {
|
||||
const [images, setImages] = useState<Image[] | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [open, setOpen] = useState(false);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const [slides, setSlides] = useState<any[]>([]);
|
||||
|
||||
return (
|
||||
|
@ -30,24 +24,21 @@ const SearchImages = ({
|
|||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem("chatModelProvider");
|
||||
const chatModel = localStorage.getItem("chatModel");
|
||||
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/images`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chat_history: chat_history,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chat_model: chatModel,
|
||||
}),
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/images`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
);
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chat_history: chat_history,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chat_model: chatModel,
|
||||
}),
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
|
@ -73,9 +64,9 @@ const SearchImages = ({
|
|||
)}
|
||||
{loading && (
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
{[...Array(4)].map((_, i) => (
|
||||
{Array.from({ length: 4 }).map((_, index) => (
|
||||
<div
|
||||
key={i}
|
||||
key={index}
|
||||
className="bg-light-secondary dark:bg-dark-secondary h-32 w-full rounded-lg animate-pulse aspect-video object-cover"
|
||||
/>
|
||||
))}
|
||||
|
@ -85,33 +76,25 @@ const SearchImages = ({
|
|||
<>
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
{images.length > 4
|
||||
? images.slice(0, 3).map((image, i) => (
|
||||
? images.slice(0, 3).map((image, index) => (
|
||||
<img
|
||||
onClick={() => {
|
||||
setOpen(true);
|
||||
setSlides([
|
||||
slides[i],
|
||||
...slides.slice(0, i),
|
||||
...slides.slice(i + 1),
|
||||
]);
|
||||
setSlides([slides[index], ...slides.slice(0, index), ...slides.slice(index + 1)]);
|
||||
}}
|
||||
key={i}
|
||||
key={index}
|
||||
src={image.img_src}
|
||||
alt={image.title}
|
||||
className="h-full w-full aspect-video object-cover rounded-lg transition duration-200 active:scale-95 hover:scale-[1.02] cursor-zoom-in"
|
||||
/>
|
||||
))
|
||||
: images.map((image, i) => (
|
||||
: images.map((image, index) => (
|
||||
<img
|
||||
onClick={() => {
|
||||
setOpen(true);
|
||||
setSlides([
|
||||
slides[i],
|
||||
...slides.slice(0, i),
|
||||
...slides.slice(i + 1),
|
||||
]);
|
||||
setSlides([slides[index], ...slides.slice(0, index), ...slides.slice(index + 1)]);
|
||||
}}
|
||||
key={i}
|
||||
key={index}
|
||||
src={image.img_src}
|
||||
alt={image.title}
|
||||
className="h-full w-full aspect-video object-cover rounded-lg transition duration-200 active:scale-95 hover:scale-[1.02] cursor-zoom-in"
|
||||
|
@ -123,18 +106,16 @@ const SearchImages = ({
|
|||
className="bg-light-100 hover:bg-light-200 dark:bg-dark-100 dark:hover:bg-dark-200 transition duration-200 active:scale-95 hover:scale-[1.02] h-auto w-full rounded-lg flex flex-col justify-between text-white p-2"
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{images.slice(3, 6).map((image, i) => (
|
||||
{images.slice(3, 6).map((image, index) => (
|
||||
<img
|
||||
key={i}
|
||||
key={index}
|
||||
src={image.img_src}
|
||||
alt={image.title}
|
||||
className="h-6 w-12 rounded-md lg:h-3 lg:w-6 lg:rounded-sm aspect-video object-cover"
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
View {images.length - 3} more
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">View {images.length - 3} more</p>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
/* eslint-disable @next/next/no-img-element */
|
||||
import { PlayCircle, PlayIcon, PlusIcon, VideoIcon } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import Lightbox, { GenericSlide, VideoSlide } from 'yet-another-react-lightbox';
|
||||
import 'yet-another-react-lightbox/styles.css';
|
||||
import { Message } from './ChatWindow';
|
||||
import { PlayCircle, PlayIcon, PlusIcon, VideoIcon } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import Lightbox, { GenericSlide, VideoSlide } from "yet-another-react-lightbox";
|
||||
import "yet-another-react-lightbox/styles.css";
|
||||
import { Message } from "./ChatWindow";
|
||||
|
||||
type Video = {
|
||||
url: string;
|
||||
|
@ -12,25 +11,19 @@ type Video = {
|
|||
iframe_src: string;
|
||||
};
|
||||
|
||||
declare module 'yet-another-react-lightbox' {
|
||||
declare module "yet-another-react-lightbox" {
|
||||
export interface VideoSlide extends GenericSlide {
|
||||
type: 'video-slide';
|
||||
type: "video-slide";
|
||||
src: string;
|
||||
iframe_src: string;
|
||||
}
|
||||
|
||||
interface SlideTypes {
|
||||
'video-slide': VideoSlide;
|
||||
"video-slide": VideoSlide;
|
||||
}
|
||||
}
|
||||
|
||||
const Searchvideos = ({
|
||||
query,
|
||||
chat_history,
|
||||
}: {
|
||||
query: string;
|
||||
chat_history: Message[];
|
||||
}) => {
|
||||
const Searchvideos = ({ query, chat_history }: { query: string; chat_history: Message[] }) => {
|
||||
const [videos, setVideos] = useState<Video[] | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [open, setOpen] = useState(false);
|
||||
|
@ -43,24 +36,21 @@ const Searchvideos = ({
|
|||
onClick={async () => {
|
||||
setLoading(true);
|
||||
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem("chatModelProvider");
|
||||
const chatModel = localStorage.getItem("chatModel");
|
||||
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/videos`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chat_history: chat_history,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chat_model: chatModel,
|
||||
}),
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/videos`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
);
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chat_history: chat_history,
|
||||
chat_model_provider: chatModelProvider,
|
||||
chat_model: chatModel,
|
||||
}),
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
|
@ -69,7 +59,7 @@ const Searchvideos = ({
|
|||
setSlides(
|
||||
videos.map((video: Video) => {
|
||||
return {
|
||||
type: 'video-slide',
|
||||
type: "video-slide",
|
||||
iframe_src: video.iframe_src,
|
||||
src: video.img_src,
|
||||
};
|
||||
|
@ -88,9 +78,9 @@ const Searchvideos = ({
|
|||
)}
|
||||
{loading && (
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
{[...Array(4)].map((_, i) => (
|
||||
{Array.from({ length: 4 }).map((_, index) => (
|
||||
<div
|
||||
key={i}
|
||||
key={index}
|
||||
className="bg-light-secondary dark:bg-dark-secondary h-32 w-full rounded-lg animate-pulse aspect-video object-cover"
|
||||
/>
|
||||
))}
|
||||
|
@ -100,18 +90,14 @@ const Searchvideos = ({
|
|||
<>
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
{videos.length > 4
|
||||
? videos.slice(0, 3).map((video, i) => (
|
||||
? videos.slice(0, 3).map((video, index) => (
|
||||
<div
|
||||
onClick={() => {
|
||||
setOpen(true);
|
||||
setSlides([
|
||||
slides[i],
|
||||
...slides.slice(0, i),
|
||||
...slides.slice(i + 1),
|
||||
]);
|
||||
setSlides([slides[index], ...slides.slice(0, index), ...slides.slice(index + 1)]);
|
||||
}}
|
||||
className="relative transition duration-200 active:scale-95 hover:scale-[1.02] cursor-pointer"
|
||||
key={i}
|
||||
key={index}
|
||||
>
|
||||
<img
|
||||
src={video.img_src}
|
||||
|
@ -124,18 +110,14 @@ const Searchvideos = ({
|
|||
</div>
|
||||
</div>
|
||||
))
|
||||
: videos.map((video, i) => (
|
||||
: videos.map((video, index) => (
|
||||
<div
|
||||
onClick={() => {
|
||||
setOpen(true);
|
||||
setSlides([
|
||||
slides[i],
|
||||
...slides.slice(0, i),
|
||||
...slides.slice(i + 1),
|
||||
]);
|
||||
setSlides([slides[index], ...slides.slice(0, index), ...slides.slice(index + 1)]);
|
||||
}}
|
||||
className="relative transition duration-200 active:scale-95 hover:scale-[1.02] cursor-pointer"
|
||||
key={i}
|
||||
key={index}
|
||||
>
|
||||
<img
|
||||
src={video.img_src}
|
||||
|
@ -154,18 +136,16 @@ const Searchvideos = ({
|
|||
className="bg-light-100 hover:bg-light-200 dark:bg-dark-100 dark:hover:bg-dark-200 transition duration-200 active:scale-95 hover:scale-[1.02] h-auto w-full rounded-lg flex flex-col justify-between text-white p-2"
|
||||
>
|
||||
<div className="flex flex-row items-center space-x-1">
|
||||
{videos.slice(3, 6).map((video, i) => (
|
||||
{videos.slice(3, 6).map((video, index) => (
|
||||
<img
|
||||
key={i}
|
||||
key={index}
|
||||
src={video.img_src}
|
||||
alt={video.title}
|
||||
className="h-6 w-12 rounded-md lg:h-3 lg:w-6 lg:rounded-sm aspect-video object-cover"
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">
|
||||
View {videos.length - 3} more
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-xs">View {videos.length - 3} more</p>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
@ -175,7 +155,7 @@ const Searchvideos = ({
|
|||
slides={slides}
|
||||
render={{
|
||||
slide: ({ slide }) =>
|
||||
slide.type === 'video-slide' ? (
|
||||
slide.type === "video-slide" ? (
|
||||
<div className="h-full w-full flex flex-row items-center justify-center">
|
||||
<iframe
|
||||
src={slide.iframe_src}
|
||||
|
|
|
@ -1,38 +1,34 @@
|
|||
import { cn } from '@/lib/utils';
|
||||
import { Dialog, Transition } from '@headlessui/react';
|
||||
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
|
||||
import React, {
|
||||
Fragment,
|
||||
useEffect,
|
||||
useState,
|
||||
type SelectHTMLAttributes,
|
||||
} from 'react';
|
||||
import ThemeSwitcher from './theme/Switcher';
|
||||
/* eslint-disable unicorn/no-nested-ternary */
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Dialog, Transition } from "@headlessui/react";
|
||||
import { CloudUpload, RefreshCcw, RefreshCw } from "lucide-react";
|
||||
import React, { Fragment, useEffect, useState, type SelectHTMLAttributes } from "react";
|
||||
import ThemeSwitcher from "./theme/Switcher";
|
||||
|
||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||
interface InputProperties extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||
|
||||
const Input = ({ className, ...restProps }: InputProps) => {
|
||||
const Input = ({ className, ...restProperties }: InputProperties) => {
|
||||
return (
|
||||
<input
|
||||
{...restProps}
|
||||
{...restProperties}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||
"bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm",
|
||||
className,
|
||||
)}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
interface SelectProps extends SelectHTMLAttributes<HTMLSelectElement> {
|
||||
interface SelectProperties extends SelectHTMLAttributes<HTMLSelectElement> {
|
||||
options: { value: string; label: string; disabled?: boolean }[];
|
||||
}
|
||||
|
||||
export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
||||
export const Select = ({ className, options, ...restProperties }: SelectProperties) => {
|
||||
return (
|
||||
<select
|
||||
{...restProps}
|
||||
{...restProperties}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm',
|
||||
"bg-light-secondary dark:bg-dark-secondary px-3 py-2 flex items-center overflow-hidden border border-light-200 dark:border-dark-200 dark:text-white rounded-lg text-sm",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
|
@ -59,27 +55,14 @@ interface SettingsType {
|
|||
ollamaApiUrl: string;
|
||||
}
|
||||
|
||||
const SettingsDialog = ({
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
setIsOpen: (isOpen: boolean) => void;
|
||||
}) => {
|
||||
const SettingsDialog = ({ isOpen, setIsOpen }: { isOpen: boolean; setIsOpen: (isOpen: boolean) => void }) => {
|
||||
const [config, setConfig] = useState<SettingsType | null>(null);
|
||||
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<
|
||||
string | null
|
||||
>(null);
|
||||
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] =
|
||||
useState<string | null>(null);
|
||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||
string | null
|
||||
>(null);
|
||||
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>('');
|
||||
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>('');
|
||||
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<string | null>(null);
|
||||
const [selectedChatModel, setSelectedChatModel] = useState<string | null>(null);
|
||||
const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] = useState<string | null>(null);
|
||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<string | null>(null);
|
||||
const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState<string>("");
|
||||
const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState<string>("");
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
|
||||
|
@ -89,52 +72,38 @@ const SettingsDialog = ({
|
|||
setIsLoading(true);
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
const data = (await res.json()) as SettingsType;
|
||||
setConfig(data);
|
||||
|
||||
const chatModelProvidersKeys = Object.keys(
|
||||
data.chatModelProviders || {},
|
||||
);
|
||||
const embeddingModelProvidersKeys = Object.keys(
|
||||
data.embeddingModelProviders || {},
|
||||
);
|
||||
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
|
||||
const embeddingModelProvidersKeys = Object.keys(data.embeddingModelProviders || {});
|
||||
|
||||
const defaultChatModelProvider =
|
||||
chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : '';
|
||||
const defaultChatModelProvider = chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : "";
|
||||
const defaultEmbeddingModelProvider =
|
||||
embeddingModelProvidersKeys.length > 0
|
||||
? embeddingModelProvidersKeys[0]
|
||||
: '';
|
||||
embeddingModelProvidersKeys.length > 0 ? embeddingModelProvidersKeys[0] : "";
|
||||
|
||||
const chatModelProvider =
|
||||
localStorage.getItem('chatModelProvider') ||
|
||||
defaultChatModelProvider ||
|
||||
'';
|
||||
const chatModelProvider = localStorage.getItem("chatModelProvider") || defaultChatModelProvider || "";
|
||||
const chatModel =
|
||||
localStorage.getItem('chatModel') ||
|
||||
(data.chatModelProviders &&
|
||||
data.chatModelProviders[chatModelProvider]?.[0]) ||
|
||||
'';
|
||||
localStorage.getItem("chatModel") ||
|
||||
(data.chatModelProviders && data.chatModelProviders[chatModelProvider]?.[0]) ||
|
||||
"";
|
||||
const embeddingModelProvider =
|
||||
localStorage.getItem('embeddingModelProvider') ||
|
||||
defaultEmbeddingModelProvider ||
|
||||
'';
|
||||
localStorage.getItem("embeddingModelProvider") || defaultEmbeddingModelProvider || "";
|
||||
const embeddingModel =
|
||||
localStorage.getItem('embeddingModel') ||
|
||||
(data.embeddingModelProviders &&
|
||||
data.embeddingModelProviders[embeddingModelProvider]?.[0]) ||
|
||||
'';
|
||||
localStorage.getItem("embeddingModel") ||
|
||||
(data.embeddingModelProviders && data.embeddingModelProviders[embeddingModelProvider]?.[0]) ||
|
||||
"";
|
||||
|
||||
setSelectedChatModelProvider(chatModelProvider);
|
||||
setSelectedChatModel(chatModel);
|
||||
setSelectedEmbeddingModelProvider(embeddingModelProvider);
|
||||
setSelectedEmbeddingModel(embeddingModel);
|
||||
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
||||
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || '');
|
||||
setCustomOpenAIApiKey(localStorage.getItem("openAIApiKey") || "");
|
||||
setCustomOpenAIBaseURL(localStorage.getItem("openAIBaseURL") || "");
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
|
@ -148,24 +117,21 @@ const SettingsDialog = ({
|
|||
|
||||
try {
|
||||
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||
method: 'POST',
|
||||
method: "POST",
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(config),
|
||||
});
|
||||
|
||||
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
||||
localStorage.setItem('chatModel', selectedChatModel!);
|
||||
localStorage.setItem(
|
||||
'embeddingModelProvider',
|
||||
selectedEmbeddingModelProvider!,
|
||||
);
|
||||
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
|
||||
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
|
||||
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
localStorage.setItem("chatModelProvider", selectedChatModelProvider!);
|
||||
localStorage.setItem("chatModel", selectedChatModel!);
|
||||
localStorage.setItem("embeddingModelProvider", selectedEmbeddingModelProvider!);
|
||||
localStorage.setItem("embeddingModel", selectedEmbeddingModel!);
|
||||
localStorage.setItem("openAIApiKey", customOpenAIApiKey!);
|
||||
localStorage.setItem("openAIBaseURL", customOpenAIBaseURL!);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
setIsUpdating(false);
|
||||
setIsOpen(false);
|
||||
|
@ -176,11 +142,7 @@ const SettingsDialog = ({
|
|||
|
||||
return (
|
||||
<Transition appear show={isOpen} as={Fragment}>
|
||||
<Dialog
|
||||
as="div"
|
||||
className="relative z-50"
|
||||
onClose={() => setIsOpen(false)}
|
||||
>
|
||||
<Dialog as="div" className="relative z-50" onClose={() => setIsOpen(false)}>
|
||||
<Transition.Child
|
||||
as={Fragment}
|
||||
enter="ease-out duration-300"
|
||||
|
@ -204,147 +166,105 @@ const SettingsDialog = ({
|
|||
leaveTo="opacity-0 scale-95"
|
||||
>
|
||||
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
|
||||
<Dialog.Title className="text-xl font-medium leading-6 dark:text-white">
|
||||
Settings
|
||||
</Dialog.Title>
|
||||
<Dialog.Title className="text-xl font-medium leading-6 dark:text-white">Settings</Dialog.Title>
|
||||
{config && !isLoading && (
|
||||
<div className="flex flex-col space-y-4 mt-6">
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Theme
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Theme</p>
|
||||
<ThemeSwitcher />
|
||||
</div>
|
||||
{config.chatModelProviders && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Chat model Provider
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Chat model Provider</p>
|
||||
<Select
|
||||
value={selectedChatModelProvider ?? undefined}
|
||||
onChange={(e) => {
|
||||
onChange={e => {
|
||||
setSelectedChatModelProvider(e.target.value);
|
||||
setSelectedChatModel(
|
||||
config.chatModelProviders[e.target.value][0],
|
||||
);
|
||||
setSelectedChatModel(config.chatModelProviders[e.target.value][0]);
|
||||
}}
|
||||
options={Object.keys(config.chatModelProviders).map(
|
||||
(provider) => ({
|
||||
value: provider,
|
||||
label:
|
||||
provider.charAt(0).toUpperCase() +
|
||||
provider.slice(1),
|
||||
}),
|
||||
)}
|
||||
options={Object.keys(config.chatModelProviders).map(provider => ({
|
||||
value: provider,
|
||||
label: provider.charAt(0).toUpperCase() + provider.slice(1),
|
||||
}))}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{selectedChatModelProvider &&
|
||||
selectedChatModelProvider != 'custom_openai' && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Chat Model
|
||||
</p>
|
||||
<Select
|
||||
value={selectedChatModel ?? undefined}
|
||||
onChange={(e) =>
|
||||
setSelectedChatModel(e.target.value)
|
||||
}
|
||||
options={(() => {
|
||||
const chatModelProvider =
|
||||
config.chatModelProviders[
|
||||
selectedChatModelProvider
|
||||
];
|
||||
{selectedChatModelProvider && selectedChatModelProvider != "custom_openai" && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Chat Model</p>
|
||||
<Select
|
||||
value={selectedChatModel ?? undefined}
|
||||
onChange={e => setSelectedChatModel(e.target.value)}
|
||||
options={(() => {
|
||||
const chatModelProvider = config.chatModelProviders[selectedChatModelProvider];
|
||||
|
||||
return chatModelProvider
|
||||
? chatModelProvider.length > 0
|
||||
? chatModelProvider.map((model) => ({
|
||||
value: model,
|
||||
label: model,
|
||||
}))
|
||||
: [
|
||||
{
|
||||
value: '',
|
||||
label: 'No models available',
|
||||
disabled: true,
|
||||
},
|
||||
]
|
||||
return chatModelProvider
|
||||
? chatModelProvider.length > 0
|
||||
? chatModelProvider.map(model => ({
|
||||
value: model,
|
||||
label: model,
|
||||
}))
|
||||
: [
|
||||
{
|
||||
value: '',
|
||||
label:
|
||||
'Invalid provider, please check backend logs',
|
||||
value: "",
|
||||
label: "No models available",
|
||||
disabled: true,
|
||||
},
|
||||
];
|
||||
})()}
|
||||
]
|
||||
: [
|
||||
{
|
||||
value: "",
|
||||
label: "Invalid provider, please check backend logs",
|
||||
disabled: true,
|
||||
},
|
||||
];
|
||||
})()}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{selectedChatModelProvider && selectedChatModelProvider === "custom_openai" && (
|
||||
<>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Model name</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Model name"
|
||||
defaultValue={selectedChatModel!}
|
||||
onChange={e => setSelectedChatModel(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{selectedChatModelProvider &&
|
||||
selectedChatModelProvider === 'custom_openai' && (
|
||||
<>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Model name
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Model name"
|
||||
defaultValue={selectedChatModel!}
|
||||
onChange={(e) =>
|
||||
setSelectedChatModel(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Custom OpenAI API Key
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Custom OpenAI API Key"
|
||||
defaultValue={customOpenAIApiKey!}
|
||||
onChange={(e) =>
|
||||
setCustomOpenAIApiKey(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Custom OpenAI Base URL
|
||||
</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Custom OpenAI Base URL"
|
||||
defaultValue={customOpenAIBaseURL!}
|
||||
onChange={(e) =>
|
||||
setCustomOpenAIBaseURL(e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Custom OpenAI API Key</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Custom OpenAI API Key"
|
||||
defaultValue={customOpenAIApiKey!}
|
||||
onChange={e => setCustomOpenAIApiKey(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Custom OpenAI Base URL</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Custom OpenAI Base URL"
|
||||
defaultValue={customOpenAIBaseURL!}
|
||||
onChange={e => setCustomOpenAIBaseURL(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{/* Embedding models */}
|
||||
{config.embeddingModelProviders && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Embedding model Provider
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Embedding model Provider</p>
|
||||
<Select
|
||||
value={selectedEmbeddingModelProvider ?? undefined}
|
||||
onChange={(e) => {
|
||||
onChange={e => {
|
||||
setSelectedEmbeddingModelProvider(e.target.value);
|
||||
setSelectedEmbeddingModel(
|
||||
config.embeddingModelProviders[e.target.value][0],
|
||||
);
|
||||
setSelectedEmbeddingModel(config.embeddingModelProviders[e.target.value][0]);
|
||||
}}
|
||||
options={Object.keys(
|
||||
config.embeddingModelProviders,
|
||||
).map((provider) => ({
|
||||
label:
|
||||
provider.charAt(0).toUpperCase() +
|
||||
provider.slice(1),
|
||||
options={Object.keys(config.embeddingModelProviders).map(provider => ({
|
||||
label: provider.charAt(0).toUpperCase() + provider.slice(1),
|
||||
value: provider,
|
||||
}))}
|
||||
/>
|
||||
|
@ -352,38 +272,31 @@ const SettingsDialog = ({
|
|||
)}
|
||||
{selectedEmbeddingModelProvider && (
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Embedding Model
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Embedding Model</p>
|
||||
<Select
|
||||
value={selectedEmbeddingModel ?? undefined}
|
||||
onChange={(e) =>
|
||||
setSelectedEmbeddingModel(e.target.value)
|
||||
}
|
||||
onChange={e => setSelectedEmbeddingModel(e.target.value)}
|
||||
options={(() => {
|
||||
const embeddingModelProvider =
|
||||
config.embeddingModelProviders[
|
||||
selectedEmbeddingModelProvider
|
||||
];
|
||||
config.embeddingModelProviders[selectedEmbeddingModelProvider];
|
||||
|
||||
return embeddingModelProvider
|
||||
? embeddingModelProvider.length > 0
|
||||
? embeddingModelProvider.map((model) => ({
|
||||
? embeddingModelProvider.map(model => ({
|
||||
label: model,
|
||||
value: model,
|
||||
}))
|
||||
: [
|
||||
{
|
||||
label: 'No embedding models available',
|
||||
value: '',
|
||||
label: "No embedding models available",
|
||||
value: "",
|
||||
disabled: true,
|
||||
},
|
||||
]
|
||||
: [
|
||||
{
|
||||
label:
|
||||
'Invalid provider, please check backend logs',
|
||||
value: '',
|
||||
label: "Invalid provider, please check backend logs",
|
||||
value: "",
|
||||
disabled: true,
|
||||
},
|
||||
];
|
||||
|
@ -392,14 +305,12 @@ const SettingsDialog = ({
|
|||
</div>
|
||||
)}
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
OpenAI API Key
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">OpenAI API Key</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="OpenAI API Key"
|
||||
defaultValue={config.openaiApiKey}
|
||||
onChange={(e) =>
|
||||
onChange={e =>
|
||||
setConfig({
|
||||
...config,
|
||||
openaiApiKey: e.target.value,
|
||||
|
@ -408,14 +319,12 @@ const SettingsDialog = ({
|
|||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
Ollama API URL
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">Ollama API URL</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Ollama API URL"
|
||||
defaultValue={config.ollamaApiUrl}
|
||||
onChange={(e) =>
|
||||
onChange={e =>
|
||||
setConfig({
|
||||
...config,
|
||||
ollamaApiUrl: e.target.value,
|
||||
|
@ -424,14 +333,12 @@ const SettingsDialog = ({
|
|||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||
GROQ API Key
|
||||
</p>
|
||||
<p className="text-black/70 dark:text-white/70 text-sm">GROQ API Key</p>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="GROQ API Key"
|
||||
defaultValue={config.groqApiKey}
|
||||
onChange={(e) =>
|
||||
onChange={e =>
|
||||
setConfig({
|
||||
...config,
|
||||
groqApiKey: e.target.value,
|
||||
|
@ -455,11 +362,7 @@ const SettingsDialog = ({
|
|||
className="bg-[#24A0ED] flex flex-row items-center space-x-2 text-white disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#ececec21] rounded-full px-4 py-2"
|
||||
disabled={isLoading || isUpdating}
|
||||
>
|
||||
{isUpdating ? (
|
||||
<RefreshCw size={20} className="animate-spin" />
|
||||
) : (
|
||||
<CloudUpload size={20} />
|
||||
)}
|
||||
{isUpdating ? <RefreshCw size={20} className="animate-spin" /> : <CloudUpload size={20} />}
|
||||
</button>
|
||||
</div>
|
||||
</Dialog.Panel>
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
'use client';
|
||||
"use client";
|
||||
|
||||
import { cn } from '@/lib/utils';
|
||||
import { BookOpenText, Home, Search, SquarePen, Settings } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { useSelectedLayoutSegments } from 'next/navigation';
|
||||
import React, { useState, type ReactNode } from 'react';
|
||||
import Layout from './Layout';
|
||||
import SettingsDialog from './SettingsDialog';
|
||||
import { cn } from "@/lib/utils";
|
||||
import { BookOpenText, Home, Search, SquarePen, Settings } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { useSelectedLayoutSegments } from "next/navigation";
|
||||
import React, { useState, type ReactNode } from "react";
|
||||
import Layout from "./Layout";
|
||||
import SettingsDialog from "./SettingsDialog";
|
||||
|
||||
const VerticalIconContainer = ({ children }: { children: ReactNode }) => {
|
||||
return (
|
||||
<div className="flex flex-col items-center gap-y-3 w-full">{children}</div>
|
||||
);
|
||||
return <div className="flex flex-col items-center gap-y-3 w-full">{children}</div>;
|
||||
};
|
||||
|
||||
const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
||||
|
@ -22,21 +20,21 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
|||
const navLinks = [
|
||||
{
|
||||
icon: Home,
|
||||
href: '/',
|
||||
active: segments.length === 0 || segments.includes('c'),
|
||||
label: 'Home',
|
||||
href: "/",
|
||||
active: segments.length === 0 || segments.includes("c"),
|
||||
label: "Home",
|
||||
},
|
||||
{
|
||||
icon: Search,
|
||||
href: '/discover',
|
||||
active: segments.includes('discover'),
|
||||
label: 'Discover',
|
||||
href: "/discover",
|
||||
active: segments.includes("discover"),
|
||||
label: "Discover",
|
||||
},
|
||||
{
|
||||
icon: BookOpenText,
|
||||
href: '/library',
|
||||
active: segments.includes('library'),
|
||||
label: 'Library',
|
||||
href: "/library",
|
||||
active: segments.includes("library"),
|
||||
label: "Library",
|
||||
},
|
||||
];
|
||||
|
||||
|
@ -48,15 +46,13 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
|||
<SquarePen className="cursor-pointer" />
|
||||
</a>
|
||||
<VerticalIconContainer>
|
||||
{navLinks.map((link, i) => (
|
||||
{navLinks.map((link, index) => (
|
||||
<Link
|
||||
key={i}
|
||||
key={index}
|
||||
href={link.href}
|
||||
className={cn(
|
||||
'relative flex flex-row items-center justify-center cursor-pointer hover:bg-black/10 dark:hover:bg-white/10 duration-150 transition w-full py-2 rounded-lg',
|
||||
link.active
|
||||
? 'text-black dark:text-white'
|
||||
: 'text-black/70 dark:text-white/70',
|
||||
"relative flex flex-row items-center justify-center cursor-pointer hover:bg-black/10 dark:hover:bg-white/10 duration-150 transition w-full py-2 rounded-lg",
|
||||
link.active ? "text-black dark:text-white" : "text-black/70 dark:text-white/70",
|
||||
)}
|
||||
>
|
||||
<link.icon />
|
||||
|
@ -67,33 +63,23 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => {
|
|||
))}
|
||||
</VerticalIconContainer>
|
||||
|
||||
<Settings
|
||||
onClick={() => setIsSettingsOpen(!isSettingsOpen)}
|
||||
className="cursor-pointer"
|
||||
/>
|
||||
<Settings onClick={() => setIsSettingsOpen(!isSettingsOpen)} className="cursor-pointer" />
|
||||
|
||||
<SettingsDialog
|
||||
isOpen={isSettingsOpen}
|
||||
setIsOpen={setIsSettingsOpen}
|
||||
/>
|
||||
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="fixed bottom-0 w-full z-50 flex flex-row items-center gap-x-6 bg-light-primary dark:bg-dark-primary px-4 py-4 shadow-sm lg:hidden">
|
||||
{navLinks.map((link, i) => (
|
||||
{navLinks.map((link, index) => (
|
||||
<Link
|
||||
href={link.href}
|
||||
key={i}
|
||||
key={index}
|
||||
className={cn(
|
||||
'relative flex flex-col items-center space-y-1 text-center w-full',
|
||||
link.active
|
||||
? 'text-black dark:text-white'
|
||||
: 'text-black dark:text-white/70',
|
||||
"relative flex flex-col items-center space-y-1 text-center w-full",
|
||||
link.active ? "text-black dark:text-white" : "text-black dark:text-white/70",
|
||||
)}
|
||||
>
|
||||
{link.active && (
|
||||
<div className="absolute top-0 -mt-4 h-1 w-full rounded-b-lg bg-black dark:bg-white" />
|
||||
)}
|
||||
{link.active && <div className="absolute top-0 -mt-4 h-1 w-full rounded-b-lg bg-black dark:bg-white" />}
|
||||
<link.icon />
|
||||
<p className="text-xs">{link.label}</p>
|
||||
</Link>
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
'use client';
|
||||
import { ThemeProvider } from 'next-themes';
|
||||
"use client";
|
||||
import { ThemeProvider } from "next-themes";
|
||||
|
||||
const ThemeProviderComponent = ({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) => {
|
||||
const ThemeProviderComponent = ({ children }: { children: React.ReactNode }) => {
|
||||
return (
|
||||
<ThemeProvider attribute="class" enableSystem={false} defaultTheme="dark">
|
||||
{children}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
'use client';
|
||||
import { useTheme } from 'next-themes';
|
||||
import { SunIcon, MoonIcon, MonitorIcon } from 'lucide-react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { Select } from '../SettingsDialog';
|
||||
"use client";
|
||||
import { useTheme } from "next-themes";
|
||||
import { SunIcon, MoonIcon, MonitorIcon } from "lucide-react";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { Select } from "../SettingsDialog";
|
||||
|
||||
type Theme = 'dark' | 'light' | 'system';
|
||||
type Theme = "dark" | "light" | "system";
|
||||
|
||||
const ThemeSwitcher = ({ className }: { className?: string }) => {
|
||||
const [mounted, setMounted] = useState(false);
|
||||
|
@ -22,20 +22,18 @@ const ThemeSwitcher = ({ className }: { className?: string }) => {
|
|||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isTheme('system')) {
|
||||
const preferDarkScheme = window.matchMedia(
|
||||
'(prefers-color-scheme: dark)',
|
||||
);
|
||||
if (isTheme("system")) {
|
||||
const preferDarkScheme = window.matchMedia("(prefers-color-scheme: dark)");
|
||||
|
||||
const detectThemeChange = (event: MediaQueryListEvent) => {
|
||||
const theme: Theme = event.matches ? 'dark' : 'light';
|
||||
const theme: Theme = event.matches ? "dark" : "light";
|
||||
setTheme(theme);
|
||||
};
|
||||
|
||||
preferDarkScheme.addEventListener('change', detectThemeChange);
|
||||
preferDarkScheme.addEventListener("change", detectThemeChange);
|
||||
|
||||
return () => {
|
||||
preferDarkScheme.removeEventListener('change', detectThemeChange);
|
||||
preferDarkScheme.removeEventListener("change", detectThemeChange);
|
||||
};
|
||||
}
|
||||
}, [isTheme, setTheme, theme]);
|
||||
|
@ -49,10 +47,10 @@ const ThemeSwitcher = ({ className }: { className?: string }) => {
|
|||
<Select
|
||||
className={className}
|
||||
value={theme}
|
||||
onChange={(e) => handleThemeSwitch(e.target.value as Theme)}
|
||||
onChange={e => handleThemeSwitch(e.target.value as Theme)}
|
||||
options={[
|
||||
{ value: 'light', label: 'Light' },
|
||||
{ value: 'dark', label: 'Dark' },
|
||||
{ value: "light", label: "Light" },
|
||||
{ value: "dark", label: "Dark" },
|
||||
]}
|
||||
/>
|
||||
);
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { Message } from '@/components/ChatWindow';
|
||||
import { Message } from "@/components/ChatWindow";
|
||||
|
||||
export const getSuggestions = async (chatHisory: Message[]) => {
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
const chatModel = localStorage.getItem("chatModel");
|
||||
const chatModelProvider = localStorage.getItem("chatModelProvider");
|
||||
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
|
||||
method: 'POST',
|
||||
method: "POST",
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
chat_history: chatHisory,
|
||||
|
|
|
@ -1,27 +1,21 @@
|
|||
import clsx, { ClassValue } from 'clsx';
|
||||
import { twMerge } from 'tailwind-merge';
|
||||
import clsx, { ClassValue } from "clsx";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
export const cn = (...classes: ClassValue[]) => twMerge(clsx(...classes));
|
||||
|
||||
export const formatTimeDifference = (
|
||||
date1: Date | string,
|
||||
date2: Date | string,
|
||||
): string => {
|
||||
export const formatTimeDifference = (date1: Date | string, date2: Date | string): string => {
|
||||
date1 = new Date(date1);
|
||||
date2 = new Date(date2);
|
||||
|
||||
const diffInSeconds = Math.floor(
|
||||
Math.abs(date2.getTime() - date1.getTime()) / 1000,
|
||||
);
|
||||
const diffInSeconds = Math.floor(Math.abs(date2.getTime() - date1.getTime()) / 1000);
|
||||
|
||||
if (diffInSeconds < 60)
|
||||
return `${diffInSeconds} second${diffInSeconds !== 1 ? 's' : ''}`;
|
||||
if (diffInSeconds < 60) return `${diffInSeconds} second${diffInSeconds === 1 ? "" : "s"}`;
|
||||
else if (diffInSeconds < 3600)
|
||||
return `${Math.floor(diffInSeconds / 60)} minute${Math.floor(diffInSeconds / 60) !== 1 ? 's' : ''}`;
|
||||
else if (diffInSeconds < 86400)
|
||||
return `${Math.floor(diffInSeconds / 3600)} hour${Math.floor(diffInSeconds / 3600) !== 1 ? 's' : ''}`;
|
||||
else if (diffInSeconds < 31536000)
|
||||
return `${Math.floor(diffInSeconds / 86400)} day${Math.floor(diffInSeconds / 86400) !== 1 ? 's' : ''}`;
|
||||
return `${Math.floor(diffInSeconds / 60)} minute${Math.floor(diffInSeconds / 60) === 1 ? "" : "s"}`;
|
||||
else if (diffInSeconds < 86_400)
|
||||
return `${Math.floor(diffInSeconds / 3600)} hour${Math.floor(diffInSeconds / 3600) === 1 ? "" : "s"}`;
|
||||
else if (diffInSeconds < 31_536_000)
|
||||
return `${Math.floor(diffInSeconds / 86_400)} day${Math.floor(diffInSeconds / 86_400) === 1 ? "" : "s"}`;
|
||||
else
|
||||
return `${Math.floor(diffInSeconds / 31536000)} year${Math.floor(diffInSeconds / 31536000) !== 1 ? 's' : ''}`;
|
||||
return `${Math.floor(diffInSeconds / 31_536_000)} year${Math.floor(diffInSeconds / 31_536_000) === 1 ? "" : "s"}`;
|
||||
};
|
||||
|
|
39
ui/next.config.js
Normal file
39
ui/next.config.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
/* eslint-disable unicorn/prefer-module */
|
||||
// eslint-disable-next-line no-undef, @typescript-eslint/no-var-requires
|
||||
const webpack = require("webpack");
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{
|
||||
hostname: "s2.googleusercontent.com",
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
webpack: (config, { isServer }) => {
|
||||
if (!isServer) {
|
||||
config.resolve.fallback = {
|
||||
...config.resolve.fallback,
|
||||
// eslint-disable-next-line no-undef
|
||||
stream: require.resolve("stream-browserify"),
|
||||
// eslint-disable-next-line no-undef
|
||||
crypto: require.resolve("crypto-browserify"),
|
||||
};
|
||||
|
||||
config.plugins.push(
|
||||
new webpack.ProvidePlugin({
|
||||
process: "process/browser",
|
||||
}),
|
||||
new webpack.NormalModuleReplacementPlugin(/node:crypto/, resource => {
|
||||
resource.request = resource.request.replace(/^node:/, "");
|
||||
}),
|
||||
);
|
||||
}
|
||||
return config;
|
||||
},
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-undef
|
||||
module.exports = nextConfig;
|
|
@ -1,12 +0,0 @@
|
|||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{
|
||||
hostname: 's2.googleusercontent.com',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
|
@ -7,7 +7,7 @@
|
|||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"lint": "eslint .",
|
||||
"format:write": "prettier . --write"
|
||||
},
|
||||
"dependencies": {
|
||||
|
@ -16,16 +16,19 @@
|
|||
"@langchain/openai": "^0.0.25",
|
||||
"@tailwindcss/typography": "^0.5.12",
|
||||
"clsx": "^2.1.0",
|
||||
"crypto-browserify": "^3.12.0",
|
||||
"langchain": "^0.1.30",
|
||||
"lucide-react": "^0.363.0",
|
||||
"markdown-to-jsx": "^7.4.5",
|
||||
"next": "14.1.4",
|
||||
"next-themes": "^0.3.0",
|
||||
"process": "^0.11.10",
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"react-text-to-speech": "^0.14.5",
|
||||
"react-textarea-autosize": "^8.5.3",
|
||||
"sonner": "^1.4.41",
|
||||
"stream-browserify": "^3.0.0",
|
||||
"tailwind-merge": "^2.2.2",
|
||||
"yet-another-react-lightbox": "^3.17.2",
|
||||
"zod": "^3.22.4"
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
import type { Config } from 'tailwindcss';
|
||||
import type { DefaultColors } from 'tailwindcss/types/generated/colors';
|
||||
import type { Config } from "tailwindcss";
|
||||
import type { DefaultColors } from "tailwindcss/types/generated/colors";
|
||||
|
||||
const themeDark = (colors: DefaultColors) => ({
|
||||
50: '#0a0a0a',
|
||||
100: '#111111',
|
||||
200: '#1c1c1c',
|
||||
50: "#0a0a0a",
|
||||
100: "#111111",
|
||||
200: "#1c1c1c",
|
||||
});
|
||||
|
||||
const themeLight = (colors: DefaultColors) => ({
|
||||
50: '#fcfcf9',
|
||||
100: '#f3f3ee',
|
||||
200: '#e8e8e3',
|
||||
50: "#fcfcf9",
|
||||
100: "#f3f3ee",
|
||||
200: "#e8e8e3",
|
||||
});
|
||||
|
||||
const config: Config = {
|
||||
content: [
|
||||
'./pages/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
'./components/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
'./app/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
"./pages/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
"./components/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
"./app/**/*.{js,ts,jsx,tsx,mdx}",
|
||||
],
|
||||
darkMode: 'class',
|
||||
darkMode: "class",
|
||||
theme: {
|
||||
extend: {
|
||||
borderColor: ({ colors }) => {
|
||||
|
@ -47,6 +47,6 @@ const config: Config = {
|
|||
},
|
||||
},
|
||||
},
|
||||
plugins: [require('@tailwindcss/typography')],
|
||||
plugins: [require("@tailwindcss/typography")],
|
||||
};
|
||||
export default config;
|
||||
|
|
383
ui/yarn.lock
383
ui/yarn.lock
|
@ -615,6 +615,15 @@ arraybuffer.prototype.slice@^1.0.3:
|
|||
is-array-buffer "^3.0.4"
|
||||
is-shared-array-buffer "^1.0.2"
|
||||
|
||||
asn1.js@^4.10.1:
|
||||
version "4.10.1"
|
||||
resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0"
|
||||
integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==
|
||||
dependencies:
|
||||
bn.js "^4.0.0"
|
||||
inherits "^2.0.1"
|
||||
minimalistic-assert "^1.0.0"
|
||||
|
||||
ast-types-flow@^0.0.8:
|
||||
version "0.0.8"
|
||||
resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.8.tgz#0a85e1c92695769ac13a428bb653e7538bea27d6"
|
||||
|
@ -681,6 +690,16 @@ binary-search@^1.3.5:
|
|||
resolved "https://registry.yarnpkg.com/binary-search/-/binary-search-1.3.6.tgz#e32426016a0c5092f0f3598836a1c7da3560565c"
|
||||
integrity sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==
|
||||
|
||||
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9:
|
||||
version "4.12.0"
|
||||
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"
|
||||
integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==
|
||||
|
||||
bn.js@^5.0.0, bn.js@^5.2.1:
|
||||
version "5.2.1"
|
||||
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70"
|
||||
integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==
|
||||
|
||||
brace-expansion@^1.1.7:
|
||||
version "1.1.11"
|
||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
|
||||
|
@ -703,6 +722,66 @@ braces@^3.0.2, braces@~3.0.2:
|
|||
dependencies:
|
||||
fill-range "^7.0.1"
|
||||
|
||||
brorand@^1.0.1, brorand@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
|
||||
integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==
|
||||
|
||||
browserify-aes@^1.0.4, browserify-aes@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48"
|
||||
integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==
|
||||
dependencies:
|
||||
buffer-xor "^1.0.3"
|
||||
cipher-base "^1.0.0"
|
||||
create-hash "^1.1.0"
|
||||
evp_bytestokey "^1.0.3"
|
||||
inherits "^2.0.1"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
browserify-cipher@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0"
|
||||
integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==
|
||||
dependencies:
|
||||
browserify-aes "^1.0.4"
|
||||
browserify-des "^1.0.0"
|
||||
evp_bytestokey "^1.0.0"
|
||||
|
||||
browserify-des@^1.0.0:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c"
|
||||
integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==
|
||||
dependencies:
|
||||
cipher-base "^1.0.1"
|
||||
des.js "^1.0.0"
|
||||
inherits "^2.0.1"
|
||||
safe-buffer "^5.1.2"
|
||||
|
||||
browserify-rsa@^4.0.0, browserify-rsa@^4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d"
|
||||
integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==
|
||||
dependencies:
|
||||
bn.js "^5.0.0"
|
||||
randombytes "^2.0.1"
|
||||
|
||||
browserify-sign@^4.0.0:
|
||||
version "4.2.3"
|
||||
resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.3.tgz#7afe4c01ec7ee59a89a558a4b75bd85ae62d4208"
|
||||
integrity sha512-JWCZW6SKhfhjJxO8Tyiiy+XYB7cqd2S5/+WeYHsKdNKFlCBhKbblba1A/HN/90YwtxKc8tCErjffZl++UNmGiw==
|
||||
dependencies:
|
||||
bn.js "^5.2.1"
|
||||
browserify-rsa "^4.1.0"
|
||||
create-hash "^1.2.0"
|
||||
create-hmac "^1.1.7"
|
||||
elliptic "^6.5.5"
|
||||
hash-base "~3.0"
|
||||
inherits "^2.0.4"
|
||||
parse-asn1 "^5.1.7"
|
||||
readable-stream "^2.3.8"
|
||||
safe-buffer "^5.2.1"
|
||||
|
||||
browserslist@^4.23.0:
|
||||
version "4.23.0"
|
||||
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.0.tgz#8f3acc2bbe73af7213399430890f86c63a5674ab"
|
||||
|
@ -713,6 +792,11 @@ browserslist@^4.23.0:
|
|||
node-releases "^2.0.14"
|
||||
update-browserslist-db "^1.0.13"
|
||||
|
||||
buffer-xor@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
|
||||
integrity sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==
|
||||
|
||||
busboy@1.6.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893"
|
||||
|
@ -779,6 +863,14 @@ chokidar@^3.5.3:
|
|||
optionalDependencies:
|
||||
fsevents "~2.3.2"
|
||||
|
||||
cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de"
|
||||
integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==
|
||||
dependencies:
|
||||
inherits "^2.0.1"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
client-only@0.0.1, client-only@^0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
|
||||
|
@ -823,6 +915,42 @@ concat-map@0.0.1:
|
|||
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
||||
integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
|
||||
|
||||
core-util-is@~1.0.0:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
||||
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
||||
|
||||
create-ecdh@^4.0.0:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e"
|
||||
integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==
|
||||
dependencies:
|
||||
bn.js "^4.1.0"
|
||||
elliptic "^6.5.3"
|
||||
|
||||
create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196"
|
||||
integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==
|
||||
dependencies:
|
||||
cipher-base "^1.0.1"
|
||||
inherits "^2.0.1"
|
||||
md5.js "^1.3.4"
|
||||
ripemd160 "^2.0.1"
|
||||
sha.js "^2.4.0"
|
||||
|
||||
create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7:
|
||||
version "1.1.7"
|
||||
resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff"
|
||||
integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==
|
||||
dependencies:
|
||||
cipher-base "^1.0.3"
|
||||
create-hash "^1.1.0"
|
||||
inherits "^2.0.1"
|
||||
ripemd160 "^2.0.0"
|
||||
safe-buffer "^5.0.1"
|
||||
sha.js "^2.4.8"
|
||||
|
||||
cross-spawn@^7.0.0, cross-spawn@^7.0.2:
|
||||
version "7.0.3"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
|
||||
|
@ -837,6 +965,23 @@ crypt@0.0.2:
|
|||
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
|
||||
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
|
||||
|
||||
crypto-browserify@^3.12.0:
|
||||
version "3.12.0"
|
||||
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
|
||||
integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==
|
||||
dependencies:
|
||||
browserify-cipher "^1.0.0"
|
||||
browserify-sign "^4.0.0"
|
||||
create-ecdh "^4.0.0"
|
||||
create-hash "^1.1.0"
|
||||
create-hmac "^1.1.0"
|
||||
diffie-hellman "^5.0.0"
|
||||
inherits "^2.0.1"
|
||||
pbkdf2 "^3.0.3"
|
||||
public-encrypt "^4.0.0"
|
||||
randombytes "^2.0.0"
|
||||
randomfill "^1.0.3"
|
||||
|
||||
cssesc@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
|
||||
|
@ -931,11 +1076,28 @@ dequal@^2.0.3:
|
|||
resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be"
|
||||
integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==
|
||||
|
||||
des.js@^1.0.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.1.0.tgz#1d37f5766f3bbff4ee9638e871a8768c173b81da"
|
||||
integrity sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==
|
||||
dependencies:
|
||||
inherits "^2.0.1"
|
||||
minimalistic-assert "^1.0.0"
|
||||
|
||||
didyoumean@^1.2.2:
|
||||
version "1.2.2"
|
||||
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
|
||||
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
|
||||
|
||||
diffie-hellman@^5.0.0:
|
||||
version "5.0.3"
|
||||
resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875"
|
||||
integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==
|
||||
dependencies:
|
||||
bn.js "^4.1.0"
|
||||
miller-rabin "^4.0.0"
|
||||
randombytes "^2.0.0"
|
||||
|
||||
digest-fetch@^1.3.0:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/digest-fetch/-/digest-fetch-1.3.0.tgz#898e69264d00012a23cf26e8a3e40320143fc661"
|
||||
|
@ -980,6 +1142,19 @@ electron-to-chromium@^1.4.668:
|
|||
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.729.tgz#8477d21e2a50993781950885b2731d92ad532c00"
|
||||
integrity sha512-bx7+5Saea/qu14kmPTDHQxkp2UnziG3iajUQu3BxFvCOnpAJdDbMV4rSl+EqFDkkpNNVUFlR1kDfpL59xfy1HA==
|
||||
|
||||
elliptic@^6.5.3, elliptic@^6.5.5:
|
||||
version "6.5.5"
|
||||
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.5.tgz#c715e09f78b6923977610d4c2346d6ce22e6dded"
|
||||
integrity sha512-7EjbcmUm17NQFu4Pmgmq2olYMj8nwMnpcddByChSUjArp8F5DQWcIcpriwO4ZToLNAJig0yiyjswfyGNje/ixw==
|
||||
dependencies:
|
||||
bn.js "^4.11.9"
|
||||
brorand "^1.1.0"
|
||||
hash.js "^1.0.0"
|
||||
hmac-drbg "^1.0.1"
|
||||
inherits "^2.0.4"
|
||||
minimalistic-assert "^1.0.1"
|
||||
minimalistic-crypto-utils "^1.0.1"
|
||||
|
||||
emoji-regex@^8.0.0:
|
||||
version "8.0.0"
|
||||
resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
|
||||
|
@ -1342,6 +1517,14 @@ eventemitter3@^4.0.4:
|
|||
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
|
||||
integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
|
||||
|
||||
evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02"
|
||||
integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==
|
||||
dependencies:
|
||||
md5.js "^1.3.4"
|
||||
safe-buffer "^5.1.1"
|
||||
|
||||
expr-eval@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/expr-eval/-/expr-eval-2.0.2.tgz#fa6f044a7b0c93fde830954eb9c5b0f7fbc7e201"
|
||||
|
@ -1645,6 +1828,31 @@ has-tostringtag@^1.0.0, has-tostringtag@^1.0.2:
|
|||
dependencies:
|
||||
has-symbols "^1.0.3"
|
||||
|
||||
hash-base@^3.0.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33"
|
||||
integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==
|
||||
dependencies:
|
||||
inherits "^2.0.4"
|
||||
readable-stream "^3.6.0"
|
||||
safe-buffer "^5.2.0"
|
||||
|
||||
hash-base@~3.0:
|
||||
version "3.0.4"
|
||||
resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918"
|
||||
integrity sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==
|
||||
dependencies:
|
||||
inherits "^2.0.1"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
hash.js@^1.0.0, hash.js@^1.0.3:
|
||||
version "1.1.7"
|
||||
resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42"
|
||||
integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==
|
||||
dependencies:
|
||||
inherits "^2.0.3"
|
||||
minimalistic-assert "^1.0.1"
|
||||
|
||||
hasown@^2.0.0, hasown@^2.0.1, hasown@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003"
|
||||
|
@ -1652,6 +1860,15 @@ hasown@^2.0.0, hasown@^2.0.1, hasown@^2.0.2:
|
|||
dependencies:
|
||||
function-bind "^1.1.2"
|
||||
|
||||
hmac-drbg@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
|
||||
integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==
|
||||
dependencies:
|
||||
hash.js "^1.0.3"
|
||||
minimalistic-assert "^1.0.0"
|
||||
minimalistic-crypto-utils "^1.0.1"
|
||||
|
||||
humanize-ms@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed"
|
||||
|
@ -1685,7 +1902,7 @@ inflight@^1.0.4:
|
|||
once "^1.3.0"
|
||||
wrappy "1"
|
||||
|
||||
inherits@2:
|
||||
inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
@ -1896,6 +2113,11 @@ isarray@^2.0.5:
|
|||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723"
|
||||
integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==
|
||||
|
||||
isarray@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
|
||||
|
||||
isexe@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||
|
@ -2114,6 +2336,15 @@ markdown-to-jsx@^7.4.5:
|
|||
resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-7.4.6.tgz#1ea0018c549bf00c9ce35e8f4ea57e48028d9cf7"
|
||||
integrity sha512-3cyNxI/PwotvYkjg6KmFaN1uyN/7NqETteD2DobBB8ro/FR9jsHIh4Fi7ywAz0s9QHRKCmGlOUggs5GxSWACKA==
|
||||
|
||||
md5.js@^1.3.4:
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
|
||||
integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==
|
||||
dependencies:
|
||||
hash-base "^3.0.0"
|
||||
inherits "^2.0.1"
|
||||
safe-buffer "^5.1.2"
|
||||
|
||||
md5@^2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
|
||||
|
@ -2136,6 +2367,14 @@ micromatch@^4.0.4, micromatch@^4.0.5:
|
|||
braces "^3.0.2"
|
||||
picomatch "^2.3.1"
|
||||
|
||||
miller-rabin@^4.0.0:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d"
|
||||
integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==
|
||||
dependencies:
|
||||
bn.js "^4.0.0"
|
||||
brorand "^1.0.1"
|
||||
|
||||
mime-db@1.52.0:
|
||||
version "1.52.0"
|
||||
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
|
||||
|
@ -2148,6 +2387,16 @@ mime-types@^2.1.12:
|
|||
dependencies:
|
||||
mime-db "1.52.0"
|
||||
|
||||
minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7"
|
||||
integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==
|
||||
|
||||
minimalistic-crypto-utils@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a"
|
||||
integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==
|
||||
|
||||
minimatch@9.0.3:
|
||||
version "9.0.3"
|
||||
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825"
|
||||
|
@ -2467,6 +2716,18 @@ parent-module@^1.0.0:
|
|||
dependencies:
|
||||
callsites "^3.0.0"
|
||||
|
||||
parse-asn1@^5.0.0, parse-asn1@^5.1.7:
|
||||
version "5.1.7"
|
||||
resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.7.tgz#73cdaaa822125f9647165625eb45f8a051d2df06"
|
||||
integrity sha512-CTM5kuWR3sx9IFamcl5ErfPl6ea/N8IYwiJ+vpeB2g+1iknv7zBl5uPwbMbRVznRVbrNY6lGuDoE5b30grmbqg==
|
||||
dependencies:
|
||||
asn1.js "^4.10.1"
|
||||
browserify-aes "^1.2.0"
|
||||
evp_bytestokey "^1.0.3"
|
||||
hash-base "~3.0"
|
||||
pbkdf2 "^3.1.2"
|
||||
safe-buffer "^5.2.1"
|
||||
|
||||
path-exists@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
|
||||
|
@ -2500,6 +2761,17 @@ path-type@^4.0.0:
|
|||
resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
|
||||
integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
|
||||
|
||||
pbkdf2@^3.0.3, pbkdf2@^3.1.2:
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075"
|
||||
integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==
|
||||
dependencies:
|
||||
create-hash "^1.1.2"
|
||||
create-hmac "^1.1.4"
|
||||
ripemd160 "^2.0.1"
|
||||
safe-buffer "^5.0.1"
|
||||
sha.js "^2.4.8"
|
||||
|
||||
picocolors@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
|
||||
|
@ -2605,6 +2877,16 @@ prettier@^3.2.5:
|
|||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.2.5.tgz#e52bc3090586e824964a8813b09aba6233b28368"
|
||||
integrity sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==
|
||||
|
||||
process-nextick-args@~2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
|
||||
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
|
||||
|
||||
process@^0.11.10:
|
||||
version "0.11.10"
|
||||
resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
|
||||
integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==
|
||||
|
||||
prop-types@^15.8.1:
|
||||
version "15.8.1"
|
||||
resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5"
|
||||
|
@ -2614,6 +2896,18 @@ prop-types@^15.8.1:
|
|||
object-assign "^4.1.1"
|
||||
react-is "^16.13.1"
|
||||
|
||||
public-encrypt@^4.0.0:
|
||||
version "4.0.3"
|
||||
resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0"
|
||||
integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==
|
||||
dependencies:
|
||||
bn.js "^4.1.0"
|
||||
browserify-rsa "^4.0.0"
|
||||
create-hash "^1.1.0"
|
||||
parse-asn1 "^5.0.0"
|
||||
randombytes "^2.0.1"
|
||||
safe-buffer "^5.1.2"
|
||||
|
||||
punycode@^2.1.0:
|
||||
version "2.3.1"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5"
|
||||
|
@ -2624,6 +2918,21 @@ queue-microtask@^1.2.2:
|
|||
resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
|
||||
integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
|
||||
|
||||
randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a"
|
||||
integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==
|
||||
dependencies:
|
||||
safe-buffer "^5.1.0"
|
||||
|
||||
randomfill@^1.0.3:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458"
|
||||
integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==
|
||||
dependencies:
|
||||
randombytes "^2.0.5"
|
||||
safe-buffer "^5.1.0"
|
||||
|
||||
react-dom@^18:
|
||||
version "18.2.0"
|
||||
resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d"
|
||||
|
@ -2665,6 +2974,28 @@ read-cache@^1.0.0:
|
|||
dependencies:
|
||||
pify "^2.3.0"
|
||||
|
||||
readable-stream@^2.3.8:
|
||||
version "2.3.8"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b"
|
||||
integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==
|
||||
dependencies:
|
||||
core-util-is "~1.0.0"
|
||||
inherits "~2.0.3"
|
||||
isarray "~1.0.0"
|
||||
process-nextick-args "~2.0.0"
|
||||
safe-buffer "~5.1.1"
|
||||
string_decoder "~1.1.1"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
readable-stream@^3.5.0, readable-stream@^3.6.0:
|
||||
version "3.6.2"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
|
||||
integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==
|
||||
dependencies:
|
||||
inherits "^2.0.3"
|
||||
string_decoder "^1.1.1"
|
||||
util-deprecate "^1.0.1"
|
||||
|
||||
readdirp@~3.6.0:
|
||||
version "3.6.0"
|
||||
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
|
||||
|
@ -2745,6 +3076,14 @@ rimraf@^3.0.2:
|
|||
dependencies:
|
||||
glob "^7.1.3"
|
||||
|
||||
ripemd160@^2.0.0, ripemd160@^2.0.1:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c"
|
||||
integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==
|
||||
dependencies:
|
||||
hash-base "^3.0.0"
|
||||
inherits "^2.0.1"
|
||||
|
||||
run-parallel@^1.1.9:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
|
||||
|
@ -2762,6 +3101,16 @@ safe-array-concat@^1.1.2:
|
|||
has-symbols "^1.0.3"
|
||||
isarray "^2.0.5"
|
||||
|
||||
safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@^5.2.1, safe-buffer@~5.2.0:
|
||||
version "5.2.1"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
|
||||
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
|
||||
|
||||
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
|
||||
version "5.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
|
||||
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
|
||||
safe-regex-test@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.3.tgz#a5b4c0f06e0ab50ea2c395c14d8371232924c377"
|
||||
|
@ -2812,6 +3161,14 @@ set-function-name@^2.0.1, set-function-name@^2.0.2:
|
|||
functions-have-names "^1.2.3"
|
||||
has-property-descriptors "^1.0.2"
|
||||
|
||||
sha.js@^2.4.0, sha.js@^2.4.8:
|
||||
version "2.4.11"
|
||||
resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7"
|
||||
integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==
|
||||
dependencies:
|
||||
inherits "^2.0.1"
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
shebang-command@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
|
||||
|
@ -2854,6 +3211,14 @@ source-map-js@^1.0.2, source-map-js@^1.2.0:
|
|||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
|
||||
integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==
|
||||
|
||||
stream-browserify@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-3.0.0.tgz#22b0a2850cdf6503e73085da1fc7b7d0c2122f2f"
|
||||
integrity sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==
|
||||
dependencies:
|
||||
inherits "~2.0.4"
|
||||
readable-stream "^3.5.0"
|
||||
|
||||
streamsearch@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764"
|
||||
|
@ -2932,6 +3297,20 @@ string.prototype.trimstart@^1.0.8:
|
|||
define-properties "^1.2.1"
|
||||
es-object-atoms "^1.0.0"
|
||||
|
||||
string_decoder@^1.1.1:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
|
||||
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
|
||||
dependencies:
|
||||
safe-buffer "~5.2.0"
|
||||
|
||||
string_decoder@~1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
|
||||
|
@ -3199,7 +3578,7 @@ use-latest@^1.2.1:
|
|||
dependencies:
|
||||
use-isomorphic-layout-effect "^1.1.1"
|
||||
|
||||
util-deprecate@^1.0.2:
|
||||
util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
|
||||
|
|
Loading…
Add table
Reference in a new issue