chore: update dependency

fix: fix typescript errors
This commit is contained in:
Justin Luoma 2024-05-24 15:39:00 -04:00
parent 62910b5879
commit d788ca8eba
6 changed files with 292 additions and 296 deletions

View file

@ -1,10 +1,10 @@
import { RunnableSequence, RunnableMap } from '@langchain/core/runnables';
import {RunnableMap, RunnableSequence} from '@langchain/core/runnables';
import ListLineOutputParser from '../lib/outputParsers/listLineOutputParser';
import { PromptTemplate } from '@langchain/core/prompts';
import {PromptTemplate} from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory';
import { BaseMessage } from '@langchain/core/messages';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatOpenAI } from '@langchain/openai';
import {BaseMessage} from '@langchain/core/messages';
import {BaseChatModel} from '@langchain/core/language_models/chat_models';
import {ChatOpenAI} from '@langchain/openai';
const suggestionGeneratorPrompt = `
You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information.
@ -45,10 +45,10 @@ const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
const generateSuggestions = (
input: SuggestionGeneratorInput,
llm: BaseChatModel,
llm: ChatOpenAI,
) => {
(llm as ChatOpenAI).temperature = 0;
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
llm.temperature = 0;
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm as unknown as BaseChatModel);
return suggestionGeneratorChain.invoke(input);
};

View file

@ -1,8 +1,8 @@
import express from 'express';
import generateSuggestions from '../agents/suggestionGeneratorAgent';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages';
import {BaseChatModel} from '@langchain/core/language_models/chat_models';
import {getAvailableChatModelProviders} from '../lib/providers';
import {AIMessage, HumanMessage} from '@langchain/core/messages';
import logger from '../utils/logger';
const router = express.Router();
@ -34,6 +34,7 @@ router.post('/', async (req, res) => {
return;
}
// @ts-ignore
const suggestions = await generateSuggestions({ chat_history }, llm);
res.status(200).json({ suggestions: suggestions });

View file

@ -1,14 +1,11 @@
import { WebSocket } from 'ws';
import { handleMessage } from './messageHandler';
import {
getAvailableEmbeddingModelProviders,
getAvailableChatModelProviders,
} from '../lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { Embeddings } from '@langchain/core/embeddings';
import type { IncomingMessage } from 'http';
import {WebSocket} from 'ws';
import {handleMessage} from './messageHandler';
import {getAvailableChatModelProviders, getAvailableEmbeddingModelProviders,} from '../lib/providers';
import {BaseChatModel} from '@langchain/core/language_models/chat_models';
import type {Embeddings} from '@langchain/core/embeddings';
import type {IncomingMessage} from 'http';
import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai';
import {ChatOpenAI} from '@langchain/openai';
export const handleConnection = async (
ws: WebSocket,
@ -49,7 +46,7 @@ export const handleConnection = async (
| BaseChatModel
| undefined;
} else if (chatModelProvider == 'custom_openai') {
llm = new ChatOpenAI({
(llm as unknown as ChatOpenAI) = new ChatOpenAI({
modelName: chatModel,
openAIApiKey: searchParams.get('openAIApiKey'),
temperature: 0.7,

View file

@ -1,16 +1,8 @@
import {
BadgePercent,
ChevronDown,
CopyPlus,
Globe,
Pencil,
ScanEye,
SwatchBook,
} from 'lucide-react';
import { cn } from '@/lib/utils';
import { Popover, Switch, Transition } from '@headlessui/react';
import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons';
import { Fragment } from 'react';
import {BadgePercent, ChevronDown, CopyPlus, Globe, Pencil, ScanEye, SwatchBook,} from 'lucide-react';
import {cn} from '@/lib/utils';
import {Popover, Switch, Transition} from '@headlessui/react';
import {SiReddit, SiYoutube} from '@icons-pack/react-simple-icons';
import {Fragment} from 'react';
export const Attach = () => {
return (
@ -55,8 +47,8 @@ const focusModes = [
icon: (
<SiYoutube
className="h-5 w-auto mr-0.5"
onPointerEnterCapture={undefined}
onPointerLeaveCapture={undefined}
onPointerEnter={undefined}
onPointerLeave={undefined}
/>
),
},
@ -67,8 +59,8 @@ const focusModes = [
icon: (
<SiReddit
className="h-5 w-auto mr-0.5"
onPointerEnterCapture={undefined}
onPointerLeaveCapture={undefined}
onPointerEnter={undefined}
onPointerLeave={undefined}
/>
),
},

View file

@ -12,7 +12,7 @@
},
"dependencies": {
"@headlessui/react": "^1.7.18",
"@icons-pack/react-simple-icons": "^9.4.0",
"@icons-pack/react-simple-icons": "^9.5.0",
"@langchain/openai": "^0.0.25",
"@tailwindcss/typography": "^0.5.12",
"clsx": "^2.1.0",

File diff suppressed because it is too large Load diff