Fix query appearing twice in chat history

The initial query appears twice in the prompt, this is ignored by OpenAI
models, however it breaks with Gemini models are they fail with an error
stating that AI and User prompts need to alternate.

Tested all search modes with both OpenAI GTP3 turbo and Vertex Gemini
1.0 and this changes appears to now function correctly with both.
This commit is contained in:
Hristo 2024-05-17 14:10:11 -04:00
parent 2e58dab30a
commit 8b9b4085ea
6 changed files with 0 additions and 6 deletions

View file

@ -209,7 +209,6 @@ const createBasicAcademicSearchAnsweringChain = (
ChatPromptTemplate.fromMessages([
['system', basicAcademicSearchResponsePrompt],
new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
strParser,

View file

@ -205,7 +205,6 @@ const createBasicRedditSearchAnsweringChain = (
ChatPromptTemplate.fromMessages([
['system', basicRedditSearchResponsePrompt],
new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
strParser,

View file

@ -203,7 +203,6 @@ const createBasicWebSearchAnsweringChain = (
ChatPromptTemplate.fromMessages([
['system', basicWebSearchResponsePrompt],
new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
strParser,

View file

@ -165,7 +165,6 @@ const createBasicWolframAlphaSearchAnsweringChain = (llm: BaseChatModel) => {
ChatPromptTemplate.fromMessages([
['system', basicWolframAlphaSearchResponsePrompt],
new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
strParser,

View file

@ -46,7 +46,6 @@ const createWritingAssistantChain = (llm: BaseChatModel) => {
ChatPromptTemplate.fromMessages([
['system', writingAssistantPrompt],
new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
strParser,

View file

@ -205,7 +205,6 @@ const createBasicYoutubeSearchAnsweringChain = (
ChatPromptTemplate.fromMessages([
['system', basicYoutubeSearchResponsePrompt],
new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
strParser,