Added automatic docker build on merge to master

This PR includes:
- Automatic build & push to docker hub when pushing to master, or when triggering a workflow dispatch on master
- Automatic docker build on pull requests to validate changes
- Updated the docker-compose.yaml to use the container images, rather than building locally
- Added support for defining backend settings using the container environment 
  -> All options defined in config.toml can now be loaded instead from the environment
  -> Order of precedence is environment definition, config.toml, and finally default inline configuration defined in config.ts
- Added support for defining frontend settings using the container environment
  -> Added a dynamic api route to load the container environment definitions on the server, and provide them to the client
  -> Added a library function to fetch from the newly created API, caching the response in the session storage
  -> Modified existing calls to `process.env` to use the new library function
  -> Left the initial statically compiled environment definitions in place as a backup definition, if no environment definitions are provided

Remaining tasks todo before able to merge to [ItzCrazyKns/Perplexica](https://github.com/ItzCrazyKns/Perplexica):
- Add secret definitions for `DOCKER_USERNAME` and `DOCKER_PASSWORD` to [ItzCrazyKns/Perplexica](https://github.com/ItzCrazyKns/Perplexica) to ensure push to dockerhub works on base branch
- Update documentation with information about changes
This commit is contained in:
Andrew Pennington 2024-08-21 00:40:34 +01:00 committed by GitHub
parent 9c1936ec2c
commit ec158c0cdf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 176 additions and 43 deletions

48
.github/workflows/docker-build.yml vendored Normal file
View file

@ -0,0 +1,48 @@
name: Build and Push Docker images
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build frontend Docker image
uses: docker/build-push-action@v5
with:
context: .
file: app.dockerfile
push: ${{ (github.event_name != 'pull_request') && (github.ref == 'refs/heads/master') }}
tags: ${{ secrets.DOCKER_USERNAME }}/perplexica-frontend:latest
- name: Build backend Docker image
uses: docker/build-push-action@v5
with:
context: .
file: backend.dockerfile
push: ${{ (github.event_name != 'pull_request') && (github.ref == 'refs/heads/master') }}
tags: ${{ secrets.DOCKER_USERNAME }}/perplexica-backend:latest
- name: Log out from Docker Hub
if: github.event_name != 'pull_request'
run: docker logout

View file

@ -1,7 +1,7 @@
FROM node:alpine FROM node:alpine
ARG NEXT_PUBLIC_WS_URL ARG NEXT_PUBLIC_WS_URL='ws://127.0.0.1:3001'
ARG NEXT_PUBLIC_API_URL ARG NEXT_PUBLIC_API_URL='http://127.0.0.1:3001/api'
ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL} ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL}
ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}

View file

@ -6,13 +6,10 @@ WORKDIR /home/perplexica
COPY src /home/perplexica/src COPY src /home/perplexica/src
COPY tsconfig.json /home/perplexica/ COPY tsconfig.json /home/perplexica/
COPY config.toml /home/perplexica/
COPY drizzle.config.ts /home/perplexica/ COPY drizzle.config.ts /home/perplexica/
COPY package.json /home/perplexica/ COPY package.json /home/perplexica/
COPY yarn.lock /home/perplexica/ COPY yarn.lock /home/perplexica/
RUN sed -i "s|SEARXNG = \".*\"|SEARXNG = \"${SEARXNG_API_URL}\"|g" /home/perplexica/config.toml
RUN mkdir /home/perplexica/data RUN mkdir /home/perplexica/data
RUN yarn install RUN yarn install

View file

@ -10,17 +10,16 @@ services:
restart: unless-stopped restart: unless-stopped
perplexica-backend: perplexica-backend:
build: image: docker.io/andypenno/perplexica-backend:latest
context: .
dockerfile: backend.dockerfile
args:
- SEARXNG_API_URL=http://searxng:8080
depends_on: depends_on:
- searxng - searxng
ports: ports:
- 3001:3001 - 3001:3001
volumes: volumes:
- backend-dbstore:/home/perplexica/data - backend-dbstore:/home/perplexica/data
- ./config.toml:/home/perplexica/config.toml
environment:
- SEARXNG_API_URL=http://searxng:4000
extra_hosts: extra_hosts:
- 'host.docker.internal:host-gateway' - 'host.docker.internal:host-gateway'
networks: networks:
@ -28,16 +27,14 @@ services:
restart: unless-stopped restart: unless-stopped
perplexica-frontend: perplexica-frontend:
build: image: docker.io/andypenno/perplexica-frontend:latest
context: .
dockerfile: app.dockerfile
args:
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
depends_on: depends_on:
- perplexica-backend - perplexica-backend
ports: ports:
- 3000:3000 - 3000:3000
environment:
- BACKEND_API_URL=http://127.0.0.1:3001/api
- BACKEND_WS_URL=ws://127.0.0.1:3001
networks: networks:
- perplexica-network - perplexica-network
restart: unless-stopped restart: unless-stopped

View file

@ -8,5 +8,5 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
[API_ENDPOINTS] [API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL SEARXNG = "" # SearxNG API URL - http://localhost:32768
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434

View file

@ -1,6 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import toml from '@iarna/toml'; import toml from '@iarna/toml';
import process from 'process';
const configFileName = 'config.toml'; const configFileName = 'config.toml';
@ -24,25 +25,52 @@ type RecursivePartial<T> = {
[P in keyof T]?: RecursivePartial<T[P]>; [P in keyof T]?: RecursivePartial<T[P]>;
}; };
const loadConfig = () => const configFilePath = path.join(__dirname, `../${configFileName}`);
toml.parse(
fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'),
) as any as Config;
export const getPort = () => loadConfig().GENERAL.PORT; const defaultConfig: Config = {
GENERAL: {
PORT: 3001,
SIMILARITY_MEASURE: "cosine"
},
API_KEYS: {
OPENAI: "",
GROQ: "",
ANTHROPIC: ""
},
API_ENDPOINTS: {
SEARXNG: "http://localhost:32768",
OLLAMA: ""
}
}
const loadConfig = () => {
if (fs.existsSync(configFilePath)) {
return toml.parse(fs.readFileSync(configFilePath, 'utf-8')) as any as Config;
} else {
return defaultConfig;
}
}
export const getPort = () =>
process.env.PORT ?? loadConfig().GENERAL.PORT;
export const getSimilarityMeasure = () => export const getSimilarityMeasure = () =>
loadConfig().GENERAL.SIMILARITY_MEASURE; process.env.SIMILARITY_MEASURE ?? loadConfig().GENERAL.SIMILARITY_MEASURE;
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; export const getOpenaiApiKey = () =>
process.env.OPENAI_API_KEY ?? loadConfig().API_KEYS.OPENAI;
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; export const getGroqApiKey = () =>
process.env.GROQ_API_KEY ?? loadConfig().API_KEYS.GROQ;
export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC; export const getAnthropicApiKey = () =>
process.env.ANTHROPIC_API_KEY ?? loadConfig().API_KEYS.ANTHROPIC;
export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG; export const getSearxngApiEndpoint = () =>
process.env.SEARXNG_API_ENDPOINT ?? loadConfig().API_ENDPOINTS.SEARXNG;
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; export const getOllamaApiEndpoint = () =>
process.env.OLLAMA_API_ENDPOINT ?? loadConfig().API_ENDPOINTS.OLLAMA;
export const updateConfig = (config: RecursivePartial<Config>) => { export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig(); const currentConfig = loadConfig();
@ -65,8 +93,5 @@ export const updateConfig = (config: RecursivePartial<Config>) => {
} }
} }
fs.writeFileSync( fs.writeFileSync(configFilePath, toml.stringify(config));
path.join(__dirname, `../${configFileName}`),
toml.stringify(config),
);
}; };

16
ui/app/api/env/route.ts vendored Normal file
View file

@ -0,0 +1,16 @@
import process from 'process';
import { NextResponse } from 'next/server';
// Enable the Runtime
export const runtime = "edge"
export async function GET(_request: Request) {
// Access environment variables
const envVars = {
'BACKEND_API_URL': process.env.BACKEND_API_URL ?? process.env.NEXT_PUBLIC_API_URL,
'BACKEND_WS_URL': process.env.BACKEND_WS_URL ?? process.env.NEXT_PUBLIC_WS_URL
}
// Return the environment variables as a JSON response
return NextResponse.json(envVars);
}

View file

@ -5,6 +5,7 @@ import { formatTimeDifference } from '@/lib/utils';
import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react'; import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react';
import Link from 'next/link'; import Link from 'next/link';
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { getServerEnv } from '@/lib/serverEnvironment';
export interface Chat { export interface Chat {
id: string; id: string;
@ -21,7 +22,7 @@ const Page = () => {
const fetchChats = async () => { const fetchChats = async () => {
setLoading(true); setLoading(true);
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/chats`, { const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/chats`, {
method: 'GET', method: 'GET',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',

View file

@ -10,6 +10,7 @@ import { toast } from 'sonner';
import { useSearchParams } from 'next/navigation'; import { useSearchParams } from 'next/navigation';
import { getSuggestions } from '@/lib/actions'; import { getSuggestions } from '@/lib/actions';
import Error from 'next/error'; import Error from 'next/error';
import { getServerEnv } from '@/lib/serverEnvironment';
export type Message = { export type Message = {
messageId: string; messageId: string;
@ -22,13 +23,16 @@ export type Message = {
}; };
const useSocket = ( const useSocket = (
url: string, url: string | null,
setIsWSReady: (ready: boolean) => void, setIsWSReady: (ready: boolean) => void,
setError: (error: boolean) => void, setError: (error: boolean) => void,
) => { ) => {
const [ws, setWs] = useState<WebSocket | null>(null); const [ws, setWs] = useState<WebSocket | null>(null);
useEffect(() => { useEffect(() => {
if (!url) {
return;
}
if (!ws) { if (!ws) {
const connectWs = async () => { const connectWs = async () => {
let chatModel = localStorage.getItem('chatModel'); let chatModel = localStorage.getItem('chatModel');
@ -39,7 +43,7 @@ const useSocket = (
); );
const providers = await fetch( const providers = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/models`, `${await getServerEnv("BACKEND_API_URL")}/models`,
{ {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -220,7 +224,7 @@ const loadMessages = async (
setNotFound: (notFound: boolean) => void, setNotFound: (notFound: boolean) => void,
) => { ) => {
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`, `${await getServerEnv("BACKEND_API_URL")}/chats/${chatId}`,
{ {
method: 'GET', method: 'GET',
headers: { headers: {
@ -260,6 +264,8 @@ const loadMessages = async (
}; };
const ChatWindow = ({ id }: { id?: string }) => { const ChatWindow = ({ id }: { id?: string }) => {
const [wsServerUrl, setWsServerUrl] = useState<string | null>(null);
const searchParams = useSearchParams(); const searchParams = useSearchParams();
const initialMessage = searchParams.get('q'); const initialMessage = searchParams.get('q');
@ -271,7 +277,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
const [isWSReady, setIsWSReady] = useState(false); const [isWSReady, setIsWSReady] = useState(false);
const ws = useSocket( const ws = useSocket(
process.env.NEXT_PUBLIC_WS_URL!, wsServerUrl,
setIsWSReady, setIsWSReady,
setHasError, setHasError,
); );
@ -323,6 +329,15 @@ const ChatWindow = ({ id }: { id?: string }) => {
} }
}, [isMessagesLoaded, isWSReady]); }, [isMessagesLoaded, isWSReady]);
useEffect(() => {
const fetchWsServerUrl = async () => {
const url = await getServerEnv("BACKEND_WS_URL");
setWsServerUrl(url);
};
fetchWsServerUrl();
}, []);
const sendMessage = async (message: string) => { const sendMessage = async (message: string) => {
if (loading) return; if (loading) return;
setLoading(true); setLoading(true);

View file

@ -3,6 +3,7 @@ import { Dialog, Transition } from '@headlessui/react';
import { Fragment, useState } from 'react'; import { Fragment, useState } from 'react';
import { toast } from 'sonner'; import { toast } from 'sonner';
import { Chat } from '@/app/library/page'; import { Chat } from '@/app/library/page';
import { getServerEnv } from '@/lib/serverEnvironment';
const DeleteChat = ({ const DeleteChat = ({
chatId, chatId,
@ -20,7 +21,7 @@ const DeleteChat = ({
setLoading(true); setLoading(true);
try { try {
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`, `${await getServerEnv("BACKEND_API_URL")}/chats/${chatId}`,
{ {
method: 'DELETE', method: 'DELETE',
headers: { headers: {

View file

@ -4,6 +4,7 @@ import { useState } from 'react';
import Lightbox from 'yet-another-react-lightbox'; import Lightbox from 'yet-another-react-lightbox';
import 'yet-another-react-lightbox/styles.css'; import 'yet-another-react-lightbox/styles.css';
import { Message } from './ChatWindow'; import { Message } from './ChatWindow';
import { getServerEnv } from '@/lib/serverEnvironment';
type Image = { type Image = {
url: string; url: string;
@ -34,7 +35,7 @@ const SearchImages = ({
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/images`, `${await getServerEnv("BACKEND_API_URL")}/images`,
{ {
method: 'POST', method: 'POST',
headers: { headers: {

View file

@ -4,6 +4,7 @@ import { useState } from 'react';
import Lightbox, { GenericSlide, VideoSlide } from 'yet-another-react-lightbox'; import Lightbox, { GenericSlide, VideoSlide } from 'yet-another-react-lightbox';
import 'yet-another-react-lightbox/styles.css'; import 'yet-another-react-lightbox/styles.css';
import { Message } from './ChatWindow'; import { Message } from './ChatWindow';
import { getServerEnv } from '@/lib/serverEnvironment';
type Video = { type Video = {
url: string; url: string;
@ -47,7 +48,7 @@ const Searchvideos = ({
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/videos`, `${await getServerEnv("BACKEND_API_URL")}/videos`,
{ {
method: 'POST', method: 'POST',
headers: { headers: {

View file

@ -8,6 +8,7 @@ import React, {
type SelectHTMLAttributes, type SelectHTMLAttributes,
} from 'react'; } from 'react';
import ThemeSwitcher from './theme/Switcher'; import ThemeSwitcher from './theme/Switcher';
import { getServerEnv } from '@/lib/serverEnvironment';
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {} interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
@ -88,7 +89,7 @@ const SettingsDialog = ({
if (isOpen) { if (isOpen) {
const fetchConfig = async () => { const fetchConfig = async () => {
setIsLoading(true); setIsLoading(true);
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/config`, {
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
@ -148,7 +149,7 @@ const SettingsDialog = ({
setIsUpdating(true); setIsUpdating(true);
try { try {
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { await fetch(`${await getServerEnv("BACKEND_API_URL")}/config`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',

View file

@ -1,10 +1,11 @@
import { Message } from '@/components/ChatWindow'; import { Message } from '@/components/ChatWindow';
import { getServerEnv } from '@/lib/serverEnvironment';
export const getSuggestions = async (chatHisory: Message[]) => { export const getSuggestions = async (chatHisory: Message[]) => {
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProvider');
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, { const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/suggestions`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',

View file

@ -0,0 +1,29 @@
async function fetchConfig() {
try {
const response = await fetch('/api/env');
if (response.ok) {
const data = await response.json();
sessionStorage.setItem('cachedConfig', JSON.stringify(data));
return data;
} else {
throw new Error('Failed to fetch config');
}
} catch (error) {
return null;
}
}
export async function getServerEnv(envVar: string): Promise<string> {
const cachedConfig = JSON.parse(sessionStorage.getItem('cachedConfig') || 'null');
if (cachedConfig) {
return cachedConfig[envVar];
}
const data = await fetchConfig();
if (!data) {
return "";
}
return data[envVar];
}