From ec158c0cdf6dc51a0f0b44f4f2485e2bda266476 Mon Sep 17 00:00:00 2001 From: Andrew Pennington <50312474+andypenno@users.noreply.github.com> Date: Wed, 21 Aug 2024 00:40:34 +0100 Subject: [PATCH] Added automatic docker build on merge to master This PR includes: - Automatic build & push to docker hub when pushing to master, or when triggering a workflow dispatch on master - Automatic docker build on pull requests to validate changes - Updated the docker-compose.yaml to use the container images, rather than building locally - Added support for defining backend settings using the container environment -> All options defined in config.toml can now be loaded instead from the environment -> Order of precedence is environment definition, config.toml, and finally default inline configuration defined in config.ts - Added support for defining frontend settings using the container environment -> Added a dynamic api route to load the container environment definitions on the server, and provide them to the client -> Added a library function to fetch from the newly created API, caching the response in the session storage -> Modified existing calls to `process.env` to use the new library function -> Left the initial statically compiled environment definitions in place as a backup definition, if no environment definitions are provided Remaining tasks todo before able to merge to [ItzCrazyKns/Perplexica](https://github.com/ItzCrazyKns/Perplexica): - Add secret definitions for `DOCKER_USERNAME` and `DOCKER_PASSWORD` to [ItzCrazyKns/Perplexica](https://github.com/ItzCrazyKns/Perplexica) to ensure push to dockerhub works on base branch - Update documentation with information about changes --- .github/workflows/docker-build.yml | 48 ++++++++++++++++++++++++++ app.dockerfile | 4 +-- backend.dockerfile | 3 -- docker-compose.yaml | 19 +++++------ sample.config.toml | 2 +- src/config.ts | 55 ++++++++++++++++++++++-------- ui/app/api/env/route.ts | 16 +++++++++ ui/app/library/page.tsx | 3 +- ui/components/ChatWindow.tsx | 23 ++++++++++--- ui/components/DeleteChat.tsx | 3 +- ui/components/SearchImages.tsx | 3 +- ui/components/SearchVideos.tsx | 3 +- ui/components/SettingsDialog.tsx | 5 +-- ui/lib/actions.ts | 3 +- ui/lib/serverEnvironment.ts | 29 ++++++++++++++++ 15 files changed, 176 insertions(+), 43 deletions(-) create mode 100644 .github/workflows/docker-build.yml create mode 100644 ui/app/api/env/route.ts create mode 100644 ui/lib/serverEnvironment.ts diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..91ee8cd --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,48 @@ +name: Build and Push Docker images + +on: + workflow_dispatch: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Log in to Docker Hub + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build frontend Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: app.dockerfile + push: ${{ (github.event_name != 'pull_request') && (github.ref == 'refs/heads/master') }} + tags: ${{ secrets.DOCKER_USERNAME }}/perplexica-frontend:latest + + - name: Build backend Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: backend.dockerfile + push: ${{ (github.event_name != 'pull_request') && (github.ref == 'refs/heads/master') }} + tags: ${{ secrets.DOCKER_USERNAME }}/perplexica-backend:latest + + - name: Log out from Docker Hub + if: github.event_name != 'pull_request' + run: docker logout diff --git a/app.dockerfile b/app.dockerfile index 105cf86..c3d3594 100644 --- a/app.dockerfile +++ b/app.dockerfile @@ -1,7 +1,7 @@ FROM node:alpine -ARG NEXT_PUBLIC_WS_URL -ARG NEXT_PUBLIC_API_URL +ARG NEXT_PUBLIC_WS_URL='ws://127.0.0.1:3001' +ARG NEXT_PUBLIC_API_URL='http://127.0.0.1:3001/api' ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL} ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} diff --git a/backend.dockerfile b/backend.dockerfile index 4886573..8297263 100644 --- a/backend.dockerfile +++ b/backend.dockerfile @@ -6,13 +6,10 @@ WORKDIR /home/perplexica COPY src /home/perplexica/src COPY tsconfig.json /home/perplexica/ -COPY config.toml /home/perplexica/ COPY drizzle.config.ts /home/perplexica/ COPY package.json /home/perplexica/ COPY yarn.lock /home/perplexica/ -RUN sed -i "s|SEARXNG = \".*\"|SEARXNG = \"${SEARXNG_API_URL}\"|g" /home/perplexica/config.toml - RUN mkdir /home/perplexica/data RUN yarn install diff --git a/docker-compose.yaml b/docker-compose.yaml index d6f9203..1166990 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -10,17 +10,16 @@ services: restart: unless-stopped perplexica-backend: - build: - context: . - dockerfile: backend.dockerfile - args: - - SEARXNG_API_URL=http://searxng:8080 + image: docker.io/andypenno/perplexica-backend:latest depends_on: - searxng ports: - 3001:3001 volumes: - backend-dbstore:/home/perplexica/data + - ./config.toml:/home/perplexica/config.toml + environment: + - SEARXNG_API_URL=http://searxng:4000 extra_hosts: - 'host.docker.internal:host-gateway' networks: @@ -28,16 +27,14 @@ services: restart: unless-stopped perplexica-frontend: - build: - context: . - dockerfile: app.dockerfile - args: - - NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api - - NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001 + image: docker.io/andypenno/perplexica-frontend:latest depends_on: - perplexica-backend ports: - 3000:3000 + environment: + - BACKEND_API_URL=http://127.0.0.1:3001/api + - BACKEND_WS_URL=ws://127.0.0.1:3001 networks: - perplexica-network restart: unless-stopped diff --git a/sample.config.toml b/sample.config.toml index f6c6943..3a19b7e 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -8,5 +8,5 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef [API_ENDPOINTS] -SEARXNG = "http://localhost:32768" # SearxNG API URL +SEARXNG = "" # SearxNG API URL - http://localhost:32768 OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 \ No newline at end of file diff --git a/src/config.ts b/src/config.ts index 9ebc182..627288e 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,6 +1,7 @@ import fs from 'fs'; import path from 'path'; import toml from '@iarna/toml'; +import process from 'process'; const configFileName = 'config.toml'; @@ -24,25 +25,52 @@ type RecursivePartial = { [P in keyof T]?: RecursivePartial; }; -const loadConfig = () => - toml.parse( - fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'), - ) as any as Config; +const configFilePath = path.join(__dirname, `../${configFileName}`); -export const getPort = () => loadConfig().GENERAL.PORT; +const defaultConfig: Config = { + GENERAL: { + PORT: 3001, + SIMILARITY_MEASURE: "cosine" + }, + API_KEYS: { + OPENAI: "", + GROQ: "", + ANTHROPIC: "" + }, + API_ENDPOINTS: { + SEARXNG: "http://localhost:32768", + OLLAMA: "" + } +} + +const loadConfig = () => { + if (fs.existsSync(configFilePath)) { + return toml.parse(fs.readFileSync(configFilePath, 'utf-8')) as any as Config; + } else { + return defaultConfig; + } +} + +export const getPort = () => + process.env.PORT ?? loadConfig().GENERAL.PORT; export const getSimilarityMeasure = () => - loadConfig().GENERAL.SIMILARITY_MEASURE; + process.env.SIMILARITY_MEASURE ?? loadConfig().GENERAL.SIMILARITY_MEASURE; -export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; +export const getOpenaiApiKey = () => + process.env.OPENAI_API_KEY ?? loadConfig().API_KEYS.OPENAI; -export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; +export const getGroqApiKey = () => + process.env.GROQ_API_KEY ?? loadConfig().API_KEYS.GROQ; -export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC; +export const getAnthropicApiKey = () => + process.env.ANTHROPIC_API_KEY ?? loadConfig().API_KEYS.ANTHROPIC; -export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG; +export const getSearxngApiEndpoint = () => + process.env.SEARXNG_API_ENDPOINT ?? loadConfig().API_ENDPOINTS.SEARXNG; -export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; +export const getOllamaApiEndpoint = () => + process.env.OLLAMA_API_ENDPOINT ?? loadConfig().API_ENDPOINTS.OLLAMA; export const updateConfig = (config: RecursivePartial) => { const currentConfig = loadConfig(); @@ -65,8 +93,5 @@ export const updateConfig = (config: RecursivePartial) => { } } - fs.writeFileSync( - path.join(__dirname, `../${configFileName}`), - toml.stringify(config), - ); + fs.writeFileSync(configFilePath, toml.stringify(config)); }; diff --git a/ui/app/api/env/route.ts b/ui/app/api/env/route.ts new file mode 100644 index 0000000..01cf6fb --- /dev/null +++ b/ui/app/api/env/route.ts @@ -0,0 +1,16 @@ +import process from 'process'; +import { NextResponse } from 'next/server'; + +// Enable the Runtime +export const runtime = "edge" + +export async function GET(_request: Request) { + // Access environment variables + const envVars = { + 'BACKEND_API_URL': process.env.BACKEND_API_URL ?? process.env.NEXT_PUBLIC_API_URL, + 'BACKEND_WS_URL': process.env.BACKEND_WS_URL ?? process.env.NEXT_PUBLIC_WS_URL + } + + // Return the environment variables as a JSON response + return NextResponse.json(envVars); +} diff --git a/ui/app/library/page.tsx b/ui/app/library/page.tsx index 8294fc1..fc17b19 100644 --- a/ui/app/library/page.tsx +++ b/ui/app/library/page.tsx @@ -5,6 +5,7 @@ import { formatTimeDifference } from '@/lib/utils'; import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react'; import Link from 'next/link'; import { useEffect, useState } from 'react'; +import { getServerEnv } from '@/lib/serverEnvironment'; export interface Chat { id: string; @@ -21,7 +22,7 @@ const Page = () => { const fetchChats = async () => { setLoading(true); - const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/chats`, { + const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/chats`, { method: 'GET', headers: { 'Content-Type': 'application/json', diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx index 5e6d382..d365db6 100644 --- a/ui/components/ChatWindow.tsx +++ b/ui/components/ChatWindow.tsx @@ -10,6 +10,7 @@ import { toast } from 'sonner'; import { useSearchParams } from 'next/navigation'; import { getSuggestions } from '@/lib/actions'; import Error from 'next/error'; +import { getServerEnv } from '@/lib/serverEnvironment'; export type Message = { messageId: string; @@ -22,13 +23,16 @@ export type Message = { }; const useSocket = ( - url: string, + url: string | null, setIsWSReady: (ready: boolean) => void, setError: (error: boolean) => void, ) => { const [ws, setWs] = useState(null); useEffect(() => { + if (!url) { + return; + } if (!ws) { const connectWs = async () => { let chatModel = localStorage.getItem('chatModel'); @@ -39,7 +43,7 @@ const useSocket = ( ); const providers = await fetch( - `${process.env.NEXT_PUBLIC_API_URL}/models`, + `${await getServerEnv("BACKEND_API_URL")}/models`, { headers: { 'Content-Type': 'application/json', @@ -220,7 +224,7 @@ const loadMessages = async ( setNotFound: (notFound: boolean) => void, ) => { const res = await fetch( - `${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`, + `${await getServerEnv("BACKEND_API_URL")}/chats/${chatId}`, { method: 'GET', headers: { @@ -260,6 +264,8 @@ const loadMessages = async ( }; const ChatWindow = ({ id }: { id?: string }) => { + const [wsServerUrl, setWsServerUrl] = useState(null); + const searchParams = useSearchParams(); const initialMessage = searchParams.get('q'); @@ -271,7 +277,7 @@ const ChatWindow = ({ id }: { id?: string }) => { const [isWSReady, setIsWSReady] = useState(false); const ws = useSocket( - process.env.NEXT_PUBLIC_WS_URL!, + wsServerUrl, setIsWSReady, setHasError, ); @@ -323,6 +329,15 @@ const ChatWindow = ({ id }: { id?: string }) => { } }, [isMessagesLoaded, isWSReady]); + useEffect(() => { + const fetchWsServerUrl = async () => { + const url = await getServerEnv("BACKEND_WS_URL"); + setWsServerUrl(url); + }; + + fetchWsServerUrl(); + }, []); + const sendMessage = async (message: string) => { if (loading) return; setLoading(true); diff --git a/ui/components/DeleteChat.tsx b/ui/components/DeleteChat.tsx index 165f86e..2ccc809 100644 --- a/ui/components/DeleteChat.tsx +++ b/ui/components/DeleteChat.tsx @@ -3,6 +3,7 @@ import { Dialog, Transition } from '@headlessui/react'; import { Fragment, useState } from 'react'; import { toast } from 'sonner'; import { Chat } from '@/app/library/page'; +import { getServerEnv } from '@/lib/serverEnvironment'; const DeleteChat = ({ chatId, @@ -20,7 +21,7 @@ const DeleteChat = ({ setLoading(true); try { const res = await fetch( - `${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`, + `${await getServerEnv("BACKEND_API_URL")}/chats/${chatId}`, { method: 'DELETE', headers: { diff --git a/ui/components/SearchImages.tsx b/ui/components/SearchImages.tsx index b53b8b0..92eca83 100644 --- a/ui/components/SearchImages.tsx +++ b/ui/components/SearchImages.tsx @@ -4,6 +4,7 @@ import { useState } from 'react'; import Lightbox from 'yet-another-react-lightbox'; import 'yet-another-react-lightbox/styles.css'; import { Message } from './ChatWindow'; +import { getServerEnv } from '@/lib/serverEnvironment'; type Image = { url: string; @@ -34,7 +35,7 @@ const SearchImages = ({ const chatModel = localStorage.getItem('chatModel'); const res = await fetch( - `${process.env.NEXT_PUBLIC_API_URL}/images`, + `${await getServerEnv("BACKEND_API_URL")}/images`, { method: 'POST', headers: { diff --git a/ui/components/SearchVideos.tsx b/ui/components/SearchVideos.tsx index 2646322..703fe28 100644 --- a/ui/components/SearchVideos.tsx +++ b/ui/components/SearchVideos.tsx @@ -4,6 +4,7 @@ import { useState } from 'react'; import Lightbox, { GenericSlide, VideoSlide } from 'yet-another-react-lightbox'; import 'yet-another-react-lightbox/styles.css'; import { Message } from './ChatWindow'; +import { getServerEnv } from '@/lib/serverEnvironment'; type Video = { url: string; @@ -47,7 +48,7 @@ const Searchvideos = ({ const chatModel = localStorage.getItem('chatModel'); const res = await fetch( - `${process.env.NEXT_PUBLIC_API_URL}/videos`, + `${await getServerEnv("BACKEND_API_URL")}/videos`, { method: 'POST', headers: { diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 171e812..a13ea31 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -8,6 +8,7 @@ import React, { type SelectHTMLAttributes, } from 'react'; import ThemeSwitcher from './theme/Switcher'; +import { getServerEnv } from '@/lib/serverEnvironment'; interface InputProps extends React.InputHTMLAttributes {} @@ -88,7 +89,7 @@ const SettingsDialog = ({ if (isOpen) { const fetchConfig = async () => { setIsLoading(true); - const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { + const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/config`, { headers: { 'Content-Type': 'application/json', }, @@ -148,7 +149,7 @@ const SettingsDialog = ({ setIsUpdating(true); try { - await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { + await fetch(`${await getServerEnv("BACKEND_API_URL")}/config`, { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/lib/actions.ts b/ui/lib/actions.ts index d7eb71f..9c03339 100644 --- a/ui/lib/actions.ts +++ b/ui/lib/actions.ts @@ -1,10 +1,11 @@ import { Message } from '@/components/ChatWindow'; +import { getServerEnv } from '@/lib/serverEnvironment'; export const getSuggestions = async (chatHisory: Message[]) => { const chatModel = localStorage.getItem('chatModel'); const chatModelProvider = localStorage.getItem('chatModelProvider'); - const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, { + const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/suggestions`, { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/ui/lib/serverEnvironment.ts b/ui/lib/serverEnvironment.ts new file mode 100644 index 0000000..1179fd6 --- /dev/null +++ b/ui/lib/serverEnvironment.ts @@ -0,0 +1,29 @@ +async function fetchConfig() { + try { + const response = await fetch('/api/env'); + if (response.ok) { + const data = await response.json(); + sessionStorage.setItem('cachedConfig', JSON.stringify(data)); + return data; + } else { + throw new Error('Failed to fetch config'); + } + } catch (error) { + return null; + } +} + +export async function getServerEnv(envVar: string): Promise { + const cachedConfig = JSON.parse(sessionStorage.getItem('cachedConfig') || 'null'); + + if (cachedConfig) { + return cachedConfig[envVar]; + } + + const data = await fetchConfig(); + if (!data) { + return ""; + } + + return data[envVar]; +}