Added automatic docker build on merge to master
This PR includes: - Automatic build & push to docker hub when pushing to master, or when triggering a workflow dispatch on master - Automatic docker build on pull requests to validate changes - Updated the docker-compose.yaml to use the container images, rather than building locally - Added support for defining backend settings using the container environment -> All options defined in config.toml can now be loaded instead from the environment -> Order of precedence is environment definition, config.toml, and finally default inline configuration defined in config.ts - Added support for defining frontend settings using the container environment -> Added a dynamic api route to load the container environment definitions on the server, and provide them to the client -> Added a library function to fetch from the newly created API, caching the response in the session storage -> Modified existing calls to `process.env` to use the new library function -> Left the initial statically compiled environment definitions in place as a backup definition, if no environment definitions are provided Remaining tasks todo before able to merge to [ItzCrazyKns/Perplexica](https://github.com/ItzCrazyKns/Perplexica): - Add secret definitions for `DOCKER_USERNAME` and `DOCKER_PASSWORD` to [ItzCrazyKns/Perplexica](https://github.com/ItzCrazyKns/Perplexica) to ensure push to dockerhub works on base branch - Update documentation with information about changes
This commit is contained in:
parent
9c1936ec2c
commit
ec158c0cdf
15 changed files with 176 additions and 43 deletions
48
.github/workflows/docker-build.yml
vendored
Normal file
48
.github/workflows/docker-build.yml
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
name: Build and Push Docker images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build frontend Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: app.dockerfile
|
||||
push: ${{ (github.event_name != 'pull_request') && (github.ref == 'refs/heads/master') }}
|
||||
tags: ${{ secrets.DOCKER_USERNAME }}/perplexica-frontend:latest
|
||||
|
||||
- name: Build backend Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: backend.dockerfile
|
||||
push: ${{ (github.event_name != 'pull_request') && (github.ref == 'refs/heads/master') }}
|
||||
tags: ${{ secrets.DOCKER_USERNAME }}/perplexica-backend:latest
|
||||
|
||||
- name: Log out from Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
run: docker logout
|
|
@ -1,7 +1,7 @@
|
|||
FROM node:alpine
|
||||
|
||||
ARG NEXT_PUBLIC_WS_URL
|
||||
ARG NEXT_PUBLIC_API_URL
|
||||
ARG NEXT_PUBLIC_WS_URL='ws://127.0.0.1:3001'
|
||||
ARG NEXT_PUBLIC_API_URL='http://127.0.0.1:3001/api'
|
||||
ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL}
|
||||
ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
|
||||
|
||||
|
|
|
@ -6,13 +6,10 @@ WORKDIR /home/perplexica
|
|||
|
||||
COPY src /home/perplexica/src
|
||||
COPY tsconfig.json /home/perplexica/
|
||||
COPY config.toml /home/perplexica/
|
||||
COPY drizzle.config.ts /home/perplexica/
|
||||
COPY package.json /home/perplexica/
|
||||
COPY yarn.lock /home/perplexica/
|
||||
|
||||
RUN sed -i "s|SEARXNG = \".*\"|SEARXNG = \"${SEARXNG_API_URL}\"|g" /home/perplexica/config.toml
|
||||
|
||||
RUN mkdir /home/perplexica/data
|
||||
|
||||
RUN yarn install
|
||||
|
|
|
@ -10,17 +10,16 @@ services:
|
|||
restart: unless-stopped
|
||||
|
||||
perplexica-backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: backend.dockerfile
|
||||
args:
|
||||
- SEARXNG_API_URL=http://searxng:8080
|
||||
image: docker.io/andypenno/perplexica-backend:latest
|
||||
depends_on:
|
||||
- searxng
|
||||
ports:
|
||||
- 3001:3001
|
||||
volumes:
|
||||
- backend-dbstore:/home/perplexica/data
|
||||
- ./config.toml:/home/perplexica/config.toml
|
||||
environment:
|
||||
- SEARXNG_API_URL=http://searxng:4000
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
|
@ -28,16 +27,14 @@ services:
|
|||
restart: unless-stopped
|
||||
|
||||
perplexica-frontend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app.dockerfile
|
||||
args:
|
||||
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
|
||||
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
|
||||
image: docker.io/andypenno/perplexica-frontend:latest
|
||||
depends_on:
|
||||
- perplexica-backend
|
||||
ports:
|
||||
- 3000:3000
|
||||
environment:
|
||||
- BACKEND_API_URL=http://127.0.0.1:3001/api
|
||||
- BACKEND_WS_URL=ws://127.0.0.1:3001
|
||||
networks:
|
||||
- perplexica-network
|
||||
restart: unless-stopped
|
||||
|
|
|
@ -8,5 +8,5 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
|||
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
|
||||
|
||||
[API_ENDPOINTS]
|
||||
SEARXNG = "http://localhost:32768" # SearxNG API URL
|
||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
||||
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
|
|
@ -1,6 +1,7 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import toml from '@iarna/toml';
|
||||
import process from 'process';
|
||||
|
||||
const configFileName = 'config.toml';
|
||||
|
||||
|
@ -24,25 +25,52 @@ type RecursivePartial<T> = {
|
|||
[P in keyof T]?: RecursivePartial<T[P]>;
|
||||
};
|
||||
|
||||
const loadConfig = () =>
|
||||
toml.parse(
|
||||
fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'),
|
||||
) as any as Config;
|
||||
const configFilePath = path.join(__dirname, `../${configFileName}`);
|
||||
|
||||
export const getPort = () => loadConfig().GENERAL.PORT;
|
||||
const defaultConfig: Config = {
|
||||
GENERAL: {
|
||||
PORT: 3001,
|
||||
SIMILARITY_MEASURE: "cosine"
|
||||
},
|
||||
API_KEYS: {
|
||||
OPENAI: "",
|
||||
GROQ: "",
|
||||
ANTHROPIC: ""
|
||||
},
|
||||
API_ENDPOINTS: {
|
||||
SEARXNG: "http://localhost:32768",
|
||||
OLLAMA: ""
|
||||
}
|
||||
}
|
||||
|
||||
const loadConfig = () => {
|
||||
if (fs.existsSync(configFilePath)) {
|
||||
return toml.parse(fs.readFileSync(configFilePath, 'utf-8')) as any as Config;
|
||||
} else {
|
||||
return defaultConfig;
|
||||
}
|
||||
}
|
||||
|
||||
export const getPort = () =>
|
||||
process.env.PORT ?? loadConfig().GENERAL.PORT;
|
||||
|
||||
export const getSimilarityMeasure = () =>
|
||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
process.env.SIMILARITY_MEASURE ?? loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
|
||||
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
||||
export const getOpenaiApiKey = () =>
|
||||
process.env.OPENAI_API_KEY ?? loadConfig().API_KEYS.OPENAI;
|
||||
|
||||
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
||||
export const getGroqApiKey = () =>
|
||||
process.env.GROQ_API_KEY ?? loadConfig().API_KEYS.GROQ;
|
||||
|
||||
export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC;
|
||||
export const getAnthropicApiKey = () =>
|
||||
process.env.ANTHROPIC_API_KEY ?? loadConfig().API_KEYS.ANTHROPIC;
|
||||
|
||||
export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
export const getSearxngApiEndpoint = () =>
|
||||
process.env.SEARXNG_API_ENDPOINT ?? loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
|
||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||
export const getOllamaApiEndpoint = () =>
|
||||
process.env.OLLAMA_API_ENDPOINT ?? loadConfig().API_ENDPOINTS.OLLAMA;
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
const currentConfig = loadConfig();
|
||||
|
@ -65,8 +93,5 @@ export const updateConfig = (config: RecursivePartial<Config>) => {
|
|||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, `../${configFileName}`),
|
||||
toml.stringify(config),
|
||||
);
|
||||
fs.writeFileSync(configFilePath, toml.stringify(config));
|
||||
};
|
||||
|
|
16
ui/app/api/env/route.ts
vendored
Normal file
16
ui/app/api/env/route.ts
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
import process from 'process';
|
||||
import { NextResponse } from 'next/server';
|
||||
|
||||
// Enable the Runtime
|
||||
export const runtime = "edge"
|
||||
|
||||
export async function GET(_request: Request) {
|
||||
// Access environment variables
|
||||
const envVars = {
|
||||
'BACKEND_API_URL': process.env.BACKEND_API_URL ?? process.env.NEXT_PUBLIC_API_URL,
|
||||
'BACKEND_WS_URL': process.env.BACKEND_WS_URL ?? process.env.NEXT_PUBLIC_WS_URL
|
||||
}
|
||||
|
||||
// Return the environment variables as a JSON response
|
||||
return NextResponse.json(envVars);
|
||||
}
|
|
@ -5,6 +5,7 @@ import { formatTimeDifference } from '@/lib/utils';
|
|||
import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react';
|
||||
import Link from 'next/link';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
export interface Chat {
|
||||
id: string;
|
||||
|
@ -21,7 +22,7 @@ const Page = () => {
|
|||
const fetchChats = async () => {
|
||||
setLoading(true);
|
||||
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/chats`, {
|
||||
const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/chats`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
|
|
@ -10,6 +10,7 @@ import { toast } from 'sonner';
|
|||
import { useSearchParams } from 'next/navigation';
|
||||
import { getSuggestions } from '@/lib/actions';
|
||||
import Error from 'next/error';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
export type Message = {
|
||||
messageId: string;
|
||||
|
@ -22,13 +23,16 @@ export type Message = {
|
|||
};
|
||||
|
||||
const useSocket = (
|
||||
url: string,
|
||||
url: string | null,
|
||||
setIsWSReady: (ready: boolean) => void,
|
||||
setError: (error: boolean) => void,
|
||||
) => {
|
||||
const [ws, setWs] = useState<WebSocket | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!url) {
|
||||
return;
|
||||
}
|
||||
if (!ws) {
|
||||
const connectWs = async () => {
|
||||
let chatModel = localStorage.getItem('chatModel');
|
||||
|
@ -39,7 +43,7 @@ const useSocket = (
|
|||
);
|
||||
|
||||
const providers = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||
`${await getServerEnv("BACKEND_API_URL")}/models`,
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
@ -220,7 +224,7 @@ const loadMessages = async (
|
|||
setNotFound: (notFound: boolean) => void,
|
||||
) => {
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`,
|
||||
`${await getServerEnv("BACKEND_API_URL")}/chats/${chatId}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
|
@ -260,6 +264,8 @@ const loadMessages = async (
|
|||
};
|
||||
|
||||
const ChatWindow = ({ id }: { id?: string }) => {
|
||||
const [wsServerUrl, setWsServerUrl] = useState<string | null>(null);
|
||||
|
||||
const searchParams = useSearchParams();
|
||||
const initialMessage = searchParams.get('q');
|
||||
|
||||
|
@ -271,7 +277,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
|
||||
const [isWSReady, setIsWSReady] = useState(false);
|
||||
const ws = useSocket(
|
||||
process.env.NEXT_PUBLIC_WS_URL!,
|
||||
wsServerUrl,
|
||||
setIsWSReady,
|
||||
setHasError,
|
||||
);
|
||||
|
@ -323,6 +329,15 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||
}
|
||||
}, [isMessagesLoaded, isWSReady]);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchWsServerUrl = async () => {
|
||||
const url = await getServerEnv("BACKEND_WS_URL");
|
||||
setWsServerUrl(url);
|
||||
};
|
||||
|
||||
fetchWsServerUrl();
|
||||
}, []);
|
||||
|
||||
const sendMessage = async (message: string) => {
|
||||
if (loading) return;
|
||||
setLoading(true);
|
||||
|
|
|
@ -3,6 +3,7 @@ import { Dialog, Transition } from '@headlessui/react';
|
|||
import { Fragment, useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
import { Chat } from '@/app/library/page';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
const DeleteChat = ({
|
||||
chatId,
|
||||
|
@ -20,7 +21,7 @@ const DeleteChat = ({
|
|||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/chats/${chatId}`,
|
||||
`${await getServerEnv("BACKEND_API_URL")}/chats/${chatId}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
|
|
|
@ -4,6 +4,7 @@ import { useState } from 'react';
|
|||
import Lightbox from 'yet-another-react-lightbox';
|
||||
import 'yet-another-react-lightbox/styles.css';
|
||||
import { Message } from './ChatWindow';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
type Image = {
|
||||
url: string;
|
||||
|
@ -34,7 +35,7 @@ const SearchImages = ({
|
|||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/images`,
|
||||
`${await getServerEnv("BACKEND_API_URL")}/images`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
|
|
@ -4,6 +4,7 @@ import { useState } from 'react';
|
|||
import Lightbox, { GenericSlide, VideoSlide } from 'yet-another-react-lightbox';
|
||||
import 'yet-another-react-lightbox/styles.css';
|
||||
import { Message } from './ChatWindow';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
type Video = {
|
||||
url: string;
|
||||
|
@ -47,7 +48,7 @@ const Searchvideos = ({
|
|||
const chatModel = localStorage.getItem('chatModel');
|
||||
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/videos`,
|
||||
`${await getServerEnv("BACKEND_API_URL")}/videos`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
|
|
@ -8,6 +8,7 @@ import React, {
|
|||
type SelectHTMLAttributes,
|
||||
} from 'react';
|
||||
import ThemeSwitcher from './theme/Switcher';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||
|
||||
|
@ -88,7 +89,7 @@ const SettingsDialog = ({
|
|||
if (isOpen) {
|
||||
const fetchConfig = async () => {
|
||||
setIsLoading(true);
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||
const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/config`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
|
@ -148,7 +149,7 @@ const SettingsDialog = ({
|
|||
setIsUpdating(true);
|
||||
|
||||
try {
|
||||
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
||||
await fetch(`${await getServerEnv("BACKEND_API_URL")}/config`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { Message } from '@/components/ChatWindow';
|
||||
import { getServerEnv } from '@/lib/serverEnvironment';
|
||||
|
||||
export const getSuggestions = async (chatHisory: Message[]) => {
|
||||
const chatModel = localStorage.getItem('chatModel');
|
||||
const chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||
|
||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
|
||||
const res = await fetch(`${await getServerEnv("BACKEND_API_URL")}/suggestions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
|
29
ui/lib/serverEnvironment.ts
Normal file
29
ui/lib/serverEnvironment.ts
Normal file
|
@ -0,0 +1,29 @@
|
|||
async function fetchConfig() {
|
||||
try {
|
||||
const response = await fetch('/api/env');
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
sessionStorage.setItem('cachedConfig', JSON.stringify(data));
|
||||
return data;
|
||||
} else {
|
||||
throw new Error('Failed to fetch config');
|
||||
}
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getServerEnv(envVar: string): Promise<string> {
|
||||
const cachedConfig = JSON.parse(sessionStorage.getItem('cachedConfig') || 'null');
|
||||
|
||||
if (cachedConfig) {
|
||||
return cachedConfig[envVar];
|
||||
}
|
||||
|
||||
const data = await fetchConfig();
|
||||
if (!data) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return data[envVar];
|
||||
}
|
Loading…
Add table
Reference in a new issue