Move front end port to 3010 to avoid collison with apps which commonly use it, like open-webui for ollama.

Fix issue with server not able to reach searxng.
Added config.toml that works locally.
Fixed sample.config.toml
This commit is contained in:
chris depalma 2024-07-30 11:26:44 -04:00
parent c62e7f091e
commit 77de9b776a

12
config.toml Normal file
View file

@ -0,0 +1,12 @@
[GENERAL]
PORT = 3001 # Port to run the server on
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
[API_KEYS]
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
[API_ENDPOINTS]
SEARXNG = "http://SearxNG:8080" # SearxNG API URL
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434