Move front end port to 3010 to avoid collison with apps which commonly use it, like open-webui for ollama.
Fix issue with server not able to reach searxng. Added config.toml that works locally. Fixed sample.config.toml
This commit is contained in:
parent
c62e7f091e
commit
77de9b776a
1 changed files with 12 additions and 0 deletions
12
config.toml
Normal file
12
config.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
[GENERAL]
|
||||
PORT = 3001 # Port to run the server on
|
||||
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||
|
||||
[API_KEYS]
|
||||
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
|
||||
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
||||
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
|
||||
|
||||
[API_ENDPOINTS]
|
||||
SEARXNG = "http://SearxNG:8080" # SearxNG API URL
|
||||
OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434
|
Loading…
Add table
Reference in a new issue