From 3bb4e63f3eec0064a2aa6a7b0be53896c7ef6add Mon Sep 17 00:00:00 2001 From: Debanjum Date: Fri, 27 Jun 2025 15:18:07 -0700 Subject: [PATCH] Add ability to set default chat model via env var in docker-compose.yml --- docker-compose.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 71fe37b6..46451fda 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -79,8 +79,9 @@ services: # Default URL of SearxNG, the default web search engine used by Khoj. Its container is specified above - KHOJ_SEARXNG_URL=http://search:8080 # Uncomment line below to use with Ollama running on your local machine at localhost:11434. - # Change URL to use with other OpenAI API compatible providers like VLLM, LMStudio etc. + # Change URL to use with other OpenAI API compatible providers like VLLM, LMStudio, DeepInfra, DeepSeek etc. # - OPENAI_BASE_URL=http://host.docker.internal:11434/v1/ + # - KHOJ_DEFAULT_CHAT_MODEL=qwen3 # # Uncomment appropriate lines below to use chat models by OpenAI, Anthropic, Google. # Ensure you set your provider specific API keys.