jira-webhook-llm/config/application.yml
2025-07-13 19:34:34 +02:00

32 lines
998 B
YAML

# Default application configuration
llm:
# The mode to run the application in.
# Can be 'openai' or 'ollama'.
# This can be overridden by the LLM_MODE environment variable.
mode: ollama
# Settings for OpenAI-compatible APIs (like OpenRouter)
openai:
# It's HIGHLY recommended to set this via an environment variable
# instead of saving it in this file.
# Can be overridden by OPENAI_API_KEY
api_key: "sk-or-v1-..."
# Can be overridden by OPENAI_API_BASE_URL
api_base_url: "https://openrouter.ai/api/v1"
# Can be overridden by OPENAI_MODEL
model: "deepseek/deepseek-chat:free"
# Settings for Ollama
ollama:
# Can be overridden by OLLAMA_BASE_URL
base_url: "http://192.168.0.140:11434"
# base_url: "https://api-amer-sandbox-gbl-mdm-hub.pfizer.com/ollama"
# Can be overridden by OLLAMA_MODEL
model: "phi4-mini:latest"
# model: "qwen3:1.7b"
# model: "smollm:360m"
# model: "qwen3:0.6b"