35 lines
1.3 KiB
YAML
35 lines
1.3 KiB
YAML
name: jira-webhook-stack
|
|
services:
|
|
ollama-jira:
|
|
image: artifactory.pfizer.com/mdmhub-docker-dev/mdmtools/ollama/ollama-preloaded:0.0.2
|
|
ports:
|
|
- "11434:11434"
|
|
restart: unless-stopped
|
|
|
|
# Service for your FastAPI application
|
|
jira-webhook-llm:
|
|
image: artifactory.pfizer.com/mdmhub-docker-dev/mdmtools/ollama/jira-webhook-llm:0.2.5
|
|
ports:
|
|
- "8000:8000"
|
|
environment:
|
|
# Set the LLM mode to 'ollama' or 'openai'
|
|
LLM_MODE: ollama
|
|
|
|
# Point to the Ollama service within the Docker Compose network
|
|
# 'ollama' is the service name, which acts as a hostname within the network
|
|
# OLLAMA_BASE_URL: "https://api-amer-sandbox-gbl-mdm-hub.pfizer.com/ollama"
|
|
OLLAMA_BASE_URL: "http://ollama-jira:11434"
|
|
|
|
# Specify the model to use
|
|
# OLLAMA_MODEL: phi4-mini:latest
|
|
OLLAMA_MODEL: qwen3:4b
|
|
|
|
# Ensure the Ollama service starts and is healthy before starting the app
|
|
depends_on:
|
|
- ollama-jira
|
|
restart: unless-stopped
|
|
|
|
# Command to run your FastAPI application using Uvicorn
|
|
# --host 0.0.0.0 is crucial for the app to be accessible from outside the container
|
|
# --reload is good for development; remove for production
|
|
command: uvicorn main:app --host 0.0.0.0 --port 8000 |