jira-webhook-llm/docker-compose.yml
Ireneusz Bachanowicz 6a57d91b7e
Some checks failed
CI/CD Pipeline / test (push) Has been cancelled
0.2.0 - conf for sandbox
2025-07-22 18:24:02 +02:00

33 lines
1.2 KiB
YAML

name: jira-webhook-stack
services:
ollama-jira:
image: artifactory.pfizer.com/mdmhub-docker-dev/mdmtools/ollama/ollama-preloaded:0.0.1
ports:
- "11434:11434"
restart: unless-stopped
# Service for your FastAPI application
jira-webhook-llm:
image: artifactory.pfizer.com/mdmhub-docker-dev/mdmtools/ollama/jira-webhook-llm:0.2.0
ports:
- "8000:8000"
environment:
# Set the LLM mode to 'ollama' or 'openai'
LLM_MODE: ollama
# Point to the Ollama service within the Docker Compose network
# 'ollama' is the service name, which acts as a hostname within the network
OLLAMA_BASE_URL: "https://api-amer-sandbox-gbl-mdm-hub.pfizer.com/ollama"
# Specify the model to use
OLLAMA_MODEL: phi4-mini:latest
# Ensure the Ollama service starts and is healthy before starting the app
depends_on:
- ollama-jira
restart: unless-stopped
# Command to run your FastAPI application using Uvicorn
# --host 0.0.0.0 is crucial for the app to be accessible from outside the container
# --reload is good for development; remove for production
command: uvicorn main:app --host 0.0.0.0 --port 8000