jira-webhook-llm/config.py
Ireneusz Bachanowicz 0c468c0a69 feat: Implement Jira Webhook Handler with LLM Integration
- Added FastAPI application to handle Jira webhooks.
- Created Pydantic models for Jira payload and LLM output.
- Integrated LangChain with OpenAI and Ollama for LLM processing.
- Set up Langfuse for tracing and monitoring.
- Implemented analysis logic for Jira tickets, including sentiment analysis and label suggestions.
- Added test endpoint for LLM integration.
- Updated requirements.txt to include necessary dependencies and versions.
2025-07-13 11:44:19 +02:00

75 lines
3.0 KiB
Python

import os
import sys
import yaml
from loguru import logger
from typing import Optional
# Define a custom exception for configuration errors
class AppConfigError(Exception):
pass
class Settings:
def __init__(self, config_path: str = "config/application.yml"):
"""
Loads configuration from a YAML file and overrides with environment variables.
"""
# --- Load from YAML file ---
try:
with open(config_path, 'r') as f:
config = yaml.safe_load(f)
except FileNotFoundError:
raise AppConfigError(f"Configuration file not found at '{config_path}'.")
except yaml.YAMLError as e:
raise AppConfigError(f"Error parsing YAML file: {e}")
# --- Read and Combine Settings (Environment variables take precedence) ---
llm_config = config.get('llm', {})
# General settings
self.llm_mode: str = os.getenv("LLM_MODE", llm_config.get('mode', 'openai')).lower()
# OpenAI settings
openai_config = llm_config.get('openai', {})
self.openai_api_key: Optional[str] = os.getenv("OPENAI_API_KEY", openai_config.get('api_key'))
self.openai_api_base_url: Optional[str] = os.getenv("OPENAI_API_BASE_URL", openai_config.get('api_base_url'))
self.openai_model: Optional[str] = os.getenv("OPENAI_MODEL", openai_config.get('model'))
# Ollama settings
ollama_config = llm_config.get('ollama', {})
self.ollama_base_url: Optional[str] = os.getenv("OLLAMA_BASE_URL", ollama_config.get('base_url'))
self.ollama_model: Optional[str] = os.getenv("OLLAMA_MODEL", ollama_config.get('model'))
self._validate()
def _validate(self):
"""
Validates that required configuration variables are set.
"""
logger.info(f"LLM mode set to: '{self.llm_mode}'")
if self.llm_mode == 'openai':
if not self.openai_api_key:
raise AppConfigError("LLM mode is 'openai', but OPENAI_API_KEY is not set.")
if not self.openai_api_base_url:
raise AppConfigError("LLM mode is 'openai', but OPENAI_API_BASE_URL is not set.")
if not self.openai_model:
raise AppConfigError("LLM mode is 'openai', but OPENAI_MODEL is not set.")
elif self.llm_mode == 'ollama':
if not self.ollama_base_url:
raise AppConfigError("LLM mode is 'ollama', but OLLAMA_BASE_URL is not set.")
if not self.ollama_model:
raise AppConfigError("LLM mode is 'ollama', but OLLAMA_MODEL is not set.")
else:
raise AppConfigError(f"Invalid LLM_MODE: '{self.llm_mode}'. Must be 'openai' or 'ollama'.")
logger.info("Configuration validated successfully.")
# Create a single, validated instance of the settings to be imported by other modules.
try:
settings = Settings()
except AppConfigError as e:
logger.error(f"FATAL: {e}")
logger.error("Application shutting down due to configuration error.")
sys.exit(1) # Exit the application if configuration is invalid