jira-webhook-llm/config.py
2025-07-13 20:14:09 +02:00

186 lines
7.3 KiB
Python

import os
import sys
from typing import Optional
from pydantic_settings import BaseSettings
from pydantic import validator, ConfigDict
from loguru import logger
from watchfiles import watch, Change
from threading import Thread
from langfuse import Langfuse
from langfuse.langchain import CallbackHandler
class LangfuseConfig(BaseSettings):
enabled: bool = True
public_key: Optional[str] = None
secret_key: Optional[str] = None
host: Optional[str] = None
@validator('host')
def validate_host(cls, v):
if v and not v.startswith(('http://', 'https://')):
raise ValueError("Langfuse host must start with http:// or https://")
return v
def __init__(self, **data):
try:
logger.info("Initializing LangfuseConfig with data: %s", data)
logger.info("Environment variables:")
logger.info("LANGFUSE_PUBLIC_KEY: %s", os.getenv('LANGFUSE_PUBLIC_KEY'))
logger.info("LANGFUSE_SECRET_KEY: %s", os.getenv('LANGFUSE_SECRET_KEY'))
logger.info("LANGFUSE_HOST: %s", os.getenv('LANGFUSE_HOST'))
super().__init__(**data)
logger.info("LangfuseConfig initialized successfully")
logger.info("Public Key: %s", self.public_key)
logger.info("Secret Key: %s", self.secret_key)
logger.info("Host: %s", self.host)
except Exception as e:
logger.error("Failed to initialize LangfuseConfig: %s", e)
logger.error("Current environment variables:")
logger.error("LANGFUSE_PUBLIC_KEY: %s", os.getenv('LANGFUSE_PUBLIC_KEY'))
logger.error("LANGFUSE_SECRET_KEY: %s", os.getenv('LANGFUSE_SECRET_KEY'))
logger.error("LANGFUSE_HOST: %s", os.getenv('LANGFUSE_HOST'))
raise
model_config = ConfigDict(
env_prefix='LANGFUSE_',
env_file='.env',
env_file_encoding='utf-8',
extra='ignore',
env_nested_delimiter='__',
case_sensitive=True
)
class LogConfig(BaseSettings):
level: str = 'INFO'
model_config = ConfigDict(
env_prefix='LOG_',
extra='ignore'
)
class LLMConfig(BaseSettings):
mode: str = 'ollama'
# OpenAI settings
openai_api_key: Optional[str] = None
openai_api_base_url: Optional[str] = None
openai_model: Optional[str] = None
# Ollama settings
ollama_base_url: Optional[str] = None
ollama_model: Optional[str] = None
@validator('mode')
def validate_mode(cls, v):
if v not in ['openai', 'ollama']:
raise ValueError("LLM mode must be either 'openai' or 'ollama'")
return v
model_config = ConfigDict(
env_prefix='LLM_',
env_file='.env',
env_file_encoding='utf-8',
extra='ignore'
)
class Settings:
def __init__(self):
try:
logger.info("Initializing LogConfig")
self.log = LogConfig()
logger.info("LogConfig initialized: %s", self.log.model_dump())
logger.info("Initializing LLMConfig")
self.llm = LLMConfig()
logger.info("LLMConfig initialized: %s", self.llm.model_dump())
logger.info("Initializing LangfuseConfig")
self.langfuse = LangfuseConfig()
logger.info("LangfuseConfig initialized: %s", self.langfuse.model_dump())
logger.info("Validating configuration")
self._validate()
logger.info("Starting config watcher")
self._start_watcher()
logger.info("Initializing Langfuse")
self._init_langfuse()
logger.info("Configuration initialized successfully")
except Exception as e:
logger.error(f"Configuration initialization failed: {e}")
logger.error("Current configuration state:")
logger.error("LogConfig: %s", self.log.model_dump() if hasattr(self, 'log') else 'Not initialized')
logger.error("LLMConfig: %s", self.llm.model_dump() if hasattr(self, 'llm') else 'Not initialized')
logger.error("LangfuseConfig: %s", self.langfuse.model_dump() if hasattr(self, 'langfuse') else 'Not initialized')
raise
def _validate(self):
logger.info(f"LLM mode set to: '{self.llm.mode}'")
if self.llm.mode == 'openai':
if not self.llm.openai_api_key:
raise ValueError("LLM mode is 'openai', but OPENAI_API_KEY is not set.")
if not self.llm.openai_api_base_url:
raise ValueError("LLM mode is 'openai', but OPENAI_API_BASE_URL is not set.")
if not self.llm.openai_model:
raise ValueError("LLM mode is 'openai', but OPENAI_MODEL is not set.")
elif self.llm.mode == 'ollama':
if not self.llm.ollama_base_url:
raise ValueError("LLM mode is 'ollama', but OLLAMA_BASE_URL is not set.")
if not self.llm.ollama_model:
raise ValueError("LLM mode is 'ollama', but OLLAMA_MODEL is not set.")
logger.info("Configuration validated successfully.")
def _init_langfuse(self):
if self.langfuse.enabled:
try:
# Verify all required credentials are present
if not all([self.langfuse.public_key, self.langfuse.secret_key, self.langfuse.host]):
raise ValueError("Missing required Langfuse credentials")
# Initialize Langfuse client
self.langfuse_client = Langfuse(
public_key=self.langfuse.public_key,
secret_key=self.langfuse.secret_key,
host=self.langfuse.host
)
# Initialize CallbackHandler
self.langfuse_handler = CallbackHandler(
public_key=self.langfuse.public_key,
secret_key=self.langfuse.secret_key,
host=self.langfuse.host
)
logger.info("Langfuse client and handler initialized successfully")
except ValueError as e:
logger.warning(f"Langfuse configuration error: {e}. Disabling Langfuse.")
self.langfuse.enabled = False
except Exception as e:
logger.error(f"Failed to initialize Langfuse: {e}")
self.langfuse.enabled = False
def _start_watcher(self):
def watch_config():
for changes in watch('config/application.yml'):
for change in changes:
if change[0] == Change.modified:
logger.info("Configuration file modified, reloading settings...")
try:
self.llm = LLMConfig()
self._validate()
logger.info("Configuration reloaded successfully")
except Exception as e:
logger.error(f"Error reloading configuration: {e}")
Thread(target=watch_config, daemon=True).start()
# Create a single, validated instance of the settings to be imported by other modules.
try:
settings = Settings()
except ValueError as e:
logger.error(f"FATAL: {e}")
logger.error("Application shutting down due to configuration error.")
sys.exit(1)