jira-webhook-llm/config.py
Ireneusz Bachanowicz 6f5e817011
Some checks failed
CI/CD Pipeline / test (push) Has been cancelled
Another synchro
2025-07-28 09:40:04 +02:00

169 lines
6.3 KiB
Python

import os
import logging
import sys
from typing import Optional
from pydantic_settings import BaseSettings
from langfuse._client.client import Langfuse
from pydantic import field_validator
from pydantic_settings import SettingsConfigDict
import yaml
_logger = logging.getLogger(__name__)
from pathlib import Path
class LangfuseConfig(BaseSettings):
enabled: bool = False
secret_key: Optional[str] = None
public_key: Optional[str] = None
host: Optional[str] = None
model_config = SettingsConfigDict(
env_prefix='LANGFUSE_',
env_file='.env',
env_file_encoding='utf-8',
extra='ignore'
)
class LLMConfig(BaseSettings):
mode: str = 'ollama'
# OpenAI settings
openai_api_key: Optional[str] = None
openai_api_base_url: Optional[str] = None
openai_model: Optional[str] = None
# Ollama settings
ollama_base_url: Optional[str] = None
ollama_model: Optional[str] = None
# Gemini settings
gemini_api_key: Optional[str] = None
gemini_model: Optional[str] = None
gemini_api_base_url: Optional[str] = None # Add this for Gemini
@field_validator('mode')
def validate_mode(cls, v):
if v not in ['openai', 'ollama', 'gemini']: # Add 'gemini'
raise ValueError("LLM mode must be 'openai', 'ollama', or 'gemini'")
return v
model_config = SettingsConfigDict(
env_prefix='LLM_',
env_file='.env',
env_file_encoding='utf-8',
extra='ignore'
)
class ProcessorConfig(BaseSettings):
poll_interval_seconds: int = 10
max_retries: int = 5
initial_retry_delay_seconds: int = 60
model_config = SettingsConfigDict(
env_prefix='PROCESSOR_',
env_file='.env',
env_file_encoding='utf-8',
extra='ignore'
)
class Settings:
def __init__(self):
try:
# Load settings from YAML file as a fallback
yaml_config = self._load_yaml_config()
# Load settings from environment and .env file first
self.llm = LLMConfig()
self.processor = ProcessorConfig()
self.langfuse = LangfuseConfig()
# Apply YAML configuration for any values not set by the environment
self._apply_yaml_fallback(yaml_config)
# Initialize Langfuse client if enabled
self.langfuse_client: Optional[Langfuse] = None
if self.langfuse.enabled:
if self.langfuse.secret_key and self.langfuse.public_key and self.langfuse.host:
self.langfuse_client = Langfuse(
public_key=self.langfuse.public_key,
secret_key=self.langfuse.secret_key,
host=self.langfuse.host
)
else:
_logger.warning("Langfuse is enabled but missing one or more of LANGFUSE_SECRET_KEY, LANGFUSE_PUBLIC_KEY, or LANGFUSE_HOST. Langfuse client will not be initialized.")
self._validate()
except Exception as e:
print(f"Configuration initialization failed: {e}")
sys.exit(1)
def _apply_yaml_fallback(self, yaml_config: dict):
"""Applies YAML config values as a fallback to settings not set by environment."""
# --- LLM Configuration ---
llm_yaml_config = yaml_config.get('llm', {})
if llm_yaml_config:
# Flatten nested YAML structure to match LLMConfig fields
flat_llm_yaml = {
'mode': llm_yaml_config.get('mode'),
**{f'openai_{k}': v for k, v in (llm_yaml_config.get('openai') or {}).items()},
**{f'ollama_{k}': v for k, v in (llm_yaml_config.get('ollama') or {}).items()},
**{f'gemini_{k}': v for k, v in (llm_yaml_config.get('gemini') or {}).items()}
}
for field_name in self.llm.model_fields:
if field_name not in self.llm.model_fields_set:
yaml_value = flat_llm_yaml.get(field_name)
if yaml_value is not None:
setattr(self.llm, field_name, yaml_value)
# --- Processor Configuration ---
processor_yaml_config = yaml_config.get('processor', {})
if processor_yaml_config:
for field_name in self.processor.model_fields:
if field_name not in self.processor.model_fields_set:
yaml_value = processor_yaml_config.get(field_name)
if yaml_value is not None:
setattr(self.processor, field_name, yaml_value)
# --- Langfuse Configuration ---
langfuse_yaml_config = yaml_config.get('langfuse', {})
if langfuse_yaml_config:
for field_name in self.langfuse.model_fields:
if field_name not in self.langfuse.model_fields_set:
yaml_value = langfuse_yaml_config.get(field_name)
if yaml_value is not None:
setattr(self.langfuse, field_name, yaml_value)
def _load_yaml_config(self):
config_path = Path('config/application.yml')
if not config_path.exists():
return {}
try:
with open(config_path, 'r') as f:
return yaml.safe_load(f) or {}
except Exception as e:
return {}
def _validate(self):
if self.llm.mode == 'openai':
if not self.llm.openai_api_key:
raise ValueError("OPENAI_API_KEY is not set.")
if not self.llm.openai_api_base_url:
raise ValueError("OPENAI_API_BASE_URL is not set.")
if not self.llm.openai_model:
raise ValueError("OPENAI_MODEL is not set.")
elif self.llm.mode == 'ollama':
if not self.llm.ollama_base_url:
raise ValueError("OLLAMA_BASE_URL is not set.")
if not self.llm.ollama_model:
raise ValueError("OLLAMA_MODEL is not set.")
elif self.llm.mode == 'gemini': # New: Add validation for Gemini mode
if not self.llm.gemini_api_key:
raise ValueError("GEMINI_API_KEY is not set.")
if not self.llm.gemini_model:
raise ValueError("GEMINI_MODEL is not set.")
# Create settings instance
settings = Settings()