Another synchro
Some checks failed
CI/CD Pipeline / test (push) Has been cancelled

This commit is contained in:
Ireneusz Bachanowicz 2025-07-28 09:40:04 +02:00
parent 6a57d91b7e
commit 6f5e817011
2 changed files with 49 additions and 20 deletions

View File

@ -40,10 +40,14 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
# Copy the configuration directory first.
# If only code changes, this layer remains cached.
COPY config ./config
COPY llm ./llm
COPY app ./app
# Copy your application source code.
COPY main.py .
COPY config.py .
COPY shared_store.py .
# Expose the port your application listens on.
EXPOSE 8000

View File

@ -69,29 +69,16 @@ class ProcessorConfig(BaseSettings):
class Settings:
def __init__(self):
try:
# Load configuration from YAML file
# Load settings from YAML file as a fallback
yaml_config = self._load_yaml_config()
# Initialize configurations
llm_config_data = yaml_config.get('llm', {})
# Extract and flatten nested LLM configurations
mode = llm_config_data.get('mode', 'ollama')
openai_settings = llm_config_data.get('openai') or {}
ollama_settings = llm_config_data.get('ollama') or {}
gemini_settings = llm_config_data.get('gemini') or {} # New: Get Gemini settings
# Load settings from environment and .env file first
self.llm = LLMConfig()
self.processor = ProcessorConfig()
self.langfuse = LangfuseConfig()
# Combine all LLM settings, prioritizing top-level 'mode'
combined_llm_settings = {
'mode': mode,
**{f'openai_{k}': v for k, v in openai_settings.items()},
**{f'ollama_{k}': v for k, v in ollama_settings.items()},
**{f'gemini_{k}': v for k, v in gemini_settings.items()} # New: Add Gemini settings
}
self.llm = LLMConfig(**combined_llm_settings)
self.processor = ProcessorConfig(**yaml_config.get('processor', {}))
self.langfuse = LangfuseConfig(**yaml_config.get('langfuse', {}))
# Apply YAML configuration for any values not set by the environment
self._apply_yaml_fallback(yaml_config)
# Initialize Langfuse client if enabled
self.langfuse_client: Optional[Langfuse] = None
@ -111,6 +98,44 @@ class Settings:
print(f"Configuration initialization failed: {e}")
sys.exit(1)
def _apply_yaml_fallback(self, yaml_config: dict):
"""Applies YAML config values as a fallback to settings not set by environment."""
# --- LLM Configuration ---
llm_yaml_config = yaml_config.get('llm', {})
if llm_yaml_config:
# Flatten nested YAML structure to match LLMConfig fields
flat_llm_yaml = {
'mode': llm_yaml_config.get('mode'),
**{f'openai_{k}': v for k, v in (llm_yaml_config.get('openai') or {}).items()},
**{f'ollama_{k}': v for k, v in (llm_yaml_config.get('ollama') or {}).items()},
**{f'gemini_{k}': v for k, v in (llm_yaml_config.get('gemini') or {}).items()}
}
for field_name in self.llm.model_fields:
if field_name not in self.llm.model_fields_set:
yaml_value = flat_llm_yaml.get(field_name)
if yaml_value is not None:
setattr(self.llm, field_name, yaml_value)
# --- Processor Configuration ---
processor_yaml_config = yaml_config.get('processor', {})
if processor_yaml_config:
for field_name in self.processor.model_fields:
if field_name not in self.processor.model_fields_set:
yaml_value = processor_yaml_config.get(field_name)
if yaml_value is not None:
setattr(self.processor, field_name, yaml_value)
# --- Langfuse Configuration ---
langfuse_yaml_config = yaml_config.get('langfuse', {})
if langfuse_yaml_config:
for field_name in self.langfuse.model_fields:
if field_name not in self.langfuse.model_fields_set:
yaml_value = langfuse_yaml_config.get(field_name)
if yaml_value is not None:
setattr(self.langfuse, field_name, yaml_value)
def _load_yaml_config(self):
config_path = Path('config/application.yml')
if not config_path.exists():