from fastapi import HTTPException from loguru import logger import json from typing import Optional, List, Union from pydantic import BaseModel, ConfigDict, field_validator from datetime import datetime from config import settings from llm.models import JiraWebhookPayload, AnalysisFlags from llm.chains import analysis_chain class BadRequestError(HTTPException): def __init__(self, detail: str): super().__init__(status_code=400, detail=detail) class RateLimitError(HTTPException): def __init__(self, detail: str): super().__init__(status_code=429, detail=detail) class ValidationError(HTTPException): def __init__(self, detail: str): super().__init__(status_code=422, detail=detail) class JiraWebhookHandler: def __init__(self): self.analysis_chain = analysis_chain async def handle_webhook(self, payload: JiraWebhookPayload): try: if not payload.issueKey: raise BadRequestError("Missing required field: issueKey") if not payload.summary: raise BadRequestError("Missing required field: summary") logger.bind( issue_key=payload.issueKey, timestamp=datetime.utcnow().isoformat() ).info("Received webhook") llm_input = { "issueKey": payload.issueKey, "summary": payload.summary, "description": payload.description if payload.description else "No description provided.", "status": payload.status if payload.status else "Unknown", "labels": ", ".join(payload.labels) if payload.labels else "None", "assignee": payload.assignee if payload.assignee else "Unassigned", "updated": payload.updated if payload.updated else "Unknown", "comment": payload.comment if payload.comment else "No new comment provided." } try: analysis_result = await self.analysis_chain.ainvoke(llm_input) # Validate LLM response if not validate_response(analysis_result): logger.warning(f"Invalid LLM response format for {payload.issueKey}") analysis_result = { "hasMultipleEscalations": False, "customerSentiment": "neutral" } logger.debug(f"LLM Analysis Result for {payload.issueKey}: {json.dumps(analysis_result, indent=2)}") return {"status": "success", "analysis_flags": analysis_result} except Exception as e: logger.error(f"LLM processing failed for {payload.issueKey}: {str(e)}") return { "status": "error", "analysis_flags": { "hasMultipleEscalations": False, "customerSentiment": "neutral" }, "error": str(e) } except Exception as e: logger.error(f"Error processing webhook: {str(e)}") import traceback logger.error(f"Stack trace: {traceback.format_exc()}") raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}")