Ireneusz Bachanowicz 2763b40b60
Some checks are pending
CI/CD Pipeline / test (push) Waiting to run
Refactor Jira Webhook LLM integration
- Simplified the FastAPI application structure and improved error handling with middleware.
- Introduced a retry decorator for asynchronous functions to enhance reliability.
- Modularized the LLM initialization and prompt loading into separate functions for better maintainability.
- Updated Pydantic models for Jira webhook payload and analysis flags to ensure proper validation and structure.
- Implemented a structured logging configuration for better traceability and debugging.
- Added comprehensive unit tests for prompt loading, response validation, and webhook handling.
- Established a CI/CD pipeline with GitHub Actions for automated testing and coverage reporting.
- Enhanced the prompt template for LLM analysis to include specific instructions for handling escalations.
2025-07-13 13:19:10 +02:00

81 lines
3.3 KiB
Python

from fastapi import HTTPException
from loguru import logger
import json
from typing import Optional, List, Union
from pydantic import BaseModel, ConfigDict, field_validator
from datetime import datetime
from config import settings
from llm.models import JiraWebhookPayload, AnalysisFlags
from llm.chains import analysis_chain
class BadRequestError(HTTPException):
def __init__(self, detail: str):
super().__init__(status_code=400, detail=detail)
class RateLimitError(HTTPException):
def __init__(self, detail: str):
super().__init__(status_code=429, detail=detail)
class ValidationError(HTTPException):
def __init__(self, detail: str):
super().__init__(status_code=422, detail=detail)
class JiraWebhookHandler:
def __init__(self):
self.analysis_chain = analysis_chain
async def handle_webhook(self, payload: JiraWebhookPayload):
try:
if not payload.issueKey:
raise BadRequestError("Missing required field: issueKey")
if not payload.summary:
raise BadRequestError("Missing required field: summary")
logger.bind(
issue_key=payload.issueKey,
timestamp=datetime.utcnow().isoformat()
).info("Received webhook")
llm_input = {
"issueKey": payload.issueKey,
"summary": payload.summary,
"description": payload.description if payload.description else "No description provided.",
"status": payload.status if payload.status else "Unknown",
"labels": ", ".join(payload.labels) if payload.labels else "None",
"assignee": payload.assignee if payload.assignee else "Unassigned",
"updated": payload.updated if payload.updated else "Unknown",
"comment": payload.comment if payload.comment else "No new comment provided."
}
try:
analysis_result = await self.analysis_chain.ainvoke(llm_input)
# Validate LLM response
if not validate_response(analysis_result):
logger.warning(f"Invalid LLM response format for {payload.issueKey}")
analysis_result = {
"hasMultipleEscalations": False,
"customerSentiment": "neutral"
}
logger.debug(f"LLM Analysis Result for {payload.issueKey}: {json.dumps(analysis_result, indent=2)}")
return {"status": "success", "analysis_flags": analysis_result}
except Exception as e:
logger.error(f"LLM processing failed for {payload.issueKey}: {str(e)}")
return {
"status": "error",
"analysis_flags": {
"hasMultipleEscalations": False,
"customerSentiment": "neutral"
},
"error": str(e)
}
except Exception as e:
logger.error(f"Error processing webhook: {str(e)}")
import traceback
logger.error(f"Stack trace: {traceback.format_exc()}")
raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}")