jira-webhook-llm/jira-webhook-llm.py

144 lines
4.8 KiB
Python

import os
from dotenv import load_dotenv
load_dotenv()
from fastapi import FastAPI, Request, HTTPException
from pydantic import BaseModel
from fastapi.responses import JSONResponse
from loguru import logger
import uuid
import sys
from typing import Optional
from datetime import datetime
import asyncio
from functools import wraps
from config import settings
from webhooks.handlers import JiraWebhookHandler
from llm.models import JiraWebhookPayload
from logging_config import configure_logging
# Initialize logging first
configure_logging(log_level="DEBUG")
import signal
try:
app = FastAPI()
logger.info("FastAPI application initialized")
@app.on_event("shutdown")
async def shutdown_event():
"""Handle application shutdown"""
logger.info("Shutting down application...")
try:
# Cleanup Langfuse client if exists
if hasattr(settings, 'langfuse_handler') and hasattr(settings.langfuse_handler, 'close'):
try:
await settings.langfuse_handler.close()
except Exception as e:
logger.warning(f"Error closing handler: {str(e)}")
logger.info("Cleanup completed successfully")
except Exception as e:
logger.error(f"Error during shutdown: {str(e)}")
raise
def handle_shutdown_signal(signum, frame):
"""Handle OS signals for graceful shutdown"""
logger.info(f"Received signal {signum}, initiating shutdown...")
# Exit immediately after cleanup is complete
os._exit(0)
# Register signal handlers
signal.signal(signal.SIGTERM, handle_shutdown_signal)
signal.signal(signal.SIGINT, handle_shutdown_signal)
except Exception as e:
logger.critical(f"Failed to initialize FastAPI: {str(e)}")
logger.warning("Application cannot continue without FastAPI initialization")
sys.exit(1)
def retry(max_retries: int = 3, delay: float = 1.0):
"""Decorator for retrying failed operations"""
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
last_error = None
for attempt in range(max_retries):
try:
return await func(*args, **kwargs)
except Exception as e:
last_error = e
logger.warning(f"Attempt {attempt + 1} failed: {str(e)}")
if attempt < max_retries - 1:
await asyncio.sleep(delay * (attempt + 1))
raise last_error
return wrapper
return decorator
class ErrorResponse(BaseModel):
error_id: str
timestamp: str
status_code: int
message: str
details: Optional[str] = None
@app.middleware("http")
async def error_handling_middleware(request: Request, call_next):
request_id = str(uuid.uuid4())
logger.bind(request_id=request_id).info(f"Request started: {request.method} {request.url}")
try:
response = await call_next(request)
return response
except HTTPException as e:
logger.error(f"HTTP Error: {e.status_code} - {e.detail}")
error_response = ErrorResponse(
error_id=request_id,
timestamp=datetime.utcnow().isoformat(),
status_code=e.status_code,
message=e.detail,
details=str(e)
)
return JSONResponse(status_code=e.status_code, content=error_response.model_dump())
except Exception as e:
logger.error(f"Unexpected error: {str(e)}")
error_response = ErrorResponse(
error_id=request_id,
timestamp=datetime.utcnow().isoformat(),
status_code=500,
message="Internal Server Error",
details=str(e)
)
return JSONResponse(status_code=500, content=error_response.model_dump())
webhook_handler = JiraWebhookHandler()
@app.post("/jira-webhook")
async def jira_webhook_handler(payload: JiraWebhookPayload):
logger.info(f"Received webhook payload: {payload.model_dump()}")
try:
response = await webhook_handler.handle_webhook(payload)
logger.info(f"Webhook processed successfully")
return response
except Exception as e:
logger.error(f"Error processing webhook: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/test-llm")
async def test_llm():
"""Test endpoint for LLM integration"""
test_payload = JiraWebhookPayload(
issueKey="TEST-123",
summary="Test issue",
description="This is a test issue for LLM integration",
comment="Testing OpenAI integration with Langfuse",
labels=["test"],
status="Open",
assignee="Tester",
updated="2025-07-04T21:40:00Z"
)
return await webhook_handler.handle_webhook(test_payload)
# if __name__ == "__main__":
# import uvicorn
# uvicorn.run(app, host="0.0.0.0", port=8000)