jira-webhook-llm/app/handlers.py
2025-07-21 22:42:56 +02:00

84 lines
3.3 KiB
Python

from datetime import datetime, timezone
from fastapi import APIRouter, Request, HTTPException, Depends
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from llm.models import JiraWebhookPayload
from shared_store import requests_queue, ProcessingRequest
from loguru import logger
jira_router = APIRouter(
prefix="/jira",
tags=["Jira"]
)
queue_router = APIRouter(
prefix="/queue",
tags=["Queue"]
)
@jira_router.post("/sendRequest", status_code=201)
async def send_jira_request(payload: JiraWebhookPayload):
"""Send requests to add to queue for further processing"""
request_id = requests_queue.add_request(payload.model_dump())
return {"request_id": request_id}
class GetResponseRequest(BaseModel):
issueKey: str
@jira_router.post("/getResponse")
async def get_jira_response(request: GetResponseRequest):
"""Get response attribute provided by ollama for a given issueKey."""
matched_request = requests_queue.get_latest_completed_by_issue_key(request.issueKey)
if not matched_request:
raise HTTPException(status_code=404, detail=f"No completed request found for issueKey: {request.issueKey}")
return matched_request.response if matched_request.response else "No response yet"
# @queue_router.get("/{issueKey}")
# async def get_queue_element_by_issue_key(issueKey: str):
# """Get the element with specific issueKey. Return latest which was successfully processed by ollama. Skip pending or failed."""
# matched_request = requests_queue.get_latest_completed_by_issue_key(issueKey)
# if not matched_request:
# raise HTTPException(status_code=404, detail=f"No completed request found for issueKey: {issueKey}")
# return matched_request
@queue_router.get("/getAll")
async def get_all_requests_in_queue():
"""Gets all requests"""
all_requests = requests_queue.get_all_requests()
return {"requests": all_requests}
@queue_router.get("/getPending")
async def get_pending_requests_in_queue():
"""Gets all requests waiting to be processed"""
all_requests = requests_queue.get_all_requests()
pending = [req for req in all_requests if req.status == "pending"]
return {"requests": pending}
@queue_router.delete("/clearAll")
async def clear_all_requests_in_queue():
"""Clear all the requests from the queue"""
requests_queue.clear_all_requests()
return {"status": "cleared"}
# Original webhook_router remains unchanged for now, as it's not part of the /jira or /queue prefixes
webhook_router = APIRouter(
prefix="/webhooks",
tags=["Webhooks"]
)
@webhook_router.post("/jira")
async def handle_jira_webhook():
return {"status": "webhook received"}
@webhook_router.post("/ollama")
async def handle_ollama_webhook(request: Request):
"""Handle incoming Ollama webhook and capture raw output"""
try:
raw_body = await request.body()
response_data = raw_body.decode('utf-8')
logger.info(f"Received raw Ollama webhook response: {response_data}")
# Here you would process the raw_body, e.g., store it or pass it to another component
return {"status": "ollama webhook received", "data": response_data}
except Exception as e:
logger.error(f"Error processing Ollama webhook: {e}")
raise HTTPException(status_code=500, detail=f"Error processing webhook: {e}")