Files
llm_macro/workspace/api.py
2025-10-30 10:32:31 +09:00

67 lines
1.8 KiB
Python

import logging
from contextlib import asynccontextmanager
from fastapi import Depends, FastAPI, HTTPException
from fastapi.staticfiles import StaticFiles
from routers.costs_router import router as costs_router
from services.api_key_service import load_api_keys_from_file
from utils.checking_keys import get_admin_key, get_api_key
from utils.redis_utils import get_redis_client
logging.basicConfig(
level=logging.INFO, format="%(asctime)s [%(levelname)s] %(name)s - %(message)s"
)
@asynccontextmanager
async def lifespan(app: FastAPI):
# 애플리케이션 시작 시 파일에서 API 키 로드
print("Loading API keys from file...")
load_api_keys_from_file()
yield
app = FastAPI(
title="LLM GATEWAY",
description="LLM 모델이 업로드된 문서를 분석하여 구조화된 JSON으로 변환하는 API 서비스입니다.",
docs_url="/docs",
lifespan=lifespan,
)
# API 키 검증을 위한 의존성 설정
api_key_dependency = Depends(get_api_key)
admin_key_dependency = Depends(get_admin_key)
# 커스텀 라벨 콜백 함수
def custom_labels(info):
# info.request 는 Starlette의 Request 객체
return {"job_id": info.request.headers.get("X-Job-ID", "unknown")}
app.mount(
"/static", StaticFiles(directory="/workspace/workspace/static"), name="static"
)
@app.get("/health/API")
async def health_check():
"""애플리케이션 상태 확인"""
return {"status": "API ok"}
@app.get("/health/Redis")
def redis_health_check():
client = get_redis_client()
if client is None:
raise HTTPException(status_code=500, detail="Redis connection failed")
try:
client.ping()
return {"status": "Redis ok"}
except Exception:
raise HTTPException(status_code=500, detail="Redis ping failed")
app.include_router(costs_router, dependencies=[api_key_dependency])