Files
llm_macro/workspace/services/ollama_service.py
2025-10-30 10:32:31 +09:00

28 lines
1.1 KiB
Python

import httpx
OLLAMA_API_URL = "http://172.16.10.176:11534/api/generate"
class OllamaService:
async def generate_content(self, prompt: str, model: str = "gemma:latest"):
"""Ollama API를 호출하여 콘텐츠를 생성합니다."""
async with httpx.AsyncClient(timeout=120.0) as client:
try:
payload = {
"model": model,
"prompt": prompt,
"stream": False,
"keep_alive": "30m",
}
response = await client.post(OLLAMA_API_URL, json=payload)
response.raise_for_status()
response_json = response.json()
return response_json.get("response", "")
except httpx.RequestError as e:
print(f"Ollama API 요청 중 오류 발생: {e}")
return f"Error: {e}"
except Exception as e:
print(f"Ollama 서비스에서 예기치 않은 오류 발생: {e}")
return f"An unexpected error occurred: {e}"