llm_service.py 439 B

12345678910111213141516
  1. from ..schemas.llm import ChatRequest, ChatResponse
  2. from ..providers.base import LLMProvider
  3. class LLMService:
  4. def __init__(self, provider: LLMProvider) -> None:
  5. self._provider = provider
  6. def chat(self, req: ChatRequest) -> ChatResponse:
  7. return self._provider.chat(
  8. req.messages,
  9. model=req.model,
  10. temperature=req.temperature,
  11. max_tokens=req.max_tokens,
  12. )