@@ -199,6 +199,7 @@ def run_openai_chat(messages, model_name, **kwargs):
raise Exception('model not supported')
response = llm_client.chat.completions.create(
messages=messages, model=model_name, **kwargs)
+ logger.debug(response)
return response
def run_extractor_prompt(req_data):