detail.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. from utils.sync_mysql_help import mysql
  2. from utils.params import CapabilityEnum
  3. from loguru import logger
  4. import sys
  5. import json
  6. from typing import Optional, Dict, Any
  7. logger.add(sink=sys.stderr, level="ERROR", backtrace=True, diagnose=True)
  8. # 任务状态常量
  9. STATUS_PENDING = 0 # 待处理
  10. STATUS_RUNNING = 1 # 执行中
  11. STATUS_SUCCESS = 2 # 成功
  12. STATUS_FAILED = 3 # 失败
  13. # 不需要查询结果的状态(失败状态需要查询 error_message,因此不在此集合中)
  14. STATUS_WITHOUT_RESULT = {STATUS_PENDING, STATUS_RUNNING}
  15. def _build_response(data: Dict[str, Any]) -> Dict[str, Any]:
  16. """构建统一响应格式"""
  17. response = {
  18. "code": 0,
  19. "msg": "ok",
  20. "data": data
  21. }
  22. return response
  23. def _parse_result_payload(payload: Optional[str]) -> Any:
  24. """解析结果负载(JSON字符串转对象)"""
  25. if not payload:
  26. return None
  27. try:
  28. return json.loads(payload)
  29. except (json.JSONDecodeError, TypeError):
  30. return payload
  31. def _fetch_decode_result(task_id: str) -> Optional[Dict[str, Any]]:
  32. """获取解构任务结果"""
  33. sql = "SELECT result_payload, error_message FROM workflow_decode_task_result WHERE task_id = %s"
  34. result_record = mysql.fetchone(sql, (task_id,))
  35. if not result_record:
  36. return None
  37. return {
  38. "result": _parse_result_payload(result_record.get("result_payload")),
  39. "error_message": result_record.get("error_message")
  40. }
  41. def _build_result_data(task_id: str, status: int, result: Any = None, reason: Optional[str] = None) -> Dict[str, Any]:
  42. """构建结果数据"""
  43. return {
  44. "taskId": task_id,
  45. "status": status,
  46. "result": result,
  47. "reason": reason
  48. }
  49. def _handle_success_status(task_id: str, capability: int) -> Dict[str, Any]:
  50. """处理成功状态(status=2)"""
  51. # 只有解构任务需要查询结果表
  52. if capability != CapabilityEnum.DECODE.value:
  53. return _build_response(_build_result_data(task_id, STATUS_SUCCESS))
  54. # 查询解构结果
  55. decode_result = _fetch_decode_result(task_id)
  56. if not decode_result:
  57. return _build_response(_build_result_data(task_id, STATUS_SUCCESS))
  58. result_data = _build_result_data(
  59. task_id=task_id,
  60. status=STATUS_SUCCESS,
  61. result=decode_result.get("result"),
  62. reason=decode_result.get("error_message")
  63. )
  64. return _build_response(
  65. result_data
  66. )
  67. def get_decode_detail_by_task_id(task_id: str) -> Optional[Dict[str, Any]]:
  68. """获取任务详情"""
  69. # 查询任务基本信息
  70. sql = "SELECT task_id, status, capability FROM workflow_task WHERE task_id = %s"
  71. task = mysql.fetchone(sql, (task_id,))
  72. if not task:
  73. logger.info(f"task_id = {task_id} , 任务不存在")
  74. return None
  75. task_id_value = task.get("task_id")
  76. status = task.get("status")
  77. capability = task.get("capability")
  78. # 不需要查询结果的状态,直接返回
  79. if status in STATUS_WITHOUT_RESULT:
  80. result_data = _build_result_data(task_id_value, status)
  81. return _build_response(result_data)
  82. # 成功状态,需要查询结果
  83. if status == STATUS_SUCCESS:
  84. return _handle_success_status(task_id_value, capability)
  85. # 失败状态,需要返回 error_message 到 reason 字段,result 固定为 "[]"
  86. if status == STATUS_FAILED:
  87. error_message: Optional[str] = None
  88. # 仅对解构任务从结果表中查询失败原因
  89. if capability == CapabilityEnum.DECODE.value:
  90. decode_result = _fetch_decode_result(task_id_value)
  91. if decode_result:
  92. error_message = decode_result.get("error_message")
  93. result_data = _build_result_data(
  94. task_id=task_id_value,
  95. status=STATUS_FAILED,
  96. result="[]",
  97. reason=error_message or ""
  98. )
  99. return _build_response(
  100. result_data
  101. )
  102. # 其他未知状态,返回基础数据
  103. result_data = _build_result_data(task_id_value, status)
  104. return _build_response(result_data)