decode_dispatch_job.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483
  1. import json
  2. from datetime import datetime
  3. from typing import Any, Dict, List, Optional, Tuple
  4. from zoneinfo import ZoneInfo
  5. import requests
  6. from scheduler.odps_fetch import fetch_priority_posts
  7. from utils.scheduler_logger import get_scheduler_logger
  8. from utils.sync_mysql_help import mysql
  9. logger = get_scheduler_logger()
  10. CONFIG_ID = "57"
  11. DECODE_URL = "https://aigc-api.aiddit.com/aigc/api/task/decode"
  12. DECODE_RESULT_URL = "https://aigc-api.aiddit.com/aigc/api/task/decode/result"
  13. BATCH_SIZE = 20
  14. ODPS_PAGE_SIZE = 200
  15. RESULT_POLL_CHUNK = 50
  16. def _map_api_status_to_int(api_status: str, vid: str) -> int:
  17. """Map upstream status string to DB status: 0待执行 1执行中 2成功 3失败."""
  18. s = (api_status or "").strip().upper()
  19. if s == "SUCCESS":
  20. return 2
  21. if s in ("FAILED", "FAILURE", "ERROR", "FAIL"):
  22. return 3
  23. if s in ("RUNNING", "PROCESSING", "DOING"):
  24. return 1
  25. if s in ("PENDING", "WAITING", "INIT", "QUEUED"):
  26. return 0
  27. if not s:
  28. return 0
  29. logger.warning("未知解码状态,按执行中处理 status={} vid={}", api_status, vid)
  30. return 1
  31. def _safe_json_loads(text: Optional[str]) -> Dict[str, Any]:
  32. if not text:
  33. return {}
  34. try:
  35. data = json.loads(text)
  36. return data if isinstance(data, dict) else {}
  37. except Exception:
  38. return {}
  39. def _today_dt() -> str:
  40. return datetime.now(ZoneInfo("Asia/Shanghai")).strftime("%Y%m%d")
  41. def _is_allowed_level(level_value: Any) -> bool:
  42. try:
  43. return int(level_value) in (0, 1, 2)
  44. except (TypeError, ValueError):
  45. return False
  46. def _is_decode_submit_open() -> bool:
  47. """
  48. Gate for submitting NEW decode tasks.
  49. Only controls whether to submit; polling/querying existing tasks is unaffected.
  50. """
  51. sql = """
  52. SELECT is_open
  53. FROM aigc_topic_decode_task_oprate
  54. ORDER BY id DESC
  55. LIMIT 1
  56. """
  57. try:
  58. row = mysql.fetchone(sql)
  59. if not row:
  60. # Fail-open if table is empty to avoid blocking by default.
  61. return True
  62. return int(row.get("is_open") or 0) == 1
  63. except Exception as exc:
  64. # Conservative: if we cannot confirm switch is open, skip submit this cycle.
  65. logger.exception("查询解构开关失败,本轮不发起新解构任务: {}", exc)
  66. return False
  67. def _fetch_today_pending_vids(dt: str) -> List[str]:
  68. sql = """
  69. SELECT DISTINCT vid
  70. FROM aigc_topic_decode_task_result
  71. WHERE dt = %s AND status IN (0, 1) AND vid IS NOT NULL AND vid != ''
  72. ORDER BY vid
  73. """
  74. rows = mysql.fetchall(sql, (dt,))
  75. return [str(row["vid"]) for row in rows if row.get("vid")]
  76. def _count_today_non_terminal(dt: str) -> int:
  77. sql = """
  78. SELECT COUNT(1) AS total
  79. FROM aigc_topic_decode_task_result
  80. WHERE dt = %s AND status IN (0, 1)
  81. """
  82. result = mysql.fetchone(sql, (dt,))
  83. return int((result or {}).get("total", 0))
  84. def _submit_decode_result_chunk(
  85. channel_content_ids: List[str],
  86. ) -> Tuple[bool, str, Dict[str, Any]]:
  87. payload = {"params": {"configId": CONFIG_ID, "channelContentIds": channel_content_ids}}
  88. try:
  89. resp = requests.post(DECODE_RESULT_URL, json=payload, timeout=60)
  90. if resp.status_code != 200:
  91. return False, f"http_status_{resp.status_code}", {}
  92. body = resp.json()
  93. ok = body.get("code") == 0
  94. return ok, body.get("msg") or "", body
  95. except Exception as exc:
  96. return False, str(exc), {}
  97. def _apply_result_row_to_db(dt: str, item: Dict[str, Any]) -> None:
  98. vid = str(item.get("channelContentId") or "").strip()
  99. if not vid:
  100. return
  101. api_status_raw = item.get("status") or ""
  102. err_msg = (item.get("err_msg") or item.get("errorMessage") or "") or ""
  103. data_content = item.get("dataContent")
  104. if data_content is not None and not isinstance(data_content, str):
  105. data_content = json.dumps(data_content, ensure_ascii=False)
  106. html = item.get("html")
  107. base_status = _map_api_status_to_int(str(api_status_raw), vid)
  108. sql = """
  109. UPDATE aigc_topic_decode_task_result
  110. SET status = %s,
  111. err_msg = %s,
  112. data_content = %s,
  113. html = %s
  114. WHERE dt = %s AND vid = %s
  115. """
  116. mysql.execute(
  117. sql,
  118. (
  119. base_status,
  120. err_msg[:512] if err_msg else "",
  121. data_content if data_content is not None else "",
  122. html if html is not None else None,
  123. dt,
  124. vid,
  125. ),
  126. )
  127. def _poll_decode_results_for_today(dt: str, vids: List[str]) -> None:
  128. if not vids:
  129. return
  130. total = len(vids)
  131. logger.info("开始查询解码结果 dt={} 总vid数={}", dt, total)
  132. overall_success = 0
  133. overall_returned = 0
  134. for i in range(0, total, RESULT_POLL_CHUNK):
  135. chunk = vids[i : i + RESULT_POLL_CHUNK]
  136. logger.info(
  137. "查询解码结果 dt={} 分片序号={} 分片大小={} 总数={}",
  138. dt,
  139. i // RESULT_POLL_CHUNK,
  140. len(chunk),
  141. total,
  142. )
  143. ok, msg, body = _submit_decode_result_chunk(chunk)
  144. if not ok:
  145. logger.error(
  146. "查询解码结果接口失败 dt={} msg={} body={}",
  147. dt,
  148. msg,
  149. body,
  150. )
  151. continue
  152. data_list = body.get("data")
  153. if not isinstance(data_list, list):
  154. logger.warning("查询解码结果返回中缺少data列表 body={}", body)
  155. continue
  156. chunk_success = 0
  157. returned_ids = {str(x.get("channelContentId") or "") for x in data_list}
  158. missing = set(chunk) - returned_ids
  159. if missing:
  160. logger.warning(
  161. "查询解码结果返回缺少{}个vid,示例={}",
  162. len(missing),
  163. list(missing)[:5],
  164. )
  165. for item in data_list:
  166. if not isinstance(item, dict):
  167. continue
  168. vid = str(item.get("channelContentId") or "").strip()
  169. api_status = str(item.get("status") or "")
  170. mapped_status = _map_api_status_to_int(api_status, vid)
  171. if mapped_status == 2:
  172. chunk_success += 1
  173. err_msg = (item.get("err_msg") or item.get("errorMessage") or "") or ""
  174. logger.info(
  175. "解码结果明细 dt={} vid={} 接口状态={} 映射状态={} 错误信息={}",
  176. dt,
  177. vid,
  178. api_status,
  179. mapped_status,
  180. err_msg[:512] if err_msg else "",
  181. )
  182. _apply_result_row_to_db(dt, item)
  183. overall_success += chunk_success
  184. overall_returned += len(data_list)
  185. logger.info(
  186. "解码结果分片处理完成 dt={} 查询数={} 返回数={} 成功数={}",
  187. dt,
  188. len(chunk),
  189. len(data_list),
  190. chunk_success,
  191. )
  192. logger.info(
  193. "解码结果查询完成 dt={} 查询总数={} 返回总数={} 成功总数={}",
  194. dt,
  195. total,
  196. overall_returned,
  197. overall_success,
  198. )
  199. def _build_posts_payload(records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
  200. posts: List[Dict[str, Any]] = []
  201. for item in records:
  202. extend_raw = item.get("extend")
  203. extend_obj: Dict[str, Any]
  204. if isinstance(extend_raw, dict):
  205. extend_obj = extend_raw
  206. else:
  207. extend_obj = _safe_json_loads(str(extend_raw)) if extend_raw is not None else {}
  208. cover_url = extend_obj.get("cover_url") or ""
  209. images = [cover_url] if cover_url else []
  210. posts.append(
  211. {
  212. "channelContentId": item.get("vid") or "",
  213. "title": item.get("title") or "",
  214. "video": item.get("url") or "",
  215. "images": images,
  216. "contentModal": 4,
  217. "channel": 10,
  218. }
  219. )
  220. return posts
  221. def _submit_decode(posts: List[Dict[str, Any]]) -> Tuple[bool, str, Dict[str, Any]]:
  222. payload = {"params": {"configId": CONFIG_ID, "posts": posts}}
  223. try:
  224. resp = requests.post(DECODE_URL, json=payload, timeout=60)
  225. if resp.status_code != 200:
  226. return False, f"http_status_{resp.status_code}", {}
  227. body = resp.json()
  228. ok = body.get("code") == 0
  229. return ok, body.get("msg") or "", body
  230. except Exception as exc:
  231. return False, str(exc), {}
  232. def _load_existing_vids(dt: str) -> set[str]:
  233. sql = """
  234. SELECT DISTINCT vid
  235. FROM aigc_topic_decode_task_result
  236. WHERE dt = %s AND vid IS NOT NULL AND vid != ''
  237. """
  238. rows = mysql.fetchall(sql, (dt,))
  239. return {str(row["vid"]) for row in rows if row.get("vid")}
  240. def _pick_candidate_records(dt: str, batch_size: int = BATCH_SIZE) -> List[Dict[str, Any]]:
  241. existing_vids = _load_existing_vids(dt)
  242. selected: List[Dict[str, Any]] = []
  243. selected_vids: set[str] = set()
  244. offset = 0
  245. while len(selected) < batch_size:
  246. page = fetch_priority_posts(limit=ODPS_PAGE_SIZE, offset=offset, dt=dt)
  247. if not page:
  248. break
  249. for item in page:
  250. vid = str(item.get("vid") or "")
  251. if (
  252. not vid
  253. or not _is_allowed_level(item.get("level"))
  254. or vid in existing_vids
  255. or vid in selected_vids
  256. ):
  257. continue
  258. selected.append(item)
  259. selected_vids.add(vid)
  260. if len(selected) >= batch_size:
  261. break
  262. offset += ODPS_PAGE_SIZE
  263. logger.info(
  264. "候选数据筛选完成 dt={} 已选数量={} 扫描offset={}",
  265. dt,
  266. len(selected),
  267. offset,
  268. )
  269. if selected:
  270. vid_title_pairs = [
  271. {"vid": str(item.get("vid") or ""), "title": item.get("title") or ""}
  272. for item in selected
  273. ]
  274. logger.info("已选候选数据 dt={} items={}", dt, vid_title_pairs)
  275. return selected
  276. def _row_status_after_decode_submit(
  277. vid: str, row_in_resp: Optional[Dict[str, Any]], full_body: Dict[str, Any]
  278. ) -> Tuple[int, str, str, Optional[str]]:
  279. """Returns (status, err_msg, data_content, html) for INSERT."""
  280. if not row_in_resp:
  281. payload = json.dumps({"decode_submit_response": full_body}, ensure_ascii=False)
  282. return 1, "", payload, None
  283. api_status_raw = row_in_resp.get("status") or ""
  284. err_msg = (row_in_resp.get("err_msg") or row_in_resp.get("errorMessage") or "") or ""
  285. mapped = _map_api_status_to_int(str(api_status_raw), vid)
  286. payload = json.dumps(
  287. {"decode_submit_item": row_in_resp, "decode_submit_response": full_body},
  288. ensure_ascii=False,
  289. )
  290. if mapped == 3:
  291. return 3, err_msg[:512], payload, None
  292. if mapped == 2:
  293. # New submit API only returns status/errorMessage.
  294. # Keep SUCCESS as terminal success; detailed result is queried via decode/result.
  295. return 2, "", payload, None
  296. if mapped == 0:
  297. return 0, err_msg[:512], payload, None
  298. return 1, err_msg[:512], payload, None
  299. def _insert_task_result_row(
  300. source: Dict[str, Any],
  301. status: int,
  302. err_msg: str,
  303. data_content: str,
  304. html: Optional[str],
  305. ) -> None:
  306. extend = _safe_json_loads(source.get("extend"))
  307. cover_url = extend.get("cover_url", "")
  308. cover_text = cover_url if isinstance(cover_url, str) else ""
  309. images_text = source.get("url") or ""
  310. sql = """
  311. INSERT INTO aigc_topic_decode_task_result
  312. (task_id, status, err_msg, vid, title, cover, video_url, images, type, channel, cate1, cate2, dt, data_content, html)
  313. VALUES
  314. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  315. """
  316. params = (
  317. None,
  318. status,
  319. err_msg or "",
  320. str(source.get("vid") or ""),
  321. source.get("title") or "",
  322. cover_text,
  323. source.get("url") or "",
  324. images_text,
  325. source.get("type") or "",
  326. source.get("channel") or "",
  327. source.get("cate1") or "",
  328. source.get("cate2") or "",
  329. source.get("dt") or _today_dt(),
  330. data_content,
  331. html,
  332. )
  333. mysql.execute(sql, params)
  334. def _insert_rows_after_decode_submit(records: List[Dict[str, Any]], body: Dict[str, Any]) -> None:
  335. data_list = body.get("data") if isinstance(body.get("data"), list) else []
  336. by_vid = {str(x.get("channelContentId") or ""): x for x in data_list if isinstance(x, dict)}
  337. for item in records:
  338. vid = str(item.get("vid") or "")
  339. row = by_vid.get(vid)
  340. status, err_msg, data_content, html = _row_status_after_decode_submit(vid, row, body)
  341. _insert_task_result_row(item, status, err_msg, data_content, html)
  342. def run_decode_dispatch_job() -> None:
  343. logger.info("解码调度任务开始执行")
  344. try:
  345. dt = _today_dt()
  346. # Startup guard: if there are in-flight tasks today, poll only in this run.
  347. # New batch submit will wait for next scheduler cycle after all are terminal.
  348. initial_non_terminal = _count_today_non_terminal(dt)
  349. if initial_non_terminal > 0:
  350. logger.info(
  351. "启动时发现当天存在进行中任务,本轮仅查询不发起新批次 dt={} count={}",
  352. dt,
  353. initial_non_terminal,
  354. )
  355. pending_vids = _fetch_today_pending_vids(dt)
  356. if pending_vids:
  357. logger.info("查询当天待执行/执行中记录 dt={} count={}", dt, len(pending_vids))
  358. _poll_decode_results_for_today(dt, pending_vids)
  359. else:
  360. logger.warning(
  361. "存在非终态记录但未获取到可查询vid dt={} count={}",
  362. dt,
  363. initial_non_terminal,
  364. )
  365. remaining_non_terminal = _count_today_non_terminal(dt)
  366. if remaining_non_terminal > 0:
  367. logger.info(
  368. "查询后仍有待执行/执行中任务,跳过新批次发起 dt={} count={}",
  369. dt,
  370. remaining_non_terminal,
  371. )
  372. logger.info("解码调度任务结束(启动保护:仅查询)")
  373. return
  374. else:
  375. logger.info(
  376. "查询后当天进行中任务已清空,立即发起新批次 dt={}",
  377. dt,
  378. )
  379. # fallthrough: submit new batch in the same run
  380. if not _is_decode_submit_open():
  381. logger.info("解构开关关闭(is_open!=1),跳过本轮新批次发起 dt={}", dt)
  382. logger.info("解码调度任务结束(开关关闭:不发起新任务)")
  383. return
  384. records = _pick_candidate_records(dt=dt, batch_size=BATCH_SIZE)
  385. if not records:
  386. logger.info("无可发起的新批次候选数据 dt={}", dt)
  387. logger.info("解码调度任务结束(无新增任务)")
  388. return
  389. logger.info("解码提交接口执行开始 records={}", records)
  390. posts = _build_posts_payload(records)
  391. logger.info("解码提交接口执行开始 posts={}", posts)
  392. ok, err_msg, body = _submit_decode(posts)
  393. logger.info(
  394. "解码提交接口执行完成 success={} records={} msg={} body={}",
  395. ok,
  396. len(records),
  397. err_msg,
  398. body,
  399. )
  400. if not ok:
  401. fail_body = json.dumps({"decode_submit_response": body}, ensure_ascii=False)
  402. for item in records:
  403. _insert_task_result_row(
  404. item,
  405. status=3,
  406. err_msg=err_msg or "解码提交失败",
  407. data_content=fail_body,
  408. html=None,
  409. )
  410. else:
  411. if isinstance(body.get("data"), list) and body["data"]:
  412. _insert_rows_after_decode_submit(records, body)
  413. else:
  414. payload = json.dumps({"decode_submit_response": body}, ensure_ascii=False)
  415. for item in records:
  416. _insert_task_result_row(
  417. item,
  418. status=1,
  419. err_msg="",
  420. data_content=payload,
  421. html=None,
  422. )
  423. logger.info("解码调度任务结束,本轮新发起数量={}", len(records))
  424. except Exception as exc:
  425. logger.exception("解码调度任务异常退出: {}", exc)
  426. return