decode_dispatch_job.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574
  1. import json
  2. from datetime import datetime
  3. from typing import Any, Dict, List, Optional, Tuple
  4. from zoneinfo import ZoneInfo
  5. import requests
  6. from scheduler.odps_fetch import fetch_priority_posts
  7. from utils.scheduler_logger import get_scheduler_logger
  8. from utils.sync_mysql_help import mysql
  9. logger = get_scheduler_logger()
  10. CONFIG_ID = "57"
  11. DECODE_URL = "https://aigc-api.aiddit.com/aigc/api/task/decode"
  12. DECODE_RESULT_URL = "https://aigc-api.aiddit.com/aigc/api/task/decode/result"
  13. # 动态窗口:尽量保持当天 status IN (0,1) 的条数为此值(补充时按缺口取数)。
  14. BATCH_SIZE = 40
  15. ODPS_PAGE_SIZE = 200
  16. RESULT_POLL_CHUNK = 50
  17. def _map_api_status_to_int(api_status: str, vid: str) -> int:
  18. """Map upstream status string to DB status: 0待执行 1执行中 2成功 3失败."""
  19. s = (api_status or "").strip().upper()
  20. if s == "SUCCESS":
  21. return 2
  22. if s in ("FAILED", "FAILURE", "ERROR", "FAIL"):
  23. return 3
  24. if s in ("RUNNING", "PROCESSING", "DOING"):
  25. return 1
  26. if s in ("PENDING", "WAITING", "INIT", "QUEUED"):
  27. return 0
  28. if not s:
  29. return 0
  30. logger.warning("未知解码状态,按执行中处理 status={} vid={}", api_status, vid)
  31. return 1
  32. def _safe_json_loads(text: Optional[str]) -> Dict[str, Any]:
  33. if not text:
  34. return {}
  35. try:
  36. data = json.loads(text)
  37. return data if isinstance(data, dict) else {}
  38. except Exception:
  39. return {}
  40. def _today_dt() -> str:
  41. return datetime.now(ZoneInfo("Asia/Shanghai")).strftime("%Y%m%d")
  42. def _is_allowed_level(level_value: Any) -> bool:
  43. try:
  44. return int(level_value) in (0, 1, 2, 3)
  45. except (TypeError, ValueError):
  46. return False
  47. def _is_decode_submit_open() -> bool:
  48. """
  49. Gate for submitting NEW decode tasks.
  50. Only controls whether to submit; polling/querying existing tasks is unaffected.
  51. """
  52. sql = """
  53. SELECT is_open
  54. FROM aigc_topic_decode_task_oprate
  55. ORDER BY id DESC
  56. LIMIT 1
  57. """
  58. try:
  59. row = mysql.fetchone(sql)
  60. if not row:
  61. # Fail-open if table is empty to avoid blocking by default.
  62. return True
  63. return int(row.get("is_open") or 0) == 1
  64. except Exception as exc:
  65. # Conservative: if we cannot confirm switch is open, skip submit this cycle.
  66. logger.exception("查询解构开关失败,本轮不发起新解构任务: {}", exc)
  67. return False
  68. def _fetch_today_pending_vids(dt: str) -> List[str]:
  69. sql = """
  70. SELECT DISTINCT vid
  71. FROM aigc_topic_decode_task_result
  72. WHERE dt = %s AND status IN (0, 1) AND vid IS NOT NULL AND vid != ''
  73. ORDER BY vid
  74. """
  75. rows = mysql.fetchall(sql, (dt,))
  76. return [str(row["vid"]) for row in rows if row.get("vid")]
  77. def _fetch_history_pending_vids_by_dt(today_dt: str) -> Dict[str, List[str]]:
  78. sql = """
  79. SELECT DISTINCT dt, vid
  80. FROM aigc_topic_decode_task_result
  81. WHERE status IN (0, 1)
  82. AND dt < %s
  83. AND vid IS NOT NULL
  84. AND vid != ''
  85. ORDER BY dt, vid
  86. """
  87. rows = mysql.fetchall(sql, (today_dt,))
  88. grouped: Dict[str, List[str]] = {}
  89. for row in rows:
  90. dt = str(row.get("dt") or "").strip()
  91. vid = str(row.get("vid") or "").strip()
  92. if not dt or not vid:
  93. continue
  94. grouped.setdefault(dt, []).append(vid)
  95. return grouped
  96. def _count_today_non_terminal(dt: str) -> int:
  97. sql = """
  98. SELECT COUNT(1) AS total
  99. FROM aigc_topic_decode_task_result
  100. WHERE dt = %s AND status IN (0, 1)
  101. """
  102. result = mysql.fetchone(sql, (dt,))
  103. return int((result or {}).get("total", 0))
  104. def _count_today_total(dt: str) -> int:
  105. sql = """
  106. SELECT COUNT(1) AS total
  107. FROM aigc_topic_decode_task_result
  108. WHERE dt = %s
  109. """
  110. result = mysql.fetchone(sql, (dt,))
  111. return int((result or {}).get("total", 0))
  112. def _fetch_decode_daily_limit() -> int:
  113. sql = """
  114. SELECT `max` AS daily_limit
  115. FROM aigc_topic_decode_task_oprate
  116. ORDER BY id DESC
  117. LIMIT 1
  118. """
  119. row = mysql.fetchone(sql)
  120. return int((row or {}).get("daily_limit") or 0)
  121. def _submit_decode_result_chunk(
  122. channel_content_ids: List[str],
  123. ) -> Tuple[bool, str, Dict[str, Any]]:
  124. payload = {"params": {"configId": CONFIG_ID, "channelContentIds": channel_content_ids}}
  125. try:
  126. resp = requests.post(DECODE_RESULT_URL, json=payload, timeout=60)
  127. if resp.status_code != 200:
  128. return False, f"http_status_{resp.status_code}", {}
  129. body = resp.json()
  130. ok = body.get("code") == 0
  131. return ok, body.get("msg") or "", body
  132. except Exception as exc:
  133. return False, str(exc), {}
  134. def _apply_result_row_to_db(dt: str, item: Dict[str, Any]) -> None:
  135. vid = str(item.get("channelContentId") or "").strip()
  136. if not vid:
  137. return
  138. api_status_raw = item.get("status") or ""
  139. err_msg = (item.get("err_msg") or item.get("errorMessage") or "") or ""
  140. data_content = item.get("dataContent")
  141. if data_content is not None and not isinstance(data_content, str):
  142. data_content = json.dumps(data_content, ensure_ascii=False)
  143. html = item.get("html")
  144. base_status = _map_api_status_to_int(str(api_status_raw), vid)
  145. sql = """
  146. UPDATE aigc_topic_decode_task_result
  147. SET status = %s,
  148. err_msg = %s,
  149. data_content = %s,
  150. html = %s
  151. WHERE dt = %s AND vid = %s
  152. """
  153. mysql.execute(
  154. sql,
  155. (
  156. base_status,
  157. err_msg[:512] if err_msg else "",
  158. data_content if data_content is not None else "",
  159. html if html is not None else None,
  160. dt,
  161. vid,
  162. ),
  163. )
  164. def _poll_decode_results_for_today(dt: str, vids: List[str]) -> None:
  165. if not vids:
  166. return
  167. total = len(vids)
  168. logger.info("开始查询解码结果 dt={} 总vid数={}", dt, total)
  169. overall_success = 0
  170. overall_returned = 0
  171. for i in range(0, total, RESULT_POLL_CHUNK):
  172. chunk = vids[i : i + RESULT_POLL_CHUNK]
  173. logger.info(
  174. "查询解码结果 dt={} 分片序号={} 分片大小={} 总数={}",
  175. dt,
  176. i // RESULT_POLL_CHUNK,
  177. len(chunk),
  178. total,
  179. )
  180. ok, msg, body = _submit_decode_result_chunk(chunk)
  181. if not ok:
  182. logger.error(
  183. "查询解码结果接口失败 dt={} msg={} body={}",
  184. dt,
  185. msg,
  186. body,
  187. )
  188. continue
  189. data_list = body.get("data")
  190. if not isinstance(data_list, list):
  191. logger.warning("查询解码结果返回中缺少data列表 body={}", body)
  192. continue
  193. chunk_success = 0
  194. returned_ids = {str(x.get("channelContentId") or "") for x in data_list}
  195. missing = set(chunk) - returned_ids
  196. if missing:
  197. logger.warning(
  198. "查询解码结果返回缺少{}个vid,示例={}",
  199. len(missing),
  200. list(missing)[:5],
  201. )
  202. for item in data_list:
  203. if not isinstance(item, dict):
  204. continue
  205. vid = str(item.get("channelContentId") or "").strip()
  206. api_status = str(item.get("status") or "")
  207. mapped_status = _map_api_status_to_int(api_status, vid)
  208. if mapped_status == 2:
  209. chunk_success += 1
  210. err_msg = (item.get("err_msg") or item.get("errorMessage") or "") or ""
  211. logger.info(
  212. "解码结果明细 dt={} vid={} 接口状态={} 映射状态={} 错误信息={}",
  213. dt,
  214. vid,
  215. api_status,
  216. mapped_status,
  217. err_msg[:512] if err_msg else "",
  218. )
  219. _apply_result_row_to_db(dt, item)
  220. overall_success += chunk_success
  221. overall_returned += len(data_list)
  222. logger.info(
  223. "解码结果分片处理完成 dt={} 查询数={} 返回数={} 成功数={}",
  224. dt,
  225. len(chunk),
  226. len(data_list),
  227. chunk_success,
  228. )
  229. logger.info(
  230. "解码结果查询完成 dt={} 查询总数={} 返回总数={} 成功总数={}",
  231. dt,
  232. total,
  233. overall_returned,
  234. overall_success,
  235. )
  236. def _poll_decode_results_for_history(today_dt: str) -> None:
  237. vids_by_dt = _fetch_history_pending_vids_by_dt(today_dt)
  238. if not vids_by_dt:
  239. return
  240. total = sum(len(v) for v in vids_by_dt.values())
  241. logger.info(
  242. "开始查询历史未完成解码结果 today_dt={} 涉及dt数={} 总vid数={}",
  243. today_dt,
  244. len(vids_by_dt),
  245. total,
  246. )
  247. for dt, vids in vids_by_dt.items():
  248. _poll_decode_results_for_today(dt, vids)
  249. logger.info(
  250. "历史未完成解码结果查询结束 today_dt={} 涉及dt数={} 总vid数={}",
  251. today_dt,
  252. len(vids_by_dt),
  253. total,
  254. )
  255. def _build_posts_payload(records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
  256. posts: List[Dict[str, Any]] = []
  257. for item in records:
  258. extend_raw = item.get("extend")
  259. extend_obj: Dict[str, Any]
  260. if isinstance(extend_raw, dict):
  261. extend_obj = extend_raw
  262. else:
  263. extend_obj = _safe_json_loads(str(extend_raw)) if extend_raw is not None else {}
  264. # cover_url = extend_obj.get("cover_url") or ""
  265. # images = [cover_url] if cover_url else []
  266. posts.append(
  267. {
  268. "channelContentId": item.get("vid") or "",
  269. "title": item.get("title") or "",
  270. "video": item.get("url") or "",
  271. "images": [],
  272. "contentModal": 4,
  273. "channel": 10,
  274. }
  275. )
  276. return posts
  277. def _submit_decode(posts: List[Dict[str, Any]]) -> Tuple[bool, str, Dict[str, Any]]:
  278. payload = {"params": {"configId": CONFIG_ID, "posts": posts}}
  279. try:
  280. resp = requests.post(DECODE_URL, json=payload, timeout=60)
  281. if resp.status_code != 200:
  282. return False, f"http_status_{resp.status_code}", {}
  283. body = resp.json()
  284. ok = body.get("code") == 0
  285. return ok, body.get("msg") or "", body
  286. except Exception as exc:
  287. return False, str(exc), {}
  288. def _load_existing_vids(dt: str) -> set[str]:
  289. sql = """
  290. SELECT DISTINCT vid
  291. FROM aigc_topic_decode_task_result
  292. WHERE dt = %s AND vid IS NOT NULL AND vid != ''
  293. """
  294. rows = mysql.fetchall(sql, (dt,))
  295. return {str(row["vid"]) for row in rows if row.get("vid")}
  296. def _pick_candidate_records(dt: str, batch_size: int = BATCH_SIZE) -> List[Dict[str, Any]]:
  297. existing_vids = _load_existing_vids(dt)
  298. selected: List[Dict[str, Any]] = []
  299. selected_vids: set[str] = set()
  300. offset = 0
  301. while len(selected) < batch_size:
  302. page = fetch_priority_posts(limit=ODPS_PAGE_SIZE, offset=offset, dt=dt)
  303. if not page:
  304. break
  305. for item in page:
  306. vid = str(item.get("vid") or "")
  307. if (
  308. not vid
  309. or not _is_allowed_level(item.get("level"))
  310. or vid in existing_vids
  311. or vid in selected_vids
  312. ):
  313. continue
  314. selected.append(item)
  315. selected_vids.add(vid)
  316. if len(selected) >= batch_size:
  317. break
  318. offset += ODPS_PAGE_SIZE
  319. logger.info(
  320. "候选数据筛选完成 dt={} 已选数量={} 扫描offset={}",
  321. dt,
  322. len(selected),
  323. offset,
  324. )
  325. if selected:
  326. vid_title_pairs = [
  327. {"vid": str(item.get("vid") or ""), "title": item.get("title") or ""}
  328. for item in selected
  329. ]
  330. logger.info("已选候选数据 dt={} items={}", dt, vid_title_pairs)
  331. return selected
  332. def _row_status_after_decode_submit(
  333. vid: str, row_in_resp: Optional[Dict[str, Any]], full_body: Dict[str, Any]
  334. ) -> Tuple[int, str, str, Optional[str]]:
  335. """Returns (status, err_msg, data_content, html) for INSERT."""
  336. if not row_in_resp:
  337. payload = json.dumps({"decode_submit_response": full_body}, ensure_ascii=False)
  338. return 1, "", payload, None
  339. api_status_raw = row_in_resp.get("status") or ""
  340. err_msg = (row_in_resp.get("err_msg") or row_in_resp.get("errorMessage") or "") or ""
  341. mapped = _map_api_status_to_int(str(api_status_raw), vid)
  342. payload = json.dumps(
  343. {"decode_submit_item": row_in_resp, "decode_submit_response": full_body},
  344. ensure_ascii=False,
  345. )
  346. if mapped == 3:
  347. return 3, err_msg[:512], payload, None
  348. if mapped == 2:
  349. # New submit API only returns status/errorMessage.
  350. # Keep SUCCESS as terminal success; detailed result is queried via decode/result.
  351. return 2, "", payload, None
  352. if mapped == 0:
  353. return 0, err_msg[:512], payload, None
  354. return 1, err_msg[:512], payload, None
  355. def _insert_task_result_row(
  356. source: Dict[str, Any],
  357. status: int,
  358. err_msg: str,
  359. data_content: str,
  360. html: Optional[str],
  361. ) -> None:
  362. extend = _safe_json_loads(source.get("extend"))
  363. cover_url = extend.get("cover_url", "")
  364. cover_text = cover_url if isinstance(cover_url, str) else ""
  365. images_text = source.get("url") or ""
  366. sql = """
  367. INSERT INTO aigc_topic_decode_task_result
  368. (task_id, status, err_msg, vid, title, cover, video_url, images, type, channel, cate1, cate2, dt, data_content, html)
  369. VALUES
  370. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  371. """
  372. params = (
  373. None,
  374. status,
  375. err_msg or "",
  376. str(source.get("vid") or ""),
  377. source.get("title") or "",
  378. cover_text,
  379. source.get("url") or "",
  380. images_text,
  381. source.get("type") or "",
  382. source.get("channel") or "",
  383. source.get("cate1") or "",
  384. source.get("cate2") or "",
  385. source.get("dt") or _today_dt(),
  386. data_content,
  387. html,
  388. )
  389. mysql.execute(sql, params)
  390. def _insert_rows_after_decode_submit(records: List[Dict[str, Any]], body: Dict[str, Any]) -> None:
  391. data_list = body.get("data") if isinstance(body.get("data"), list) else []
  392. by_vid = {str(x.get("channelContentId") or ""): x for x in data_list if isinstance(x, dict)}
  393. for item in records:
  394. vid = str(item.get("vid") or "")
  395. row = by_vid.get(vid)
  396. status, err_msg, data_content, html = _row_status_after_decode_submit(vid, row, body)
  397. _insert_task_result_row(item, status, err_msg, data_content, html)
  398. def run_decode_dispatch_job() -> None:
  399. logger.info("解码调度任务开始执行")
  400. try:
  401. dt = _today_dt()
  402. _poll_decode_results_for_history(dt)
  403. before_poll_non_terminal = _count_today_non_terminal(dt)
  404. if before_poll_non_terminal > 0:
  405. logger.info(
  406. "当天存在待执行/执行中任务,先拉取解码结果 dt={} count={}",
  407. dt,
  408. before_poll_non_terminal,
  409. )
  410. pending_vids = _fetch_today_pending_vids(dt)
  411. if pending_vids:
  412. logger.info("查询当天待执行/执行中记录 dt={} count={}", dt, len(pending_vids))
  413. _poll_decode_results_for_today(dt, pending_vids)
  414. elif before_poll_non_terminal > 0:
  415. logger.warning(
  416. "存在非终态记录但未获取到可查询vid dt={} count={}",
  417. dt,
  418. before_poll_non_terminal,
  419. )
  420. after_poll_non_terminal = _count_today_non_terminal(dt)
  421. if pending_vids or before_poll_non_terminal > 0:
  422. logger.info(
  423. "解码结果查询阶段结束 dt={} 查询前非终态={} 查询后非终态={}",
  424. dt,
  425. before_poll_non_terminal,
  426. after_poll_non_terminal,
  427. )
  428. if not _is_decode_submit_open():
  429. logger.info("解构开关关闭(is_open!=1),跳过本轮新批次发起 dt={}", dt)
  430. logger.info("解码调度任务结束(开关关闭:不发起新任务)")
  431. return
  432. window_need = max(0, BATCH_SIZE - after_poll_non_terminal)
  433. if window_need == 0:
  434. logger.info(
  435. "解构中已满{}条,本轮无需补充 dt={} non_terminal={}",
  436. BATCH_SIZE,
  437. dt,
  438. after_poll_non_terminal,
  439. )
  440. logger.info("解码调度任务结束(窗口已满)")
  441. return
  442. daily_limit = _fetch_decode_daily_limit()
  443. today_total = _count_today_total(dt)
  444. daily_remaining = max(0, daily_limit - today_total)
  445. if daily_remaining == 0:
  446. logger.info(
  447. "当日解构已达到上限,不再发起新任务 dt={} daily_limit={} today_total={}",
  448. dt,
  449. daily_limit,
  450. today_total,
  451. )
  452. logger.info("解码调度任务结束(达到当日上限)")
  453. return
  454. need = min(window_need, daily_remaining)
  455. logger.info(
  456. "动态窗口补充 dt={} 当前解构中={} 目标={} 窗口缺口={} 日上限={} 当日已发起={} 当日剩余额度={} 本次补充={}",
  457. dt,
  458. after_poll_non_terminal,
  459. BATCH_SIZE,
  460. window_need,
  461. daily_limit,
  462. today_total,
  463. daily_remaining,
  464. need,
  465. )
  466. records = _pick_candidate_records(dt=dt, batch_size=need)
  467. if not records:
  468. logger.info("无可发起的新批次候选数据 dt={} need={}", dt, need)
  469. logger.info("解码调度任务结束(无新增任务)")
  470. return
  471. posts = _build_posts_payload(records)
  472. logger.info("解码提交接口执行开始 posts={}", posts)
  473. ok, err_msg, body = _submit_decode(posts)
  474. logger.info(
  475. "解码提交接口执行完成 success={} records={} msg={} body={}",
  476. ok,
  477. len(records),
  478. err_msg,
  479. body,
  480. )
  481. if not ok:
  482. fail_body = json.dumps({"decode_submit_response": body}, ensure_ascii=False)
  483. for item in records:
  484. _insert_task_result_row(
  485. item,
  486. status=3,
  487. err_msg=err_msg or "解码提交失败",
  488. data_content=fail_body,
  489. html=None,
  490. )
  491. else:
  492. if isinstance(body.get("data"), list) and body["data"]:
  493. _insert_rows_after_decode_submit(records, body)
  494. else:
  495. payload = json.dumps({"decode_submit_response": body}, ensure_ascii=False)
  496. for item in records:
  497. _insert_task_result_row(
  498. item,
  499. status=1,
  500. err_msg="",
  501. data_content=payload,
  502. html=None,
  503. )
  504. logger.info("解码调度任务结束,本轮新发起数量={}", len(records))
  505. except Exception as exc:
  506. logger.exception("解码调度任务异常退出: {}", exc)
  507. return