recycle_published_articles_pipiline.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647
  1. # 插入文章数据
  2. async def save_article_to_recycle_pool(
  3. pool,
  4. log_client,
  5. info_tuple: tuple,
  6. content_url: str,
  7. account_info: dict,
  8. table_name: str = "official_articles_v2",
  9. ):
  10. """插入文章,冲突则更新"""
  11. query = f"""
  12. INSERT INTO {table_name}
  13. (ghId, accountName, appMsgId, title, Type, createTime, updateTime, Digest,
  14. ItemIndex, ContentUrl, SourceUrl, CoverImgUrl, CoverImgUrl_1_1,
  15. CoverImgUrl_255_1, ItemShowType, IsOriginal, ShowDesc, ori_content,
  16. show_view_count, show_like_count, show_zs_count, show_pay_count,
  17. wx_sn, baseInfo, title_md5, status)
  18. VALUES
  19. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
  20. %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  21. ON DUPLICATE KEY UPDATE
  22. show_view_count = VALUES(show_view_count),
  23. show_like_count = VALUES(show_like_count);
  24. """
  25. try:
  26. await pool.async_save(
  27. query=query,
  28. params=info_tuple,
  29. db_name="piaoquan_crawler",
  30. )
  31. print("insert/update article success")
  32. except Exception as e:
  33. await log_client.log(
  34. contents={
  35. "function": "save_article_to_recycle_pool",
  36. "status": "fail",
  37. "message": "insert/update article failed",
  38. "data": {
  39. "error": str(e),
  40. "content_link": content_url,
  41. "account_name": account_info["name"],
  42. },
  43. }
  44. )