insert_videos.py 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/23
  4. import json
  5. import os
  6. import sys
  7. sys.path.append(os.getcwd())
  8. from common.common import Common
  9. from common.db import MysqlHelper
  10. from common.feishu import Feishu
  11. class Insert:
  12. @classmethod
  13. def insert_video_from_feishu_to_mysql(cls, log_type, crawler, env, machine):
  14. xigua_sheetid_list = ["QOWqMo", "3Ul6wZ", "e075e9"]
  15. for sheetid in xigua_sheetid_list:
  16. xigua_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  17. for i in range(1, len(xigua_sheet)):
  18. # for i in range(1, 3):
  19. if xigua_sheet[i][5] is None:
  20. continue
  21. video_id = xigua_sheet[i][9].replace("https://admin.piaoquantv.com/cms/post-detail/", "").replace("/info", "")
  22. if video_id == "None":
  23. continue
  24. video_id = int(video_id)
  25. user_id = 0
  26. out_user_id = str(xigua_sheet[i][19])
  27. platform = "西瓜视频"
  28. strategy = "定向爬虫策略"
  29. out_video_id = str(xigua_sheet[i][8])
  30. video_title = str(xigua_sheet[i][7])
  31. cover_url = str(xigua_sheet[i][21])
  32. video_url = str(xigua_sheet[i][22])
  33. duration = int(xigua_sheet[i][15])
  34. publish_time = str(xigua_sheet[i][17].replace("/", "-"))
  35. play_cnt = int(xigua_sheet[i][11])
  36. like_cnt = int(xigua_sheet[i][13])
  37. share_cnt = int(xigua_sheet[i][14])
  38. # collection_cnt = 0
  39. comment_cnt = int(xigua_sheet[i][12])
  40. crawler_rule = json.dumps({"play_cnt": 0, "comment_cnt": 0, "like_cnt": 0, "duration": 60, "publish_time": 10, "video_width": 720, "video_height": 720})
  41. width = int(xigua_sheet[i][16].split("*")[0])
  42. height = int(xigua_sheet[i][16].split("*")[1])
  43. # print(f"video_id:{video_id}, type:{type(video_id)}")
  44. # print(f"user_id:{user_id}, type:{type(user_id)}")
  45. # print(f"out_user_id:{out_user_id}, type:{type(out_user_id)}")
  46. # print(f"platform:{platform}, type:{type(platform)}")
  47. # print(f"strategy:{strategy}, type:{type(strategy)}")
  48. # print(f"out_video_id:{out_video_id}, type:{type(out_video_id)}")
  49. # print(f"video_title:{video_title}, type:{type(video_title)}")
  50. # print(f"cover_url:{cover_url}, type:{type(cover_url)}")
  51. # print(f"video_url:{video_url}, type:{type(video_url)}")
  52. # print(f"duration:{duration}, type:{type(duration)}")
  53. # print(f"publish_time:{publish_time}, type:{type(publish_time)}")
  54. # print(f"play_cnt:{play_cnt}, type:{type(play_cnt)}")
  55. # print(f"like_cnt:{like_cnt}, type:{type(like_cnt)}")
  56. # print(f"share_cnt:{share_cnt}, type:{type(share_cnt)}")
  57. # print(f"collection_cnt:{collection_cnt}, type:{type(collection_cnt)}")
  58. # print(f"comment_cnt:{comment_cnt}, type:{type(comment_cnt)}")
  59. # print(f"crawler_rule:{crawler_rule}, type:{type(crawler_rule)}")
  60. # print(f"width:{width}, type:{type(width)}")
  61. # print(f"height:{height}, type:{type(height)}\n")
  62. select_sql = f""" select * from crawler_video where platform="{platform}" and out_video_id="{out_video_id}" """
  63. Common.logger(log_type, crawler).info(f"select_sql:{select_sql}")
  64. repeat_video = MysqlHelper.get_values(log_type, crawler, select_sql, env, machine)
  65. Common.logger(log_type, crawler).info(f"repeat_video:{repeat_video}")
  66. if repeat_video is not None and len(repeat_video) != 0:
  67. Common.logger(log_type, crawler).info(f"{video_title} 已存在数据库中\n")
  68. else:
  69. # 视频信息保存数据库
  70. insert_sql = f""" insert into crawler_video(video_id,
  71. user_id,
  72. out_user_id,
  73. platform,
  74. strategy,
  75. out_video_id,
  76. video_title,
  77. cover_url,
  78. video_url,
  79. duration,
  80. publish_time,
  81. play_cnt,
  82. like_cnt,
  83. share_cnt,
  84. comment_cnt,
  85. crawler_rule,
  86. width,
  87. height)
  88. values({video_id},
  89. {user_id},
  90. "{out_user_id}",
  91. "{platform}",
  92. "{strategy}",
  93. "{out_video_id}",
  94. "{video_title}",
  95. "{cover_url}",
  96. "{video_url}",
  97. {duration},
  98. "{publish_time}",
  99. {play_cnt},
  100. {like_cnt},
  101. {share_cnt},
  102. {comment_cnt},
  103. '{crawler_rule}',
  104. {width},
  105. {height}) """
  106. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  107. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  108. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  109. if __name__ == "__main__":
  110. Insert.insert_video_from_feishu_to_mysql("insert", "xigua", "dev", "local")