conf_task.py 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192
  1. import os
  2. import sys
  3. import time
  4. import requests
  5. from flask import Flask, request
  6. from flask import jsonify
  7. sys.path.append(os.path.abspath(os.path.join(os.getcwd(), "..")))
  8. from common.db.mysql_help import MysqlHelper
  9. from user_spider.user_info import *
  10. app = Flask(__name__)
  11. app.config['JSON_AS_ASCII'] = False
  12. # 只接受get方法访问
  13. @app.route("/v1/crawler/source/getall", methods=["GET"])
  14. def getSource():
  15. # 获取传入的params参数
  16. get_data = request.args.to_dict()
  17. fields = get_data.get('fields')
  18. # # 对参数进行操作
  19. sql = 'select source, task_type, spider_name, machine, description from crawler_source'
  20. result = MysqlHelper.get_values(sql)
  21. if not result:
  22. return jsonify({'code': '200', 'result': [], 'message': 'no data'})
  23. source_list = list()
  24. for source, task_type, spider_name, machine, description, task_type_desc, spider_name_desc in result:
  25. source_dict = {}
  26. source_dict[source] = {
  27. 'task_type': [
  28. {
  29. 'description': task_type_desc,
  30. 'author': [
  31. {
  32. 'spider_name': spider_name,
  33. 'description': spider_name_desc
  34. }
  35. ]
  36. }
  37. ],
  38. 'description': description
  39. }
  40. source_list.append(source_dict)
  41. return jsonify({'code': '200', 'result': source_list})
  42. @app.route("/v1/crawler/task/insert", methods=["POST"])
  43. def insertTask():
  44. data = request.form
  45. outer_info = data.get(('spider_link'))
  46. source = data.get('source')
  47. exist_outer_info = list()
  48. for link in outer_info:
  49. s_sql = f"""select spider_link from crawler_task where source={source}"""
  50. result = MysqlHelper.get_values(s_sql)
  51. if link in eval(result[0]):
  52. exist_outer_info.append(link)
  53. if exist_outer_info:
  54. return jsonify({'code': 200, 'message': '名单重复', 'repeat_list': exist_outer_info})
  55. # 获取到一个以键且为逗号分隔的字符串,返回一个字符串
  56. keys = ','.join(data.keys())
  57. values = ','.join(['%s'] * len(data))
  58. sql = 'insert into {table}({keys}) VALUES({values})'.format(table='crawler_task', keys=keys, values=values)
  59. MysqlHelper.insert_values(sql, tuple(data.values()))
  60. return jsonify({'code': 200, 'message': 'task create success'})
  61. @app.route("/v1/crawler/task/gettask", methods=["GET"])
  62. def getAllTask():
  63. get_data = request.args.to_dict()
  64. page = int(get_data.get('page', 1))
  65. offset = int(get_data.get('offset', 10))
  66. start_count = (page * offset) - offset
  67. end_count = page * offset
  68. sql = f"""select task_id, task_name from crawler_task limit {start_count}, {end_count}"""
  69. result = MysqlHelper.get_values(sql)
  70. if not result:
  71. return jsonify({'code': '200', 'result': [], 'message': 'no data'})
  72. source_list = list()
  73. for task_id, task_name in result:
  74. data = dict(
  75. task_id=task_id,
  76. task_name=task_name,
  77. )
  78. source_list.append(data)
  79. return jsonify({'code': '200', 'result': source_list})
  80. @app.route("/v1/crawler/task/getone", methods=["GET"])
  81. def getOneTask():
  82. get_data = request.args.to_dict()
  83. task_id = get_data['task_id']
  84. sql = f'select task_id, spider_link from crawler_task where task_id={task_id}'
  85. result = MysqlHelper.get_values(sql)
  86. if not result:
  87. return jsonify({'code': '200', 'result': [], 'message': 'no data'})
  88. for task_id, spider_link in result:
  89. data = dict(
  90. task_id=task_id,
  91. spider_link=spider_link,
  92. )
  93. return jsonify({'code': '200', 'result': data})
  94. @app.route("/v1/crawler/task/update", methods=["POST"])
  95. def updateTask():
  96. task_id = request.form.get('task_id')
  97. spider_link = request.form.get('spider_link')
  98. print(spider_link, task_id)
  99. sql = f"""UPDATE crawler_task SET spider_link='{spider_link}' where task_id = {task_id}"""
  100. print(sql)
  101. result = MysqlHelper.update_values(sql)
  102. if result:
  103. return jsonify({'code': 200, 'message': 'task update success'})
  104. else:
  105. return jsonify({'code': 400, 'message': 'task update faild'})
  106. def get_user_info(source):
  107. source_spider = {
  108. 'xigua': xigua_user_info
  109. }
  110. return source_spider.get(source)
  111. @app.route("/v1/crawler/author/create", methods=["POST"])
  112. def createUser():
  113. get_media_url = 'http://videotest-internal.yishihui.com/longvideoapi/user/virtual/crawler/registerVirtualUser'
  114. print(request.form.to_dict())
  115. spider_link = request.form.get('spider_link')
  116. print(111111, spider_link, type(spider_link))
  117. source = request.form.get('source')
  118. task_type = request.form.get('task_type')
  119. applets_status = request.form.get('applets_status')
  120. app_status = request.form.get('app_status')
  121. user_tag = request.form.get('user_tag')
  122. user_content_tag = request.form.get('user_content_tag')
  123. success_list = list()
  124. fail_list = list()
  125. for author_url in eval(spider_link):
  126. try:
  127. post_data = {
  128. # 'count': 1, # (必须)账号个数:传1
  129. # 'accountType': 4, # (必须)账号类型 :传 4 app虚拟账号
  130. 'pwd': '', # 密码 默认 12346
  131. 'nickName': '', # 昵称 默认 vuser......
  132. 'avatarUrl': '',
  133. # 头像Url 默认 http://weapppiccdn.yishihui.com/resources/images/pic_normal.png
  134. 'tagName': user_tag, # 多条数据用英文逗号分割
  135. }
  136. response = requests.post(url=get_media_url, params=post_data)
  137. media_id = response.json()['data']
  138. f_sql = f"""select spider_link from crawler_author_map where spider_link="{author_url}" """
  139. result = MysqlHelper.get_values(f_sql)
  140. if result:
  141. success_list.append(author_url)
  142. continue
  143. else:
  144. data = dict(
  145. spider_link=author_url,
  146. media_id=media_id,
  147. source=source,
  148. task_type=task_type,
  149. applets_status=applets_status,
  150. app_status=app_status,
  151. user_tag=user_tag,
  152. user_content_tag=user_content_tag,
  153. insert_time=int(time.time()),
  154. update_time=int(time.time())
  155. )
  156. keys = ','.join(data.keys())
  157. values = ','.join(['%s'] * len(data))
  158. table = 'crawler_author_map'
  159. sql = f"""insert into {table}({keys}) VALUES({values})"""
  160. result = MysqlHelper.insert_values(sql, tuple(data.values()))
  161. if not result:
  162. fail_list.append(author_url)
  163. else:
  164. success_list.append(author_url)
  165. except Exception as e:
  166. fail_list.append(author_url)
  167. continue
  168. return jsonify({'code': 200, 'result': {'success': success_list, 'fail': fail_list}})
  169. if __name__ == "__main__":
  170. app.run(debug=True, port=5050)