conf_task.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641
  1. import copy
  2. import logging
  3. import os
  4. import sys
  5. import time
  6. import requests
  7. from flask import Flask, request
  8. from flask import jsonify
  9. from dotenv import load_dotenv
  10. sys.path.append(os.path.abspath(os.path.join(os.getcwd(), "..")))
  11. from conf.config import get_config
  12. from common.db.mysql_help import MysqlHelper
  13. load_dotenv(verbose=True)
  14. env = os.getenv('env')
  15. app = Flask(__name__)
  16. app.config['JSON_AS_ASCII'] = False
  17. # mysql实例
  18. mysql_con = MysqlHelper()
  19. conf = get_config()
  20. @app.route("/v1/crawler/task/addlink", methods=["POST"])
  21. def addSpiderLink():
  22. try:
  23. data = request.json
  24. spider_link = data['spider_link']
  25. task_id = data['task_id']
  26. sql = f'select * from crawler_author_map where spider_link={spider_link}'
  27. result = mysql_con.get_values(sql)
  28. now_time = int(time.time())
  29. if result:
  30. old_task_id = result[0]['task_id']
  31. if task_id == old_task_id:
  32. up_sql = f'update crawler_author_map set is_del=1 where spider_link="{spider_link}"'
  33. else:
  34. up_sql = f'update crawler_author_map set task_id={task_id} where spider_link="{spider_link}"'
  35. res = mysql_con.update_values(up_sql)
  36. task_sql = f'select spider_link from crawler_task where task_id ={task_id}'
  37. task = mysql_con.get_values(task_sql)
  38. spider_links = eval(task[0]['spider_link'])
  39. spider_links.append(spider_link)
  40. str_spider_links = str(spider_links)
  41. u_sql = f'update crawler_task set spider_link="{str_spider_links}", update_time={now_time} where task_id={task_id}'
  42. mysql_con.update_values(u_sql)
  43. return jsonify({'code': 200, 'message': '抓取名单增加成功', 'del_link': spider_link})
  44. else:
  45. sql = f'select * from crawler_task where task_id={task_id}'
  46. result = mysql_con.get_values(sql)
  47. success_list, fail_list = create_uid(result[0], task_id, spider_link=[spider_link])
  48. spider_links = eval(result[0]['spider_link'])
  49. spider_links.append(spider_link)
  50. str_spider_links = str(spider_links)
  51. u_sql = f'update crawler_task set spider_link="{str_spider_links}", update_time={now_time} where task_id={task_id}'
  52. mysql_con.update_values(u_sql)
  53. return jsonify({'code': 200, 'message': '抓取名单增加成功', 'add_link': success_list})
  54. except Exception as e:
  55. return jsonify(
  56. {'code': 400, 'message': '抓取名单删除失败', 'spider_link': spider_link})
  57. @app.route("/v1/crawler/task/dellink", methods=["POST"])
  58. def delSpiderLink():
  59. data = request.json
  60. spider_link = data['spider_link']
  61. task_id = data['task_id']
  62. up_sql = f'update crawler_author_map set is_del=0 where spider_link="{spider_link}"'
  63. MysqlHelper.update_values(up_sql)
  64. sql = f'select * from crawler_task where task_id ={task_id}'
  65. task = mysql_con.get_values(sql)
  66. spider_links = eval(task[0]['spider_link'])
  67. spider_links.remove(spider_link)
  68. now_time = int(time.time())
  69. u_sql = f'update crawler_task set spider_link="{spider_links}",update_time={now_time} where task_id={task_id}'
  70. mysql_con.update_values(u_sql)
  71. if spider_link:
  72. return jsonify({'code': 200, 'message': '抓取名单删除成功', 'del_link': spider_link})
  73. else:
  74. return jsonify(
  75. {'code': 400, 'message': '抓取名单删除失败', 'del_link': spider_link})
  76. @app.route("/v1/crawler/task/getcategory", methods=["GET"])
  77. def getCategory():
  78. sql = f'select id, content_category from crawler_content_category'
  79. result = mysql_con.get_values(sql)
  80. return jsonify({'code': 200, 'data': result})
  81. @app.route("/v1/crawler/task/getboard", methods=["GET"])
  82. def getBoard():
  83. sql = f'select id, mode_board from crawler_board'
  84. result = mysql_con.get_values(sql)
  85. return jsonify({'code': 200, 'data': result})
  86. @app.route("/v1/crawler/task/getmodename", methods=["GET"])
  87. def getModeName():
  88. sql = f'select id, mode_name from crawler_mode'
  89. result = mysql_con.get_values(sql)
  90. return jsonify({'code': 200, 'data': result})
  91. @app.route("/v1/crawler/task/getrecommendboard", methods=["GET"])
  92. def getRecommendBoard():
  93. sql = f'select id, mode_board from crawler_recommend_board'
  94. result = mysql_con.get_values(sql)
  95. return jsonify({'code': 200, 'data': result})
  96. @app.route("/v1/crawler/user/findmedia", methods=["GET"])
  97. def getMediaInfo():
  98. data = request.args.to_dict()
  99. task_id = data['task_id']
  100. sql = f'select * from crawler_author_map where task_id={task_id} and is_del=1'
  101. result = mysql_con.get_values(sql)
  102. task_user_info = []
  103. for task_info in result:
  104. media_id = task_info['media_id']
  105. media_info = requests.get(url=conf['select_media_url'], params={'uid': media_id}).json()['content']
  106. media_name = media_info['longvideoNickName'] if media_info['longvideoNickName'] else media_info['nickName']
  107. nick_name = task_info['nick_name']
  108. spider_link = task_info['spider_link']
  109. create_user_time = task_info['create_user_time']
  110. media_data = dict(
  111. media_name=media_name,
  112. nick_name=nick_name,
  113. spider_link=spider_link,
  114. media_id={'media_id': media_id, 'media_url': conf['media_main_url'].format(media_id)},
  115. create_user_time=create_user_time * 1000
  116. )
  117. task_user_info.append(media_data)
  118. return jsonify({'code': 200, 'data': task_user_info})
  119. @app.route("/v1/crawler/task/findtask", methods=["GET"])
  120. def getTaskUserInfo():
  121. # 根据条件查找任务
  122. data = request.args.to_dict()
  123. values = ''
  124. for k, v in data.items():
  125. if isinstance(v, int):
  126. values += f'{k}={v} and '
  127. else:
  128. values += f'{k}="{v}" and '
  129. sql = f"select task_id from crawler_author_map where {values[:-4]}" # [:-1]是为了去掉末尾的逗号
  130. res = mysql_con.get_values(sql)
  131. task_id = res['task_id']
  132. sql = f'select task_name, source, task_type, create_task_user, insert_time, update_task_user, update_time from crawler_task where task_id={task_id} '
  133. task_info = mysql_con.get_values(sql)
  134. return jsonify({'code': 200, 'data': task_info})
  135. # 只接受get方法访问
  136. @app.route("/v1/crawler/source/getall", methods=["GET"])
  137. def getSource():
  138. try:
  139. # # 对参数进行操作
  140. sql = 'select * from crawler_source'
  141. result = mysql_con.get_values(sql)
  142. if not result:
  143. return jsonify({'code': '200', 'result': [], 'message': '没有更多数据'})
  144. except Exception as e:
  145. return jsonify({'code': '400', 'message': '获取数据源信息失败'})
  146. return jsonify({'code': '200', 'result': result})
  147. @app.route("/v1/crawler/source/getasktype", methods=["GET"])
  148. def getTaskType():
  149. try:
  150. data = request.args.to_dict()
  151. source = data['source']
  152. # # 对参数进行操作
  153. sql = f'select * from crawler_task_type where source="{source}"'
  154. result = mysql_con.get_values(sql)
  155. if not result:
  156. return jsonify({'code': '200', 'result': [], 'message': '没有更多数据'})
  157. else:
  158. task_type_list = list()
  159. for task_type_info in result:
  160. task_info = {
  161. 'type': task_type_info['task_type'],
  162. 'description': task_type_info['task_type_desc'],
  163. 'spider': {
  164. 'spider_name': task_type_info['spider_name'],
  165. 'description': task_type_info['spider_name_desc']
  166. }
  167. }
  168. task_type_list.append(task_info)
  169. source_dict = {
  170. 'task_type': task_type_list,
  171. }
  172. except Exception as e:
  173. return jsonify({'code': '400', 'message': '获取数据源信息失败'})
  174. return jsonify({'code': '200', 'result': source_dict})
  175. @app.route("/v1/crawler/task/checkrepeat", methods=["POST"])
  176. def get_repeat_list():
  177. data = request.json
  178. # 字段转换
  179. spider_links = data.get('spider_link')
  180. repeat_list = list()
  181. # 判断是否为重复名单
  182. for spider_link in spider_links:
  183. if isinstance(spider_link, int):
  184. s_sql = f"""select spider_link from crawler_author_map where spider_link={spider_link}"""
  185. else:
  186. s_sql = f"""select spider_link from crawler_author_map where spider_link='{spider_link}'"""
  187. result = mysql_con.get_values(s_sql)
  188. if result:
  189. repeat_list.append(spider_link)
  190. if repeat_list:
  191. return jsonify({'code': 400, 'message': '名单重复', 'repeat_list': repeat_list})
  192. else:
  193. return jsonify({'code': 200, 'message': '抓取名单校验通过', 'repeat_list': repeat_list})
  194. @app.route("/v1/crawler/task/insert", methods=["POST"])
  195. def insertTask():
  196. try:
  197. data = request.json
  198. user_data = copy.deepcopy(data)
  199. tag_name_list = []
  200. content_tag_list = []
  201. user_tag = data['user_tag']
  202. user_content_tag = data['user_content_tag']
  203. for tag in user_tag:
  204. tag_name_list.append(tag['tagName'])
  205. for tag in user_content_tag:
  206. content_tag_list.append(tag['tagName'])
  207. if data['min_publish_time']:
  208. data['min_publish_time'] = int(data['min_publish_time'] / 1000)
  209. else:
  210. data['min_publish_time'] = 0
  211. if not data['min_publish_day']:
  212. data['min_publish_day'] = 0
  213. data['next_time'] = int(data['next_time'] / 1000)
  214. data['insert_time'] = int(time.time())
  215. data['update_time'] = int(time.time())
  216. data['spider_link'] = str(data['spider_link'])
  217. data['spider_rule'] = str(data['spider_rule'])
  218. data['user_tag_info'] = str(user_tag)
  219. data['content_tag_info'] = str(user_content_tag)
  220. data['user_tag'] = ','.join(str(i) for i in tag_name_list)
  221. data['user_content_tag'] = ','.join(str(i) for i in content_tag_list)
  222. # data['crawler_interval'] = data.pop('interval')
  223. # 获取到一个以键且为逗号分隔的字符串,返回一个字符串
  224. keys = ','.join(data.keys())
  225. values = ','.join(['%s'] * len(data))
  226. sql = 'insert into {table}({keys}) VALUES({values})'.format(table='crawler_task', keys=keys, values=values)
  227. task_id = mysql_con.insert_values(sql, tuple(data.values()))
  228. if task_id:
  229. spider_link = user_data['spider_link']
  230. success_list, fail_list = create_uid(user_data, task_id, spider_link)
  231. return jsonify(
  232. {'code': 200, 'message': 'task create success', 'success_list': success_list, 'fail_list': fail_list})
  233. except Exception as e:
  234. return jsonify({'code': 500, 'message': '任务写入失败,原因:{e}'})
  235. @app.route("/v1/crawler/task/gettask", methods=["POST"])
  236. def getAllTask():
  237. try:
  238. get_data = request.json
  239. page = int(get_data.get('page', 1))
  240. offset = int(get_data.get('offset', 10))
  241. start_count = (page * offset) - offset
  242. end_count = page * offset
  243. if get_data.get('fields'):
  244. select_data = get_data['fields']
  245. values = ''
  246. for k, v in select_data.items():
  247. if isinstance(v, int):
  248. values += f'{k}={v} and '
  249. else:
  250. values += f'{k}="{v}" and '
  251. sql = f"select task_id from crawler_author_map where {values[:-4]} and is_del=1" # [:-1]是为了去掉末尾的逗号
  252. res = mysql_con.get_values(sql)
  253. task_id_set = set()
  254. for task in res:
  255. task_id_set.add(task['task_id'])
  256. task_list = list()
  257. for task_id in task_id_set:
  258. sql = f'select * from crawler_task where task_id={task_id} order by update_time desc limit {start_count}, {end_count}'
  259. task_info = mysql_con.get_values(sql)[0]
  260. task_data = dict(
  261. task_id=task_info['task_id'],
  262. task_name=task_info['task_name'],
  263. source=task_info['source'],
  264. task_type=task_info['task_type'],
  265. create_task_user=task_info['create_task_user'],
  266. insert_time=task_info['insert_time'] * 1000,
  267. update_task_user=task_info['update_task_user'],
  268. update_time=task_info['update_time'] * 1000
  269. )
  270. task_list.append(task_data)
  271. return jsonify({'code': 200, 'result': task_list, 'total': len(task_list)})
  272. sql = f"""select * from crawler_task order by update_time desc limit {start_count}, {end_count} """
  273. result = mysql_con.get_values(sql)
  274. if not result:
  275. return jsonify({'code': '200', 'result': [], 'message': '没有更多任务'})
  276. task_list = list()
  277. for task_info in result:
  278. source = task_info['source']
  279. task_type = task_info['task_type']
  280. source_sql = f'select * from crawler_source where source="{source}"'
  281. source_info = mysql_con.get_values(source_sql)
  282. task_type_sql = f'select * from crawler_task_type where task_type="{task_type}"'
  283. type_info = mysql_con.get_values(task_type_sql)
  284. task_data = dict(
  285. task_id=task_info['task_id'],
  286. task_name=task_info['task_name'],
  287. source_name=source_info[0]['source_desc'],
  288. task_type_name=type_info[0]['task_type_desc'],
  289. source=task_info['source'],
  290. task_type=task_info['task_type'],
  291. create_task_user=task_info['create_task_user'],
  292. insert_time=task_info['insert_time'] * 1000,
  293. update_task_user=task_info['update_task_user'],
  294. update_time=task_info['update_time'] * 1000
  295. )
  296. task_list.append(task_data)
  297. t_sql = f"""select count(*) from crawler_task"""
  298. t_res = mysql_con.get_values(t_sql)
  299. total = t_res[0]['count(*)']
  300. except Exception as e:
  301. return jsonify({"code": "400", 'message': "任务列表获取失败"})
  302. return jsonify({'code': '200', 'result': task_list, 'total': total})
  303. @app.route("/v1/crawler/task/getone", methods=["GET"])
  304. def getOneTask():
  305. try:
  306. get_data = request.args.to_dict()
  307. task_id = get_data['task_id']
  308. sql = f'select * from crawler_task where task_id={task_id}'
  309. result = mysql_con.get_values(sql)
  310. if not result:
  311. return jsonify({'code': '400', 'result': [], 'message': 'no data'})
  312. data = result[0]
  313. if data['min_publish_time']:
  314. data['min_publish_time'] = data['min_publish_time'] * 1000
  315. else:
  316. data['min_publish_time'] = 0
  317. data['next_time'] = data['next_time'] * 1000
  318. data['spider_link'] = eval(data['spider_link'])
  319. data['spider_rule'] = eval(data['spider_rule'])
  320. #
  321. data['user_tag_info'] = eval(data['user_tag_info'])
  322. data['content_tag_info'] = eval(data['content_tag_info'])
  323. if not data['mode_name_id']:
  324. data['mode_name_id'] = ''
  325. if not data['mode_board_id']:
  326. data['mode_board_id'] = ''
  327. if not data['content_category_id']:
  328. data['content_category_id'] = ''
  329. except Exception as e:
  330. return jsonify({'code': '500', "message": "获取任务信息失败"})
  331. return jsonify({'code': '200', 'result': result})
  332. @app.route("/v1/crawler/task/update", methods=["POST"])
  333. def updateTask():
  334. try:
  335. data = request.json
  336. task_id = data.get('task_id')
  337. task_info = data.get('task_info')
  338. values = ''
  339. if task_info['min_publish_time']:
  340. task_info['min_publish_time'] = task_info['min_publish_time'] / 1000
  341. else:
  342. task_info['min_publish_time'] = 0
  343. if not task_info['min_publish_day']:
  344. task_info['min_publish_day'] = 0
  345. task_info['next_time'] = task_info['next_time'] / 1000
  346. user_tag = task_info['user_tag']
  347. user_content_tag = task_info['user_content_tag']
  348. tag_name_list = []
  349. content_tag_list = []
  350. for tag in user_tag:
  351. tag_name_list.append(tag['tagName'])
  352. for tag in user_content_tag:
  353. content_tag_list.append(tag['tagName'])
  354. task_info['user_tag_info'] = str(user_tag)
  355. task_info['content_tag_info'] = str(user_content_tag)
  356. task_info['user_tag'] = ','.join(str(i) for i in tag_name_list)
  357. task_info['user_content_tag'] = ','.join(str(i) for i in content_tag_list)
  358. for k, v in task_info.items():
  359. if isinstance(v, int):
  360. values += f'{k}={v},'
  361. else:
  362. values += f'{k}="{v}",'
  363. sql = f'update crawler_task set {values[:-1]} where task_id={task_id}'
  364. result = mysql_con.update_values(sql)
  365. if result:
  366. return jsonify({'code': 200, 'message': 'task update success'})
  367. else:
  368. return jsonify({'code': 400, 'message': 'task update faild'})
  369. except Exception as e:
  370. return jsonify({'code': 400, 'message': '任务更新失败'})
  371. def create_uid(task, task_id, spider_link):
  372. if not isinstance(spider_link, list):
  373. spider_link = eval(spider_link)
  374. source = task.get('source')
  375. task_type = task.get('task_type')
  376. applets_status = task.get('applets_status')
  377. app_status = task.get('app_status')
  378. try:
  379. user_tag = eval(task.get('user_tag_info'))
  380. user_content_tag = eval(task.get('content_tag_info'))
  381. except Exception as e:
  382. user_tag = task.get('user_tag')
  383. user_content_tag = task.get('user_content_tag')
  384. mode_name_id = task.get('mode_name_id', 0)
  385. mode_board_id = task.get('mode_board_id', 0)
  386. content_category_id = task.get('content_category_id', 0)
  387. mn_sql = f'select * from crawler_mode where id={mode_name_id}'
  388. mode_name_list = mysql_con.get_values(mn_sql)
  389. mb_sql = f'select * from crawler_board where id={mode_board_id}'
  390. mode_board_list = mysql_con.get_values(mb_sql)
  391. cc_sql = f'select * from crawler_content_category where id={content_category_id}'
  392. content_category_list = mysql_con.get_values(cc_sql)
  393. source_sql = f'select * from crawler_source where source="{source}"'
  394. source_res = mysql_con.get_values(source_sql)[0]
  395. spider_platform = source_res['source_desc']
  396. if mode_name_list:
  397. task['mode_name_str'] = mode_name_list[0]['mode_name']
  398. else:
  399. task['mode_name_str'] = ''
  400. if mode_board_list:
  401. task['mode_board_str'] = mode_board_list[0]['mode_board']
  402. else:
  403. task['mode_board_str'] = ''
  404. if content_category_list:
  405. task['content_category_str'] = content_category_list[0]['content_category']
  406. else:
  407. task['content_category_str'] = ''
  408. success_list = list()
  409. fail_list = list()
  410. tag_name_list = list()
  411. content_tag_list = list()
  412. for tag in user_tag:
  413. tag_name_list.append(tag['tagName'])
  414. for tag in user_content_tag:
  415. content_tag_list.append(tag['tagName'])
  416. user_tags = ','.join(str(i) for i in tag_name_list)
  417. user_content_tags = ','.join(str(i) for i in content_tag_list)
  418. for author_url in spider_link:
  419. now_time = int(time.time())
  420. time_array = time.localtime(now_time)
  421. str_time = time.strftime("%Y%m%d", time_array)
  422. # 生成创建用户的tag
  423. tags = ''
  424. if task['task_type'] == 'author':
  425. spider_task = '账号'
  426. tags_list = ['spider', spider_task, spider_platform, user_tags, task['content_category_str'], str_time]
  427. elif task['task_type'] == 'search':
  428. spider_task = '搜索'
  429. tags_list = ['spider', spider_task, spider_platform, user_tags, author_url, task['content_category_str'], str_time]
  430. elif task['task_type'] == 'board':
  431. spider_task = '榜单'
  432. mode_tags = task['mode_board_str']
  433. tags_list = ['spider', spider_task, spider_platform, user_tags, mode_tags, task['content_category_str'], str_time]
  434. elif task['task_type'] == 'recommend':
  435. spider_task = '推荐'
  436. mode_tags = task['mode_name_str'] + task['mode_board_str']
  437. tags_list = ['spider', spider_task, spider_platform, user_tags, mode_tags, task['content_category_str'], str_time]
  438. else:
  439. tags_list = ['spider', spider_platform, user_tags, task['content_category_str'], str_time]
  440. for v in tags_list:
  441. if v:
  442. tags += str(v) + ','
  443. post_data = {
  444. # 'count': 1, # (必须)账号个数:传1
  445. # 'accountType': 4, # (必须)账号类型 :传 4 app虚拟账号
  446. 'pwd': '', # 密码 默认 12346
  447. 'nickName': '', # 昵称 默认 vuser......
  448. 'avatarUrl': '',
  449. # 头像Url 默认 http://weapppiccdn.yishihui.com/resources/images/pic_normal.png
  450. 'tagName': tags[:-1], # 多条数据用英文逗号分割
  451. }
  452. try:
  453. response = requests.post(url=conf['media_url'], params=post_data)
  454. media_id = response.json()['data']
  455. media_info = requests.get(url=conf['select_media_url'], params={'uid': media_id}).json()['content']
  456. except Exception as e:
  457. logging.warning(f'创建账户:{spider_link},失败,原因:{e}')
  458. fail_list.append(author_url)
  459. continue
  460. data = dict(
  461. spider_link=author_url,
  462. media_id=media_id,
  463. media_name=media_info['longvideoNickName'] if media_info['longvideoNickName'] else media_info['nickName'],
  464. source=source,
  465. task_type=task_type,
  466. applets_status=applets_status,
  467. app_status=app_status,
  468. user_tag=user_tags,
  469. user_content_tag=user_content_tags,
  470. insert_time=int(time.time()),
  471. update_time=int(time.time()),
  472. create_user_time=now_time,
  473. mode_name_str=task['mode_name_str'],
  474. mode_board_str=task['mode_board_str'],
  475. content_category_str=task['content_category_str'],
  476. # mode_value_str=mode_value_str,
  477. task_id=task_id,
  478. media_main_url=conf['media_main_url'].format(media_id)
  479. )
  480. keys = ','.join(data.keys())
  481. values = ','.join(['%s'] * len(data))
  482. table = 'crawler_author_map'
  483. sql = f"""insert into {table}({keys}) VALUES({values})"""
  484. mysql_con.insert_values(sql, tuple(data.values()))
  485. uer_info = dict(
  486. outer_id=author_url,
  487. uid=media_id
  488. )
  489. success_list.append(uer_info)
  490. return success_list, fail_list
  491. @app.route("/v1/crawler/author/create", methods=["POST"])
  492. def createUser():
  493. spider_link = request.json.get('spider_link')
  494. source = request.json.get('source')
  495. task_type = request.json.get('task_type')
  496. applets_status = request.json.get('applets_status')
  497. app_status = request.json.get('app_status')
  498. user_tag = request.json.get('user_tag')
  499. user_content_tag = request.json.get('user_content_tag')
  500. success_list = list()
  501. fail_list = list()
  502. for author_url in spider_link:
  503. try:
  504. f_sql = f"""select spider_link from crawler_author_map where spider_link="{author_url}" """
  505. result = mysql_con.get_values(f_sql)
  506. if result:
  507. success_list.append(author_url)
  508. continue
  509. else:
  510. tag_name_list = []
  511. content_tag_list = []
  512. for tag in user_tag:
  513. tag_name_list.append(tag['tagName'])
  514. for tag in user_content_tag:
  515. content_tag_list.append(tag['tagName'])
  516. user_tags = ','.join(str(i) for i in tag_name_list)
  517. user_content_tags = ','.join(str(i) for i in content_tag_list)
  518. post_data = {
  519. # 'count': 1, # (必须)账号个数:传1
  520. # 'accountType': 4, # (必须)账号类型 :传 4 app虚拟账号
  521. 'pwd': '', # 密码 默认 12346
  522. 'nickName': '', # 昵称 默认 vuser......
  523. 'avatarUrl': '',
  524. # 头像Url 默认 http://weapppiccdn.yishihui.com/resources/images/pic_normal.png
  525. 'tagName': user_tags, # 多条数据用英文逗号分割
  526. }
  527. response = requests.post(url=conf['media_url'], params=post_data)
  528. media_id = response.json()['data']
  529. data = dict(
  530. spider_link=author_url,
  531. media_id=media_id,
  532. source=source,
  533. task_type=task_type,
  534. applets_status=applets_status,
  535. app_status=app_status,
  536. user_tag=user_tags,
  537. user_content_tag=user_content_tags,
  538. insert_time=int(time.time()),
  539. update_time=int(time.time())
  540. )
  541. keys = ','.join(data.keys())
  542. values = ','.join(['%s'] * len(data))
  543. table = 'crawler_author_map'
  544. sql = f"""insert into {table}({keys}) VALUES({values})"""
  545. result = mysql_con.insert_values(sql, tuple(data.values()))
  546. if not result:
  547. fail_list.append(author_url)
  548. else:
  549. success_list.append(author_url)
  550. except Exception as e:
  551. fail_list.append(author_url)
  552. continue
  553. return jsonify({'code': 200, 'result': {'success': success_list, 'fail': fail_list}})
  554. if __name__ == "__main__":
  555. app.run(debug=True, port=5050)