|
@@ -32,12 +32,17 @@ def addSpiderLink():
|
|
|
result = mysql_con.get_values(sql)
|
|
|
now_time = int(time.time())
|
|
|
if result:
|
|
|
- old_task_id = result[0]['task_id']
|
|
|
- if task_id == old_task_id:
|
|
|
- up_sql = f'update crawler_author_map set is_del=1 where spider_link="{spider_link}"'
|
|
|
+ is_del = result[0]['is_del']
|
|
|
+ if is_del:
|
|
|
+ return jsonify({'code': 400, 'message': '抓取名单重复'})
|
|
|
else:
|
|
|
- up_sql = f'update crawler_author_map set task_id={task_id} where spider_link="{spider_link}"'
|
|
|
- res = mysql_con.update_values(up_sql)
|
|
|
+ old_task_id = result[0]['task_id']
|
|
|
+ if task_id == old_task_id:
|
|
|
+ up_sql = f'update crawler_author_map set is_del=1 where spider_link="{spider_link}"'
|
|
|
+ else:
|
|
|
+ up_sql = f'update crawler_author_map set task_id={task_id},is_del=1 where spider_link="{spider_link}"'
|
|
|
+ mysql_con.update_values(up_sql)
|
|
|
+ return jsonify({'code': 200, 'message': '抓取名单增加成功'})
|
|
|
|
|
|
# task_sql = f'select spider_link from crawler_task where task_id ={task_id}'
|
|
|
# task = mysql_con.get_values(task_sql)
|
|
@@ -46,16 +51,13 @@ def addSpiderLink():
|
|
|
# str_spider_links = str(spider_links)
|
|
|
# u_sql = f'update crawler_task set spider_link="{str_spider_links}", update_time={now_time} where task_id={task_id}'
|
|
|
# mysql_con.update_values(u_sql)
|
|
|
- return jsonify({'code': 200, 'message': '抓取名单增加成功', 'del_link': spider_link})
|
|
|
else:
|
|
|
sql = f'select * from crawler_task where task_id={task_id}'
|
|
|
result = mysql_con.get_values(sql)
|
|
|
success_list, fail_list = create_uid(result[0], task_id, spider_link=[spider_link])
|
|
|
-
|
|
|
spider_links = eval(result[0]['spider_link'])
|
|
|
spider_links.append(spider_link)
|
|
|
str_spider_links = str(spider_links)
|
|
|
-
|
|
|
u_sql = f'update crawler_task set spider_link="{str_spider_links}", update_time={now_time} where task_id={task_id}'
|
|
|
mysql_con.update_values(u_sql)
|
|
|
|