lierqiang 2 years ago
parent
commit
bce3617829
1 changed files with 3 additions and 8 deletions
  1. 3 8
      server/conf_task.py

+ 3 - 8
server/conf_task.py

@@ -31,10 +31,12 @@ def addSpiderLink():
         sql = f'select * from crawler_author_map where spider_link="{spider_link}"'
         result = mysql_con.get_values(sql)
         now_time = int(time.time())
+        repeat_list = list()
         if result:
             is_del = result[0]['is_del']
             if is_del:
-                return jsonify({'code': 400, 'message': '抓取名单重复'})
+                repeat_list.append(spider_link)
+                return jsonify({'code': 400, 'message': '抓取名单重复', 'repeat_list': repeat_list})
             else:
                 old_task_id = result[0]['task_id']
                 if task_id == old_task_id:
@@ -44,13 +46,6 @@ def addSpiderLink():
                 mysql_con.update_values(up_sql)
                 return jsonify({'code': 200, 'message': '抓取名单增加成功'})
 
-            # task_sql = f'select spider_link from crawler_task where task_id ={task_id}'
-            # task = mysql_con.get_values(task_sql)
-            # spider_links = eval(task[0]['spider_link'])
-            # spider_links.append(spider_link)
-            # str_spider_links = str(spider_links)
-            # u_sql = f'update crawler_task set spider_link="{str_spider_links}", update_time={now_time} where task_id={task_id}'
-            # mysql_con.update_values(u_sql)
         else:
             sql = f'select * from crawler_task where task_id={task_id}'
             result = mysql_con.get_values(sql)