lierqiang 2 years ago
parent
commit
17a39f5dad
1 changed files with 25 additions and 3 deletions
  1. 25 3
      server/conf_task.py

+ 25 - 3
server/conf_task.py

@@ -6,17 +6,25 @@ import time
 import requests
 from flask import Flask, request
 from flask import jsonify
-
+from dotenv import load_dotenv
 sys.path.append(os.path.abspath(os.path.join(os.getcwd(), "..")))
-from common.db.mysql_help_new import MysqlHelper
 from conf.config import get_config
-
+from common.db.mysql_help import MysqlHelper
+load_dotenv(verbose=True)
+env = os.getenv('env')
 app = Flask(__name__)
 app.config['JSON_AS_ASCII'] = False
 # mysql实例
 mysql_con = MysqlHelper()
 conf = get_config()
 
+@app.route("/v1/crawler/task/dellink", methods=["POST"])
+def delSpiderLink():
+    data = request.json
+    spider_links = data['spider_link']
+    for link in spider_links:
+        up_sql = f'update crawler_author_map set is_del=0 where spider_link='
+
 
 @app.route("/v1/crawler/task/getcategory", methods=["GET"])
 def getCategory():
@@ -236,6 +244,20 @@ def getOneTask():
         if not result:
             return jsonify({'code': '400', 'result': [], 'message': 'no data'})
 
+        data = result[0]
+        if data['min_publish_time']:
+            data['min_publish_time'] = data['min_publish_time'] * 1000
+        else:
+            data['min_publish_time'] = 0
+
+        data['next_time'] = data['next_time'] * 1000
+        data['spider_link'] = eval(data['spider_link'])
+        data['spider_rule'] = eval(data['spider_rule'])
+        #
+        data['user_tag_info'] = eval(data['user_tag_info'])
+        data['content_tag_info'] = eval(data['user_content_tag'])
+
+
     except Exception as e:
         return jsonify({'code': '500', "message": "获取任务信息失败"})