|
@@ -21,20 +21,29 @@ def getSource():
|
|
|
get_data = request.args.to_dict()
|
|
|
fields = get_data.get('fields')
|
|
|
# # 对参数进行操作
|
|
|
- sql = 'select source, task_type, spider_name, machine from crawler_source'
|
|
|
+ sql = 'select source, task_type, spider_name, machine, description from crawler_source'
|
|
|
|
|
|
result = MysqlHelper.get_values(sql)
|
|
|
if not result:
|
|
|
return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
source_list = list()
|
|
|
- for source, task_type, spider_name, machine in result:
|
|
|
- data = dict(
|
|
|
- source=source,
|
|
|
- task_type=task_type,
|
|
|
- spider_name=spider_name,
|
|
|
- machine=machine
|
|
|
- )
|
|
|
- source_list.append(data)
|
|
|
+ for source, task_type, spider_name, machine, description, task_type_desc, spider_name_desc in result:
|
|
|
+ source_dict = {}
|
|
|
+ source_dict[source] = {
|
|
|
+ 'task_type': [
|
|
|
+ {
|
|
|
+ 'description': task_type_desc,
|
|
|
+ 'author': [
|
|
|
+ {
|
|
|
+ 'spider_name': spider_name,
|
|
|
+ 'description': spider_name_desc
|
|
|
+ }
|
|
|
+ ]
|
|
|
+ }
|
|
|
+ ],
|
|
|
+ 'description': description
|
|
|
+ }
|
|
|
+ source_list.append(source_dict)
|
|
|
return jsonify({'code': '200', 'result': source_list})
|
|
|
|
|
|
|
|
@@ -50,7 +59,7 @@ def insertTask():
|
|
|
if link in eval(result[0]):
|
|
|
exist_outer_info.append(link)
|
|
|
if exist_outer_info:
|
|
|
- return jsonify({'code': 200, 'message': '名单重复', 'spider_link': exist_outer_info})
|
|
|
+ return jsonify({'code': 200, 'message': '名单重复', 'repeat_list': exist_outer_info})
|
|
|
# 获取到一个以键且为逗号分隔的字符串,返回一个字符串
|
|
|
keys = ','.join(data.keys())
|
|
|
values = ','.join(['%s'] * len(data))
|
|
@@ -81,20 +90,20 @@ def getAllTask():
|
|
|
return jsonify({'code': '200', 'result': source_list})
|
|
|
|
|
|
|
|
|
-# @app.route("/v1/crawler/task/getone", methods=["GET"])
|
|
|
-# def getOneTask():
|
|
|
-# get_data = request.args.to_dict()
|
|
|
-# task_id = get_data['task_id']
|
|
|
-# sql = f'select task_id, spider_link from crawler_task where task_id={task_id}'
|
|
|
-# result = MysqlHelper.get_values(sql)
|
|
|
-# if not result:
|
|
|
-# return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
-# for task_id, spider_link in result:
|
|
|
-# data = dict(
|
|
|
-# task_id=task_id,
|
|
|
-# spider_link=spider_link,
|
|
|
-# )
|
|
|
-# return jsonify({'code': '200', 'result': data})
|
|
|
+@app.route("/v1/crawler/task/getone", methods=["GET"])
|
|
|
+def getOneTask():
|
|
|
+ get_data = request.args.to_dict()
|
|
|
+ task_id = get_data['task_id']
|
|
|
+ sql = f'select task_id, spider_link from crawler_task where task_id={task_id}'
|
|
|
+ result = MysqlHelper.get_values(sql)
|
|
|
+ if not result:
|
|
|
+ return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
+ for task_id, spider_link in result:
|
|
|
+ data = dict(
|
|
|
+ task_id=task_id,
|
|
|
+ spider_link=spider_link,
|
|
|
+ )
|
|
|
+ return jsonify({'code': '200', 'result': data})
|
|
|
|
|
|
|
|
|
@app.route("/v1/crawler/task/update", methods=["POST"])
|
|
@@ -124,7 +133,7 @@ def createUser():
|
|
|
get_media_url = 'http://videotest-internal.yishihui.com/longvideoapi/user/virtual/crawler/registerVirtualUser'
|
|
|
print(request.form.to_dict())
|
|
|
spider_link = request.form.get('spider_link')
|
|
|
- print(111111,spider_link,type(spider_link))
|
|
|
+ print(111111, spider_link, type(spider_link))
|
|
|
source = request.form.get('source')
|
|
|
task_type = request.form.get('task_type')
|
|
|
applets_status = request.form.get('applets_status')
|