|
@@ -25,11 +25,10 @@ def getSource():
|
|
|
|
|
|
result = MysqlHelper.get_values(sql)
|
|
|
if not result:
|
|
|
- return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
+ return jsonify({'code': '200', 'result': [], 'message': '没有更多数据'})
|
|
|
source_list = list()
|
|
|
for source, task_type, spider_name, machine, source_desc, task_type_desc, spider_name_desc in result:
|
|
|
- source_dict = {}
|
|
|
- source_dict[source] = {
|
|
|
+ source_dict = {
|
|
|
'task_type': [
|
|
|
{
|
|
|
'description': task_type_desc,
|
|
@@ -41,7 +40,8 @@ def getSource():
|
|
|
]
|
|
|
}
|
|
|
],
|
|
|
- 'description': source_desc
|
|
|
+ 'description': source_desc,
|
|
|
+ 'source': source
|
|
|
}
|
|
|
source_list.append(source_dict)
|
|
|
return jsonify({'code': '200', 'result': source_list})
|
|
@@ -49,76 +49,91 @@ def getSource():
|
|
|
|
|
|
@app.route("/v1/crawler/task/insert", methods=["POST"])
|
|
|
def insertTask():
|
|
|
- data = request.form
|
|
|
- outer_info = data.get(('spider_link'))
|
|
|
- source = data.get('source')
|
|
|
- exist_outer_info = list()
|
|
|
- for link in outer_info:
|
|
|
- s_sql = f"""select spider_link from crawler_task where source={source}"""
|
|
|
- result = MysqlHelper.get_values(s_sql)
|
|
|
- if link in eval(result[0]):
|
|
|
- exist_outer_info.append(link)
|
|
|
- if exist_outer_info:
|
|
|
- return jsonify({'code': 200, 'message': '名单重复', 'repeat_list': exist_outer_info})
|
|
|
- # 获取到一个以键且为逗号分隔的字符串,返回一个字符串
|
|
|
- keys = ','.join(data.keys())
|
|
|
- values = ','.join(['%s'] * len(data))
|
|
|
- sql = 'insert into {table}({keys}) VALUES({values})'.format(table='crawler_task', keys=keys, values=values)
|
|
|
- MysqlHelper.insert_values(sql, tuple(data.values()))
|
|
|
+ try:
|
|
|
+ data = request.form
|
|
|
+ outer_info = data.get(('spider_link'))
|
|
|
+ source = data.get('source')
|
|
|
+ exist_outer_info = list()
|
|
|
+ for link in outer_info:
|
|
|
+ s_sql = f"""select spider_link from crawler_task where source={source}"""
|
|
|
+ result = MysqlHelper.get_values(s_sql)
|
|
|
+ if link in eval(result[0]):
|
|
|
+ exist_outer_info.append(link)
|
|
|
+ if exist_outer_info:
|
|
|
+ return jsonify({'code': 200, 'message': '名单重复', 'repeat_list': exist_outer_info})
|
|
|
+ # 获取到一个以键且为逗号分隔的字符串,返回一个字符串
|
|
|
+ keys = ','.join(data.keys())
|
|
|
+ values = ','.join(['%s'] * len(data))
|
|
|
+ sql = 'insert into {table}({keys}) VALUES({values})'.format(table='crawler_task', keys=keys, values=values)
|
|
|
+ MysqlHelper.insert_values(sql, tuple(data.values()))
|
|
|
+ except Exception as e:
|
|
|
+ return jsonify({'code': 400, 'message': '任务写入失败'})
|
|
|
|
|
|
return jsonify({'code': 200, 'message': 'task create success'})
|
|
|
|
|
|
|
|
|
@app.route("/v1/crawler/task/gettask", methods=["GET"])
|
|
|
def getAllTask():
|
|
|
- get_data = request.args.to_dict()
|
|
|
- page = int(get_data.get('page', 1))
|
|
|
- offset = int(get_data.get('offset', 10))
|
|
|
- start_count = (page * offset) - offset
|
|
|
- end_count = page * offset
|
|
|
- sql = f"""select task_id, task_name from crawler_task limit {start_count}, {end_count}"""
|
|
|
- result = MysqlHelper.get_values(sql)
|
|
|
- if not result:
|
|
|
- return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
- source_list = list()
|
|
|
- for task_id, task_name in result:
|
|
|
- data = dict(
|
|
|
- task_id=task_id,
|
|
|
- task_name=task_name,
|
|
|
- )
|
|
|
- source_list.append(data)
|
|
|
+ try:
|
|
|
+ get_data = request.args.to_dict()
|
|
|
+ page = int(get_data.get('page', 1))
|
|
|
+ offset = int(get_data.get('offset', 10))
|
|
|
+ start_count = (page * offset) - offset
|
|
|
+ end_count = page * offset
|
|
|
+ sql = f"""select task_id, task_name from crawler_task limit {start_count}, {end_count}"""
|
|
|
+ result = MysqlHelper.get_values(sql)
|
|
|
+ if not result:
|
|
|
+ return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
+ source_list = list()
|
|
|
+ for task_id, task_name in result:
|
|
|
+ data = dict(
|
|
|
+ task_id=task_id,
|
|
|
+ task_name=task_name,
|
|
|
+ )
|
|
|
+ source_list.append(data)
|
|
|
+ except Exception as e:
|
|
|
+ return jsonify({"code": "400", 'message': "任务列表获取失败"})
|
|
|
+
|
|
|
return jsonify({'code': '200', 'result': source_list})
|
|
|
|
|
|
|
|
|
@app.route("/v1/crawler/task/getone", methods=["GET"])
|
|
|
def getOneTask():
|
|
|
- get_data = request.args.to_dict()
|
|
|
- task_id = get_data['task_id']
|
|
|
- sql = f'select task_id, spider_link from crawler_task where task_id={task_id}'
|
|
|
- result = MysqlHelper.get_values(sql)
|
|
|
- if not result:
|
|
|
- return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
- for task_id, spider_link in result:
|
|
|
- data = dict(
|
|
|
- task_id=task_id,
|
|
|
- spider_link=spider_link,
|
|
|
- )
|
|
|
+ try:
|
|
|
+ get_data = request.args.to_dict()
|
|
|
+ task_id = get_data['task_id']
|
|
|
+ sql = f'select task_id, spider_link from crawler_task where task_id={task_id}'
|
|
|
+ result = MysqlHelper.get_values(sql)
|
|
|
+ if not result:
|
|
|
+ return jsonify({'code': '200', 'result': [], 'message': 'no data'})
|
|
|
+ for task_id, spider_link in result:
|
|
|
+ data = dict(
|
|
|
+ task_id=task_id,
|
|
|
+ spider_link=spider_link,
|
|
|
+ )
|
|
|
+ except Exception as e:
|
|
|
+ return jsonify({'code': '400', "message": "获取任务信息失败"})
|
|
|
+
|
|
|
return jsonify({'code': '200', 'result': data})
|
|
|
|
|
|
|
|
|
@app.route("/v1/crawler/task/update", methods=["POST"])
|
|
|
def updateTask():
|
|
|
- task_id = request.form.get('task_id')
|
|
|
- spider_link = request.form.get('spider_link')
|
|
|
- print(spider_link, task_id)
|
|
|
-
|
|
|
- sql = f"""UPDATE crawler_task SET spider_link='{spider_link}' where task_id = {task_id}"""
|
|
|
- print(sql)
|
|
|
- result = MysqlHelper.update_values(sql)
|
|
|
- if result:
|
|
|
- return jsonify({'code': 200, 'message': 'task update success'})
|
|
|
- else:
|
|
|
- return jsonify({'code': 400, 'message': 'task update faild'})
|
|
|
+ try:
|
|
|
+ task_id = request.form.get('task_id')
|
|
|
+ spider_link = request.form.get('spider_link')
|
|
|
+ print(spider_link, task_id)
|
|
|
+
|
|
|
+ sql = f"""UPDATE crawler_task SET spider_link='{spider_link}' where task_id = {task_id}"""
|
|
|
+ print(sql)
|
|
|
+ result = MysqlHelper.update_values(sql)
|
|
|
+ if result:
|
|
|
+ return jsonify({'code': 200, 'message': 'task update success'})
|
|
|
+ else:
|
|
|
+ return jsonify({'code': 400, 'message': 'task update faild'})
|
|
|
+ except Exception as e:
|
|
|
+
|
|
|
+ return jsonify({'code': 400, 'message': '任务更新失败'})
|
|
|
|
|
|
|
|
|
def get_user_info(source):
|