Ver código fonte

add 获取一个api开发

kk 2 anos atrás
pai
commit
7e89d0c510
1 arquivos alterados com 31 adições e 29 exclusões
  1. 31 29
      server/conf_task.py

+ 31 - 29
server/conf_task.py

@@ -17,33 +17,37 @@ app.config['JSON_AS_ASCII'] = False
 # 只接受get方法访问
 @app.route("/v1/crawler/source/getall", methods=["GET"])
 def getSource():
-    # 获取传入的params参数
-    get_data = request.args.to_dict()
-    fields = get_data.get('fields')
-    # # 对参数进行操作
-    sql = 'select source, task_type, spider_name, machine, source_desc, task_type_desc, spider_name_desc from crawler_source'
-
-    result = MysqlHelper.get_values(sql)
-    if not result:
-        return jsonify({'code': '200', 'result': [], 'message': '没有更多数据'})
-    source_list = list()
-    for source, task_type, spider_name, machine, source_desc, task_type_desc, spider_name_desc in result:
-        source_dict = {
-            'task_type': [
-                {
-                    'description': task_type_desc,
-                    'author': [
-                        {
-                            'spider_name': spider_name,
-                            'description': spider_name_desc
-                        }
-                    ]
-                }
-            ],
-            'description': source_desc,
-            'source': source
-        }
-        source_list.append(source_dict)
+    try:
+        # 获取传入的params参数
+        get_data = request.args.to_dict()
+        fields = get_data.get('fields')
+        # # 对参数进行操作
+        sql = 'select source, task_type, spider_name, machine, source_desc, task_type_desc, spider_name_desc from crawler_source'
+
+        result = MysqlHelper.get_values(sql)
+        if not result:
+            return jsonify({'code': '200', 'result': [], 'message': '没有更多数据'})
+        source_list = list()
+        for source, task_type, spider_name, machine, source_desc, task_type_desc, spider_name_desc in result:
+            source_dict = {
+                'task_type': [
+                    {
+                        'description': task_type_desc,
+                        'author': [
+                            {
+                                'spider_name': spider_name,
+                                'description': spider_name_desc
+                            }
+                        ]
+                    }
+                ],
+                'description': source_desc,
+                'source': source
+            }
+            source_list.append(source_dict)
+    except Exception as e:
+        return jsonify({'code': '400', 'message': '获取数据源信息失败'})
+
     return jsonify({'code': '200', 'result': source_list})
 
 
@@ -146,9 +150,7 @@ def get_user_info(source):
 @app.route("/v1/crawler/author/create", methods=["POST"])
 def createUser():
     get_media_url = 'http://videotest-internal.yishihui.com/longvideoapi/user/virtual/crawler/registerVirtualUser'
-    print(request.form.to_dict())
     spider_link = request.form.get('spider_link')
-    print(111111, spider_link, type(spider_link))
     source = request.form.get('source')
     task_type = request.form.get('task_type')
     applets_status = request.form.get('applets_status')