lierqiang 2 年之前
父節點
當前提交
239a543806
共有 1 個文件被更改,包括 17 次插入14 次删除
  1. 17 14
      scheduling/scheduling_main/crawler_scheduling.py

+ 17 - 14
scheduling/scheduling_main/crawler_scheduling.py

@@ -60,25 +60,28 @@ class Scheduling:
 
     @classmethod
     def get_redis(cls, log_type, crawler, env):
-        while True:
-            if env == 'hk':
-                task_key = 'crawler_config_task_queue:hk'
-            elif env == 'prod':
-                task_key = 'crawler_config_task_queue:aliyun'
-            else:
-                task_key = 'crawler_config_task_queue:dev'
+        if env == 'hk':
+            task_key = 'crawler_config_task_queue:hk'
+        elif env == 'prod':
+            task_key = 'crawler_config_task_queue:aliyun'
+        else:
+            task_key = 'crawler_config_task_queue:dev'
 
-            redis_data = RedisHelper.redis_pop(env, task_key)
-            if redis_data is None or len(redis_data) == 0:
-                Common.logger(log_type, crawler).info("Redis为空,等待1秒")
-                time.sleep(1)
-            else:
-                task = eval(str(redis_data, encoding="utf8"))
-                return task
+        redis_data = RedisHelper.redis_pop(env, task_key)
+        if redis_data is None or len(redis_data) == 0:
+            # Common.logger(log_type, crawler).info("Redis为空,程序退出")
+            # time.sleep(1)
+            return
+        else:
+            task = eval(str(redis_data, encoding="utf8"))
+            return task
 
     @classmethod
     def scheduling_task(cls, log_type, crawler, env):
         task = cls.get_redis(log_type, crawler, env)
+        if not task:
+            Common.logger(log_type, crawler).info("Redis为空,程序退出")
+            return
         Common.logger(log_type, crawler).info(f"task: {task}")
         Common.logger(log_type, crawler).info(f"已获取调度任务:{task}")
         task_id = task['task_id']