|
@@ -40,11 +40,12 @@ class Scheduling:
|
|
|
for pre_task in pre_task_list:
|
|
|
# machine字段是用来区分海外爬虫和国内爬虫使用的,不涉及任何其他含义
|
|
|
machine = pre_task.get('machine', 'dev')
|
|
|
+
|
|
|
if machine == "hk":
|
|
|
# 写入 redis
|
|
|
task_key = 'crawler_config_task_queue:hk'
|
|
|
RedisHelper.redis_push(env, task_key, str(pre_task))
|
|
|
- elif machine == "prod":
|
|
|
+ elif machine == "aliyun":
|
|
|
# 写入 redis
|
|
|
task_key = 'crawler_config_task_queue:aliyun'
|
|
|
RedisHelper.redis_push(env, task_key, str(pre_task))
|
|
@@ -56,7 +57,14 @@ class Scheduling:
|
|
|
@classmethod
|
|
|
def get_redis(cls, log_type, crawler, env):
|
|
|
while True:
|
|
|
- redis_data = RedisHelper.redis_pop(env)
|
|
|
+ if env == 'hk':
|
|
|
+ task_key = 'crawler_config_task_queue:hk'
|
|
|
+ elif env == 'prod':
|
|
|
+ task_key = 'crawler_config_task_queue:aliyun'
|
|
|
+ else:
|
|
|
+ task_key = 'crawler_config_task_queue:dev'
|
|
|
+
|
|
|
+ redis_data = RedisHelper.redis_pop(env, task_key)
|
|
|
if redis_data is None or len(redis_data) == 0:
|
|
|
Common.logger(log_type, crawler).info("Redis为空,等待1秒")
|
|
|
time.sleep(1)
|