wangkun 2 سال پیش
والد
کامیت
ce1ae33b15
3فایلهای تغییر یافته به همراه23 افزوده شده و 8 حذف شده
  1. 10 0
      common/db.py
  2. 11 6
      scheduling/crawler_scheduling.py
  3. 2 2
      weixinzhishu/weixinzhishu_main/search_key.py

+ 10 - 0
common/db.py

@@ -122,6 +122,16 @@ class RedisHelper:
             redis_conn = redis.Redis(connection_pool=redis_pool)
         return redis_conn
 
+    @classmethod
+    def redis_push(cls, env, machine, name, data):
+        redis_conn = cls.connect_redis(env, machine)
+        redis_conn.lpush(name, data)
+
+    @classmethod
+    def redis_pop(cls, env, machine, name):
+        redis_conn = cls.connect_redis(env, machine)
+        redis_conn.rpop(name)
+
 
 
 if __name__ == "__main__":

+ 11 - 6
scheduling/crawler_scheduling.py

@@ -7,7 +7,7 @@ import sys
 import time
 sys.path.append(os.getcwd())
 from common.common import Common
-from common.db import MysqlHelper
+from common.db import MysqlHelper, RedisHelper
 
 
 class Scheduling:
@@ -92,7 +92,8 @@ class Scheduling:
             Common.logger(log_type, crawler).info("暂无新任务\n")
         else:
             for pre_task in pre_task_list:
-
+                print(type(pre_task))
+                print(pre_task)
                 if machine == "hk":
                     # 写入 redis
                     pass
@@ -101,13 +102,17 @@ class Scheduling:
                     pass
                 else:
                     # 写入 redis
-                    pass
+                    RedisHelper.redis_push(env, machine,pre_task['task_id'], str(pre_task))
 
     @classmethod
-    def main(cls):
+    def main(cls, log_type, crawler):
         # 当前时间 >= next_time,更新 next_time(调用update_task),然后启动该爬虫
-
-        pass
+        pre_task_list = cls.get_task(log_type=log_type, crawler=crawler, env=env, machine=machine)
+        if len(pre_task_list) == 0:
+            Common.logger(log_type, crawler).info("暂无新任务\n")
+        else:
+            for pre_task in pre_task_list:
+                task_list = RedisHelper.redis_pop()
 
 
 if __name__ == "__main__":

+ 2 - 2
weixinzhishu/weixinzhishu_main/search_key.py

@@ -48,8 +48,8 @@ class Searchkey:
             # cls.close_weixinzhishu(log_type, crawler)
             weixinzhishu_driver = cls.close_weixinzhishu(log_type, crawler)
             weixinzhishu_driver.find_elements(By.NAME, '关闭')[-1].click()
-            cls.kill_pid(log_type, crawler)
-            time.sleep(3)
+            # cls.kill_pid(log_type, crawler)
+            # time.sleep(3)
         except Exception as e:
             Common.logger(log_type, crawler).error(f'start_wechat异常:{e}\n')