insert.py 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/4/13
  4. import time
  5. from datetime import date, timedelta
  6. from common.feishu import Feishu
  7. from common.scheduling_db import MysqlHelper
  8. class Insert:
  9. @classmethod
  10. def get_config(cls, log_type, crawler, text, env):
  11. select_sql = f"""select * from crawler_config where source="benshanzhufu" """
  12. contents = MysqlHelper.get_values(log_type, crawler, select_sql, env, action='')
  13. title_list = []
  14. filter_list = []
  15. for content in contents:
  16. config = content['config']
  17. config_dict = eval(config)
  18. for k, v in config_dict.items():
  19. if k == "title":
  20. title_list_config = v.split(",")
  21. for title in title_list_config:
  22. title_list.append(title)
  23. if k == "filter":
  24. filter_list_config = v.split(",")
  25. for filter_word in filter_list_config:
  26. filter_list.append(filter_word)
  27. if text == "title":
  28. return title_list
  29. elif text == "filter":
  30. return filter_list
  31. @classmethod
  32. def before_day(cls):
  33. publish_time_str_rule = (date.today() + timedelta(days=-30)).strftime("%Y-%m-%d %H:%M:%S")
  34. publish_time_stamp_rule = int(time.mktime(time.strptime(publish_time_str_rule, "%Y-%m-%d %H:%M:%S")))
  35. print(publish_time_str_rule)
  36. print(publish_time_stamp_rule)
  37. @classmethod
  38. def insert_config(cls, log_type, crawler, env):
  39. filter_sheet = Feishu.get_values_batch(log_type, crawler, "DjXfqG")
  40. # title_sheet = Feishu.get_values_batch(log_type, crawler, "bHSW1p")
  41. filter_list = []
  42. # title_list = []
  43. for x in filter_sheet:
  44. for y in x:
  45. if y is None:
  46. pass
  47. else:
  48. filter_list.append(y)
  49. # for x in title_sheet:
  50. # for y in x:
  51. # if y is None:
  52. # pass
  53. # else:
  54. # title_list.append(y)
  55. # str_title = ','.join(title_list)
  56. str_filter = ','.join(filter_list)
  57. config_dict = {
  58. # "title": str_title,
  59. "filter": str_filter
  60. }
  61. str_config_dict = str(config_dict)
  62. # print(f"config_dict:{config_dict}")
  63. # print(f"str_config_dict:{str_config_dict}")
  64. insert_sql = f""" insert into crawler_config(title, source, config) values("本山祝福小程序", "benshanzhufu", "{str_config_dict}") """
  65. MysqlHelper.update_values(log_type, crawler, insert_sql, env)
  66. if __name__ == "__main__":
  67. # Insert.insert_config("insert", "benshanzhufu", "dev")
  68. print(Insert.get_config("insert", "benshanzhufu", "filter", "dev"))