db.py 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/2
  4. """
  5. 数据库连接及操作
  6. """
  7. import pymysql
  8. from common.common import Common
  9. class MysqlHelper:
  10. @classmethod
  11. def connect_mysql(cls, env, machine):
  12. if machine == 'aliyun_hk':
  13. # 创建一个 Connection 对象,代表了一个数据库连接
  14. connection = pymysql.connect(
  15. host="rm-j6cz4c6pt96000xi3.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
  16. # host="rm-j6cz4c6pt96000xi3lo.mysql.rds.aliyuncs.com",# 数据库IP地址,外网地址
  17. port=3306, # 端口号
  18. user="crawler", # mysql用户名
  19. passwd="crawler123456@", # mysql用户登录密码
  20. db="piaoquan-crawler" , # 数据库名
  21. # 如果数据库里面的文本是utf8编码的,charset指定是utf8
  22. charset = "utf8")
  23. elif env == 'prod':
  24. # 创建一个 Connection 对象,代表了一个数据库连接
  25. connection = pymysql.connect(
  26. host="rm-bp1159bu17li9hi94.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
  27. # host="rm-bp1159bu17li9hi94ro.mysql.rds.aliyuncs.com",# 数据库IP地址,外网地址
  28. port=3306, # 端口号
  29. user="crawler", # mysql用户名
  30. passwd="crawler123456@", # mysql用户登录密码
  31. db="piaoquan-crawler" , # 数据库名
  32. # 如果数据库里面的文本是utf8编码的,charset指定是utf8
  33. charset = "utf8")
  34. else:
  35. # 创建一个 Connection 对象,代表了一个数据库连接
  36. connection = pymysql.connect(
  37. host="rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
  38. # host="rm-bp1k5853td1r25g3ndo.mysql.rds.aliyuncs.com", # 数据库IP地址,外网地址
  39. port=3306, # 端口号
  40. user="crawler", # mysql用户名
  41. passwd="crawler123456@", # mysql用户登录密码
  42. db="piaoquan-crawler", # 数据库名
  43. # 如果数据库里面的文本是utf8编码的,charset指定是utf8
  44. charset="utf8")
  45. return connection
  46. @classmethod
  47. def get_values(cls, log_type, crawler, sql, env, machine):
  48. try:
  49. # 连接数据库
  50. connect = cls.connect_mysql(env, machine)
  51. # 返回一个 Cursor对象
  52. mysql = connect.cursor()
  53. # 执行 sql 语句
  54. mysql.execute(sql)
  55. # fetchall方法返回的是一个元组,里面每个元素也是元组,代表一行记录
  56. data = mysql.fetchall()
  57. # 关闭数据库连接
  58. connect.close()
  59. # 返回查询结果,元组
  60. return data
  61. except Exception as e:
  62. Common.logger(log_type, crawler).error(f"get_values异常:{e}\n")
  63. @classmethod
  64. def update_values(cls, log_type, crawler, sql, env, machine):
  65. # 连接数据库
  66. connect = cls.connect_mysql(env, machine)
  67. # 返回一个 Cursor对象
  68. mysql = connect.cursor()
  69. try:
  70. # 执行 sql 语句
  71. res = mysql.execute(sql)
  72. # 注意 一定要commit,否则添加数据不生效
  73. connect.commit()
  74. return res
  75. except Exception as e:
  76. Common.logger(log_type, crawler).error(f"update_values异常,进行回滚操作:{e}\n")
  77. # 发生错误时回滚
  78. connect.rollback()
  79. # 关闭数据库连接
  80. connect.close()
  81. if __name__ == "__main__":
  82. # sql_statement = f"INSERT INTO crawler_user ( user_id, out_user_id, out_user_name, out_avatar_url, platform, tag) " \
  83. # f"VALUES ('6282398', 'out_uid_003', 'out_user_name', '', 'xiaoniangao', 'xiaoniangao_play')"
  84. # edit_data = MysqlHelper.edit_data(sql=sql_statement)
  85. # print(edit_data)
  86. get_data = MysqlHelper.get_values("demo", "youtube", "select * from crawler_user", "dev", "local")
  87. print(get_data)