db.py 3.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/2
  4. """
  5. 数据库连接及操作
  6. """
  7. import pymysql
  8. from common.common import Common
  9. class MysqlHelper:
  10. @classmethod
  11. def connect_mysql(cls, env):
  12. if env == 'prod':
  13. # 创建一个 Connection 对象,代表了一个数据库连接
  14. connection = pymysql.connect(
  15. # host="rm-bp1159bu17li9hi94.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
  16. host="rm-bp1159bu17li9hi94ro.mysql.rds.aliyuncs.com",# 数据库IP地址,外网地址
  17. port=3306, # 端口号
  18. user="crawler", # mysql用户名
  19. passwd="crawler123456@", # mysql用户登录密码
  20. db="piaoquan-crawler" , # 数据库名
  21. # 如果数据库里面的文本是utf8编码的,charset指定是utf8
  22. charset = "utf8")
  23. else:
  24. # 创建一个 Connection 对象,代表了一个数据库连接
  25. connection = pymysql.connect(
  26. # host="rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
  27. host="rm-bp1k5853td1r25g3ndo.mysql.rds.aliyuncs.com", # 数据库IP地址,外网地址
  28. port=3306, # 端口号
  29. user="crawler", # mysql用户名
  30. passwd="crawler123456@", # mysql用户登录密码
  31. db="piaoquan-crawler", # 数据库名
  32. # 如果数据库里面的文本是utf8编码的,charset指定是utf8
  33. charset="utf8")
  34. return connection
  35. @classmethod
  36. def get_values(cls, log_type, crawler, sql, env):
  37. # try:
  38. # 连接数据库
  39. connect = cls.connect_mysql(env)
  40. # 返回一个 Cursor对象
  41. mysql = connect.cursor()
  42. # 执行 sql 语句
  43. mysql.execute(sql)
  44. # fetchall方法返回的是一个元组,里面每个元素也是元组,代表一行记录
  45. data = mysql.fetchall()
  46. # 关闭数据库连接
  47. connect.close()
  48. # 返回查询结果,元组
  49. return data
  50. # except Exception as e:
  51. # Common.logger(log_type, crawler).error(f"get_values异常:{e}\n")
  52. @classmethod
  53. def update_values(cls, log_type, crawler, sql, env):
  54. # 连接数据库
  55. connect = cls.connect_mysql(env)
  56. # 返回一个 Cursor对象
  57. mysql = connect.cursor()
  58. try:
  59. # 执行 sql 语句
  60. res = mysql.execute(sql)
  61. # 注意 一定要commit,否则添加数据不生效
  62. connect.commit()
  63. return res
  64. except Exception as e:
  65. Common.logger(log_type, crawler).error(f"update_values异常,进行回滚操作:{e}\n")
  66. # 发生错误时回滚
  67. connect.rollback()
  68. # 关闭数据库连接
  69. connect.close()
  70. if __name__ == "__main__":
  71. # sql_statement = f"INSERT INTO crawler_user ( user_id, out_user_id, out_user_name, out_avatar_url, platform, tag) " \
  72. # f"VALUES ('6282398', 'out_uid_003', 'out_user_name', '', 'xiaoniangao', 'xiaoniangao_play')"
  73. # edit_data = MysqlHelper.edit_data(sql=sql_statement)
  74. # print(edit_data)
  75. get_data = MysqlHelper.get_values("demo", "youtube", "select * from crawler_user", "dev")
  76. print(get_data)