1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889 |
- # -*- coding: utf-8 -*-
- # @Author: wangkun
- # @Time: 2023/2/2
- """
- 数据库连接及操作
- """
- import pymysql
- from common.common import Common
- class MysqlHelper:
- @classmethod
- def connect_mysql(cls, env):
- if env == 'prod':
- # 创建一个 Connection 对象,代表了一个数据库连接
- connection = pymysql.connect(
- # host="rm-bp1159bu17li9hi94.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
- host="rm-bp1159bu17li9hi94ro.mysql.rds.aliyuncs.com",# 数据库IP地址,外网地址
- port=3306, # 端口号
- user="crawler", # mysql用户名
- passwd="crawler123456@", # mysql用户登录密码
- db="piaoquan-crawler" , # 数据库名
- # 如果数据库里面的文本是utf8编码的,charset指定是utf8
- charset = "utf8")
- else:
- # 创建一个 Connection 对象,代表了一个数据库连接
- connection = pymysql.connect(
- # host="rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com",# 数据库IP地址,内网地址
- host="rm-bp1k5853td1r25g3ndo.mysql.rds.aliyuncs.com", # 数据库IP地址,外网地址
- port=3306, # 端口号
- user="crawler", # mysql用户名
- passwd="crawler123456@", # mysql用户登录密码
- db="piaoquan-crawler", # 数据库名
- # 如果数据库里面的文本是utf8编码的,charset指定是utf8
- charset="utf8")
- return connection
- @classmethod
- def get_values(cls, log_type, crawler, sql, env):
- # try:
- # 连接数据库
- connect = cls.connect_mysql(env)
- # 返回一个 Cursor对象
- mysql = connect.cursor()
- # 执行 sql 语句
- mysql.execute(sql)
- # fetchall方法返回的是一个元组,里面每个元素也是元组,代表一行记录
- data = mysql.fetchall()
- # 关闭数据库连接
- connect.close()
- # 返回查询结果,元组
- return data
- # except Exception as e:
- # Common.logger(log_type, crawler).error(f"get_values异常:{e}\n")
- @classmethod
- def update_values(cls, log_type, crawler, sql, env):
- # 连接数据库
- connect = cls.connect_mysql(env)
- # 返回一个 Cursor对象
- mysql = connect.cursor()
- try:
- # 执行 sql 语句
- res = mysql.execute(sql)
- # 注意 一定要commit,否则添加数据不生效
- connect.commit()
- return res
- except Exception as e:
- Common.logger(log_type, crawler).error(f"update_values异常,进行回滚操作:{e}\n")
- # 发生错误时回滚
- connect.rollback()
- # 关闭数据库连接
- connect.close()
- if __name__ == "__main__":
- # sql_statement = f"INSERT INTO crawler_user ( user_id, out_user_id, out_user_name, out_avatar_url, platform, tag) " \
- # f"VALUES ('6282398', 'out_uid_003', 'out_user_name', '', 'xiaoniangao', 'xiaoniangao_play')"
- # edit_data = MysqlHelper.edit_data(sql=sql_statement)
- # print(edit_data)
- get_data = MysqlHelper.get_values("demo", "youtube", "select * from crawler_user", "dev")
- print(get_data)
|