result_save.py 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. import mysql.connector
  2. import json
  3. # 配置数据库连接参数
  4. db_config = {
  5. 'host': 'rm-bp19uc56sud25ag4o.mysql.rds.aliyuncs.com',
  6. 'database': 'longvideo',
  7. 'port': 3306,
  8. 'user': 'wx2016_longvideo',
  9. 'password': 'wx2016_longvideoP@assword1234',
  10. }
  11. json_field_names = ['key_words', 'search_keys', 'extra_keys', 'category']
  12. normal_field_names = ['tone', 'target_audience',
  13. 'target_age', 'target_gender', 'address', 'theme']
  14. def insert_content(gpt_res):
  15. """ 连接MySQL数据库并插入一行数据 """
  16. try:
  17. # 连接MySQL数据库
  18. conn = mysql.connector.connect(
  19. host=db_config['host'],
  20. database=db_config['database'],
  21. user=db_config['user'],
  22. password=db_config['password'],
  23. )
  24. if conn.is_connected():
  25. print('成功连接到数据库')
  26. cursor = conn.cursor()
  27. insert_batch = []
  28. # 插入数据的SQL语句
  29. sql = """
  30. INSERT INTO video_content_mapping (video_id, tag, tag_type)
  31. VALUES (%s, %s, %s)
  32. """
  33. video_id = gpt_res.get('video_id', '')
  34. for field_name in json_field_names:
  35. # 获取对应的JSON字符串
  36. tags = gpt_res.get(field_name, '')
  37. # 判断是否是json字符串
  38. if not tags or not isinstance(tags, list):
  39. continue
  40. # 构建批量插入的参数
  41. for tag in tags:
  42. insert_batch.append((video_id, tag, field_name))
  43. for field_name in normal_field_names:
  44. # 获取对应的字段值
  45. value = gpt_res.get(field_name, '')
  46. # 构建批量插入的参数
  47. insert_batch.append((video_id, value, field_name))
  48. # 执行批量插入操作
  49. cursor.executemany(sql, insert_batch)
  50. print(f"Inserting records {len(insert_batch)} rows...")
  51. insert_batch.clear()
  52. # 提交事务
  53. conn.commit()
  54. # 关闭游标和连接
  55. cursor.close()
  56. conn.close()
  57. print('数据库连接已关闭')
  58. except mysql.connector.Error as e:
  59. print('数据库连接或操作出错:', e)