|
@@ -12,7 +12,7 @@ from utils.aliyun_log import AliyunLogger
|
|
from utils.google_ai_studio import GoogleAI
|
|
from utils.google_ai_studio import GoogleAI
|
|
from utils.piaoquan import PQ
|
|
from utils.piaoquan import PQ
|
|
from utils.redis import RedisHelper, content_video_data
|
|
from utils.redis import RedisHelper, content_video_data
|
|
-
|
|
|
|
|
|
+from utils.mysql_db import MysqlHelper
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -30,8 +30,15 @@ class ConsumptionRecommend(object):
|
|
task = orjson.loads(task)
|
|
task = orjson.loads(task)
|
|
logger.info(f"[处理] 获取redis数据{task}")
|
|
logger.info(f"[处理] 获取redis数据{task}")
|
|
video_id = task['video_id']
|
|
video_id = task['video_id']
|
|
|
|
+
|
|
|
|
+ count_sql = f"""select count(1) from video_demand_analysis where video_id = {video_id}"""
|
|
|
|
+ count = MysqlHelper.get_values(count_sql)
|
|
|
|
+ if count and count[0][0] == 0:
|
|
|
|
+ logger.info(f"[处理] 视频重复过滤")
|
|
|
|
+ return
|
|
|
|
+
|
|
logger.info(f"[处理] 开始获取原视频OSS地址")
|
|
logger.info(f"[处理] 开始获取原视频OSS地址")
|
|
- video_path = PQ.get_pq_oss(video_id)
|
|
|
|
|
|
+ video_title, video_path = PQ.get_pq_oss(video_id)
|
|
if not video_path:
|
|
if not video_path:
|
|
return
|
|
return
|
|
logger.info(f"[处理] 获取原视频OSS地址,视频链接:{video_path}")
|
|
logger.info(f"[处理] 获取原视频OSS地址,视频链接:{video_path}")
|
|
@@ -43,7 +50,37 @@ class ConsumptionRecommend(object):
|
|
text = GoogleAI.run(api_key, video_url)
|
|
text = GoogleAI.run(api_key, video_url)
|
|
if "[异常]" in text:
|
|
if "[异常]" in text:
|
|
content_video_data(json.dumps(task))
|
|
content_video_data(json.dumps(task))
|
|
- AliyunLogger.logging(str(video_id), orjson.dumps(text).decode())
|
|
|
|
|
|
+
|
|
|
|
+ # Parse JSON data
|
|
|
|
+ data = json.loads(orjson.dumps(text).decode())
|
|
|
|
+ # Generate SQL insert statement
|
|
|
|
+ sql = """
|
|
|
|
+ INSERT INTO video_demand_analysis (
|
|
|
|
+ video_id, video_link, video_title, content_type,
|
|
|
|
+ demand_order, demand_score, user_demand, demand_category,
|
|
|
|
+ demand_reason, product_hook, hook_time, hook_desc,
|
|
|
|
+ hook_type, landing_desc, landing_type, platform_case
|
|
|
|
+ ) VALUES
|
|
|
|
+ """
|
|
|
|
+ # Add values for each entry
|
|
|
|
+ values = []
|
|
|
|
+ link = f"""https://admin.piaoquantv.com/cms/post-detail/{video_id}/detail"""
|
|
|
|
+
|
|
|
|
+ for entry in data:
|
|
|
|
+ value = f"""(
|
|
|
|
+ {video_id}, {link}, {video_title}, NULL,
|
|
|
|
+ {entry.get('需求排序序号', '')}, {entry.get('需求强烈程度分值', '')}, '{entry.get('用户具体的需求描述', '')}', '{entry.get('需求分类', '')}',
|
|
|
|
+ '{entry.get('推测出该点需求的原因', '')}', '{entry.get('描述出与需求对应的产品钩子', '')}', '{entry.get('产品形式出现到消失的时间点', '')}', '{entry.get('钩子形式描述', '')}',
|
|
|
|
+ '{entry.get('钩子形式类型', '')}', '{entry.get('点击钩子后的产品落地形态描述', '')}', '{entry.get('产品落地形态分类', '')}', '{entry.get('其他平台案例', '')}'
|
|
|
|
+ )"""
|
|
|
|
+ values.append(value)
|
|
|
|
+ # Combine SQL statement and values
|
|
|
|
+ sql += ",\n".join(values) + ";"
|
|
|
|
+ # Print SQL statement
|
|
|
|
+ print(sql)
|
|
|
|
+ MysqlHelper.update(sql)
|
|
|
|
+
|
|
|
|
+ # AliyunLogger.logging(str(video_id), orjson.dumps(text).decode())
|
|
logger.info(f"[处理] 写入日志成功")
|
|
logger.info(f"[处理] 写入日志成功")
|
|
|
|
|
|
|
|
|