Browse Source

脚本修改

jihuaqiang 1 month ago
parent
commit
d06854b72f
5 changed files with 47 additions and 52 deletions
  1. 1 6
      Dockerfile
  2. 3 5
      start.sh
  3. 0 39
      supervisor.conf
  4. 41 0
      workers/test_select.py
  5. 2 2
      workers/video_insight_select_work.py

+ 1 - 6
Dockerfile

@@ -9,11 +9,6 @@ ENV TZ=Asia/Shanghai
 RUN apt update && apt --no-install-recommends install -y curl jq \
     && apt-get clean && rm -rf /var/lib/apt/lists/* \
     && pip install -r requirements.txt --no-cache-dir \
-    && mkdir -p /app/cache \
-    && apt-get update && apt-get install -y supervisor && \
-    mkdir -p /var/log/supervisor
-
-# 复制 supervisor 配置文件
-COPY supervisor.conf /app/supervisor.conf
+    && mkdir -p /app/cache 
 
 #ENTRYPOINT ["python", "/app/job.py"]

+ 3 - 5
start.sh

@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
 
 # 设置环境变量
 export CONTAINER_INFO="$(curl -s --unix-socket /var/run/docker.sock http://docker/containers/$HOSTNAME/json)"
@@ -6,8 +6,6 @@ export CONTAINER_INDEX="$(echo "$CONTAINER_INFO" | jq '.Name' | sed 's/^"\(.*\)"
 echo "export VIDEO_INSIGHT_GEMINI_API_KEY=$(eval echo \$"VIDEO_INSIGHT_GEMINI_API_KEY_${CONTAINER_INDEX}")" >> /root/.bashrc
 . /root/.bashrc
 
-# 确保日志目录存在
-mkdir -p /app/logs
+# python /app/workers/video_insight_consumption_work.py
+python /app/workers/video_insight_trigger_work.py
 
-# 启动 supervisor
-/usr/bin/supervisord -c /app/supervisor.conf

+ 0 - 39
supervisor.conf

@@ -1,39 +0,0 @@
-[supervisord]
-nodaemon=true
-logfile=/app/logs/supervisord.log
-logfile_maxbytes=50MB
-logfile_backups=10
-loglevel=info
-pidfile=/var/run/supervisord.pid
-
-[program:video_insight_consumption]
-command=python /app/workers/video_insight_consumption_work.py
-directory=/app
-user=root
-autostart=true
-autorestart=true
-startsecs=5
-startretries=3
-stderr_logfile=/app/logs/consumption.err.log
-stdout_logfile=/app/logs/consumption.out.log
-stdout_logfile_maxbytes=50MB
-stdout_logfile_backups=10
-stderr_logfile_maxbytes=50MB
-stderr_logfile_backups=10
-environment=PYTHONUNBUFFERED=1
-
-[program:video_insight_trigger]
-command=python /app/workers/video_insight_trigger_work.py
-directory=/app
-user=root
-autostart=true
-autorestart=true
-startsecs=5
-startretries=3
-stderr_logfile=/app/logs/trigger.err.log
-stdout_logfile=/app/logs/trigger.out.log
-stdout_logfile_maxbytes=50MB
-stdout_logfile_backups=10
-stderr_logfile_maxbytes=50MB
-stderr_logfile_backups=10
-environment=PYTHONUNBUFFERED=1 

+ 41 - 0
workers/test_select.py

@@ -0,0 +1,41 @@
+import datetime
+import sys
+import time
+import os
+
+import schedule
+from loguru import logger
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+logger.add("/app/logs/select.log", rotation="10 MB")
+
+sys.path.append('/app')
+import os
+print("Current working directory:", os.getcwd())
+
+from utils.odps_data import OdpsDataCount
+from utils.redis import RedisHelper
+
+def requirement_insight():
+    """视频需求点洞察"""
+    try:
+        dt = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime('%Y%m%d')
+        logger.info(f"视频需求点洞察")
+        redis_task = "task:video_insight"
+        redis_trigger_task = "task:video_trigger_insight"
+        sql =f'select clickobjectid as video_id from loghubods.user_share_log where dt = {dt} and topic = "click" group by clickobjectid order by count(distinct machinecode) desc limit 1'
+        data = OdpsDataCount.main(sql)
+        if not data:
+            return
+        RedisHelper().get_client().rpush(redis_task, *data)
+        RedisHelper().get_client().rpush(redis_trigger_task, *data)
+        logger.info(f"[R] 写入Redis 成功 共写入 {len(data)} 条")
+    except Exception as e:
+        logger.error(f"[R] 写入Redis写入失败,失败信息{e}")
+
+def schedule_tasks():
+    schedule.every().day.at("01:00").do(requirement_insight)
+
+
+if __name__ == "__main__":
+    requirement_insight()

+ 2 - 2
workers/video_insight_select_work.py

@@ -18,13 +18,13 @@ def requirement_insight():
     try:
         dt = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime('%Y%m%d')
         logger.info(f"视频需求点洞察")
-        redis_task = "task:video_insight"
+        # redis_task = "task:video_insight"
         redis_trigger_task = "task:video_trigger_insight"
         sql =f'select clickobjectid as video_id from loghubods.user_share_log where dt = {dt} and topic = "click" group by clickobjectid order by count(distinct machinecode) desc limit 1000'
         data = OdpsDataCount.main(sql)
         if not data:
             return
-        RedisHelper().get_client().rpush(redis_task, *data)
+        # RedisHelper().get_client().rpush(redis_task, *data)
         RedisHelper().get_client().rpush(redis_trigger_task, *data)
         logger.info(f"[R] 写入Redis 成功 共写入 {len(data)} 条")
     except Exception as e: