소스 검색

feat:添加日常补数据脚本

zhaohaipeng 1 개월 전
부모
커밋
dc29c3b9c8
1개의 변경된 파일72개의 추가작업 그리고 0개의 파일을 삭제
  1. 72 0
      recommend/00_train_data_make_day.sh

+ 72 - 0
recommend/00_train_data_make_day.sh

@@ -0,0 +1,72 @@
+#!/bin/sh
+set -x
+
+export PATH=$SPARK_HOME/bin:$PATH
+export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
+export JAVA_HOME=/usr/lib/jvm/java-1.8.0
+export PREDICT_CACHE_PATH=/root/zhaohp/XGB/predict_cache/
+export SEGMENT_BASE_PATH=/dw/recommend/model/36_model_attachment/score_calibration_file
+
+HADOOP=/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop
+
+dt="$(date -d '2 days ago' +%Y%m%d)"
+
+
+echo "开始处理: ${dt}"
+
+/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_originData_20250218 \
+--master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 13 \
+--conf spark.yarn.executor.memoryOverhead=2G \
+/root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+tablePart:64 beginStr:${dt}00 endStr:${dt}11 repartition:32 \
+savePath:/dw/recommend/model/41_recsys_origin_date \
+table:dwd_recsys_alg_sample_all_20250212 &
+
+/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_originData_20250218 \
+--master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 13 \
+--conf spark.yarn.executor.memoryOverhead=2G \
+/root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+tablePart:64 beginStr:${dt}12 endStr:${dt}17 repartition:32 \
+savePath:/dw/recommend/model/41_recsys_origin_date \
+table:dwd_recsys_alg_sample_all_20250212 &
+
+/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_originData_20250218 \
+--master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 13 \
+--conf spark.yarn.executor.memoryOverhead=2G \
+/root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+tablePart:64 beginStr:${dt}18 endStr:${dt}23 repartition:32 \
+savePath:/dw/recommend/model/41_recsys_origin_date \
+table:dwd_recsys_alg_sample_all_20250212 &
+
+wait 
+echo "${dt} 原始特征生产完成"
+
+# ROS数据过滤
+/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_ros_train_data_20250304 \
+--master yarn --driver-memory 2G --executor-memory 5G --executor-cores 1 --num-executors 15 \
+--conf spark.driver.maxResultSize=2g \
+/root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+readPath:/dw/recommend/model/41_recsys_origin_date/${dt}*/* \
+savePath:/dw/recommend/model/41_recsys_ros_train_data/${dt} \
+whatLabel:is_share repartition:64 &
+
+# STR负样本采样
+/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_str_train_data_20250218 \
+--master yarn --driver-memory 4G --executor-memory 5G --executor-cores 1 --num-executors 15 \
+--conf spark.driver.maxResultSize=2g \
+./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+readPath:/dw/recommend/model/41_recsys_origin_date/${dt}*/* \
+savePath:/dw/recommend/model/41_recsys_str_train_data/${dt} \
+fuSampleRate:0.05 whatLabel:is_share repartition:64 &
+
+wait
+echo "${dt} 负样本采样完成"
+
+# 删除原始特征,释放空间
+${HADOOP} fs -rm -r /dw/recommend/model/41_recsys_origin_date/${dt}*
+echo "删除 ${dt} 的原始特征数据"