00_train_data_make_day.sh 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. #!/bin/sh
  2. set -x
  3. export PATH=$SPARK_HOME/bin:$PATH
  4. export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
  5. export JAVA_HOME=/usr/lib/jvm/java-1.8.0
  6. export PREDICT_CACHE_PATH=/root/zhaohp/XGB/predict_cache/
  7. export SEGMENT_BASE_PATH=/dw/recommend/model/36_model_attachment/score_calibration_file
  8. HADOOP=/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop
  9. dt="$(date -d '2 days ago' +%Y%m%d)"
  10. echo "开始处理: ${dt}"
  11. /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
  12. --class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_originData_20250218 \
  13. --master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 13 \
  14. --conf spark.yarn.executor.memoryOverhead=2G \
  15. /root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
  16. tablePart:64 beginStr:${dt}00 endStr:${dt}11 repartition:32 \
  17. savePath:/dw/recommend/model/41_recsys_origin_date \
  18. table:dwd_recsys_alg_sample_all_20250212 &
  19. /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
  20. --class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_originData_20250218 \
  21. --master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 13 \
  22. --conf spark.yarn.executor.memoryOverhead=2G \
  23. /root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
  24. tablePart:64 beginStr:${dt}12 endStr:${dt}17 repartition:32 \
  25. savePath:/dw/recommend/model/41_recsys_origin_date \
  26. table:dwd_recsys_alg_sample_all_20250212 &
  27. /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
  28. --class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_originData_20250218 \
  29. --master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 13 \
  30. --conf spark.yarn.executor.memoryOverhead=2G \
  31. /root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
  32. tablePart:64 beginStr:${dt}18 endStr:${dt}23 repartition:32 \
  33. savePath:/dw/recommend/model/41_recsys_origin_date \
  34. table:dwd_recsys_alg_sample_all_20250212 &
  35. wait
  36. echo "${dt} 原始特征生产完成"
  37. # ROS数据过滤
  38. /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
  39. --class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_ros_train_data_20250304 \
  40. --master yarn --driver-memory 2G --executor-memory 5G --executor-cores 1 --num-executors 15 \
  41. --conf spark.driver.maxResultSize=2g \
  42. /root/zhaohp/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
  43. readPath:/dw/recommend/model/41_recsys_origin_date/${dt}*/* \
  44. savePath:/dw/recommend/model/41_recsys_ros_train_data/${dt} \
  45. whatLabel:is_share repartition:64 &
  46. # STR负样本采样
  47. /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
  48. --class com.aliyun.odps.spark.examples.makedata_recsys.v20250218.makedata_recsys_41_str_train_data_20250218 \
  49. --master yarn --driver-memory 4G --executor-memory 5G --executor-cores 1 --num-executors 15 \
  50. --conf spark.driver.maxResultSize=2g \
  51. ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
  52. readPath:/dw/recommend/model/41_recsys_origin_date/${dt}*/* \
  53. savePath:/dw/recommend/model/41_recsys_str_train_data/${dt} \
  54. fuSampleRate:0.05 whatLabel:is_share repartition:64 &
  55. wait
  56. echo "${dt} 负样本采样完成"
  57. # 删除原始特征,释放空间
  58. ${HADOOP} fs -rm -r /dw/recommend/model/41_recsys_origin_date/${dt}*
  59. echo "删除 ${dt} 的原始特征数据"