rov_sample_v1.sh 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. #!/bin/sh
  2. set -x
  3. start_date=""
  4. end_date=""
  5. feature_file=""
  6. if(($#==3))
  7. then
  8. start_date=$1
  9. end_date=$2
  10. feature_file=$3
  11. else
  12. exit -1
  13. fi
  14. # env
  15. export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
  16. export PATH=$SPARK_HOME/bin:$PATH
  17. export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
  18. export JAVA_HOME=/usr/lib/jvm/java-1.8.0
  19. # param
  20. sampleRate=2
  21. readPath=/dw/recommend/model/83_origin_data/
  22. savePath=/dw/recommend/model/831_recsys_rov_train_data/
  23. echo `date` "rov sample"
  24. /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
  25. --class com.aliyun.odps.spark.examples.makedata_recsys_r_rate.makedata_recsys_86_fm_sample_20250627 \
  26. --master yarn --driver-memory 2G --executor-memory 6G --executor-cores 1 --num-executors 24 \
  27. --files ${feature_file} \
  28. /mnt/disk1/jch/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-jar-with-dependencies.jar \
  29. readPath:${readPath} \
  30. beginStr:${start_date} endStr:${end_date} \
  31. whatApps:0,4,32,31,21,29,27,26,28,34,3,36,6,17,35 \
  32. whatPages:"详情后沉浸页,回流后沉浸页&内页feed,首页feed,详情页" \
  33. whatLabel:is_return_noself \
  34. fuSampleRate:${sampleRate} \
  35. notUseBucket:1 \
  36. featureName:${feature_file} \
  37. featureBucket:20241209_recsys_rov_bucket.txt \
  38. repartition:64 \
  39. savePath:${savePath} \