nor_predict.sh 1.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. #!/bin/sh
  2. set -x
  3. export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
  4. export JAVA_HOME=/usr/lib/jvm/java-1.8.0
  5. # params
  6. FEATURE_FILE=20250303_recsys_nor_name.txt
  7. BASE_TRAIN_DATA_PATH=/dw/recommend/model/82_recsys_nor_train_data
  8. PREDICT_RESULT_PATH=/dw/recommend/model/82_recsys_nor_predict_data
  9. MODEL_SAVE_PATH=/dw/recommend/model/82_recsys_nor_model/model_xgb
  10. start_date=20250301
  11. end_date=20250301
  12. test_data_path=""
  13. for((i=0; i<=21; i++))
  14. do
  15. data_date=$(date -d "$start_date $i day" +"%Y%m%d")
  16. if [ "$data_date" -le "$end_date" ]
  17. then
  18. one_day_data_path="${BASE_TRAIN_DATA_PATH}/${data_date}"
  19. if [[ -z $test_data_path ]]
  20. then
  21. test_data_path=$one_day_data_path
  22. else
  23. test_data_path="$test_data_path,$one_day_data_path"
  24. fi
  25. fi
  26. done
  27. /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \
  28. --class com.tzld.piaoquan.recommend.model.pred_recsys_61_xgb_nor_hdfsfile_20241209 \
  29. --master yarn --driver-memory 1G --executor-memory 2G --executor-cores 1 --num-executors 16 \
  30. --conf spark.yarn.executor.memoryoverhead=1024 \
  31. --conf spark.shuffle.service.enabled=true \
  32. --conf spark.shuffle.service.port=7337 \
  33. --conf spark.shuffle.consolidateFiles=true \
  34. --conf spark.shuffle.manager=sort \
  35. --conf spark.storage.memoryFraction=0.4 \
  36. --conf spark.shuffle.memoryFraction=0.5 \
  37. --conf spark.default.parallelism=200 \
  38. --conf spark.debug.maxToStringFields=100 \
  39. /mnt/disk1/jch/recommend-model/recommend-model-produce/target/recommend-model-produce-jar-with-dependencies.jar \
  40. labelLogType:0 \
  41. labelLogBase:1.5 \
  42. featureFile:${FEATURE_FILE} \
  43. testPath:${test_data_path} \
  44. savePath:${PREDICT_RESULT_PATH} \
  45. modelPath:${MODEL_SAVE_PATH}