1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283 |
- current_data=20250624 # train end date
- today_early_1=20250625
- train_data_days=14
- model_name=model_xgb_dev_20250623
- feature_file=20240703_ad_feature_name.txt
- BUCKET_FEATURE_PATH=/dw/recommend/model/dev_20250623/33_ad_train_data
- model_ver=dev_20250623
- MODEL_PATH=/root/yuehailiang/xgboost-dev/
- sh_path=$(cd $(dirname $0); pwd)
- source ${sh_path}/00_common.sh
- init() {
- set +x
- declare -a date_keys=()
- local count=1
- # 循环获取前 n 天的非节日日期
- while [[ ${count} -le $train_data_days ]]; do
- date_key=$(date -d "${current_data}" +%Y%m%d)
- # 判断是否是节日,并拼接训练数据路径
- if [ $(is_not_holidays ${date_key}) -eq 1 ]; then
- # 将 date_key 放入数组
- date_keys+=("${date_key}")
- if [[ -z ${train_data_path} ]]; then
- train_data_path="${BUCKET_FEATURE_PATH}/${date_key}"
- else
- train_data_path="${BUCKET_FEATURE_PATH}/${date_key},${train_data_path}"
- fi
- count=$((count + 1))
- else
- echo "日期: ${date_key}是节日,跳过"
- fi
- current_data=$(date -d "${current_data} -1 day" +%Y%m%d)
- done
- last_index=$((${#date_keys[@]} - 1))
- train_first_day=${date_keys[$last_index]}
- train_last_day=${date_keys[0]}
- model_save_path=${MODEL_PATH}/${model_name}_${train_first_day: -4}_${train_last_day: -4}
- predict_date_path=${BUCKET_FEATURE_PATH}/${today_early_1}
- new_model_predict_result_path=${PREDICT_RESULT_SAVE_PATH}/${today_early_1}_${model_ver}_${train_first_day: -4}_${train_last_day: -4}
- echo "init param train_data_path: ${train_data_path}"
- echo "init param predict_date_path: ${predict_date_path}"
- echo "init param new_model_predict_result_path: ${new_model_predict_result_path}"
- echo "init param model_save_path: ${model_save_path}"
- echo "init param feature_file: ${feature_file}"
- echo "init param model_name: ${model_name}"
- }
- xgb_train() {
- local step_start_time=$(date +%s)
- /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \
- --class com.tzld.piaoquan.recommend.model.train_01_xgb_ad_20250104 \
- --master yarn --driver-memory 6G --executor-memory 10G --executor-cores 2 --num-executors 11 \
- --conf spark.yarn.executor.memoryoverhead=2048 \
- --conf spark.shuffle.service.enabled=true \
- --conf spark.shuffle.service.port=7337 \
- --conf spark.shuffle.consolidateFiles=true \
- --conf spark.shuffle.manager=sort \
- --conf spark.storage.memoryFraction=0.4 \
- --conf spark.shuffle.memoryFraction=0.5 \
- --conf spark.default.parallelism=200 \
- /root/yuehailiang/recommend-model/recommend-model-produce/target/recommend-model-produce-1.0.0.jar \
- featureFile:20240703_ad_feature_name.txt \
- trainPath:${train_data_path} \
- testPath:${predict_date_path} \
- savePath:${new_model_predict_result_path} \
- modelPath:${model_save_path} \
- eta:0.01 gamma:0.0 max_depth:5 num_round:1000 num_worker:10 repartition:20 \
- negSampleRate:0.04
- #local return_code=$?
- #check_run_status ${return_code} ${step_start_time} "XGB模型训练任务" "XGB模型训练失败"
- }
- init
- xgb_train
|