|  | @@ -0,0 +1,51 @@
 | 
	
		
			
				|  |  | +#!/bin/sh
 | 
	
		
			
				|  |  | +set -x
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
 | 
	
		
			
				|  |  | +export JAVA_HOME=/usr/lib/jvm/java-1.8.0
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +# params
 | 
	
		
			
				|  |  | +FEATURE_FILE=20250303_recsys_nor_name.txt
 | 
	
		
			
				|  |  | +BASE_TRAIN_DATA_PATH=/dw/recommend/model/82_recsys_nor_train_data
 | 
	
		
			
				|  |  | +PREDICT_RESULT_PATH=/dw/recommend/model/82_recsys_nor_predict_data
 | 
	
		
			
				|  |  | +MODEL_SAVE_PATH=/dw/recommend/model/82_recsys_nor_model/model_xgb
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +start_date=20250221
 | 
	
		
			
				|  |  | +end_date=20250228
 | 
	
		
			
				|  |  | +train_data_path=""
 | 
	
		
			
				|  |  | +for((i=0; i<=21; i++))
 | 
	
		
			
				|  |  | +do
 | 
	
		
			
				|  |  | +  data_date=$(date -d "$start_date $i day" +"%Y%m%d")
 | 
	
		
			
				|  |  | +  if [ "$data_date" -le "$end_date" ]
 | 
	
		
			
				|  |  | +  then
 | 
	
		
			
				|  |  | +    one_day_data_path="${BASE_TRAIN_DATA_PATH}/${data_date}"
 | 
	
		
			
				|  |  | +    if [[ -z $train_data_path ]]
 | 
	
		
			
				|  |  | +    then
 | 
	
		
			
				|  |  | +      train_data_path=$one_day_data_path
 | 
	
		
			
				|  |  | +    else
 | 
	
		
			
				|  |  | +      train_data_path="$train_data_path,$one_day_data_path"
 | 
	
		
			
				|  |  | +    fi
 | 
	
		
			
				|  |  | +  fi
 | 
	
		
			
				|  |  | +done
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +## ******* train *******
 | 
	
		
			
				|  |  | +/opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \
 | 
	
		
			
				|  |  | +--class com.tzld.piaoquan.recommend.model.train_recsys_61_xgb_nor_20241209 \
 | 
	
		
			
				|  |  | +--master yarn --driver-memory 4G --executor-memory 10G --executor-cores 1 --num-executors 32 \
 | 
	
		
			
				|  |  | +--conf spark.yarn.executor.memoryoverhead=2048 \
 | 
	
		
			
				|  |  | +--conf spark.shuffle.service.enabled=true \
 | 
	
		
			
				|  |  | +--conf spark.shuffle.service.port=7337 \
 | 
	
		
			
				|  |  | +--conf spark.shuffle.consolidateFiles=true \
 | 
	
		
			
				|  |  | +--conf spark.shuffle.manager=sort \
 | 
	
		
			
				|  |  | +--conf spark.storage.memoryFraction=0.4 \
 | 
	
		
			
				|  |  | +--conf spark.shuffle.memoryFraction=0.5 \
 | 
	
		
			
				|  |  | +--conf spark.default.parallelism=200 \
 | 
	
		
			
				|  |  | +--conf spark.sql.debug.maxToStringFields=100 \
 | 
	
		
			
				|  |  | +/mnt/disk1/jch/recommend-model/recommend-model-produce/target/recommend-model-produce-jar-with-dependencies.jar \
 | 
	
		
			
				|  |  | +featureFile:${FEATURE_FILE} \
 | 
	
		
			
				|  |  | +trainPath:${train_data_path} \
 | 
	
		
			
				|  |  | +savePath:${PREDICT_RESULT_PATH} \
 | 
	
		
			
				|  |  | +modelPath:${MODEL_SAVE_PATH} \
 | 
	
		
			
				|  |  | +labelLogType:0 \
 | 
	
		
			
				|  |  | +labelLogBase:1.5 \
 | 
	
		
			
				|  |  | +eta:0.06 gamma:0.0 max_depth:5 num_round:1000 num_worker:32 repartition:20
 |