| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138 | spark-submit --class com.tzld.piaoquan.recommend.model.produce.xgboost.XGBoostTrain --master yarn --driver-memory 512M --executor-memory 512M --executor-cores 1 --num-executors 4 /root/recommend-model/recommend-model-produce-new.jar > ~/recommend-model/log 2>&1 &recommend-model-produce-jar-with-dependencies.jarnohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \--class com.tzld.piaoquan.recommend.model.produce.xgboost.XGBoostTrain \--master yarn --driver-memory 512M --executor-memory 512M --executor-cores 1 --num-executors 2 \./target/recommend-model-produce-jar-with-dependencies.jar \> p.log 2>&1 &nohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit --class com.tzld.piaoquan.recommend.model.train_01_xgb_ad_20240808 --master yarn --driver-memory 6G --executor-memory 6G --executor-cores 1 --num-executors 32 --conf spark.yarn.executor.memoryoverhead=1024 --conf spark.shuffle.service.enabled=true --conf spark.shuffle.service.port=7337 --conf spark.shuffle.consolidateFiles=true --conf spark.shuffle.manager=sort --conf spark.storage.memoryFraction=0.4 --conf spark.shuffle.memoryFraction=0.5 --conf spark.default.parallelism=200 ./target/recommend-model-produce-jar-with-dependencies.jar > p.log 2>&1 &nohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \--class com.tzld.piaoquan.recommend.model.train_01_xgb_ad_20240808 \--master yarn --driver-memory 6G --executor-memory 3G --executor-cores 1 --num-executors 160 \--conf spark.yarn.executor.memoryoverhead=1000 \--conf spark.shuffle.service.enabled=true \--conf spark.shuffle.service.port=7337 \--conf spark.shuffle.consolidateFiles=true \--conf spark.shuffle.manager=sort \--conf spark.storage.memoryFraction=0.4 \--conf spark.shuffle.memoryFraction=0.5 \--conf spark.default.parallelism=200 \./target/recommend-model-produce-jar-with-dependencies.jar \featureFile:20240809_ad_feature_name_517.txt \trainPath:/dw/recommend/model/33_ad_train_data_v4/2024080[6-9],/dw/recommend/model/33_ad_train_data_v4/2024081[0-2] \testPath:/dw/recommend/model/33_ad_train_data_v4/20240813/ \savePath:/dw/recommend/model/34_ad_predict_data/20240813_1000/ \modelPath:/dw/recommend/model/35_ad_model/model_xgb_7day \eta:0.01 gamma:0.0 max_depth:5 num_round:1000 num_worker:63 \repartition:20 \> p5.log 2>&1 &0.7316512679739304 10002024072[5-9],2024073[0-1],2024080[1-4]/dw/recommend/model/33_ad_train_data_v4/(20240725|20240726|20240727|20240728|20240729|20240730|20240731|20240801|20240802|20240803|20240804)nohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \--class com.tzld.piaoquan.recommend.model.ana_01_xgb_ad_20240809 \--master yarn --driver-memory 1G --executor-memory 1G --executor-cores 1 --num-executors 32 \--conf spark.yarn.executor.memoryoverhead=1024 \--conf spark.shuffle.service.enabled=true \--conf spark.shuffle.service.port=7337 \--conf spark.shuffle.consolidateFiles=true \--conf spark.shuffle.manager=sort \--conf spark.storage.memoryFraction=0.4 \--conf spark.shuffle.memoryFraction=0.5 \--conf spark.default.parallelism=200 \./target/recommend-model-produce-jar-with-dependencies.jar \savePath:/dw/recommend/model/34_ad_predict_data/20240805_1000/ \> p1.log 2>&1 &dfs -get /dw/recommend/model/35_ad_model/model_xgb_1000 ./tar -czvf model_xgb_1000.tar.gz -C model_xgb_1000 .dfs -put model_xgb_1000.tar.gz oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/model_xgb_1000.tar.gznohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \--class com.tzld.piaoquan.recommend.model.pred_01_xgb_ad_jsonfile_20240813 \--master yarn --driver-memory 6G --executor-memory 6G --executor-cores 1 --num-executors 32 \--conf spark.yarn.executor.memoryoverhead=1024 \--conf spark.shuffle.service.enabled=true \--conf spark.shuffle.service.port=7337 \--conf spark.shuffle.consolidateFiles=true \--conf spark.shuffle.manager=sort \--conf spark.storage.memoryFraction=0.4 \--conf spark.shuffle.memoryFraction=0.5 \--conf spark.default.parallelism=200 \./target/recommend-model-produce-jar-with-dependencies.jar \featureFile:20240809_ad_feature_name_517.txt \savePath:/dw/recommend/model/34_ad_predict_data/case_tmp/ \modelPath:/dw/recommend/model/35_ad_model/model_xgb_1000 \> p5.log 2>&1 &-------------------------------预测-----------------------------------------nohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \--class com.tzld.piaoquan.recommend.model.pred_01_xgb_ad_hdfsfile_20240813 \--master yarn --driver-memory 1G --executor-memory 1G --executor-cores 1 --num-executors 30 \--conf spark.yarn.executor.memoryoverhead=1024 \--conf spark.shuffle.service.enabled=true \--conf spark.shuffle.service.port=7337 \--conf spark.shuffle.consolidateFiles=true \--conf spark.shuffle.manager=sort \--conf spark.storage.memoryFraction=0.4 \--conf spark.shuffle.memoryFraction=0.5 \--conf spark.default.parallelism=200 \./target/recommend-model-produce-jar-with-dependencies.jar \featureFile:20240809_ad_feature_name_517.txt \testPath:/dw/recommend/model/33_ad_train_data_v4/20240815/ \savePath:/dw/recommend/model/34_ad_predict_data/20240815_new2/ \modelPath:/dw/recommend/model/35_ad_model/model_xgb_7day_2000 \> p1_pred_20240815_new2.log 2>&1 &cat p1_pred_20240815_new2.log | grep -E "^3353|^3606|^2670|^3585|^2912|^3869|^3857" | grep "实验组"-------------------------------训练-----------------------------------------nohup /opt/apps/SPARK3/spark-3.3.1-hadoop3.2-1.0.5/bin/spark-class org.apache.spark.deploy.SparkSubmit \--class com.tzld.piaoquan.recommend.model.train_01_xgb_ad_20240808 \--master yarn --driver-memory 6G --executor-memory 9G --executor-cores 1 --num-executors 31 \--conf spark.yarn.executor.memoryoverhead=1000 \--conf spark.shuffle.service.enabled=true \--conf spark.shuffle.service.port=7337 \--conf spark.shuffle.consolidateFiles=true \--conf spark.shuffle.manager=sort \--conf spark.storage.memoryFraction=0.4 \--conf spark.shuffle.memoryFraction=0.5 \--conf spark.default.parallelism=200 \./target/recommend-model-produce-jar-with-dependencies.jar \featureFile:20240809_ad_feature_name_517.txt \trainPath:/dw/recommend/model/33_ad_train_data_v4/2024080[7-9],/dw/recommend/model/33_ad_train_data_v4/2024081[0-3] \testPath:/dw/recommend/model/33_ad_train_data_v4/20240814/ \savePath:/dw/recommend/model/34_ad_predict_data/20240814_2000/ \modelPath:/dw/recommend/model/35_ad_model/model_xgb_7day_2000 \eta:0.01 gamma:0.0 max_depth:5 num_round:2000 num_worker:30 \repartition:20 \> p2_train_0814.log 2>&1 &dfs -get /dw/recommend/model/35_ad_model/model_xgb_7day ./tar -czvf model_xgb_1000.tar.gz -C model_xgb_7day .rm -rf .model.tar.gz.crcdfs -rm -r -skipTrash oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/model.tar.gzdfs -put model_xgb_1000.tar.gz oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/
 |