Sfoglia il codice sorgente

训练表转样本增加在线日志样本

xueyiming 1 giorno fa
parent
commit
dcf14ec118
2 ha cambiato i file con 27 aggiunte e 2 eliminazioni
  1. 1 0
      ad/02_ad_model_dnn_v11_update.sh
  2. 26 2
      ad/25_xgb_make_data_origin_bucket.sh

+ 1 - 0
ad/02_ad_model_dnn_v11_update.sh

@@ -210,6 +210,7 @@ check_ad_hive() {
 bucket_feature_from_origin_to_hive() {
   (
     export outputTable=ad_easyrec_train_data_v3_sampled
+    export outputTable1=ad_easyrec_train_realtime_data_v3_sampled
     export outputTable2=ad_easyrec_eval_data_v3_sampled
     source ${sh_path}/25_xgb_make_data_origin_bucket.sh
     make_bucket_feature_from_origin_to_hive

+ 26 - 2
ad/25_xgb_make_data_origin_bucket.sh

@@ -124,7 +124,31 @@ make_bucket_feature_from_origin_to_hive() {
   outputTable:${outputTable} \
   inputTable:alg_recsys_ad_sample_all \
   negSampleRate:${neg_sample_rate}
+  local task1=$!
+
+  /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+  --class com.aliyun.odps.spark.examples.makedata_ad.v20240718.makedata_ad_33_bucketDataFromOriginToHive_20250522 \
+  --master yarn --driver-memory 2G --executor-memory 3G --executor-cores 1 --num-executors 30 \
+  --conf spark.dynamicAllocation.enabled=true \
+  --conf spark.shuffle.service.enabled=true \
+  --conf spark.dynamicAllocation.maxExecutors=100 \
+  ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+  beginStr:${today_early_1} endStr:${today_early_1} \
+  filterHours:${FILTER_HOURS:-00,01,02,03,04,05,06,07} \
+  filterAdverIds:${FILTER_ADVER_IDS} \
+  filterNames:_4h_,_5h_,adid_,targeting_conversion_ \
+  outputTable:${outputTable1} \
+  inputTable:alg_recsys_ad_sample_all \
+  negSampleRate:${neg_sample_rate}
+  local task2=$!
+
+  wait ${task1}
+  local task1_return_code=$?
+
+  wait ${task2}
+  local task2_return_code=$?
+
+  check_run_status ${task1_return_code} ${step_start_time} "离线数据spark特征分桶任务"
+  check_run_status ${task2_return_code} ${step_start_time} "在线日志spark特征分桶任务"
 
-  local return_code=$?
-  check_run_status ${return_code} ${step_start_time} "spark特征分桶任务"
 }