|
@@ -0,0 +1,37 @@
|
|
|
+#!/bin/sh
|
|
|
+set -x
|
|
|
+
|
|
|
+export PATH=$SPARK_HOME/bin:$PATH
|
|
|
+export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
|
|
|
+export JAVA_HOME=/usr/lib/jvm/java-1.8.0
|
|
|
+export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
|
|
|
+
|
|
|
+
|
|
|
+sh_path=$(dirname $0)
|
|
|
+source ${sh_path}/00_common.sh
|
|
|
+
|
|
|
+source /root/anaconda3/bin/activate py37
|
|
|
+
|
|
|
+today_early_1="$(date -d '1 days ago' +%Y%m%d)"
|
|
|
+TRAIN_PATH=/dw/recommend/model/31_ad_sample_data_dev_20250623
|
|
|
+alg_recsys_ad_sample_all
|
|
|
+
|
|
|
+make_origin_data() {
|
|
|
+
|
|
|
+ local step_start_time=$(date +%s)
|
|
|
+
|
|
|
+ /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
|
|
|
+ --class com.aliyun.odps.spark.examples.makedata_ad.v20240718.makedata_ad_31_originData_20250623 \
|
|
|
+ --master yarn --driver-memory 1G --executor-memory 2G --executor-cores 1 --num-executors 16 \
|
|
|
+ ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
|
|
|
+ tablePart:64 repartition:32 \
|
|
|
+ beginStr:${today_early_1}00 endStr:${today_early_1}12 \
|
|
|
+ savePath:${TRAIN_PATH} \
|
|
|
+ table:${TABLE} \
|
|
|
+ filterHours:00,01,02,03,04,05,06,07 \
|
|
|
+ idDefaultValue:0.1 &
|
|
|
+ local task1=$!
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+make_origin_data
|