123456789101112131415161718192021222324252627282930313233 |
- #!/bin/sh
- set -x
- export PATH=$SPARK_HOME/bin:$PATH
- export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0
- export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
- sh_path=$(dirname $0)
- source ${sh_path}/00_common.sh
- source /root/anaconda3/bin/activate py37
- make_32() {
- local step_start_time=$(date +%s)
- /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
- --class com.aliyun.odps.spark.examples.makedata_ad.v20240718.makedata_ad_32_bucket_20250110 \
- --master yarn --driver-memory 1G --executor-memory 2G --executor-cores 1 --num-executors 16 \
- ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
- readPath:/dw/recommend/model/31_ad_sample_data_dev_20250623/*/* \
- savePath:/dw/recommend/model/32_bucket_data_dev_20250623 \
- fileName:ad_bucket_707.txt \
- local task1=$!
- wait ${task1}
- local task1_return_code=$?
- }
- make_32
|