| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748 |
- #!/bin/bash
- start_date=""
- end_date=""
- start_hour=""
- end_hour=""
- table=""
- if(($#==5))
- then
- start_date=$1
- end_date=$2
- start_hour=$3
- end_hour=$4
- table=$5
- else
- start_date=$(date +%Y%m%d -d "-1 $days day")
- end_date=$start_date
- start_hour=00
- end_hour=23
- table=dwd_recsys_alg_sample_all_20250212
- fi
- set -x
- export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
- export PATH=$SPARK_HOME/bin:$PATH
- export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0
- # params
- sampleRate=0.036
- label=is_share
- savePath=/dw/recommend/model/83_origin_data/
- # 1 生产原始数据
- echo "$(date +%Y-%m-%d_%H-%M-%S)----------step1------------开始根据${table}生产原始数据"
- /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
- --class com.aliyun.odps.spark.examples.makedata_recsys_r_rate.makedata_recsys_83_originData_20250317 \
- --master yarn --driver-memory 6G --executor-memory 10G --executor-cores 1 --num-executors 16 \
- --conf spark.yarn.executor.memoryoverhead=2048 \
- /mnt/disk1/jch/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-jar-with-dependencies.jar \
- table:${table} tablePart:96 \
- beginStr:${start_date}${start_hour} endStr:${end_date}${end_hour} \
- whatPages:"详情后沉浸页,回流后沉浸页&内页feed,首页feed,详情页,回流页" \
- whatLabel:${label} \
- fuSampleRate:${sampleRate} \
- repartition:8 \
- savePath:${savePath} \
|