| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253 |
- #!/bin/bash
- start_date=""
- end_date=""
- sub_path="feat_freq"
- if(($#==3))
- then
- start_date=$1
- end_date=$2
- sub_path=$3
- else
- exit -1
- fi
- export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
- export PATH=$SPARK_HOME/bin:$PATH
- export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0
- # params
- BASE_DATA_PATH=/dw/recommend/model/ad_display/data
- data_path=""
- for((i=0; i<=21; i++))
- do
- data_date=$(date -d "$start_date $i day" +"%Y%m%d")
- if [ "$data_date" -le "$end_date" ]
- then
- one_day_data_path="${BASE_DATA_PATH}/${data_date}"
- if [[ -z $data_path ]]
- then
- data_path=$one_day_data_path
- else
- data_path="$data_path,$one_day_data_path"
- fi
- fi
- done
- featureIndex=2
- repartition=1
- savePath=/dw/recommend/model/832_recsys_analysis_data/${sub_path}
- # 1 生产原始数据
- echo "$(date +%Y-%m-%d_%H-%M-%S)----------step1------------开始根据${table}生产原始数据"
- set -x
- /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
- --class com.aliyun.odps.spark.examples.makedata_recsys_r_rate.stat_feature \
- --master yarn --driver-memory 4G --executor-memory 6G --executor-cores 1 --num-executors 16 \
- --conf spark.yarn.executor.memoryoverhead=2048 \
- /mnt/disk1/jch/recommend-emr-dataprocess/target/spark-examples-1.0.0-SNAPSHOT-jar-with-dependencies.jar \
- dataPath:${data_path} \
- featureIndex:${featureIndex} \
- repartition:${repartition} \
- savePath:${savePath} \
|