| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051 |
- #!/bin/bash
- abs_path=$(cd `dirname $0`; pwd)
- run_mode=""
- data_date=$(date +%Y%m%d -d "-1 $days day")
- if(($#==1))
- then
- run_mode=$1
- elif(($#==2))
- then
- run_mode=$1
- data_date=$2
- else
- exit -1
- fi
- if [[ "$run_mode" != "run" ]]
- then
- exit -1
- fi
- # env
- export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
- export PATH=$SPARK_HOME/bin:$PATH
- export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0
- # 1. feat stat
- stat_sh="${abs_path}/feat_stat.sh"
- start_date=$(date -d "$data_date -13 day" +"%Y%m%d")
- end_date=$data_date
- echo `date` "sh +x $stat_sh $start_date $end_date"
- sh +x $stat_sh $start_date $end_date &
- wait
- sleep 30s
- # 2. feat file
- hadoop_bin=/opt/apps/HADOOP-COMMON/hadoop-3.2.1-1.2.7-alinux3/bin/hadoop
- feat_file="${abs_path}/../feat/feat_${end_date}.txt"
- feat_hdfs_dir="/dw/recommend/model/83_recsys_feature/"
- echo `date` "$hadoop_bin fs -text $feat_hdfs_dir/$end_date/part* > $feat_file"
- $hadoop_bin fs -text $feat_hdfs_dir/$end_date/part* > $feat_file &
- wait
- sleep 30s
- # v1
- v1_sh="${abs_path}/v1/v1_pipline_t1.sh"
- echo `date` "sh +x $v1_sh $feat_file $data_date"
- sh +x $v1_sh $feat_file $data_date &
- wait
- sleep 30s
|