rov_batch.sh 1.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. #!/bin/bash
  2. abs_path=$(cd `dirname $0`; pwd)
  3. run_mode=""
  4. data_date=$(date +%Y%m%d -d "-2 $days day")
  5. if(($#==1))
  6. then
  7. run_mode=$1
  8. elif(($#==2))
  9. then
  10. run_mode=$1
  11. data_date=$2
  12. else
  13. exit -1
  14. fi
  15. if [[ "$run_mode" != "run" ]]
  16. then
  17. exit -1
  18. fi
  19. # env
  20. export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
  21. export PATH=$SPARK_HOME/bin:$PATH
  22. export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
  23. export JAVA_HOME=/usr/lib/jvm/java-1.8.0
  24. # 1. feat stat
  25. stat_sh="${abs_path}/feat_stat.sh"
  26. start_date=$(date -d "$data_date -13 day" +"%Y%m%d")
  27. end_date=$data_date
  28. echo `date` "sh +x $stat_sh $start_date $end_date"
  29. sh +x $stat_sh $start_date $end_date &
  30. wait
  31. sleep 30s
  32. # 2. feat file
  33. hadoop_bin=/opt/apps/HADOOP-COMMON/hadoop-3.2.1-1.2.7-alinux3/bin/hadoop
  34. feat_file="${abs_path}/../feat/feat_${end_date}.txt"
  35. feat_hdfs_dir="/dw/recommend/model/83_recsys_feature/"
  36. echo `date` "$hadoop_bin fs -text $feat_hdfs_dir/$end_date/part* > $feat_file"
  37. $hadoop_bin fs -text $feat_hdfs_dir/$end_date/part* > $feat_file &
  38. wait
  39. sleep 30s
  40. # 3. v1
  41. v1_sh="${abs_path}/v1/v1_pipline.sh"
  42. echo `date` "sh +x $v1_sh $feat_file"
  43. sh +x $v1_sh $feat_file &
  44. wait
  45. sleep 30s