123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354 |
- #!/bin/sh
- set -x
- # source /root/anaconda3/bin/activate py37
- sh_path=$(dirname $0)
- source ${sh_path}/00_common.sh
- export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
- export PATH=$SPARK_HOME/bin:$PATH
- export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0
- # 全局常量
- HADOOP=/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop
- TRAIN_PATH=/dw/recommend/model/31_ad_sample_data_v4
- BUCKET_FEATURE_PATH=/dw/recommend/model/33_ad_train_data_v4
- TABLE=alg_recsys_ad_sample_all
- today_early_1="$(date -d '1 days ago' +%Y%m%d)"
- train_dates=()
- init() {
- local count=
- local handle_date="$(date -d '2 days ago' +%Y%m%d)"
- # 循环获取前 n 天的非节日日期
- while [[ $count -lt 7 ]]; do
- # 获取当前日期的 MM-DD 部分
- date_key=$(date -d "$handle_date -${count} day" +%Y%m%d)
- # 判断是否是节日
- if [ $(is_holidays $date_key) -ge 0 ]; then
- # 如果不是节日,添加到数组中
- train_dates+=($date_key)
- # 计数器加 1
- count=$((count + 1))
- fi
- done
- echo "train_dates: ${train_dates[@]}"
- }
- # 主方法
- main() {
- init
- }
- main
|