#!/bin/sh export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8 export PATH=$SPARK_HOME/bin:$PATH export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf export JAVA_HOME=/usr/lib/jvm/java-1.8.0 DATE="$(date -d '9 days ago' +%Y%m%d)" HADOOP="/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop" path="/dw/recommend/model/feature/user/dt=${DATE}" $HADOOP fs -test -e ${path} if [ $? -eq 0 ]; then echo "${path} 数据存在, 删除。" $HADOOP fs -rm -r -skipTrash ${path} else echo "${path} 数据不存在" fi path="/dw/recommend/model/feature/video/dt=${DATE}" $HADOOP fs -test -e ${path} if [ $? -eq 0 ]; then echo "${path} 数据存在, 删除。" $HADOOP fs -rm -r -skipTrash ${path} else echo "${path} 数据不存在" fi path="/dw/recommend/model/00_sample_data/dt=${DATE}" $HADOOP fs -test -e ${path} if [ $? -eq 0 ]; then echo "${path} 数据存在, 删除。" $HADOOP fs -rm -r -skipTrash ${path} else echo "${path} 数据不存在" fi function delete_path() { if [ "$#" -ne 2 ]; then echo "Usage: delete_path " return 1 fi early=$1 path=$2 date="$(date -d "${early} days ago" +%Y%m%d)" path_delete=${path}${date} $HADOOP fs -test -e ${path_delete} if [ $? -eq 0 ]; then echo "${path_delete} 数据存在, 删除。" if $HADOOP fs -rm -r -skipTrash "${path_delete}"; then echo "删除成功。" else echo "删除失败。" fi else echo "${path_delete} 数据不存在" fi } delete_path 7 /dw/recommend/model/11_str_data_v3/dt= delete_path 7 /dw/recommend/model/12_ros_data_v3/dt= delete_path 7 /dw/recommend/model/10_sample_data_v3/dt= delete_path 3 /dw/recommend/model/09_feature/user/all/dt= delete_path 3 /dw/recommend/model/09_feature/user/true/dt= delete_path 3 /dw/recommend/model/09_feature/video/dt=