50_delete_hdfs.sh 996 B

123456789101112131415161718192021222324252627282930313233343536
  1. #!/bin/sh
  2. export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
  3. export PATH=$SPARK_HOME/bin:$PATH
  4. export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
  5. export JAVA_HOME=/usr/lib/jvm/java-1.8.0
  6. DATE="$(date -d '9 days ago' +%Y%m%d)"
  7. HADOOP="/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop"
  8. path="/dw/recommend/model/feature/user/dt=${DATE}"
  9. $HADOOP fs -test -e ${path}
  10. if [ $? -eq 0 ]; then
  11. echo "${path} 数据存在, 删除。"
  12. $HADOOP fs -rm -r -skipTrash ${path}
  13. else
  14. echo "${path} 数据不存在"
  15. fi
  16. path="/dw/recommend/model/feature/video/dt=${DATE}"
  17. $HADOOP fs -test -e ${path}
  18. if [ $? -eq 0 ]; then
  19. echo "${path} 数据存在, 删除。"
  20. $HADOOP fs -rm -r -skipTrash ${path}
  21. else
  22. echo "${path} 数据不存在"
  23. fi
  24. path="/dw/recommend/model/00_sample_data/dt=${DATE}"
  25. $HADOOP fs -test -e ${path}
  26. if [ $? -eq 0 ]; then
  27. echo "${path} 数据存在, 删除。"
  28. $HADOOP fs -rm -r -skipTrash ${path}
  29. else
  30. echo "${path} 数据不存在"
  31. fi