03_delete_timer_file.sh 2.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. #!/bin/sh
  2. export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
  3. HADOOP=/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop
  4. PREDICT_HOME=/root/zhaohp/recommend-emr-dataprocess/predict
  5. origin_data_hdfs_dir=/dw/recommend/model/31_ad_sample_data_v3_auto
  6. bucket_feature_hdfs_dir=/dw/recommend/model/33_ad_train_data_v3_auto
  7. # 删除五天之前的预测结果文件
  8. delete_predict_5d_ago() {
  9. echo "=========== 开始删除五天前的预测结果文件 $(date "+%Y-%m-%d %H:%M:%d") ==========="
  10. tmp_file_name=./files_to_delete.txt
  11. # 查询五天前的预测结果文件,并保存到临时文件
  12. find "$PREDICT_HOME" -type f -mtime +5 > "${tmp_file_name}"
  13. # 逐行读取临时文件中的路径并删除文件
  14. while IFS= read -r file; do
  15. echo "Deleting: $file"
  16. rm -f "$file"
  17. done < "${tmp_file_name}"
  18. # 删除临时文件
  19. rm -f "${tmp_file_name}"
  20. echo "=========== 删除五天前的预测结果文件结束 $(date "+%Y-%m-%d %H:%M:%d") ==========="
  21. }
  22. delete_hdfs_path() {
  23. if [ "$#" -ne 2 ]; then
  24. echo "Usage: delete_path <early> <path>"
  25. return 1
  26. fi
  27. early=$1
  28. path=$2
  29. echo "=========== $(date "+%Y-%m-%d %H:%M:%d") 开始删除目录 ${path} ${early}天前的文件 ==========="
  30. EARLY_DAYS_AGO=$(date -d "${early} days ago" +%Y-%m-%d)
  31. $HADOOP fs -ls $path | while read line
  32. do
  33. dir=$(echo $line | awk '{print $8}')
  34. modified_date=$(echo $line | awk '{print $6}')
  35. if [[ "${modified_date}" < "${SEVEN_DAYS_AGO}" ]]; then
  36. echo "Deleting: ${dir}"
  37. # $HADOOP fs -rm -r -skipTrash ${dir}
  38. fi
  39. done
  40. echo "=========== $(date "+%Y-%m-%d %H:%M:%d") 删除目录 ${path} ${early}天前的文件结束 ==========="
  41. }
  42. main() {
  43. # # 删除五天前的预测结果文件
  44. # delete_predict_5d_ago
  45. # # 删除七天之前的HDFS中的特征原始数据
  46. # delete_hdfs_path 7 $origin_data_hdfs_dir
  47. # 删除七天之前的HDFS中的特征分桶数据
  48. delete_hdfs_path 7 $bucket_feature_hdfs_dir
  49. }
  50. main