|  | @@ -9,7 +9,6 @@ origin_data_hdfs_dir=/dw/recommend/model/31_ad_sample_data_v3_auto
 | 
	
		
			
				|  |  |  bucket_feature_hdfs_dir=/dw/recommend/model/33_ad_train_data_v3_auto
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -
 | 
	
		
			
				|  |  |  # 删除五天之前的预测结果文件
 | 
	
		
			
				|  |  |  delete_predict_5d_ago() {
 | 
	
		
			
				|  |  |  
 | 
	
	
		
			
				|  | @@ -42,7 +41,7 @@ delete_hdfs_path() {
 | 
	
		
			
				|  |  |      early=$1
 | 
	
		
			
				|  |  |      path=$2
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -    echo "=========== $(date "+%Y-%m-%d %H:%M:%d") 开始删除目录 ${path} ${early}天前的文件  ==========="
 | 
	
		
			
				|  |  | +    echo "=========== $(date "+%Y-%m-%d %H:%M:%d") 开始删除目录 ${path}下 ${early}天前的文件  ==========="
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |      EARLY_DAYS_AGO=$(date -d "${early} days ago" +%Y-%m-%d)
 | 
	
		
			
				|  |  |  
 | 
	
	
		
			
				|  | @@ -50,15 +49,15 @@ delete_hdfs_path() {
 | 
	
		
			
				|  |  |      do
 | 
	
		
			
				|  |  |          dir=$(echo $line | awk '{print $8}')
 | 
	
		
			
				|  |  |          modified_date=$(echo $line | awk '{print $6}')
 | 
	
		
			
				|  |  | -
 | 
	
		
			
				|  |  | -        if [[ "${modified_date}" < "${SEVEN_DAYS_AGO}" ]]; then
 | 
	
		
			
				|  |  | +        echo "${line}"
 | 
	
		
			
				|  |  | +        if [[ "${modified_date}" < "${EARLY_DAYS_AGO}" ]]; then
 | 
	
		
			
				|  |  |              echo "Deleting: ${dir}"
 | 
	
		
			
				|  |  |              # $HADOOP fs -rm -r -skipTrash ${dir}
 | 
	
		
			
				|  |  |          fi
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |      done
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -    echo "=========== $(date "+%Y-%m-%d %H:%M:%d") 删除目录 ${path} ${early}天前的文件结束  ==========="
 | 
	
		
			
				|  |  | +    echo "=========== $(date "+%Y-%m-%d %H:%M:%d") 删除目录 ${path}下 ${early}天前的文件结束  ==========="
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  }
 | 
	
		
			
				|  |  |  
 |