process_mq.sh 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. #! /bin/bash
  2. crawler=$1 # 哪款爬虫
  3. path=$2 # 爬虫路径
  4. log_type=$3 # 爬虫策略
  5. env=$4 # 环境
  6. if [ ${env} = "dev" ];then
  7. piaoquan_crawler_dir=/Users/wangkun/Desktop/crawler/piaoquan_crawler/
  8. profile_path=/etc/profile
  9. python=python3
  10. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  11. elif [ ${env} = "hk" ];then
  12. piaoquan_crawler_dir=/root/piaoquan_crawler/
  13. profile_path=/etc/profile
  14. python=python3
  15. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  16. elif [ ${crawler} = "kyk" ];then
  17. piaoquan_crawler_dir=/Users/lieyunye/Desktop/crawler/piaoquan_crawler/
  18. profile_path=./base_profile
  19. python=python3
  20. else
  21. piaoquan_crawler_dir=/root/piaoquan_crawler/
  22. profile_path=/etc/profile
  23. python=python3
  24. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  25. fi
  26. echo run_${crawler}_${log_type}.py
  27. echo topic_${crawler}_${log_type}_${env}
  28. echo GID_${crawler}_${log_type}_${env}
  29. time=$(date +%H:%M:%S)
  30. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始监测爬虫进程状态" >> ${log_path}
  31. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量..." >> ${log_path}
  32. cd ~ && source /etc/profile
  33. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量完成!" >> ${log_path}
  34. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在更新代码..." >> ${log_path}
  35. cd ${piaoquan_crawler_dir} && git pull origin master --force
  36. echo "$(date "+%Y-%m-%d %H:%M:%S") 代码更新完成!" >> ${log_path}
  37. # ====================接入爬虫平台,且调用MQ进程检测====================
  38. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在监测 ${crawler}_${log_type} 进程状态" >> ${log_path}
  39. ps -ef | grep "run_${crawler}_${log_type}.py" | grep -v "grep"
  40. if [ "$?" -eq 1 ];then
  41. echo "$(date "+%Y-%m-%d_%H:%M:%S") 异常停止,正在重启!" >> ${log_path}
  42. cd ${piaoquan_crawler_dir} && nohup ${python} -u ${path}/${path}_main/run_${crawler}_${log_type}.py --log_type="${log_type}" --crawler="${path}" --topic_name="${crawler}_${log_type}_${env}" --group_id="${crawler}_${log_type}_${env}" --env="${env}" >> ${path}/logs/${log_type}-shell.log 2>&1 &
  43. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  44. else
  45. echo "$(date "+%Y-%m-%d %H:%M:%S") ${crawler}_${log_type} 进程状态正常" >> ${log_path}
  46. fi
  47. # ==================================================================
  48. # 删除日志
  49. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始清理 5 天前的日志文件" >> ${log_path}
  50. find ${piaoquan_crawler_dir}main/main_logs/ -mtime +5 -name "*.log" -exec rm -rf {} \;
  51. echo "$(date "+%Y-%m-%d %H:%M:%S") 日志文件清理完毕" >> ${log_path}
  52. exit 0