process_mq.sh 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. #! /bin/bash
  2. crawler=$1 # 哪款爬虫
  3. path=$2 # 爬虫路径
  4. log_type=$3 # 爬虫策略
  5. env=$4 # 环境
  6. if [ ${env} = "dev" ];then
  7. piaoquan_crawler_dir=/Users/tzld/Desktop/piaoquan_crawler/
  8. profile_path=/etc/profile
  9. python=python3
  10. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  11. elif [ ${crawler} = "zfqz" ];then
  12. piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  13. profile_path=./base_profile
  14. python=python3
  15. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  16. elif [ ${env} = "hk" ];then
  17. piaoquan_crawler_dir=/root/piaoquan_crawler/
  18. profile_path=/etc/profile
  19. python=python3
  20. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  21. elif [ ${crawler} = "kykhcm" ];then
  22. piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  23. profile_path=/.base_profile
  24. python=python3
  25. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  26. #elif [ ${crawler} = "hhxxzfd" ];then
  27. # piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  28. # profile_path=/.base_profile
  29. # python=python3
  30. # log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  31. elif [ ${crawler} = "kykjk" ];then
  32. piaoquan_crawler_dir=/Users/kanyikan/Desktop/crawler/piaoquan_crawler/
  33. profile_path=/.base_profile
  34. python=python3
  35. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  36. elif [ ${crawler} = "sph" ] && [ ${log_type} = "search" ];then
  37. piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  38. profile_path=/etc/profile
  39. python=python3
  40. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  41. elif [ ${crawler} = "kyk" ];then
  42. piaoquan_crawler_dir=/Users/lieyunye/Desktop/crawler/piaoquan_crawler/
  43. profile_path=./base_profile
  44. python=python3
  45. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  46. elif [ ${crawler} = "sph" ] && [ ${log_type} = "recommend" ];then
  47. piaoquan_crawler_dir=/Users/lieyunye/Desktop/crawler/piaoquan_crawler/
  48. profile_path=./base_profile
  49. python=python3
  50. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  51. elif [ ${crawler} = "xgms" ];then
  52. piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  53. profile_path=/etc/profile
  54. python=python3
  55. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  56. elif [ ${crawler} = "xg" ] && [ ${log_type} = "recommend" ];then
  57. piaoquan_crawler_dir=/Users/kanyikan/Desktop/crawler/piaoquan_crawler/
  58. profile_path=/.base_profile
  59. python=python3
  60. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  61. elif [ ${crawler} = "xngplus" ] && [ ${log_type} = "recommend" ];then
  62. piaoquan_crawler_dir=/Users/a123456/Desktop/spider/piaoquan_crawler/
  63. profile_path=/.base_profile
  64. python=python3
  65. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  66. elif [ ${crawler} = "xngrule" ] && [ ${log_type} = "recommend" ];then
  67. piaoquan_crawler_dir=/Users/tzld/Desktop/piaoquan_crawler/
  68. profile_path=/.base_profile
  69. python=python3
  70. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  71. elif [ ${crawler} = "zfqz" ] && [ ${log_type} = "recommend" ];then
  72. piaoquan_crawler_dir=/Users/tzld/Desktop/piaoquan_crawler/
  73. profile_path=/.base_profile
  74. python=python3
  75. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  76. elif [ ${crawler} = "xnguser" ] && [ ${log_type} = "recommend" ];then
  77. piaoquan_crawler_dir=/Users/tzld/Desktop/piaoquan_crawler/
  78. profile_path=/.base_profile
  79. python=python3
  80. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  81. else
  82. piaoquan_crawler_dir=/root/piaoquan_crawler/
  83. profile_path=/etc/profile
  84. python=python3
  85. log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log
  86. fi
  87. echo run_${crawler}_${log_type}.py
  88. echo topic:${crawler}_${log_type}_${env}
  89. echo GID:${crawler}_${log_type}_${env}
  90. time=$(date +%H:%M:%S)
  91. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始监测爬虫进程状态" >> ${log_path}
  92. #echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量..." >> ${log_path}
  93. #cd ~ && source /etc/profile
  94. #echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量完成!" >> ${log_path}
  95. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在更新代码..." >> ${log_path}
  96. cd ${piaoquan_crawler_dir} && git pull origin master --force
  97. echo "$(date "+%Y-%m-%d %H:%M:%S") 代码更新完成!" >> ${log_path}
  98. # ====================接入爬虫平台,且调用MQ进程检测====================
  99. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在监测 ${crawler}_${log_type} 进程状态" >> ${log_path}
  100. ps -ef | grep "run_${crawler}_${log_type}.py" | grep -v "grep"
  101. if [ "$?" -eq 1 ];then
  102. echo "$(date "+%Y-%m-%d %H:%M:%S") 异常停止,正在重启!" >> ${log_path}
  103. cd ${piaoquan_crawler_dir} && nohup ${python} -u ${path}/${path}_main/run_${crawler}_${log_type}.py --log_type="${log_type}" --crawler="${path}" --topic_name="${crawler}_${log_type}_${env}" --group_id="${crawler}_${log_type}_${env}" --env="${env}" >> ${path}/logs/${log_type}-shell.log 2>&1 &
  104. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  105. else
  106. echo "$(date "+%Y-%m-%d %H:%M:%S") ${crawler}_${log_type} 进程状态正常" >> ${log_path}
  107. fi
  108. # ==================================================================
  109. # 删除日志
  110. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始清理 10 天前的日志文件" >> ${log_path}
  111. find ${piaoquan_crawler_dir}main/main_logs/ -mtime +10 -name "*.log" -exec rm -rf {} \;
  112. echo "$(date "+%Y-%m-%d %H:%M:%S") 日志文件清理完毕" >> ${log_path}
  113. exit 0