process_offline.sh 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. #! /bin/bash
  2. # **********线下爬虫********** #
  3. env=$1 # 爬虫运行环境,正式环境: prod / 测试环境: dev
  4. #echo ${env}
  5. if [ ${env} = "dev" ];then
  6. piaoquan_crawler_dir=/Users/wangkun/Desktop/crawler/piaoquan_crawler/
  7. profile_path=/etc/profile
  8. node_path=/opt/homebrew/bin/node
  9. log_path=${piaoquan_crawler_dir}main/main_logs/process-$(date +%Y-%m-%d).log
  10. else
  11. piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  12. profile_path=./base_profile
  13. node_path=/usr/local/bin/node
  14. log_path=${piaoquan_crawler_dir}main/main_logs/process-$(date +%Y-%m-%d).log
  15. fi
  16. time=$(date +%H:%M:%S)
  17. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量..." >> ${log_path}
  18. cd ~ && source ${profile_path}
  19. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量完成!" >> ${log_path}
  20. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在重启Appium..." >> ${log_path}
  21. ps -ef | grep "/Applications/Appium.app/Contents/Resources/app/node_modules/appium/build/lib/main.js" | grep -v "grep"
  22. if [ "$?" -eq 1 ];then
  23. echo "$(date "+%Y-%m-%d %H:%M:%S") Appium异常停止,正在重启!" >> ${log_path}
  24. nohup ${node_path} /Applications/Appium.app/Contents/Resources/app/node_modules/appium/build/lib/main.js >>./nohup.log 2>&1 &
  25. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启Appium完毕!" >> ${log_path}
  26. else
  27. echo "$(date "+%Y-%m-%d %H:%M:%S") Appium 进程状态正常" >> ${log_path}
  28. fi
  29. # 吉祥幸福
  30. if [[ "$time" > "00:00:00" ]] && [[ "$time" < "08:59:59" ]]; then
  31. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始启动 吉祥幸福 爬虫脚本任务" >> ${log_path}
  32. ps aux | grep run_zhongmiaoyinxin | grep -v grep | awk '{print $2}' | xargs kill -9
  33. ps aux | grep run_zhiqingtiantiankan | grep -v grep | awk '{print $2}' | xargs kill -9
  34. ps aux | grep run_ganggangdouchuan | grep -v grep | awk '{print $2}' | xargs kill -9
  35. ps -ef | grep "run_jixiangxingfu_recommend.py" | grep -v "grep"
  36. if [ "$?" -eq 1 ];then
  37. echo "$(date "+%Y-%m-%d %H:%M:%S") 吉祥幸福爬虫, 异常停止, 正在重启!" >> ${log_path}
  38. cd ${piaoquan_crawler_dir}
  39. nohup python3 -u jixiangxingfu/jixiangxingfu_main/run_jixiangxingfu_recommend.py --log_type="recommend" --crawler="jixiangxingfu" --env=${env} >>jixiangxingfu/logs/nohup-recommend.log 2>&1 &
  40. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  41. else
  42. echo "$(date "+%Y-%m-%d %H:%M:%S") 吉祥幸福 进程状态正常" >> ${log_path}
  43. fi
  44. else
  45. echo "$(date "+%Y-%m-%d %H:%M:%S") 吉祥幸福 爬虫脚本任务结束" >> ${log_path}
  46. fi
  47. # 知青天天看
  48. if [[ "$time" > "09:00:00" ]] && [[ "$time" < "12:59:59" ]]; then
  49. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始启动 知青天天看 爬虫脚本任务" >> ${log_path}
  50. ps aux | grep run_zhongmiaoyinxin | grep -v grep | awk '{print $2}' | xargs kill -9
  51. ps aux | grep run_ganggangdouchuan | grep -v grep | awk '{print $2}' | xargs kill -9
  52. ps aux | grep run_jixiangxingfu | grep -v grep | awk '{print $2}' | xargs kill -9
  53. ps -ef | grep "run_zhiqingtiantiankan_recommend.py" | grep -v "grep"
  54. if [ "$?" -eq 1 ];then
  55. echo "$(date "+%Y-%m-%d %H:%M:%S") 知青天天看小程序爬虫, 异常停止, 正在重启!" >> ${log_path}
  56. cd ${piaoquan_crawler_dir}
  57. nohup python3 -u zhiqingtiantiankan/zhiqingtiantiankan_main/run_zhiqingtiantiankan_recommend.py --log_type="recommend" --crawler="zhiqingtiantiankan" --env=${env} >>zhiqingtiantiankan/logs/nohup-recommend.log 2>&1 &
  58. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  59. else
  60. echo "$(date "+%Y-%m-%d %H:%M:%S") 知青天天看小程序爬虫, 进程状态正常" >> ${log_path}
  61. fi
  62. else
  63. echo "$(date "+%Y-%m-%d %H:%M:%S") 知青天天看 爬虫脚本任务结束" >> ${log_path}
  64. fi
  65. # 刚刚都传
  66. if [[ "$time" > "13:00:00" ]] && [[ "$time" < "16:59:59" ]]; then
  67. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始启动 刚刚都传 爬虫脚本任务" >> ${log_path}
  68. ps aux | grep run_zhongmiaoyinxin | grep -v grep | awk '{print $2}' | xargs kill -9
  69. ps aux | grep run_zhiqingtiantiankan | grep -v grep | awk '{print $2}' | xargs kill -9
  70. ps aux | grep run_jixiangxingfu | grep -v grep | awk '{print $2}' | xargs kill -9
  71. ps -ef | grep "run_ganggangdouchuan_recommend.py" | grep -v "grep"
  72. if [ "$?" -eq 1 ];then
  73. echo "$(date "+%Y-%m-%d %H:%M:%S") 刚刚都传小程序爬虫, 异常停止, 正在重启!" >> ${log_path}
  74. cd ${piaoquan_crawler_dir}
  75. nohup python3 -u ganggangdouchuan/ganggangdouchuan_main/run_ganggangdouchuan_recommend.py --log_type="recommend" --crawler="ganggangdouchuan" --env=${env} >>ganggangdouchuan/logs/nohup-recommend.log 2>&1 &
  76. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  77. else
  78. echo "$(date "+%Y-%m-%d %H:%M:%S") 刚刚都传小程序爬虫, 进程状态正常" >> ${log_path}
  79. fi
  80. else
  81. echo "$(date "+%Y-%m-%d %H:%M:%S") 刚刚都传小程序爬虫, 任务结束" >> ${log_path}
  82. fi
  83. # 众妙音信
  84. if [[ "$time" > "17:00:00" ]] && [[ "$time" < "23:59:59" ]]; then
  85. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始启动 众妙音信 爬虫脚本任务" >> ${log_path}
  86. ps aux | grep run_ganggangdouchuan | grep -v grep | awk '{print $2}' | xargs kill -9
  87. ps aux | grep run_zhiqingtiantiankan | grep -v grep | awk '{print $2}' | xargs kill -9
  88. ps aux | grep run_jixiangxingfu | grep -v grep | awk '{print $2}' | xargs kill -9
  89. ps -ef | grep "run_zhongmiaoyinxin_recommend.py" | grep -v "grep"
  90. if [ "$?" -eq 1 ];then
  91. echo "$(date "+%Y-%m-%d %H:%M:%S") 众妙音信小程序爬虫, 异常停止, 正在重启!" >> ${log_path}
  92. cd ${piaoquan_crawler_dir}
  93. nohup python3 -u zhongmiaoyinxin/zhongmiaoyinxin_main/run_zhongmiaoyinxin_recommend.py --log_type="recommend" --crawler="zhongmiaoyinxin" --env=${env} >>zhongmiaoyinxin/logs/nohup-recommend.log 2>&1 &
  94. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  95. else
  96. echo "$(date "+%Y-%m-%d %H:%M:%S") 众妙音信小程序爬虫, 进程状态正常" >> ${log_path}
  97. fi
  98. else
  99. echo "$(date "+%Y-%m-%d %H:%M:%S") 众妙音信 爬虫脚本任务结束" >> ${log_path}
  100. fi
  101. # 删除日志
  102. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始清理 10 天前的日志文件" >> ${log_path}
  103. find ${piaoquan_crawler_dir}main/main_logs/ -mtime +10 -name "*.log" -exec rm -rf {} \;
  104. echo "$(date "+%Y-%m-%d %H:%M:%S") 日志文件清理完毕" >> ${log_path}
  105. exit 0