process_offline.sh 4.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. #! /bin/bash
  2. # **********线下爬虫********** #
  3. env=$1 # 环境
  4. if [ ${env} = "dev" ];then
  5. piaoquan_crawler_dir=/Users/wangkun/Desktop/crawler/piaoquan_crawler/
  6. profile_path=/etc/profile
  7. python=python3
  8. log_path=${piaoquan_crawler_dir}main/main_logs/process-$(date +%Y-%m-%d).log
  9. else
  10. piaoquan_crawler_dir=/Users/piaoquan/Desktop/crawler/piaoquan_crawler/
  11. # profile_path=/etc/profile
  12. profile_path=./base_profile
  13. python=python3
  14. log_path=${piaoquan_crawler_dir}main/main_logs/process-$(date +%Y-%m-%d).log
  15. fi
  16. time=$(date +%H:%M:%S)
  17. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量..." >> ${log_path}
  18. cd ~ && source ${profile_path}
  19. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量完成!" >> ${log_path}
  20. ## 吉祥幸福
  21. #if [[ "$time" > "00:00:0" ]] && [[ "$time" < "08:59:59" ]]; then
  22. # echo "开始启动 吉祥幸福 爬虫脚本任务" >> ${log_path}
  23. # ps aux | grep run_zhongmiaoyinxin | grep -v grep | awk '{print $2}' | xargs kill -9
  24. # ps aux | grep run_zhiqingtiantiankan | grep -v grep | awk '{print $2}' | xargs kill -9
  25. # ps aux | grep run_ganggangdouchuan | grep -v grep | awk '{print $2}' | xargs kill -9
  26. # ps -ef | grep "run_jixiangxingfu_recommend.py" | grep -v "grep"
  27. # if [ "$?" -eq 1 ];then
  28. # echo "$(date "+%Y-%m-%d_%H:%M:%S") 异常停止,正在重启!" >> ${log_path}
  29. # if [ ${env} = "dev" ];then
  30. # cd ${piaoquan_crawler_dir} && sh main/scheduling_main.sh ./jixiangxingfu/jixiangxingfu_main/run_jixiangxingfu_recommend.py --log_type="recommend" --crawler="jixiangxingfu" --env="dev" jixiangxingfu/logs/nohup-recommend.log
  31. # else
  32. # cd ${piaoquan_crawler_dir} && /bin/sh main/scheduling_main.sh ./jixiangxingfu/jixiangxingfu_main/run_jixiangxingfu_recommend.py --log_type="recommend" --crawler="jixiangxingfu" --env="prod" jixiangxingfu/logs/nohup-recommend.log
  33. # fi
  34. # echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  35. # else
  36. # echo "$(date "+%Y-%m-%d %H:%M:%S") 吉祥幸福 进程状态正常" >> ${log_path}
  37. # fi
  38. #else
  39. # echo "吉祥幸福 爬虫脚本任务结束" >> ${log_path}
  40. #fi
  41. ## 知青天天看
  42. #if [[ "$time" > "09:00:0" ]] && [[ "$time" < "12:59:59" ]]; then
  43. # echo "开始启动 知青天天看 爬虫脚本任务" >> ${log_path}
  44. #
  45. #else
  46. # echo "知青天天看 爬虫脚本任务结束" >> ${log_path}
  47. #fi
  48. # 刚刚都传
  49. if [[ "$time" > "13:00:0" ]] && [[ "$time" < "22:59:59" ]]; then
  50. echo "开始启动 刚刚都传 爬虫脚本任务" >> ${log_path}
  51. ps aux | grep run_zhongmiaoyinxin | grep -v grep | awk '{print $2}' | xargs kill -9
  52. ps aux | grep run_zhiqingtiantiankan | grep -v grep | awk '{print $2}' | xargs kill -9
  53. ps aux | grep run_jixiangxingfu | grep -v grep | awk '{print $2}' | xargs kill -9
  54. ps -ef | grep "run_ganggangdouchuan_recommend.py" | grep -v "grep"
  55. if [ "$?" -eq 1 ];then
  56. echo "$(date "+%Y-%m-%d_%H:%M:%S") 异常停止,正在重启!" >> ${log_path}
  57. if [ ${env} = "dev" ];then
  58. cd ${piaoquan_crawler_dir} && sh main/scheduling_main.sh ./ganggangdouchuan/ganggangdouchuan_main/run_ganggangdouchuan_recommend.py --log_type="recommend" --crawler="ganggangdouchuan" --env="dev" ganggangdouchuan/logs/nohup-recommend.log
  59. else
  60. cd ${piaoquan_crawler_dir} && /bin/sh main/scheduling_main.sh ./ganggangdouchuan/ganggangdouchuan_main/run_ganggangdouchuan_recommend.py --log_type="recommend" --crawler="ganggangdouchuan" --env="prod" ganggangdouchuan/logs/nohup-recommend.log
  61. fi
  62. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path}
  63. else
  64. echo "$(date "+%Y-%m-%d %H:%M:%S") 刚刚都传 进程状态正常" >> ${log_path}
  65. fi
  66. else
  67. echo "刚刚都传 爬虫脚本任务结束" >> ${log_path}
  68. fi
  69. ## 众妙音信
  70. #if [[ "$time" > "17:00:0" ]] && [[ "$time" < "23:59:59" ]]; then
  71. # echo "开始启动 众妙音信 爬虫脚本任务" >> ${log_path}
  72. #
  73. #else
  74. # echo "众妙音信 爬虫脚本任务结束" >> ${log_path}
  75. #fi
  76. # 删除日志
  77. echo "$(date "+%Y-%m-%d %H:%M:%S") 开始清理 5 天前的日志文件" >> ${log_path}
  78. find ${piaoquan_crawler_dir}main/main_logs/ -mtime +5 -name "*.log" -exec rm -rf {} \;
  79. echo "$(date "+%Y-%m-%d %H:%M:%S") 日志文件清理完毕" >> ${log_path}
  80. exit 0