main.sh 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. #!/bin/bash
  2. # 看一看+小程序 朋友圈榜单
  3. # sh ./main/main.sh ./kanyikan/kanyikan_main/run_kanyikan_moment.py --log_type="moment" --crawler="kanyikan" --strategy="kanyikan_moment" --our_uid="kanyikan_moment" --oss_endpoint="out" --env="dev" ./kanyikan/nohup.log local
  4. # youtube 定向策略
  5. # sh ./main/main.sh ./youtube/youtube_main/run_youtube_follow.py --log_type="follow" --crawler="youtube" --strategy="定向爬虫策略" --oss_endpoint="out" --env="dev" --machine="local" youtube/nohup.log
  6. # ps aux | grep run_youtube | grep Python | grep -v grep | awk '{print $2}' | xargs kill -9
  7. crawler_dir=$1 # 爬虫Python脚本执行入口路径
  8. log_type=$2 # 日志
  9. crawler=$3 # 哪款爬虫
  10. strategy=$4 # 爬虫策略
  11. oss_endpoint=$5 # OSS 网关,脚本部署在阿里云服务器时填写 inner,其它填写 out
  12. env=$6 # 爬虫上传环境。正式环境填写 prod,测试环境填写 dev
  13. machine=$7 # 部署机器,阿里云填写 aliyun,线下分别填写 macpro,macair,local
  14. nohup_dir=$8 # nohup 日志路径
  15. echo "开始"
  16. if [ ${machine} = "macpro" ];then
  17. piaoquan_crawler_dir=/Users/lieyunye/Desktop/piaoquan_crawler/
  18. profile_path=.bash_profile
  19. node_path=/usr/local/bin/node
  20. elif [ ${machine} = "macair" ];then
  21. piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/
  22. profile_path=./base_profile
  23. node_path=/usr/local/bin/node
  24. elif [ ${machine} = "aliyun" ];then
  25. piaoquan_crawler_dir=/data5/wangkun/piaoquan_crawler/
  26. profile_path=/etc/profile
  27. elif [ ${machine} = "local" ];then
  28. piaoquan_crawler_dir=/Users/wangkun/Desktop/crawler/piaoquan_crawler/
  29. profile_path=/etc/profile
  30. node_path=/opt/homebrew/bin/node
  31. else
  32. piaoquan_crawler_dir=/Users/wangkun/Desktop/crawler/piaoquan_crawler/
  33. profile_path=/etc/profile
  34. node_path=/opt/homebrew/bin/node
  35. fi
  36. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量..."
  37. cd ~ && source ${profile_path}
  38. echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量完成!"
  39. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在杀进程..."
  40. grep_str=run_${crawler##*=}
  41. ps aux | grep ${grep_str} | grep Python | grep -v grep | awk '{print $2}' | xargs kill -9
  42. echo "$(date "+%Y-%m-%d %H:%M:%S") 进程已杀死!"
  43. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在更新代码..."
  44. cd ${piaoquan_crawler_dir} && git pull origin master --force && rm -f ${piaoquan_crawler_dir}main/nohup.log && rm -f ${piaoquan_crawler_dir}${nohup_dir}
  45. #echo ${piaoquan_crawler_dir}
  46. echo "$(date "+%Y-%m-%d %H:%M:%S") 代码更新完成!"
  47. if [ ${machine} != "aliyun" ];then
  48. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在重启Appium..."
  49. ps aux | grep Appium.app | grep -v grep | awk '{print $2}' | xargs kill -9
  50. nohup ${node_path} /Applications/Appium.app/Contents/Resources/app/node_modules/appium/build/lib/main.js >>./nohup.log 2>&1 &
  51. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启Appium完毕!"
  52. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在重启adb..."
  53. adb kill-server
  54. adb start-server
  55. echo "$(date "+%Y-%m-%d %H:%M:%S") 重启adb完毕!"
  56. else
  57. echo "无需重启Appium及adb服务"
  58. fi
  59. echo "$(date "+%Y-%m-%d %H:%M:%S") 正在重启服务..."
  60. nohup python3 -u ${crawler_dir} ${log_type} ${crawler} ${strategy} ${oss_endpoint} ${env} ${machine} >>${nohup_dir} 2>&1 &
  61. echo "$(date "+%Y-%m-%d %H:%M:%S") 服务重启完毕!"
  62. exit 0