#! /bin/bash crawler=$1 # 哪款爬虫 path=$2 # 爬虫路径 log_type=$3 # 爬虫策略 env=$4 # 环境 if [ ${env} = "dev" ];then piaoquan_crawler_dir=/Users/wangkun/Desktop/crawler/piaoquan_crawler/ profile_path=/etc/profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${crawler} = "zfqz" ];then piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/ profile_path=./base_profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${env} = "hk" ];then piaoquan_crawler_dir=/root/piaoquan_crawler/ profile_path=/etc/profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${crawler} = "kykjk" ];then piaoquan_crawler_dir=/Users/kanyikan/Desktop/crawler/piaoquan_crawler/ profile_path=/.base_profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${crawler} = "sph" ] && [ ${log_type} = "search" ];then piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/ profile_path=/etc/profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${crawler} = "kyk" ] || [ ${crawler} = "sph" ];then piaoquan_crawler_dir=/Users/lieyunye/Desktop/crawler/piaoquan_crawler/ profile_path=./base_profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${crawler} = "xgms" ];then piaoquan_crawler_dir=/Users/piaoquan/Desktop/piaoquan_crawler/ profile_path=/etc/profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log elif [ ${crawler} = "xg" ] && [ ${log_type} = "recommend" ];then piaoquan_crawler_dir=/Users/kanyikan/Desktop/crawler/piaoquan_crawler/ profile_path=/.base_profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log else piaoquan_crawler_dir=/root/piaoquan_crawler/ profile_path=/etc/profile python=python3 log_path=${piaoquan_crawler_dir}main/main_logs/process-mq-$(date +%Y-%m-%d).log fi echo run_${crawler}_${log_type}.py echo topic:${crawler}_${log_type}_${env} echo GID:${crawler}_${log_type}_${env} time=$(date +%H:%M:%S) echo "$(date "+%Y-%m-%d %H:%M:%S") 开始监测爬虫进程状态" >> ${log_path} #echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量..." >> ${log_path} #cd ~ && source /etc/profile #echo "$(date "+%Y-%m-%d %H:%M:%S") 更新环境变量完成!" >> ${log_path} echo "$(date "+%Y-%m-%d %H:%M:%S") 正在更新代码..." >> ${log_path} cd ${piaoquan_crawler_dir} && git pull origin master --force echo "$(date "+%Y-%m-%d %H:%M:%S") 代码更新完成!" >> ${log_path} # ====================接入爬虫平台,且调用MQ进程检测==================== echo "$(date "+%Y-%m-%d %H:%M:%S") 正在监测 ${crawler}_${log_type} 进程状态" >> ${log_path} ps -ef | grep "run_${crawler}_${log_type}.py" | grep -v "grep" if [ "$?" -eq 1 ];then echo "$(date "+%Y-%m-%d_%H:%M:%S") 异常停止,正在重启!" >> ${log_path} cd ${piaoquan_crawler_dir} && nohup ${python} -u ${path}/${path}_main/run_${crawler}_${log_type}.py --log_type="${log_type}" --crawler="${path}" --topic_name="${crawler}_${log_type}_${env}" --group_id="${crawler}_${log_type}_${env}" --env="${env}" >> ${path}/logs/${log_type}-shell.log 2>&1 & echo "$(date "+%Y-%m-%d %H:%M:%S") 重启完成!" >> ${log_path} else echo "$(date "+%Y-%m-%d %H:%M:%S") ${crawler}_${log_type} 进程状态正常" >> ${log_path} fi # ================================================================== # 删除日志 echo "$(date "+%Y-%m-%d %H:%M:%S") 开始清理 10 天前的日志文件" >> ${log_path} find ${piaoquan_crawler_dir}main/main_logs/ -mtime +10 -name "*.log" -exec rm -rf {} \; echo "$(date "+%Y-%m-%d %H:%M:%S") 日志文件清理完毕" >> ${log_path} exit 0