from flask import Flask,Response from DBSession import session_maker from model import InstanceList from utils import * import atexit from aliyunsdkcore.client import AcsClient from aliyunsdkecs.request.v20140526.DescribeInstancesRequest import DescribeInstancesRequest from apscheduler.schedulers.background import BackgroundScheduler import json from model_longvideo import produce_video_task from DBSession_longvideo import session_maker_longvideo from prometheus_client import Gauge,Counter, generate_latest,start_http_server from prometheus_client.core import CollectorRegistry from scheduler_jobs import interface_info_count from ex_response import ex_response import time import threading app = Flask(__name__) registry = CollectorRegistry(auto_describe=False) # 定义后台执行调度器 scheduler = BackgroundScheduler() scheduler.add_job(func=interface_info_count, trigger="interval", seconds=60) scheduler.start() atexit.register(lambda: scheduler.shutdown()) client = AcsClient('LTAI4GBWbFvvXoXsSVBe1o9f', 'kRAikWitb4kDxaAyBqNrmLmllMEDO3', 'cn-hangzhou') healthcheck_status = Gauge("healthcheck_status", "ipaddress", ['instance_id','server_name', 'ipaddress'], registry=registry) url_http_avgtime = Gauge("url_http_times_avgs", "url of avgs", ['url'], registry=registry) url_http_qps = Gauge("url_http_qps", "url of qps", ['url'], registry=registry) probe_http_status_code = Gauge("http_status_code", 'h5',['server_name'], registry=registry) probe_http_total_time = Gauge("http_total_time", 'h5',['server_name'], registry=registry) probe_http_dns_time = Gauge("http_dns_time", 'h5',['server_name'], registry=registry) probe_http_connect_time = Gauge("http_connect_time", 'h5',['server_name'], registry=registry) probe_http_pretransfer_time = Gauge("http_pretransfer_time", 'h5',['server_name'], registry=registry) probe_http_first_byte_time = Gauge("http_first_byte_time", 'h5',['server_name'], registry=registry) slb_http_status_code = Gauge("slb_http_status_code", 'slb', ['server_name', 'status'], registry=registry) produce_video_task_success_count = Gauge("produce_video_task_success_count", 'success_status_count', ['success_status_count'], registry=registry) produce_video_task_fail_count = Gauge("produce_video_task_fail_count", 'fail_status_count', ['fail_status_count'], registry=registry) produce_video_task_progress_count = Gauge("produce_video_task_progress_count", 'progress_status_count', ['progress_status_count'], registry=registry) produce_video_task_rate = Gauge("produce_video_task_rate", 'produce_video_rate', ['produce_video_rate'], registry=registry) produce_video_tts_count = Gauge("tts_aliyun_azure", 'success', ['tts_channel'], registry=registry) def healthcheck(): with session_maker() as session: instance_infos = session.query(InstanceList).filter(InstanceList.server_name=="longvideoapi.prod").all() for index in range(len(instance_infos)): ipaddr = instance_infos[index].ipadd server_name = instance_infos[index].server_name http_code = healthcheck(ipaddr, server_name) instance_id = instance_infos[index].instance_id healthcheck_status.labels(instance_id, server_name, ipaddr).set(http_code) def interface_qps(): with session_maker() as session: intface_infos = session.query(IntfaceList).filter(IntfaceList.app_type == "1").all() for i in range(len(intface_infos)): url = intface_infos[i].interface_url url_qps = intface_infos[i].qps url_avg_time = intface_infos[i].avg_time url_http_avgtime.labels(url).set(url_avg_time) url_http_qps.labels(url).set(url_qps) def h5_healthcheck(): share_h5 = "share_h5" download_h5 = "download_h5" share_h5_url = "https://longvideoh5.piaoquantv.com/core/share?shareSource=customerMessage&fromAppType=0&qrAppType=0&versionCode=321&shareUid=12463024&shareMachineCode=weixin_openid_o0w175fPwp8yrtOGihYJhvnT9Ag4&h5WxrootPageSource=vlog-pages___category&videoId=2689415&isRecommendShare=1&h5ShareId=backend493cd67dd28f4ee395781d59881567211625976055926&shareDepth=0&state=#" download_h5_url = "https://longvideoh5.piaoquantv.com/dist_1_3_4/upload?accessToken=fe8914eb2e99d1fe8ddaa2f753f5ec613eb2dfbb&versionCode=323&galleryId=0&fileType=2&machineCode=weixin_openid_o0w175fPwp8yrtOGihYJhvnT9Ag4&platform=devtools&system=iOS%2010.0.1&appType=0&appId=wx89e7eb06478361d7&pageSource=vlog-pages%2Fwebview&loginUid=12463024&machineInfo=%7B%22sdkVersion%22%3A%222.4.1%22,%22brand%22%3A%22devtools%22,%22language%22%3A%22zh_CN%22,%22model%22%3A%22iPhone%20X%22,%22platform%22%3A%22devtools%22,%22system%22%3A%22iOS%2010.0.1%22,%22weChatVersion%22%3A%228.0.5%22,%22screenHeight%22%3A812,%22screenWidth%22%3A375,%22windowHeight%22%3A730,%22windowWidth%22%3A375,%22softVersion%22%3A%224.1.168%22%7D&wxHelpPagePath=%2Fpackage-my%2Fhelp-feedback%2Fhelp-feedback&transaction=2065ff98-6f27-4f09-c9eb-d366c99dd5d5&videoBarrageSwitch=true&addMusic=1&eventId=0&fromActivityId=0&sessionId=1626833289618-583a312d-81cd-62f9-cdd4-cf914c682d55&subSessionId=1626833289618-583a312d-81cd-62f9-cdd4-cf914c682d55&projectId=&entranceType=#wechat_redirec" shar_h5_curl_response = ex_response(share_h5_url) share_h5_url_info = shar_h5_curl_response.getinfo() download_h5_curl_response = ex_response(download_h5_url) download_h5_url_info = download_h5_curl_response.getinfo() probe_http_status_code.labels(share_h5).set(share_h5_url_info["http_code"]) probe_http_status_code.labels(download_h5).set(download_h5_url_info["http_code"]) probe_http_total_time.labels("share_h5").set(share_h5_url_info["total_time"]*1000) probe_http_total_time.labels("download_h5").set(download_h5_url_info["total_time"]*1000) probe_http_dns_time.labels("share_h5").set(share_h5_url_info["dns_time"]*1000) probe_http_dns_time.labels("download_h5").set(download_h5_url_info["dns_time"]*1000) probe_http_connect_time.labels("share_h5").set(share_h5_url_info["dns_time"]*1000) probe_http_connect_time.labels("download_h5").set(download_h5_url_info["dns_time"]*1000) probe_http_pretransfer_time.labels("share_h5").set(share_h5_url_info["pretransfer_time"]*1000) probe_http_pretransfer_time.labels("download_h5").set(download_h5_url_info["pretransfer_time"]*1000) probe_http_first_byte_time.labels("share_h5").set(share_h5_url_info["first_byte_time"]*1000) probe_http_first_byte_time.labels("download_h5").set(download_h5_url_info["first_byte_time"]*1000) def produce_video_task_status_count(): with session_maker_longvideo() as session: video_progress_count = session.query(produce_video_task).filter(produce_video_task.task_status == 1).count() video_success_count = session.query(produce_video_task).filter(produce_video_task.task_status == 2).count() video_fail_count = session.query(produce_video_task).filter(produce_video_task.task_status == 3).count() video_produce_speed = session.query(produce_video_task).filter(produce_video_task.submit_timestamp) #视频合成成功率 produce_video_task_success_count.labels("video_sucess_count").set(video_success_count) produce_video_task_fail_count.labels("video_fail_count").set(video_fail_count) produce_video_task_progress_count.labels("video_progress_count").set(video_progress_count) #合成速度 end_time = int(time.time()) start_time = end_time - (end_time - time.timezone) % 86400 res = session.query(produce_video_task).filter(produce_video_task.task_status==2).order_by(produce_video_task.id.desc()).first() if res: duration = res.duration submit_timestamp = res.submit_timestamp complete_timestamp = res.complete_timestamp rate = (duration / (complete_timestamp - submit_timestamp) / 1000) produce_video_task_rate.labels("produce_video_task_rate").set(round(rate, 3)) else: produce_video_task_rate.labels("produce_video_task_rate").set(0) #tts res = logs_tts_count("aliyun",1) if res[0]["count"]: produce_video_tts_count.labels("aliyun_success").set(res[0]["count"]) res = logs_tts_count("aliyun", 0) if res[0]["count"]: produce_video_tts_count.labels("aliyun_fail").set(res[0]["count"]) res = logs_tts_count("azure", 1) if res[0]["count"]: produce_video_tts_count.labels("azure_success").set(res[0]["count"]) res = logs_tts_count("aliyun", 0) if res[0]["count"]: produce_video_tts_count.labels("azure_fail").set(res[0]["count"]) def count_metric(url): avgs = count_avg_time(url) qps = count_qps(url) # url_http_times_avgs.labels(url, avgs) url_http_qps.labels(url).set(qps) def count_threads(url): while True: t = threading.Thread(target=count_metric,args=(url,)) t.setDaemon(True) t.start() time.sleep(10) if __name__ == '__main__': start_http_server(9091) thread_num = 10 threads = [] for i in range(thread_num): t = threading.Thread(target=count_threads,args=(url,)) threads.append(t) for thread in threads: thread.setDaemon(True) thread.start() thread.join()