app.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423
  1. from flask import Flask,Response
  2. from DBSession import session_maker
  3. from model import *
  4. from sqlalchemy.sql import func
  5. from utils import *
  6. import atexit
  7. from aliyunsdkcore.client import AcsClient
  8. from aliyunsdkecs.request.v20140526.DescribeInstancesRequest import DescribeInstancesRequest
  9. from apscheduler.schedulers.background import BackgroundScheduler
  10. import json
  11. from model_longvideo import produce_video_task,produce_video_project
  12. from DBSession_longvideo import session_maker_longvideo
  13. from prometheus_client import Gauge,Counter, generate_latest
  14. from prometheus_client.core import CollectorRegistry
  15. from scheduler_jobs import interface_info_count
  16. from ex_response import ex_response
  17. import time
  18. import threading
  19. app = Flask(__name__)
  20. registry = CollectorRegistry(auto_describe=False)
  21. # 定义后台执行调度器
  22. scheduler = BackgroundScheduler()
  23. scheduler.add_job(func=update_request_url_list, trigger="interval", seconds=300)
  24. scheduler.add_job(produce_video_ratio_cnt, 'cron', hour='00', minute='03')
  25. scheduler.start()
  26. atexit.register(lambda: scheduler.shutdown())
  27. client = AcsClient('LTAI4GBWbFvvXoXsSVBe1o9f', 'kRAikWitb4kDxaAyBqNrmLmllMEDO3', 'cn-hangzhou')
  28. healthcheck_status = Gauge("healthcheck_status", "ipaddress", ['instance_id','server_name', 'ipaddress'], registry=registry)
  29. url_http_avgtime = Gauge("url_http_times_avgs", "url of avgs", ['appType', 'url'], registry=registry)
  30. url_http_qps = Gauge("url_http_qps", "url of qps", ['appType','url'], registry=registry)
  31. url_http_expendtime_summary = Gauge("url_http_expendtime_summary", "expendtime summary", ['appType', 'url', 'duration'], registry=registry)
  32. url_http_error_code_cnt = Gauge("url_http_error_code_cnt", "error code", ['appType', 'error_code'], registry=registry)
  33. probe_http_status_code = Gauge("http_status_code", 'h5',['server_name'], registry=registry)
  34. probe_http_total_time = Gauge("http_total_time", 'h5',['server_name'], registry=registry)
  35. probe_http_dns_time = Gauge("http_dns_time", 'h5',['server_name'], registry=registry)
  36. probe_http_connect_time = Gauge("http_connect_time", 'h5',['server_name'], registry=registry)
  37. probe_http_pretransfer_time = Gauge("http_pretransfer_time", 'h5',['server_name'], registry=registry)
  38. probe_http_first_byte_time = Gauge("http_first_byte_time", 'h5',['server_name'], registry=registry)
  39. slb_http_status_code = Gauge("slb_http_status_code", 'slb', ['server_name', 'status'], registry=registry)
  40. #视频合成
  41. produce_video_task_count = Gauge("produce_video_task_count", 'status', ['status'], registry=registry)
  42. produce_video_task_rate = Gauge("produce_video_task_rate", 'rate', ['rate'], registry=registry)
  43. produce_video_task_total = Gauge("produce_video_task_total", 'total', ['total_cnt'], registry=registry)
  44. produce_video_task_sucess = Gauge("produce_video_task_sucess", 'sucess', ['sucess_cnt'], registry=registry)
  45. produce_video_task_ratio = Gauge("produce_video_task_ratio", 'produce_video_ratio', ['ratio'], registry=registry)
  46. produce_video_tts_count = Gauge("tts_aliyun_azure", 'success', ['tts_channel'], registry=registry)
  47. logs_app_recommend_log_cnt_300 = Gauge("logs_app_recommend_log_null_cnt_300", "null cnt", ['cnt'], registry=registry)
  48. logs_app_recommend_log_cnt_all = Counter("logs_app_recommend_log_null_cnt_all", "all cnt", ['cnt'], registry=registry)
  49. @app.route('/update')
  50. def update():
  51. request = DescribeInstancesRequest()
  52. request.set_accept_format('json')
  53. request.set_PageSize(100)
  54. # request.set_InstanceNetworkType("vpc")
  55. request.set_Tags([
  56. {
  57. "Key": "ecs"
  58. }
  59. ])
  60. response = client.do_action_with_exception(request)
  61. instance_info = json.loads(response)
  62. intances_list_del()
  63. count = len(instance_info["Instances"]["Instance"])
  64. for i in range(len(instance_info["Instances"]["Instance"])):
  65. instance_id = instance_info["Instances"]["Instance"][i]["InstanceId"]
  66. if instance_info["Instances"]["Instance"][i]["InstanceNetworkType"] == "vpc":
  67. ipaddr = instance_info["Instances"]["Instance"][i]["VpcAttributes"]["PrivateIpAddress"]["IpAddress"][0]
  68. if instance_info["Instances"]["Instance"][i]["InstanceNetworkType"] == "classic":
  69. ipaddr = instance_info["Instances"]["Instance"][i]["InnerIpAddress"]["IpAddress"][0]
  70. server_name = instance_info["Instances"]["Instance"][i]["Tags"]["Tag"][0]["TagValue"]
  71. status = instance_info["Instances"]["Instance"][i]["Status"]
  72. instance_name = instance_info["Instances"]["Instance"][i]["HostName"]
  73. if status == "Running":
  74. status = 1
  75. instance_insert(instance_id, ipaddr, instance_name, server_name, status)
  76. return "更新完成"
  77. @app.route('/app/healthcheck/metrics')
  78. def app_healthcheck():
  79. threads = []
  80. with session_maker() as session:
  81. instance_infos = session.query(InstanceList).filter(InstanceList.server_name=="longvideoapi.prod").all()
  82. for index in range(len(instance_infos)):
  83. ipaddr = instance_infos[index].ipadd
  84. server_name = instance_infos[index].server_name
  85. http_code = healthcheck(ipaddr, server_name)
  86. instance_id = instance_infos[index].instance_id
  87. healthcheck_status.labels(instance_id, server_name, ipaddr).set(http_code)
  88. return Response(generate_latest(registry),mimetype="text/plain")
  89. @app.route('/app/qps/metrics')
  90. def qps_avgtime_count():
  91. threads = []
  92. with session_maker() as session:
  93. intface_infos = session.query(IntfaceList.interface_url).all()
  94. app_type = session.query(app_info.app_type).all()
  95. for i in range(len(intface_infos)):
  96. for index in range(len(app_type)):
  97. url = intface_infos[i].interface_url
  98. appType = app_type[index].app_type
  99. url_avgtime = count_avg_time(appType, url)
  100. url_qps = count_qps(appType, url)
  101. url_http_avgtime.labels(appType, url).set(url_avgtime)
  102. url_http_qps.labels(appType, url).set(url_qps)
  103. return Response(generate_latest(registry),mimetype="text/plain")
  104. @app.route('/h5/metrics')
  105. def h5_healthcheck():
  106. # curl_respon = ex_response(
  107. share_h5 = "share_h5"
  108. download_h5 = "download_h5"
  109. share_h5_url = "https://longvideoh5.piaoquantv.com/core/share?shareSource=customerMessage&fromAppType=0&qrAppType=0&versionCode=321&shareUid=12463024&shareMachineCode=weixin_openid_o0w175fPwp8yrtOGihYJhvnT9Ag4&h5WxrootPageSource=vlog-pages___category&videoId=2689415&isRecommendShare=1&h5ShareId=backend493cd67dd28f4ee395781d59881567211625976055926&shareDepth=0&state=#"
  110. download_h5_url = "https://longvideoh5.piaoquantv.com/dist_1_3_4/upload?accessToken=fe8914eb2e99d1fe8ddaa2f753f5ec613eb2dfbb&versionCode=323&galleryId=0&fileType=2&machineCode=weixin_openid_o0w175fPwp8yrtOGihYJhvnT9Ag4&platform=devtools&system=iOS%2010.0.1&appType=0&appId=wx89e7eb06478361d7&pageSource=vlog-pages%2Fwebview&loginUid=12463024&machineInfo=%7B%22sdkVersion%22%3A%222.4.1%22,%22brand%22%3A%22devtools%22,%22language%22%3A%22zh_CN%22,%22model%22%3A%22iPhone%20X%22,%22platform%22%3A%22devtools%22,%22system%22%3A%22iOS%2010.0.1%22,%22weChatVersion%22%3A%228.0.5%22,%22screenHeight%22%3A812,%22screenWidth%22%3A375,%22windowHeight%22%3A730,%22windowWidth%22%3A375,%22softVersion%22%3A%224.1.168%22%7D&wxHelpPagePath=%2Fpackage-my%2Fhelp-feedback%2Fhelp-feedback&transaction=2065ff98-6f27-4f09-c9eb-d366c99dd5d5&videoBarrageSwitch=true&addMusic=1&eventId=0&fromActivityId=0&sessionId=1626833289618-583a312d-81cd-62f9-cdd4-cf914c682d55&subSessionId=1626833289618-583a312d-81cd-62f9-cdd4-cf914c682d55&projectId=&entranceType=#wechat_redirec"
  111. shar_h5_curl_response = ex_response(share_h5_url)
  112. share_h5_url_info = shar_h5_curl_response.getinfo()
  113. download_h5_curl_response = ex_response(download_h5_url)
  114. download_h5_url_info = download_h5_curl_response.getinfo()
  115. probe_http_status_code.labels(share_h5).set(share_h5_url_info["http_code"])
  116. probe_http_status_code.labels(download_h5).set(download_h5_url_info["http_code"])
  117. probe_http_total_time.labels("share_h5").set(share_h5_url_info["total_time"]*1000)
  118. probe_http_total_time.labels("download_h5").set(download_h5_url_info["total_time"]*1000)
  119. probe_http_dns_time.labels("share_h5").set(share_h5_url_info["dns_time"]*1000)
  120. probe_http_dns_time.labels("download_h5").set(download_h5_url_info["dns_time"]*1000)
  121. probe_http_connect_time.labels("share_h5").set(share_h5_url_info["dns_time"]*1000)
  122. probe_http_connect_time.labels("download_h5").set(download_h5_url_info["dns_time"]*1000)
  123. probe_http_pretransfer_time.labels("share_h5").set(share_h5_url_info["pretransfer_time"]*1000)
  124. probe_http_pretransfer_time.labels("download_h5").set(download_h5_url_info["pretransfer_time"]*1000)
  125. probe_http_first_byte_time.labels("share_h5").set(share_h5_url_info["first_byte_time"]*1000)
  126. probe_http_first_byte_time.labels("download_h5").set(download_h5_url_info["first_byte_time"]*1000)
  127. return Response(generate_latest(registry), mimetype="text/plain")
  128. @app.route('/slbStatusCode/metrics')
  129. def slb_request_status_metric():
  130. svc_name = {'longvideoapi', 'clip', 'speed'}
  131. for name in svc_name:
  132. res = slb_status_code_count(name)
  133. if res:
  134. for i in range(len(res)):
  135. status = res[i]['status']
  136. cnt = float(res[i]['cnt'])
  137. slb_http_status_code.labels(name, status).set(cnt)
  138. return Response(generate_latest(registry), mimetype="text/plain")
  139. @app.route('/metrics')
  140. def all_metric():
  141. """视频合成类metrics"""
  142. """视频合成成功率"""
  143. time_stamp = int(time.time())
  144. end_time = int(datetime.datetime.fromtimestamp(time_stamp).strftime('%Y%m%d%H%M%S')) * 1000000000
  145. start_time = int(datetime.datetime.fromtimestamp(time_stamp-300).strftime('%Y%m%d%H%M%S')) * 1000000000
  146. print(start_time, end_time)
  147. query_sql = ("select totalCount , (successCount+processingCount1), round((successCount + processingCount1)/totalCount * 100,2) from "
  148. "(select count(*) as totalCount,"
  149. "sum(case when produce_status in (5,6,7,8) then 1 else 0 end) as successCount,"
  150. "sum(case when produce_status = 99 then 1 else 0 end) as failCount , "
  151. "sum(case when produce_status in(0,1,2,3,4) then 1 else 0 end) as processingCount,"
  152. "sum(case when produce_status in(0,1,2,3,4) and (rate < 0.7 or rate is null) then 1 else 0 end) processingCount1,"
  153. "sum(case when produce_status in(0,1,2,3,4) and rate >= 0.7 then 1 else 0 end) processingCount2 from "
  154. "(select t1.project_id, t1.produce_status, round((t2.last_connect_timestamp - t1.submit_timestamp) / (t1.video_duration/1000), 1) as rate from produce_video_project t1 "
  155. "left join produce_video_project_connect_time t2 on t1.project_id = t2.project_id "
  156. "where t1.project_id > %s and t1.project_id < %s and t1.app_type not in (1,13,15)) s1) ss1" %(start_time ,end_time)
  157. )
  158. res = db_query(query_sql)
  159. if res[0] is not None:
  160. total_cnt = res[0]
  161. else:
  162. total_cnt = 0
  163. if res[1] is not None:
  164. success_cnt = res[1]
  165. else:
  166. success_cnt = 0
  167. if total_cnt == 0:
  168. rate = 100
  169. else:
  170. rate = (success_cnt/total_cnt) *100
  171. produce_video_task_rate.labels("produce_video_task_rate").set(rate)
  172. produce_video_task_total.labels('total_cnt').set(total_cnt)
  173. produce_video_task_sucess.labels('success_cnt').set(success_cnt)
  174. """今日视频合成任务数"""
  175. """今日视频合成成功数量"""
  176. """今日视频合成失败数量"""
  177. """视频合成中任务数量"""
  178. res = produce_video_task_cnt()
  179. if res:
  180. if res[0]:
  181. total_cnt = res[0]
  182. produce_video_task_count.labels("total").set(total_cnt)
  183. if res[1]:
  184. success_cnt = res[1]
  185. produce_video_task_count.labels("success").set(success_cnt)
  186. if res[2]:
  187. fail_cnt = res[2]
  188. produce_video_task_count.labels("fail").set(fail_cnt)
  189. if res[3]:
  190. process_cnt = res[3]
  191. produce_video_task_count.labels("process").set(process_cnt)
  192. if res[4]:
  193. r1 = res[4]
  194. produce_video_task_ratio.labels("<0.5").set(r1)
  195. if res[5]:
  196. r2 = res[5]
  197. produce_video_task_ratio.labels("0.5-0.7").set(r2)
  198. if res[5]:
  199. r3 = res[6]
  200. produce_video_task_ratio.labels("0.7-1.0").set(r3)
  201. if res[5]:
  202. r4 = res[7]
  203. produce_video_task_ratio.labels("1.0-1.5").set(r4)
  204. if res[5]:
  205. r5 = res[8]
  206. produce_video_task_ratio.labels("1.5-2.0").set(r5)
  207. if res[5]:
  208. r6 = res[9]
  209. produce_video_task_ratio.labels(">2.0").set(r6)
  210. # sql = "select v1 as 平均合成耗时,v2 as 平均视频时长, round(v2/v1,1) as 时长耗时比 from (select avg(produce_done_timestamp - submit_timestamp) as v1, avg(video_duration/1000) as v2 from produce_video_project where project_id > {} and project_id < {} and app_type not in (13,15) and produce_status in (5,6,7,8)) as t1".format(start_time, end_time)
  211. # #tts
  212. # res = db_query(sql)
  213. # if res[2] is not None:
  214. # produce_video_task_count.labels("video_progress").set(res[2])
  215. # else:
  216. # produce_video_task_count.labels("video_progress").set(0)
  217. res = logs_tts_count("aliyun",1)
  218. if res is not None:
  219. produce_video_tts_count.labels("aliyun_success").set(res[0]["count"])
  220. else:
  221. produce_video_tts_count.labels("aliyun_success").set(0)
  222. res = logs_tts_count("aliyun", 0)
  223. if res is not None:
  224. produce_video_tts_count.labels("aliyun_fail").set(res[0]["count"])
  225. else:
  226. produce_video_tts_count.labels("aliyun_fail").set(0)
  227. res = logs_tts_count("azure", 1)
  228. if res is not None:
  229. produce_video_tts_count.labels("azure_success").set(res[0]["count"])
  230. else:
  231. produce_video_tts_count.labels("azure_success").set(0)
  232. res = logs_tts_count("azure", 0)
  233. if res is not None:
  234. produce_video_tts_count.labels("azure_fail").set(res[0]["count"])
  235. else:
  236. produce_video_tts_count.labels("azure_fail").set(0)
  237. #当日负载均衡http_code
  238. svc_name = {'longvideoapi', 'clip', 'speed', 'commonapi'}
  239. for name in svc_name:
  240. res = slb_status_code_count(name)
  241. if res:
  242. for i in range(len(res)):
  243. status = res[i]['status']
  244. cnt = float(res[i]['cnt'])
  245. slb_http_status_code.labels(name, status).set(cnt)
  246. #h5 healthcheck
  247. share_h5 = "share_h5"
  248. download_h5 = "download_h5"
  249. share_h5_url = "https://longvideoh5.piaoquantv.com/core/share?shareSource=customerMessage&fromAppType=0&qrAppType=0&versionCode=321&shareUid=12463024&shareMachineCode=weixin_openid_o0w175fPwp8yrtOGihYJhvnT9Ag4&h5WxrootPageSource=vlog-pages___category&videoId=2689415&isRecommendShare=1&h5ShareId=backend493cd67dd28f4ee395781d59881567211625976055926&shareDepth=0&state=#"
  250. download_h5_url = "https://longvideoh5.piaoquantv.com/dist_1_3_4/upload?accessToken=fe8914eb2e99d1fe8ddaa2f753f5ec613eb2dfbb&versionCode=323&galleryId=0&fileType=2&machineCode=weixin_openid_o0w175fPwp8yrtOGihYJhvnT9Ag4&platform=devtools&system=iOS%2010.0.1&appType=0&appId=wx89e7eb06478361d7&pageSource=vlog-pages%2Fwebview&loginUid=12463024&machineInfo=%7B%22sdkVersion%22%3A%222.4.1%22,%22brand%22%3A%22devtools%22,%22language%22%3A%22zh_CN%22,%22model%22%3A%22iPhone%20X%22,%22platform%22%3A%22devtools%22,%22system%22%3A%22iOS%2010.0.1%22,%22weChatVersion%22%3A%228.0.5%22,%22screenHeight%22%3A812,%22screenWidth%22%3A375,%22windowHeight%22%3A730,%22windowWidth%22%3A375,%22softVersion%22%3A%224.1.168%22%7D&wxHelpPagePath=%2Fpackage-my%2Fhelp-feedback%2Fhelp-feedback&transaction=2065ff98-6f27-4f09-c9eb-d366c99dd5d5&videoBarrageSwitch=true&addMusic=1&eventId=0&fromActivityId=0&sessionId=1626833289618-583a312d-81cd-62f9-cdd4-cf914c682d55&subSessionId=1626833289618-583a312d-81cd-62f9-cdd4-cf914c682d55&projectId=&entranceType=#wechat_redirec"
  251. shar_h5_curl_response = ex_response(share_h5_url)
  252. share_h5_url_info = shar_h5_curl_response.getinfo()
  253. download_h5_curl_response = ex_response(download_h5_url)
  254. download_h5_url_info = download_h5_curl_response.getinfo()
  255. probe_http_status_code.labels(share_h5).set(share_h5_url_info["http_code"])
  256. probe_http_status_code.labels(download_h5).set(download_h5_url_info["http_code"])
  257. probe_http_total_time.labels("share_h5").set(share_h5_url_info["total_time"]*1000)
  258. probe_http_total_time.labels("download_h5").set(download_h5_url_info["total_time"]*1000)
  259. probe_http_dns_time.labels("share_h5").set(share_h5_url_info["dns_time"]*1000)
  260. probe_http_dns_time.labels("download_h5").set(download_h5_url_info["dns_time"]*1000)
  261. probe_http_connect_time.labels("share_h5").set(share_h5_url_info["dns_time"]*1000)
  262. probe_http_connect_time.labels("download_h5").set(download_h5_url_info["dns_time"]*1000)
  263. probe_http_pretransfer_time.labels("share_h5").set(share_h5_url_info["pretransfer_time"]*1000)
  264. probe_http_pretransfer_time.labels("download_h5").set(download_h5_url_info["pretransfer_time"]*1000)
  265. probe_http_first_byte_time.labels("share_h5").set(share_h5_url_info["first_byte_time"]*1000)
  266. probe_http_first_byte_time.labels("download_h5").set(download_h5_url_info["first_byte_time"]*1000)
  267. #推荐业务
  268. res_null_cnt = count_recommend_null()
  269. logs_app_recommend_log_cnt_300.labels("recommend").set(res_null_cnt)
  270. # # logs_app_recommend_log_cnt_all.labels("recommend").inc(1)
  271. # logs_app_recommend_log_cnt_all.inc(res_null_cnt)
  272. return Response(generate_latest(registry), mimetype="text/plain")
  273. @app.route('/qps_avgtime/metrics')
  274. def qps_avgtime_metrics():
  275. """统计接口QPM, RT时间,RT时间分布 """
  276. res, url = app_openapi_qps_avgtime_count()
  277. qps = int(res.body[0]["cnt"])
  278. avgtime = int(float(res.body[0]["avg_time"]))
  279. app_type_temp = 1
  280. url_http_qps.labels(app_type_temp, url).set(qps)
  281. url_http_avgtime.labels(app_type_temp, url).set(avgtime)
  282. app_type = ['1', '0', '4', '5', '6', '12', '13', '15']
  283. for i in range(len(app_type)):
  284. appType = app_type[i]
  285. res = count_qps_avgtime(appType)
  286. for i in range(len(res.body)):
  287. url = res.body[i]["requestUri"]
  288. qps = int(res.body[i]["cnt"])
  289. avgtime = int(float(res.body[i]["avg_time"]))
  290. url_http_qps.labels(appType, url).set(qps)
  291. url_http_avgtime.labels(appType, url).set(avgtime)
  292. res = count_rt_less_time_count(appType, 0, 200)
  293. for i in range(len(res.body)):
  294. url = res.body[i]["requestUri"]
  295. count = res.body[i]["cnt"]
  296. url_http_expendtime_summary.labels(appType, url, "0-200").set(count)
  297. res = count_rt_less_time_count(appType, 200, 500)
  298. for i in range(len(res.body)):
  299. url = res.body[i]["requestUri"]
  300. count = res.body[i]["cnt"]
  301. url_http_expendtime_summary.labels(appType, url, "200-300").set(count)
  302. res = count_rt_less_time_count(appType, 500, 1000)
  303. for i in range(len(res.body)):
  304. url = res.body[i]["requestUri"]
  305. count = res.body[i]["cnt"]
  306. url_http_expendtime_summary.labels(appType, url, "500-1000").set(count)
  307. res = count_rt_less_time_count(appType, 1000, 10000)
  308. for i in range(len(res.body)):
  309. url = res.body[i]["requestUri"]
  310. count = res.body[i]["cnt"]
  311. url_http_expendtime_summary.labels(appType, url, ">1000").set(count)
  312. """接口error统计"""
  313. res = error_cnt(appType)
  314. for i in range(len(res.body)):
  315. cnt = res.body[i]["cnt"]
  316. error_code = res.body[i]["resultCode"]
  317. url_http_error_code_cnt.labels(appType,error_code).set(cnt)
  318. return Response(generate_latest(registry), mimetype="text/plain")
  319. def produce_video_data():
  320. start_time = (int(time.strftime("%Y%m%d%H%M", time.localtime())) - 30) * 100000000000
  321. end_time = (int(time.strftime("%Y%m%d%H%M", time.localtime()))) * 100000000000
  322. sum_sql = ("select count(*) as totalCount from"
  323. "(select t1.project_id, t1.produce_status "
  324. "from produce_video_project t1 "
  325. "left join produce_video_project_connect_time t2 on t1.project_id = t2.project_id "
  326. " where t1.project_id > %s and t1.project_id < %s and t1.app_type not in (1,13,15)) s1) ss1"%(start_time,end_time)
  327. )
  328. @app.route('/healthcheck')
  329. def healthcheck():
  330. return "OK"
  331. if __name__ == '__main__':
  332. # app.run()
  333. app.run(host='192.168.201.1', port=9091)