Browse Source

初始化

罗俊辉 1 year ago
parent
commit
eee8880236
1 changed files with 28 additions and 31 deletions
  1. 28 31
      pressure_test.py

+ 28 - 31
pressure_test.py

@@ -3,41 +3,38 @@
 """
 import time
 import requests
+import argparse
 from concurrent.futures import ThreadPoolExecutor
 
-body = {
-    "version":"v2",
-    "features": {
-        "channel": "小年糕",
-        "out_user_id": "66506470",
-        "mode": "author",
-        "out_play_cnt": 698,
-        "out_like_cnt": 1,
-        "out_share_cnt": 41,
-        "title": "收藏好这九种中成药,让你少往医院跑。?",
-        "lop": 701 / 24698, # like_cnt + 700 / play_cnt + 24000
-        "duration": 180
-    }
-}
 
-features = {
-        "channel": "小年糕",
-        "out_user_id": "66506470",
-        "mode": "author",
-        "out_play_cnt": 698,
-        "out_like_cnt": 1,
-        "out_share_cnt": 41,
-        "title": "收藏好这九种中成药,让你少往医院跑。?",
-        "lop": 701 / 24698,
-        "duration": 180
+def request_data(_url):
+    index = _url.split("#")[0]
+    url = _url.split("#")[1]
+    body = {
+        "version": "v2",
+        "features": {
+            "channel": "小年糕",
+            "out_user_id": "66506470",
+            "mode": "author",
+            "out_play_cnt": 698,
+            "out_like_cnt": 1,
+            "out_share_cnt": 41,
+            "title": "收藏好这九种中成药,让你少往医院跑。?",
+            "lop": 701 / 24698,  # like_cnt + 700 / play_cnt + 24000
+            "duration": 180
+        }
     }
-
-url = "http://127.0.0.1:5000/lightgbm_score"
-
-while True:
     t = time.time()
     res = requests.post(url, json=body)
     e = time.time()
-    print(res.json())
-    print("cost time:", e - t, "s")
-    time.sleep(0.5)
+    print("请求", index, "cost time:", e - t, "s")
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()  # 新建参数解释器对象
+    parser.add_argument("--thread")
+    args = parser.parse_args()
+    thread = int(args.thread)
+    dt = ["{}#http://127.0.0.1:5000/lightgbm_score".format(i) for i in range(1, 1001)]
+    with ThreadPoolExecutor(max_workers=thread) as pool:
+        pool.map(request_data, dt)