Przeglądaj źródła

feat:添加hdfs数据获取逻辑

zhaohaipeng 10 miesięcy temu
rodzic
commit
281f049936
2 zmienionych plików z 42 dodań i 0 usunięć
  1. 22 0
      client/HDFSClient.py
  2. 20 0
      hadoop_monitor.py

+ 22 - 0
client/HDFSClient.py

@@ -0,0 +1,22 @@
+import json
+
+import requests
+
+
+class HDFSClient(object):
+
+    def __init__(self, cluster_ip, port):
+        self.cluster_ip = cluster_ip
+        self.port = port
+
+    def get_name_node_info(self) -> dict:
+        url = f"http://{self.cluster_ip}:{self.port}/jmx?qry=Hadoop:service=NameNode,name=NameNodeInfo"
+        resp = requests.get(url)
+        if resp.status_code == 200:
+            return json.loads(resp.text)
+        else:
+            return {}
+
+    def get_data_node_info(self):
+        resp = self.get_name_node_info()
+        return json.loads(resp['beans'][0]['LiveNodes'])

+ 20 - 0
hadoop_monitor.py

@@ -0,0 +1,20 @@
+import json
+
+from client import HDFSClient
+
+from util import convert_util
+
+hdfs_client = HDFSClient.HDFSClient("47.99.167.11", "9870")
+
+if __name__ == '__main__':
+    live_nodes = hdfs_client.get_data_node_info()
+    for node in live_nodes:
+        info = live_nodes[node]
+        capacity = info['capacity']
+        used = info['used']
+        print(
+            f"节点: {node}, "
+            f"容量: {convert_util.byte_convert(capacity)}, "
+            f"已使用: {convert_util.byte_convert(used)}, "
+            f"使用率: {used / capacity * 100}"
+        )