丁云鹏 5 месяцев назад
Родитель
Сommit
aa1276e0be

+ 3 - 2
recommend-model-produce/src/main/python/tools/static_ps_infer_v2.py

@@ -157,8 +157,9 @@ class Main(object):
         init_model_path = config.get("runner.infer_load_path")
         model_mode = config.get("runner.model_mode", 0)
         client = HangZhouOSSClient("art-recommend")
-        client.put_object_from_file("dyp/test.tar.gz", "test.tar.gz")
-        compress.uncompress_tar(test.tar.gz, init_model_path)
+        client.get_object_to_file("dyp/test.tar.gz", "test.tar.gz")
+        assert os.path.exists(data_path)
+        compress.uncompress_tar("test.tar.gz", init_model_path)
 
         #if fleet.is_first_worker():
         #fleet.load_inference_model(init_model_path, mode=int(model_mode))

+ 1 - 4
recommend-model-produce/src/main/python/tools/utils/static_ps/reader_helper.py

@@ -74,10 +74,7 @@ def get_infer_reader(input_var, config):
 def get_file_list(data_path, config):
     assert os.path.exists(data_path)
     file_list = [data_path + "/%s" % x for x in os.listdir(data_path)]
-    if config.get("runner.split_file_list"):
-        logger.info("Split file list for worker {}".format(fleet.worker_index(
-        )))
-        file_list = fleet.util.get_file_shard(file_list)
+
     logger.info("File list: {}".format(file_list))
     return file_list