|
@@ -37,10 +37,10 @@ from utils.oss_client import HangZhouOSSClient
|
|
|
import utils.compress as compress
|
|
|
|
|
|
|
|
|
-sys.path.append(os.path.dirname(os.path.abspath("")) + os.sep + "lib")
|
|
|
-print(os.path.dirname(os.path.abspath("")) + os.sep + "lib1")
|
|
|
+sys.path.append(os.path.abspath("") + os.sep + "lib")
|
|
|
+print(os.path.abspath("") + os.sep + "lib1")
|
|
|
import brpc_flags
|
|
|
-print(os.path.dirname(os.path.abspath("")) + os.sep + "lib2")
|
|
|
+print(os.path.abspath("") + os.sep + "lib2")
|
|
|
|
|
|
|
|
|
__dir__ = os.path.dirname(os.path.abspath(__file__))
|
|
@@ -237,15 +237,25 @@ class Main(object):
|
|
|
self.exe, model_dir,
|
|
|
[feed.name for feed in self.inference_feed_var],
|
|
|
self.inference_target_var)
|
|
|
+
|
|
|
else:
|
|
|
paddle.static.save_inference_model(
|
|
|
model_dir,
|
|
|
[feed.name for feed in self.inference_feed_var],
|
|
|
[self.inference_target_var], self.exe)
|
|
|
|
|
|
- compress.compress_tar(model_dir, "test")
|
|
|
+
|
|
|
+
|
|
|
+ # trans to new format
|
|
|
+ paddle.static.load_inference_model(os.path.join(model_dir, "dnn_plugin"))
|
|
|
+ paddle.static.save_inference_model(
|
|
|
+ os.path.join(model_dir, "dnn_plugin_new"),
|
|
|
+ [feed.name for feed in self.inference_feed_var],
|
|
|
+ [self.inference_target_var], self.exe)
|
|
|
+
|
|
|
+ compress.compress_tar(os.path.join(model_dir, "dnn_plugin_new"), "dnn_plugin_new.tar.gz")
|
|
|
client = HangZhouOSSClient("art-recommend")
|
|
|
- client.put_object_from_file("dyp/test.tar.gz", "test.tar.gz")
|
|
|
+ client.put_object_from_file("dyp/model.tar.gz", "dnn_plugin_new.tar.gz")
|
|
|
|
|
|
if reader_type == "InmemoryDataset":
|
|
|
self.reader.release_memory()
|