|
@@ -49,31 +49,27 @@ public class DemoModel implements Model {
|
|
|
|
|
|
@Override
|
|
|
public boolean loadFromStream(InputStream in) throws Exception {
|
|
|
- try {
|
|
|
- String modelDir = PropertiesUtil.getString("model.dir") + "/demo";
|
|
|
- CompressUtil.decompressGzFile(in, modelDir);
|
|
|
-
|
|
|
- File modelFile = new File(modelDir + "/inference.pdmodel");
|
|
|
- File paramFile = new File(modelDir + "/inference.pdiparams");
|
|
|
-
|
|
|
- log.info("start init Config");
|
|
|
- Config config = new Config();
|
|
|
- config.setCppModel(modelFile.getAbsolutePath(), paramFile.getAbsolutePath());
|
|
|
- config.enableMemoryOptim(true);
|
|
|
- config.enableProfile();
|
|
|
- config.enableMKLDNN();
|
|
|
- config.getCpuMathLibraryNumThreads();
|
|
|
- config.getFractionOfGpuMemoryForPool();
|
|
|
- config.switchIrDebug(false);
|
|
|
-
|
|
|
- log.info("start init Predictor");
|
|
|
- Predictor predictor = Predictor.createPaddlePredictor(config);
|
|
|
- Predictor temp = sourcePredictor;
|
|
|
- this.sourcePredictor = predictor;
|
|
|
+ String modelDir = PropertiesUtil.getString("model.dir") + "/demo";
|
|
|
+ CompressUtil.decompressGzFile(in, modelDir);
|
|
|
+
|
|
|
+ String modelFile = modelDir + "/inference.pdmodel";
|
|
|
+ String paramFile = modelDir + "/inference.pdiparams";
|
|
|
+
|
|
|
+ Config config = new Config();
|
|
|
+ config.setCppModel(modelFile, paramFile);
|
|
|
+ config.enableMemoryOptim(true);
|
|
|
+ config.enableProfile();
|
|
|
+ config.enableMKLDNN();
|
|
|
+ config.getCpuMathLibraryNumThreads();
|
|
|
+ config.getFractionOfGpuMemoryForPool();
|
|
|
+ config.switchIrDebug(false);
|
|
|
+
|
|
|
+ Predictor temp = sourcePredictor;
|
|
|
+ sourcePredictor = Predictor.createPaddlePredictor(config);
|
|
|
+ if (temp != null) {
|
|
|
temp.destroyNativePredictor();
|
|
|
- return true;
|
|
|
- } finally {
|
|
|
}
|
|
|
+ return true;
|
|
|
}
|
|
|
|
|
|
}
|