|
@@ -0,0 +1,71 @@
|
|
|
+package com.tzld.piaoquan.recommend.model.service.model;
|
|
|
+
|
|
|
+import com.baidu.paddle.inference.Config;
|
|
|
+import com.baidu.paddle.inference.Predictor;
|
|
|
+import com.baidu.paddle.inference.Tensor;
|
|
|
+import com.tzld.piaoquan.recommend.feature.util.JSONUtils;
|
|
|
+import com.tzld.piaoquan.recommend.model.util.CompressUtil;
|
|
|
+import com.tzld.piaoquan.recommend.model.util.PropertiesUtil;
|
|
|
+import lombok.extern.slf4j.Slf4j;
|
|
|
+
|
|
|
+import java.io.InputStream;
|
|
|
+
|
|
|
+@Slf4j
|
|
|
+public class DemoModel implements Model {
|
|
|
+
|
|
|
+ private Predictor sourcePredictor;
|
|
|
+
|
|
|
+ public void cleanModel() {
|
|
|
+ this.sourcePredictor.destroyNativePredictor();
|
|
|
+ this.sourcePredictor = null;
|
|
|
+ }
|
|
|
+
|
|
|
+ public String predict(String param) {
|
|
|
+ Predictor predictor = Predictor.clonePaddlePredictor(sourcePredictor);
|
|
|
+
|
|
|
+ String inNames = predictor.getInputNameById(0);
|
|
|
+ Tensor inHandle = predictor.getInputHandle(inNames);
|
|
|
+
|
|
|
+ inHandle.reshape(4, new int[]{1, 3, 224, 224});
|
|
|
+
|
|
|
+ float[] inData = new float[1 * 3 * 224 * 224];
|
|
|
+ inHandle.copyFromCpu(inData);
|
|
|
+ predictor.run();
|
|
|
+ String outNames = predictor.getOutputNameById(0);
|
|
|
+ Tensor outHandle = predictor.getOutputHandle(outNames);
|
|
|
+ float[] outData = new float[outHandle.getSize()];
|
|
|
+ outHandle.copyToCpu(outData);
|
|
|
+
|
|
|
+
|
|
|
+ outHandle.destroyNativeTensor();
|
|
|
+ inHandle.destroyNativeTensor();
|
|
|
+ predictor.destroyNativePredictor();
|
|
|
+
|
|
|
+ return JSONUtils.toJson(outData);
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean loadFromStream(InputStream in) throws Exception {
|
|
|
+ String modelDir = PropertiesUtil.getString("model.dir");
|
|
|
+ CompressUtil.decompressGzFile(in, modelDir + "/demo");
|
|
|
+
|
|
|
+ String modelFile = "";
|
|
|
+ String paramFile = "";
|
|
|
+
|
|
|
+ Config config = new Config();
|
|
|
+ config.setCppModel(modelFile, paramFile);
|
|
|
+ config.enableMemoryOptim(true);
|
|
|
+ config.enableProfile();
|
|
|
+ config.enableMKLDNN();
|
|
|
+ config.getCpuMathLibraryNumThreads();
|
|
|
+ config.getFractionOfGpuMemoryForPool();
|
|
|
+ config.switchIrDebug(false);
|
|
|
+
|
|
|
+ Predictor predictor = Predictor.createPaddlePredictor(config);
|
|
|
+ Predictor temp = predictor;
|
|
|
+ this.sourcePredictor = predictor;
|
|
|
+ temp.destroyNativePredictor();
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
+}
|