Ver Fonte

model service

丁云鹏 há 5 meses atrás
pai
commit
4177963988
23 ficheiros alterados com 2344 adições e 0 exclusões
  1. 33 0
      recommend-model-jni/.gitignore
  2. 23 0
      recommend-model-jni/paddle/build.sh
  3. 93 0
      recommend-model-jni/paddle/readme.md
  4. 23 0
      recommend-model-jni/paddle/test.sh
  5. 49 0
      recommend-model-jni/pom.xml
  6. 273 0
      recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Config.cpp
  7. 290 0
      recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Config.h
  8. 107 0
      recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Predictor.cpp
  9. 132 0
      recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Predictor.h
  10. 133 0
      recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Tensor.cpp
  11. 169 0
      recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Tensor.h
  12. 270 0
      recommend-model-jni/src/main/java/com/baidu/paddle/inference/Config.java
  13. 97 0
      recommend-model-jni/src/main/java/com/baidu/paddle/inference/Predictor.java
  14. 101 0
      recommend-model-jni/src/main/java/com/baidu/paddle/inference/Tensor.java
  15. 65 0
      recommend-model-jni/src/test/java/com/baidu/paddle/inference/test.java
  16. 5 0
      recommend-model-service/pom.xml
  17. 16 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/WarmUpService.java
  18. 52 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/DSSMModel.java
  19. 14 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/Model.java
  20. 239 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/ModelManager.java
  21. 15 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/OssConfig.java
  22. 123 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/util/CompressUtil.java
  23. 22 0
      recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/util/PropertiesUtil.java

+ 33 - 0
recommend-model-jni/.gitignore

@@ -0,0 +1,33 @@
+HELP.md
+target/
+!.mvn/wrapper/maven-wrapper.jar
+!**/src/main/**/target/
+!**/src/test/**/target/
+
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
+
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+!**/src/main/**/build/
+!**/src/test/**/build/
+
+### VS Code ###
+.vscode/

+ 23 - 0
recommend-model-jni/paddle/build.sh

@@ -0,0 +1,23 @@
+#!/bin/bash
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+#
+
+mkdir build && cd build
+export library_path=$1
+export jni_path=$2
+export jni_sub_path=$3
+mkldnn_lib=$library_path"/third_party/install/mkldnn/lib"
+mklml_lib=$library_path"/third_party/install/mklml/lib"
+export paddle_inference_lib=$library_path"/paddle/lib"
+export paddle_path=$library_path"/paddle/include"
+export LD_LIBRARY_PATH=mkldnn_lib:mklml_lib:paddle_inference_lib
+cmake .. && make
+#g++ -fPIC -D_REENTRANT -I $jni_path -I $jni_sub_path -I $paddle_path -L $paddle_inference_lib -c com_baidu_paddle_inference_Predictor.cpp com_baidu_paddle_inference_Config.cpp com_baidu_paddle_inference_Tensor.cpp
+#g++ -shared -I $paddle_path -L $paddle_inference_lib com_baidu_paddle_inference_Config.o com_baidu_paddle_inference_Predictor.o com_baidu_paddle_inference_Tensor.o -o libpaddle_inference.so -lpaddle_inference_c
+
+cd ../src/main/java/com/baidu/paddle/inference
+javac Config.java Predictor.java Tensor.java
+cd ../../../../../../../
+cp ./native/libpaddle_inference.so libpaddle_inference.so
+pwd
+jar cvf JavaInference.jar -C src/main/java/ .

+ 93 - 0
recommend-model-jni/paddle/readme.md

@@ -0,0 +1,93 @@
+# Paddle Inference java API
+
+Paddle Inference java API 基于 [capi](../capi_exp) 和 jni 实现,需要您提前准备好C预测库。
+
+## 安装(Linux)
+
+##### 1.下载C预测库
+
+您可以选择直接下载[paddle_inference_c](https://github.com/PaddlePaddle/Paddle-Inference-Demo/blob/master/docs/user_guides/download_lib.md)预测库,或通过源码编译的方式安装,源码编译方式参考官网文档,注意这里cmake编译时打开`-DON_INFER=ON`,在编译目录下得到`paddle_inference_c_install_dir`。
+
+##### 2.准备预测部署模型
+
+下载 [resnet50](https://paddle-inference-dist.bj.bcebos.com/Paddle-Inference-Demo/resnet50.tgz) 模型后解压,得到 Paddle Combined 形式的模型。
+
+```
+wget https://paddle-inference-dist.bj.bcebos.com/Paddle-Inference-Demo/resnet50.tgz
+tar zxf resnet50.tgz
+
+# 获得 resnet50 目录结构如下
+resnet50/
+├── inference.pdmodel
+├── inference.pdiparams
+└── inference.pdiparams.info
+```
+
+##### 3.准备预测执行目录
+
+```
+git clone github.com/paddlepaddle/paddle/paddle/fluid/inference/javaapi
+```
+
+##### 3. 编译动态链接库和jar包
+
+```bash
+在javaapi目录下执行
+
+./build_gpu.sh {c预测库目录} {jni头文件目录} {jni系统头文件目录}
+
+以笔者的目录结构为例
+./build.sh /root/paddle_c/paddle_inference_c_2.2/paddle_inference_c /usr/lib/jvm/java-8-openjdk-amd64/include /usr/lib/jvm/java-8-openjdk-amd64/include/linux
+
+执行完成后,会在当前目录下生成JavaInference.jar和libpaddle_inference.so
+```
+
+##### 5.运行单测,验证
+
+```
+在javaapi目录下执行
+
+./test.sh {c预测库目录} {.pdmodel文件目录} {.pdiparams文件目录}
+
+以笔者的目录结构为例
+./test.sh "/root/paddle_c/paddle_inference_c_2.2/paddle_inference_c"  "/root/paddle_c/resnet50/inference.pdmodel" "/root/paddle_c/resnet50/inference.pdiparams"
+```
+
+## 在Java中使用Paddle预测
+
+首先创建预测配置
+```java
+Config config = new Config();
+config.setCppModel(model_file, params_file);
+```
+
+创建predictor
+```java
+Predictor predictor = Predictor.createPaddlePredictor(config);
+```
+
+获取输入Tensor
+```java
+String inNames = predictor.getInputNameById(0);
+Tensor inHandle = predictor.getInputHandle(inNames);
+```
+
+设置输入数据(假设只有一个输入)
+```java
+inHandle.Reshape(4, new int[]{1, 3, 224, 224});
+float[] inData = new float[1*3*224*224];
+inHandle.CopyFromCpu(inData);
+```
+
+运行预测
+```java
+predictor.Run();
+```
+
+获取输出Tensor
+```java
+String outNames = predictor.getOutputNameById(0);
+Tensor outHandle = predictor.getOutputHandle(outNames);
+float[] outData = new float[outHandle.GetSize()];
+outHandle.CopyToCpu(outData);
+```

+ 23 - 0
recommend-model-jni/paddle/test.sh

@@ -0,0 +1,23 @@
+#!/bin/bash
+
+# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+library_path=$1
+mkldnn_lib=$library_path"/third_party/install/mkldnn/lib"
+mklml_lib=$library_path"/third_party/install/mklml/lib"
+paddle_inference_lib=$library_path"/paddle/lib"
+export LD_LIBRARY_PATH=$mkldnn_lib:$mklml_lib:$paddle_inference_lib:.
+javac -cp $CLASSPATH:JavaInference.jar:. test.java
+java -cp $CLASSPATH:JavaInference.jar:. test $2 $3

+ 49 - 0
recommend-model-jni/pom.xml

@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>recommend-model</artifactId>
+        <groupId>com.tzld.piaoquan</groupId>
+        <version>1.0.0</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>recommend-model-jni</artifactId>
+    <version>1.0.0</version>
+
+    <dependencies>
+    </dependencies>
+    <build>
+        <extensions>
+            <extension>
+                <groupId>kr.motd.maven</groupId>
+                <artifactId>os-maven-plugin</artifactId>
+                <version>1.6.0</version>
+            </extension>
+        </extensions>
+        <plugins>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <version>3.0.0</version>
+                <executions>
+                    <!-- 添加主源码目录 -->
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>${project.basedir}/src/main/java</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+
+        </plugins>
+    </build>
+</project>

+ 273 - 0
recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Config.cpp

@@ -0,0 +1,273 @@
+// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "com_baidu_paddle_inference_Config.h"
+
+#include <iostream>
+
+#include "jni_convert_util.h"  // NOLINT
+#include "pd_inference_api.h"  // NOLINT
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_cppConfigDestroy(
+    JNIEnv*, jobject, jlong cppPaddleConfigPointer) {
+  PD_ConfigDestroy(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+}
+
+// 1. create Config
+
+JNIEXPORT jlong JNICALL Java_com_baidu_paddle_inference_Config_createCppConfig(
+    JNIEnv* env, jobject obj) {
+  jlong cppPaddleConfigPointer = reinterpret_cast<jlong>(PD_ConfigCreate());
+  return cppPaddleConfigPointer;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_isCppConfigValid(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag =
+      PD_ConfigIsValid(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+// 2. not combined model settings
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_setCppModel(JNIEnv* env,
+                                                   jobject obj,
+                                                   jlong cppPaddleConfigPointer,
+                                                   jstring modelFile,
+                                                   jstring paramsFile) {
+  PD_ConfigSetModel(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                    jstring_to_cpp_string(env, modelFile).c_str(),
+                    jstring_to_cpp_string(env, paramsFile).c_str());
+}
+
+// 3. combined model settings
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppModelDir(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jstring modelDir) {
+  PD_ConfigSetModelDir(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                       jstring_to_cpp_string(env, modelDir).c_str());
+}
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppProgFile(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jstring progFile) {
+  PD_ConfigSetProgFile(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                       jstring_to_cpp_string(env, progFile).c_str());
+}
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppParamsFile(
+    JNIEnv* env,
+    jobject obj,
+    jlong cppPaddleConfigPointer,
+    jstring paramsFile) {
+  PD_ConfigSetParamsFile(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                         jstring_to_cpp_string(env, paramsFile).c_str());
+}
+
+JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_modelDir(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  return cpp_string_to_jstring(
+      env,
+      PD_ConfigGetModelDir(
+          reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
+}
+
+JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_progFile(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  return cpp_string_to_jstring(
+      env,
+      PD_ConfigGetProgFile(
+          reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
+}
+
+JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_paramsFile(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  return cpp_string_to_jstring(
+      env,
+      PD_ConfigGetParamsFile(
+          reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
+}
+
+// 4. cpu settings
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_setCpuMathLibraryNumThreads(
+    JNIEnv* env,
+    jobject obj,
+    jlong cppPaddleConfigPointer,
+    jint mathThreadsNum) {
+  int math_threads_num = reinterpret_cast<int>(mathThreadsNum);
+  PD_ConfigSetCpuMathLibraryNumThreads(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer), math_threads_num);
+}
+
+JNIEXPORT jint JNICALL
+Java_com_baidu_paddle_inference_Config_cpuMathLibraryNumThreads(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  jint mathThreadsNum =
+      reinterpret_cast<jint>(PD_ConfigGetCpuMathLibraryNumThreads(
+          reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
+  return mathThreadsNum;
+}
+
+// 5. MKLDNN settings
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableMKLDNN(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  PD_ConfigEnableMKLDNN(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+}
+
+JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Config_mkldnnEnabled(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag = PD_ConfigMkldnnEnabled(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_enableMkldnnBfloat16(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  PD_ConfigEnableMkldnnBfloat16(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+}
+
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_mkldnnBfloat16Enabled(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag = PD_ConfigMkldnnBfloat16Enabled(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+// 6. gpu setting
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableUseGpu(
+    JNIEnv* env,
+    jobject obj,
+    jlong cppPaddleConfigPointer,
+    jlong memorySize,
+    jint deviceId) {
+  PD_ConfigEnableUseGpu(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                        (uint64_t)memorySize,
+                        (int32_t)deviceId,
+                        0);
+}
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_disableGpu(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  PD_ConfigDisableGpu(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+}
+
+JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Config_useGpu(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag =
+      PD_ConfigUseGpu(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+JNIEXPORT jint JNICALL Java_com_baidu_paddle_inference_Config_gpuDeviceId(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  int device_id = PD_ConfigGpuDeviceId(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return reinterpret_cast<jint>(device_id);
+}
+
+JNIEXPORT jint JNICALL
+Java_com_baidu_paddle_inference_Config_memoryPoolInitSizeMb(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  int memory_pool_init_size_mb = PD_ConfigMemoryPoolInitSizeMb(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return reinterpret_cast<jint>(memory_pool_init_size_mb);
+}
+
+JNIEXPORT jfloat JNICALL
+Java_com_baidu_paddle_inference_Config_fractionOfGpuMemoryForPool(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  float fraction_of_gpuMemory_for_pool = PD_ConfigFractionOfGpuMemoryForPool(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return (jfloat)fraction_of_gpuMemory_for_pool;
+}
+
+// 7. TensorRT To Do
+
+// 8. optim setting
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_switchIrOptim(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jboolean flag) {
+  PD_ConfigSwitchIrOptim(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                         jboolean_to_cpp_bool(env, flag));
+}
+
+JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Config_irOptim(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag =
+      PD_ConfigIrOptim(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_switchIrDebug(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jboolean flag) {
+  PD_ConfigSwitchIrDebug(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+                         jboolean_to_cpp_bool(env, flag));
+}
+
+// 9. enable memory optimization
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableMemoryOptim(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer, jboolean flag) {
+  PD_ConfigEnableMemoryOptim(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer),
+      jboolean_to_cpp_bool(env, flag));
+}
+
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_memoryOptimEnabled(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag = PD_ConfigMemoryOptimEnabled(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+// 10. profile setting
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableProfile(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  PD_ConfigEnableProfile(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+}
+
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_profileEnabled(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  bool flag = PD_ConfigProfileEnabled(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+  return cpp_bool_to_jboolean(env, flag);
+}
+
+// 11. log setting
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_disableGlogInfo(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  PD_ConfigDisableGlogInfo(
+      reinterpret_cast<PD_Config*>(cppPaddleConfigPointer));
+}
+
+// 12. view config configuration
+
+JNIEXPORT jstring JNICALL Java_com_baidu_paddle_inference_Config_summary(
+    JNIEnv* env, jobject obj, jlong cppPaddleConfigPointer) {
+  return cpp_string_to_jstring(
+      env,
+      PD_ConfigSummary(reinterpret_cast<PD_Config*>(cppPaddleConfigPointer)));
+}

+ 290 - 0
recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Config.h

@@ -0,0 +1,290 @@
+// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class com_baidu_paddle_inference_Config */
+#pragma once
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    cppConfigDestroy
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_cppConfigDestroy(
+    JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    createCppConfig
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Config_createCppConfig(JNIEnv *, jobject);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    isCppConfigValid
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_isCppConfigValid(JNIEnv *,
+                                                        jobject,
+                                                        jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    setCppModel
+ * Signature: (JLjava/lang/String;Ljava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppModel(
+    JNIEnv *, jobject, jlong, jstring, jstring);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    setCppModelDir
+ * Signature: (JLjava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppModelDir(
+    JNIEnv *, jobject, jlong, jstring);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    setCppProgFile
+ * Signature: (JLjava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppProgFile(
+    JNIEnv *, jobject, jlong, jstring);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    setCppParamsFile
+ * Signature: (JLjava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_setCppParamsFile(
+    JNIEnv *, jobject, jlong, jstring);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    modelDir
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Config_modelDir(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    progFile
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Config_progFile(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    paramsFile
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Config_paramsFile(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    setCpuMathLibraryNumThreads
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_setCpuMathLibraryNumThreads(JNIEnv *,
+                                                                   jobject,
+                                                                   jlong,
+                                                                   jint);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    cpuMathLibraryNumThreads
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_com_baidu_paddle_inference_Config_cpuMathLibraryNumThreads(JNIEnv *,
+                                                                jobject,
+                                                                jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    enableMKLDNN
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_enableMKLDNN(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    mkldnnEnabled
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_mkldnnEnabled(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    enableMkldnnBfloat16
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_enableMkldnnBfloat16(JNIEnv *,
+                                                            jobject,
+                                                            jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    mkldnnBfloat16Enabled
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_mkldnnBfloat16Enabled(JNIEnv *,
+                                                             jobject,
+                                                             jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    enableUseGpu
+ * Signature: (JJI)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableUseGpu(
+    JNIEnv *, jobject, jlong, jlong, jint);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    disableGpu
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_disableGpu(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    useGpu
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_useGpu(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    gpuDeviceId
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_com_baidu_paddle_inference_Config_gpuDeviceId(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    memoryPoolInitSizeMb
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_com_baidu_paddle_inference_Config_memoryPoolInitSizeMb(JNIEnv *,
+                                                            jobject,
+                                                            jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    fractionOfGpuMemoryForPool
+ * Signature: (J)F
+ */
+JNIEXPORT jfloat JNICALL
+Java_com_baidu_paddle_inference_Config_fractionOfGpuMemoryForPool(JNIEnv *,
+                                                                  jobject,
+                                                                  jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    switchIrOptim
+ * Signature: (JZ)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_switchIrOptim(
+    JNIEnv *, jobject, jlong, jboolean);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    irOptim
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_irOptim(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    switchIrDebug
+ * Signature: (JZ)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_switchIrDebug(
+    JNIEnv *, jobject, jlong, jboolean);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    enableMemoryOptim
+ * Signature: (JZ)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_enableMemoryOptim(
+    JNIEnv *, jobject, jlong, jboolean);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    memoryOptimEnabled
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_memoryOptimEnabled(JNIEnv *,
+                                                          jobject,
+                                                          jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    enableProfile
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Config_enableProfile(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    profileEnabled
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Config_profileEnabled(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    disableGlogInfo
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Config_disableGlogInfo(
+    JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Config
+ * Method:    summary
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Config_summary(JNIEnv *, jobject, jlong);
+
+#ifdef __cplusplus
+}
+#endif

+ 107 - 0
recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Predictor.cpp

@@ -0,0 +1,107 @@
+// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "com_baidu_paddle_inference_Predictor.h"
+
+#include <jni.h>
+
+#include "jni_convert_util.h"  // NOLINT
+#include "pd_inference_api.h"  // NOLINT
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Predictor_cppPredictorDestroy(
+    JNIEnv*, jobject, jlong cppPaddlePredictorPointer) {
+  PD_PredictorDestroy(
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer));
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Predictor_predictorTryShrinkMemory(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer) {
+  PD_PredictorTryShrinkMemory(
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer));
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Predictor_predictorClearIntermediateTensor(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer) {
+  PD_PredictorClearIntermediateTensor(
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer));
+}
+
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_createPredictor(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer) {
+  return (jlong)PD_PredictorCreate(
+      reinterpret_cast<PD_Config*>(cppPaddlePredictorPointer));
+}
+
+JNIEXPORT jlong JNICALL Java_com_baidu_paddle_inference_Predictor_getInputNum(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer) {
+  return (jlong)PD_PredictorGetInputNum(
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer));
+}
+
+JNIEXPORT jlong JNICALL Java_com_baidu_paddle_inference_Predictor_getOutputNum(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer) {
+  return (jlong)PD_PredictorGetOutputNum(
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer));
+}
+
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Predictor_getInputNameByIndex(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer, jlong index) {
+  const char* c_str = PD_PredictorGetInputNames(reinterpret_cast<PD_Predictor*>(
+                                                    cppPaddlePredictorPointer))
+                          ->data[static_cast<int>(index)];
+  return env->NewStringUTF(c_str);
+}
+
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Predictor_getOutputNameByIndex(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer, jlong index) {
+  const char* c_str =
+      PD_PredictorGetOutputNames(
+          reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer))
+          ->data[static_cast<int>(index)];
+  return env->NewStringUTF(c_str);
+}
+
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_getInputHandleByName(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer, jstring name) {
+  // const char* input_name = env->GetStringUTFChars(name, 0);
+  PD_Predictor* pd_predictor =
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer);
+  jlong output_tensor = (jlong)PD_PredictorGetInputHandle(
+      pd_predictor, jstring_to_cpp_string(env, name).c_str());
+  return output_tensor;
+}
+
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_getOutputHandleByName(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer, jstring name) {
+  // const char* output_name = env->GetStringUTFChars(name, 0);
+  PD_Predictor* pd_predictor =
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer);
+  jlong output_tensor = (jlong)PD_PredictorGetOutputHandle(
+      pd_predictor, jstring_to_cpp_string(env, name).c_str());
+  return output_tensor;
+}
+
+JNIEXPORT jboolean JNICALL Java_com_baidu_paddle_inference_Predictor_runPD(
+    JNIEnv* env, jobject obj, jlong cppPaddlePredictorPointer) {
+  return (jboolean)PD_PredictorRun(
+      reinterpret_cast<PD_Predictor*>(cppPaddlePredictorPointer));
+}

+ 132 - 0
recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Predictor.h

@@ -0,0 +1,132 @@
+// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class com_baidu_paddle_inference_Predictor */
+
+#pragma once
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    cppPredictorDestroy
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Predictor_cppPredictorDestroy(JNIEnv *,
+                                                              jobject,
+                                                              jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    predictorTryShrinkMemory
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Predictor_predictorTryShrinkMemory(JNIEnv *,
+                                                                   jobject,
+                                                                   jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    predictorClearIntermediateTensor
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Predictor_predictorClearIntermediateTensor(
+    JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    createPredictor
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_createPredictor(JNIEnv *,
+                                                          jobject,
+                                                          jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    getInputNum
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_getInputNum(JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    getOutputNum
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_com_baidu_paddle_inference_Predictor_getOutputNum(
+    JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    getInputNameByIndex
+ * Signature: (JJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Predictor_getInputNameByIndex(JNIEnv *,
+                                                              jobject,
+                                                              jlong,
+                                                              jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    getOutputNameByIndex
+ * Signature: (JJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Predictor_getOutputNameByIndex(JNIEnv *,
+                                                               jobject,
+                                                               jlong,
+                                                               jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    getInputHandleByName
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_getInputHandleByName(JNIEnv *,
+                                                               jobject,
+                                                               jlong,
+                                                               jstring);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    getOutputHandleByName
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_com_baidu_paddle_inference_Predictor_getOutputHandleByName(JNIEnv *,
+                                                                jobject,
+                                                                jlong,
+                                                                jstring);
+
+/*
+ * Class:     com_baidu_paddle_inference_Predictor
+ * Method:    runPD
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_com_baidu_paddle_inference_Predictor_runPD(JNIEnv *, jobject, jlong);
+
+#ifdef __cplusplus
+}
+#endif

+ 133 - 0
recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Tensor.cpp

@@ -0,0 +1,133 @@
+// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "com_baidu_paddle_inference_Tensor.h"
+
+#include <jni.h>
+
+#include "pd_inference_api.h"  // NOLINT
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Tensor_cppTensorDestroy(
+    JNIEnv *, jobject, jlong tensorPointer) {
+  PD_TensorDestroy(reinterpret_cast<PD_Tensor *>(tensorPointer));
+}
+
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Tensor_cppTensorReshape(
+    JNIEnv *env, jobject, jlong tensorPointer, jint dim, jintArray array) {
+  int32_t *input_shape = env->GetIntArrayElements(array, nullptr);
+  PD_TensorReshape(reinterpret_cast<PD_Tensor *>(tensorPointer),
+                   static_cast<int>(dim),
+                   input_shape);
+  env->ReleaseIntArrayElements(array, input_shape, JNI_ABORT);
+}
+
+JNIEXPORT jintArray JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorGetShape(JNIEnv *env,
+                                                         jobject,
+                                                         jlong tensorPointer) {
+  PD_Tensor *tensor = reinterpret_cast<PD_Tensor *>(tensorPointer);
+  PD_OneDimArrayInt32 *output_shape = PD_TensorGetShape(tensor);
+  jintArray result = env->NewIntArray(output_shape->size);
+  env->SetIntArrayRegion(result, 0, output_shape->size, output_shape->data);
+  return result;
+}
+
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorGetName(JNIEnv *env,
+                                                        jobject,
+                                                        jlong tensorPointer) {
+  const char *c_str =
+      PD_TensorGetName(reinterpret_cast<PD_Tensor *>(tensorPointer));
+  return env->NewStringUTF(c_str);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuFloat(
+    JNIEnv *env, jobject, jlong tensorPointer, jfloatArray array) {
+  float *data = env->GetFloatArrayElements(array, nullptr);
+  PD_TensorCopyFromCpuFloat(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseFloatArrayElements(array, data, JNI_ABORT);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuInt(
+    JNIEnv *env, jobject, jlong tensorPointer, jintArray array) {
+  int32_t *data = env->GetIntArrayElements(array, nullptr);
+  PD_TensorCopyFromCpuInt32(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseIntArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuLong(
+    JNIEnv *env, jobject, jlong tensorPointer, jlongArray array) {
+  int64_t *data = env->GetLongArrayElements(array, nullptr);
+  PD_TensorCopyFromCpuInt64(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseLongArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuByte(
+    JNIEnv *env, jobject, jlong tensorPointer, jbyteArray array) {
+  int8_t *data = env->GetByteArrayElements(array, nullptr);
+  PD_TensorCopyFromCpuInt8(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseByteArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuBoolean(
+    JNIEnv *env, jobject, jlong tensorPointer, jbooleanArray array) {
+  uint8_t *data = env->GetBooleanArrayElements(array, nullptr);
+  PD_TensorCopyFromCpuUint8(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseBooleanArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuFloat(
+    JNIEnv *env, jobject, jlong tensorPointer, jfloatArray array) {
+  float *data = env->GetFloatArrayElements(array, nullptr);
+  PD_TensorCopyToCpuFloat(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseFloatArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuInt(
+    JNIEnv *env, jobject, jlong tensorPointer, jintArray array) {
+  int32_t *data = env->GetIntArrayElements(array, nullptr);
+  PD_TensorCopyToCpuInt32(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseIntArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuLong(
+    JNIEnv *env, jobject, jlong tensorPointer, jlongArray array) {
+  int64_t *data = env->GetLongArrayElements(array, nullptr);
+  PD_TensorCopyToCpuInt64(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseLongArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuByte(
+    JNIEnv *env, jobject, jlong tensorPointer, jbyteArray array) {
+  int8_t *data = env->GetByteArrayElements(array, nullptr);
+  PD_TensorCopyToCpuInt8(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseByteArrayElements(array, data, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuBoolean(
+    JNIEnv *env, jobject, jlong tensorPointer, jbooleanArray array) {
+  uint8_t *data = env->GetBooleanArrayElements(array, nullptr);
+  PD_TensorCopyToCpuUint8(reinterpret_cast<PD_Tensor *>(tensorPointer), data);
+  env->ReleaseBooleanArrayElements(array, data, 0);
+}

+ 169 - 0
recommend-model-jni/src/main/c/com/baidu/paddle/inference/com_baidu_paddle_inference_Tensor.h

@@ -0,0 +1,169 @@
+// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class com_baidu_paddle_inference_Tensor */
+
+#pragma once
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorDestroy
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Tensor_cppTensorDestroy(
+    JNIEnv *, jobject, jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorReshape
+ * Signature: (JI[I)V
+ */
+JNIEXPORT void JNICALL Java_com_baidu_paddle_inference_Tensor_cppTensorReshape(
+    JNIEnv *, jobject, jlong, jint, jintArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorGetShape
+ * Signature: (J)[I
+ */
+JNIEXPORT jintArray JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorGetShape(JNIEnv *,
+                                                         jobject,
+                                                         jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorGetName
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorGetName(JNIEnv *,
+                                                        jobject,
+                                                        jlong);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyFromCpuFloat
+ * Signature: (J[F)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuFloat(JNIEnv *,
+                                                                 jobject,
+                                                                 jlong,
+                                                                 jfloatArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyFromCpuInt
+ * Signature: (J[I)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuInt(JNIEnv *,
+                                                               jobject,
+                                                               jlong,
+                                                               jintArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyFromCpuLong
+ * Signature: (J[J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuLong(JNIEnv *,
+                                                                jobject,
+                                                                jlong,
+                                                                jlongArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyFromCpuByte
+ * Signature: (J[B)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuByte(JNIEnv *,
+                                                                jobject,
+                                                                jlong,
+                                                                jbyteArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyFromCpuBoolean
+ * Signature: (J[Z)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyFromCpuBoolean(
+    JNIEnv *, jobject, jlong, jbooleanArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyToCpuFloat
+ * Signature: (J[F)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuFloat(JNIEnv *,
+                                                               jobject,
+                                                               jlong,
+                                                               jfloatArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyToCpuInt
+ * Signature: (J[I)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuInt(JNIEnv *,
+                                                             jobject,
+                                                             jlong,
+                                                             jintArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyToCpuLong
+ * Signature: (J[J)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuLong(JNIEnv *,
+                                                              jobject,
+                                                              jlong,
+                                                              jlongArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyToCpuByte
+ * Signature: (J[B)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuByte(JNIEnv *,
+                                                              jobject,
+                                                              jlong,
+                                                              jbyteArray);
+
+/*
+ * Class:     com_baidu_paddle_inference_Tensor
+ * Method:    cppTensorCopyToCpuBoolean
+ * Signature: (J[Z)V
+ */
+JNIEXPORT void JNICALL
+Java_com_baidu_paddle_inference_Tensor_cppTensorCopyToCpuBoolean(JNIEnv *,
+                                                                 jobject,
+                                                                 jlong,
+                                                                 jbooleanArray);
+
+#ifdef __cplusplus
+}
+#endif

+ 270 - 0
recommend-model-jni/src/main/java/com/baidu/paddle/inference/Config.java

@@ -0,0 +1,270 @@
+package com.baidu.paddle.inference;
+
+public class Config {
+    private long cppPaddleConfigPointer;
+    private String modelFile;
+    private String paramsFile;
+    private String modelDir;
+    private String progFile;
+    private int mathThreadsNum;
+
+
+    public Config() {
+        this.cppPaddleConfigPointer = createCppConfig();
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        destroyNativeConfig();
+    }
+
+    public void destroyNativeConfig() {
+        if(cppPaddleConfigPointer != 0) cppConfigDestroy(cppPaddleConfigPointer);
+        cppPaddleConfigPointer = 0;
+    }
+
+    public boolean isValid() {
+        if(cppPaddleConfigPointer == 0) return false;
+        return isCppConfigValid(cppPaddleConfigPointer);
+    }
+
+
+    public void setCppModel(String modelFile, String paramsFile) {
+
+        this.modelFile = modelFile;
+        this.paramsFile = paramsFile;
+        setCppModel(this.cppPaddleConfigPointer, modelFile, paramsFile);
+    }
+
+    public void setCppModelDir(String modelDir) {
+        this.modelDir = modelDir;
+        setCppModelDir(this.cppPaddleConfigPointer, modelDir);
+    }
+
+    public void setCppProgFile(String progFile){
+        this.progFile = progFile;
+        setCppProgFile(this.cppPaddleConfigPointer, progFile);
+    }
+
+    public void setCppParamsFile(String paramsFile){
+        this.paramsFile = paramsFile;
+        setCppParamsFile(this.cppPaddleConfigPointer, paramsFile);
+    }
+
+    public String getCppModelDir() {
+        return modelDir(this.cppPaddleConfigPointer);
+    }
+
+    public String getCppProgFile(){
+        return progFile(this.cppPaddleConfigPointer);
+    }
+
+    public String getCppParamsFile() {
+        return paramsFile(this.cppPaddleConfigPointer);
+    }
+
+    public void setCpuMathLibraryNumThreads(int mathThreadsNum){
+        this.mathThreadsNum = mathThreadsNum;
+        setCpuMathLibraryNumThreads(this.cppPaddleConfigPointer, mathThreadsNum);
+    }
+
+    public int getCpuMathLibraryNumThreads(){
+        return cpuMathLibraryNumThreads(this.cppPaddleConfigPointer);
+    }
+
+    public void enableMKLDNN(){
+        enableMKLDNN(this.cppPaddleConfigPointer);
+    }
+
+    public boolean mkldnnEnabled(){
+        return mkldnnEnabled(this.cppPaddleConfigPointer);
+    }
+
+    public void enableMkldnnBfloat16(){
+        enableMkldnnBfloat16(this.cppPaddleConfigPointer);
+    }
+
+    public boolean mkldnnBfloat16Enabled(){
+        return mkldnnBfloat16Enabled(this.cppPaddleConfigPointer);
+    }
+
+    public void enableUseGpu(long memorySize, int deviceId){
+        enableUseGpu(this.cppPaddleConfigPointer, memorySize, deviceId);
+    }
+
+    public void disableGpu(){
+        disableGpu(this.cppPaddleConfigPointer);
+    }
+
+    public boolean useGpu(){
+        return useGpu(this.cppPaddleConfigPointer);
+    }
+
+    public int getGpuDeviceId(){
+        return gpuDeviceId(this.cppPaddleConfigPointer);
+    }
+
+    public int getMemoryPoolInitSizeMb(){
+        return memoryPoolInitSizeMb(this.cppPaddleConfigPointer);
+    }
+
+    public float getFractionOfGpuMemoryForPool(){
+        return fractionOfGpuMemoryForPool(this.cppPaddleConfigPointer);
+    }
+
+    public void switchIrOptim(boolean flag){
+        switchIrOptim(this.cppPaddleConfigPointer, flag);
+    }
+
+    public boolean irOptim(){
+        return irOptim(this.cppPaddleConfigPointer);
+    }
+
+    public void switchIrDebug(boolean flag){
+        switchIrDebug(this.cppPaddleConfigPointer, flag);
+    }
+
+    public void enableMemoryOptim(boolean flag){
+        enableMemoryOptim(this.cppPaddleConfigPointer, flag);
+    }
+
+    public boolean memoryOptimEnabled(){
+        return memoryOptimEnabled(this.cppPaddleConfigPointer);
+    }
+
+    public void enableProfile(){
+        enableProfile(this.cppPaddleConfigPointer);
+    }
+
+    public boolean profileEnabled(){
+        return profileEnabled(this.cppPaddleConfigPointer);
+    }
+
+    public void disableGlogInfo(){
+        disableGlogInfo(this.cppPaddleConfigPointer);
+    }
+
+    public String summary(){
+        return summary(this.cppPaddleConfigPointer);
+    }
+
+    public long getCppPaddleConfigPointer() {
+        return cppPaddleConfigPointer;
+    }
+
+    public String getModelFile() {
+        return modelFile;
+    }
+
+    public String getParamsFile() {
+        return paramsFile;
+    }
+
+    public String getModelDir() {
+        return modelDir;
+    }
+
+    public String getProgFile() {
+        return progFile;
+    }
+
+    public int getMathThreadsNum() {
+        return mathThreadsNum;
+    }
+
+    public void resetCppPaddleConfigPointer() {
+        cppPaddleConfigPointer = 0;
+    }
+
+    private native void cppConfigDestroy(long cppPaddleConfigPointer);
+
+    // 1. create Config
+
+    private native long createCppConfig();
+
+    private native boolean isCppConfigValid(long cppPaddleConfigPointer);
+
+    // 2. not combined model settings
+
+    private native void setCppModel(long cppPaddleConfigPointer, String modelFile, String paramsFile);
+
+    // 3. combined model settings
+
+    private native void setCppModelDir(long cppPaddleConfigPointer, String modelDir);
+
+    private native void setCppProgFile(long cppPaddleConfigPointer, String modelFile);
+
+    private native void setCppParamsFile(long cppPaddleConfigPointer, String paramsFile);
+
+    private native String modelDir(long cppPaddleConfigPointer);
+
+    private native String progFile(long cppPaddleConfigPointer);
+
+    private native String paramsFile(long cppPaddleConfigPointer);
+
+    // 4. cpu settings
+
+    private native void setCpuMathLibraryNumThreads(long cppPaddleConfigPointer, int mathThreadsNum);
+
+    private native int cpuMathLibraryNumThreads(long cppPaddleConfigPointer);
+
+    // 5. MKLDNN settings
+
+    private native void enableMKLDNN(long cppPaddleConfigPointer);
+
+    private native boolean mkldnnEnabled(long cppPaddleConfigPointer);
+
+    private native void enableMkldnnBfloat16(long cppPaddleConfigPointer);
+
+    private native boolean mkldnnBfloat16Enabled(long cppPaddleConfigPointer);
+
+    // 6. gpu setting
+
+    // 这里有个bug java没有uint64 这里用 long代替
+    // memorySize 太大的时候 java里long会是负数
+    private native void enableUseGpu(long cppPaddleConfigPointer, long memorySize, int deviceId);
+
+    private native void disableGpu(long cppPaddleConfigPointer);
+
+    private native boolean useGpu(long cppPaddleConfigPointer);
+
+    private native int gpuDeviceId(long cppPaddleConfigPointer);
+
+    private native int memoryPoolInitSizeMb(long cppPaddleConfigPointer);
+
+    private native float fractionOfGpuMemoryForPool(long cppPaddleConfigPointer);
+
+    // 7. TensorRT use To Do
+
+
+
+    // 8. optim setting
+
+    private native void switchIrOptim(long cppPaddleConfigPointer, boolean flag);
+
+    private native boolean irOptim(long cppPaddleConfigPointer);
+
+    private native void switchIrDebug(long cppPaddleConfigPointer, boolean flag);
+
+    // 9. enable memory optimization
+
+    private native void enableMemoryOptim(long cppPaddleConfigPointer, boolean flag);
+
+    private native boolean memoryOptimEnabled(long cppPaddleConfigPointer);
+
+    // 10. profile setting
+
+    private native void enableProfile(long cppPaddleConfigPointer);
+
+    private native  boolean profileEnabled(long cppPaddleConfigPointer);
+
+    // 11. log setting
+
+    private native void  disableGlogInfo(long cppPaddleConfigPointer);
+
+    // 12. view config configuration
+
+    private native String summary(long cppPaddleConfigPointer);
+
+
+}

+ 97 - 0
recommend-model-jni/src/main/java/com/baidu/paddle/inference/Predictor.java

@@ -0,0 +1,97 @@
+package com.baidu.paddle.inference;
+
+public class Predictor {
+
+    private long cppPaddlePredictorPointer;
+
+    private long inputNum;
+
+    private long outputNum;
+
+    public Predictor(Config config) {
+        cppPaddlePredictorPointer = createPredictor(config.getCppPaddleConfigPointer());
+        config.resetCppPaddleConfigPointer();
+        inputNum = getInputNum(cppPaddlePredictorPointer);
+        outputNum = getOutputNum(cppPaddlePredictorPointer);
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        destroyNativePredictor();
+    }
+
+    public static Predictor createPaddlePredictor(Config config){
+        Predictor predictor = new Predictor(config);
+        return predictor.cppPaddlePredictorPointer == 0L ? null : predictor;
+    }
+
+    public void destroyNativePredictor() {
+        if(cppPaddlePredictorPointer != 0) cppPredictorDestroy(cppPaddlePredictorPointer);
+        cppPaddlePredictorPointer = 0;
+    }
+
+    public String getInputNameById(long id){
+        return getInputNameByIndex(this.cppPaddlePredictorPointer, id);
+    }
+
+    public String getOutputNameById(long id){
+        return getOutputNameByIndex(this.cppPaddlePredictorPointer, id);
+    }
+
+    public Tensor getInputHandle(String name){
+        long cppTensorPointer = getInputHandleByName(this.cppPaddlePredictorPointer, name);
+        return cppTensorPointer == 0 ? null : new Tensor(cppTensorPointer);
+    }
+
+    public Tensor getOutputHandle(String name){
+        long cppTensorPointer = getOutputHandleByName(this.cppPaddlePredictorPointer, name);
+        return cppTensorPointer == 0 ? null : new Tensor(cppTensorPointer);
+    }
+
+    public void clearIntermediateTensor(){
+        predictorClearIntermediateTensor(this.cppPaddlePredictorPointer);
+    }
+
+    public void tryShrinkMemory(){
+        predictorTryShrinkMemory(this.cppPaddlePredictorPointer);
+    }
+
+
+    public boolean run(){
+        return runPD(this.cppPaddlePredictorPointer);
+    }
+
+    public long getCppPaddlePredictorPointer() {
+        return cppPaddlePredictorPointer;
+    }
+
+    public long getInputNum() {
+        return inputNum;
+    }
+
+    public long getOutputNum() {
+        return outputNum;
+    }
+
+    private native void cppPredictorDestroy(long cppPaddleConfigPointer);
+
+    private native void predictorTryShrinkMemory(long cppPaddleConfigPointer);
+
+    private native void predictorClearIntermediateTensor(long cppPaddleConfigPointer);
+
+    private native long createPredictor(long cppPaddleConfigPointer);
+
+    private native long getInputNum(long cppPaddlePredictorPointer);
+
+    private native long getOutputNum(long cppPaddlePredictorPointer);
+
+    private native String getInputNameByIndex(long cppPaddlePredictorPointer, long index);
+
+    private native String getOutputNameByIndex(long cppPaddlePredictorPointer, long index);
+
+    private native long getInputHandleByName(long cppPaddlePredictorPointer, String name);
+
+    private native long getOutputHandleByName(long cppPaddlePredictorPointer, String name);
+
+    private native boolean runPD(long cppPaddlePredictorPointer);
+}

+ 101 - 0
recommend-model-jni/src/main/java/com/baidu/paddle/inference/Tensor.java

@@ -0,0 +1,101 @@
+package com.baidu.paddle.inference;
+
+public class Tensor {
+
+    long nativeTensorPointer;
+
+    //构造函数
+    public Tensor(long nativeTensorPointer) {
+        this.nativeTensorPointer = nativeTensorPointer;
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        destroyNativeTensor();
+    }
+
+    public void destroyNativeTensor() {
+        if(nativeTensorPointer != 0) cppTensorDestroy(nativeTensorPointer);
+        nativeTensorPointer = 0;
+    }
+
+    public void reshape(int dim_num, int[] shape) {
+        cppTensorReshape(nativeTensorPointer, dim_num, shape);
+    }
+
+    public int getSize() {
+        int[] shape = getShape();
+        if (shape.length == 0) return 0;
+        int size = 1;
+        for (int i : shape) size *= i;
+        return size;
+    }
+
+    public int[] getShape() {
+        return cppTensorGetShape(nativeTensorPointer);
+    }
+
+    public String getName() {
+        return cppTensorGetName(nativeTensorPointer);
+    }
+
+    public long getCppPaddleTensorPointer() {
+        return nativeTensorPointer;
+    }
+
+    public void copyFromCpu(Object obj) {
+        if (obj instanceof float[]) {
+            cppTensorCopyFromCpuFloat(this.nativeTensorPointer, (float[]) obj);
+        } else if (obj instanceof long[]) {
+            cppTensorCopyFromCpuLong(this.nativeTensorPointer, (long[]) obj);
+        } else if (obj instanceof int[]) {
+            cppTensorCopyFromCpuInt(this.nativeTensorPointer, (int[]) obj);
+        } else if (obj instanceof byte[]) {
+            cppTensorCopyFromCpuByte(this.nativeTensorPointer, (byte[]) obj);
+        } else if (obj instanceof boolean[]) {
+            cppTensorCopyFromCpuBoolean(this.nativeTensorPointer, (boolean[]) obj);
+        }
+    }
+
+    public void copyToCpu(Object obj) {
+        if (obj instanceof float[]) {
+            cppTensorCopyToCpuFloat(this.nativeTensorPointer, (float[]) obj);
+        } else if (obj instanceof long[]) {
+            cppTensorCopyToCpuLong(this.nativeTensorPointer, (long[]) obj);
+        } else if (obj instanceof int[]) {
+            cppTensorCopyToCpuInt(this.nativeTensorPointer, (int[]) obj);
+        } else if (obj instanceof byte[]) {
+            cppTensorCopyToCpuByte(this.nativeTensorPointer, (byte[]) obj);
+        } else if (obj instanceof boolean[]) {
+            cppTensorCopyToCpuBoolean(this.nativeTensorPointer, (boolean[]) obj);
+        }
+    }
+
+    private native void cppTensorDestroy(long TensorPointer);
+
+    private native void cppTensorReshape(long tensor, int dim_num, int[] shape);
+
+    private native int[] cppTensorGetShape(long tensor);
+
+    private native String cppTensorGetName(long tensor);
+
+    private native void cppTensorCopyFromCpuFloat(long TensorPointer, float[] data);
+
+    private native void cppTensorCopyFromCpuInt(long TensorPointer, int[] data);
+
+    private native void cppTensorCopyFromCpuLong(long TensorPointer, long[] data);
+
+    private native void cppTensorCopyFromCpuByte(long TensorPointer, byte[] data);
+
+    private native void cppTensorCopyFromCpuBoolean(long TensorPointer, boolean[] data);
+
+    private native void cppTensorCopyToCpuFloat(long TensorPointer, float[] data);
+
+    private native void cppTensorCopyToCpuInt(long TensorPointer, int[] data);
+
+    private native void cppTensorCopyToCpuLong(long TensorPointer, long[] data);
+
+    private native void cppTensorCopyToCpuByte(long TensorPointer, byte[] data);
+
+    private native void cppTensorCopyToCpuBoolean(long TensorPointer, boolean[] data);
+}

+ 65 - 0
recommend-model-jni/src/test/java/com/baidu/paddle/inference/test.java

@@ -0,0 +1,65 @@
+package com.baidu.paddle.inference;
+
+public class test {
+
+    static {
+        System.loadLibrary("paddle_inference");
+    }
+
+    public static void main(String[] args) {
+        Config config = new Config();
+
+        config.setCppModel(args[0], args[1]);
+        config.enableMemoryOptim(true);
+        config.enableProfile();
+        config.enableMKLDNN();
+
+        System.out.println("summary:\n" + config.summary());
+        System.out.println("model dir:\n" + config.getCppModelDir());
+        System.out.println("prog file:\n" + config.getProgFile());
+        System.out.println("params file:\n" + config.getCppParamsFile());
+
+        config.getCpuMathLibraryNumThreads();
+        config.getFractionOfGpuMemoryForPool();
+        config.switchIrDebug(false);
+        System.out.println(config.summary());
+
+        Predictor predictor = Predictor.createPaddlePredictor(config);
+
+        long n = predictor.getInputNum();
+
+        String inNames = predictor.getInputNameById(0);
+
+        Tensor inHandle = predictor.getInputHandle(inNames);
+
+        inHandle.reshape(4, new int[]{1, 3, 224, 224});
+
+        float[] inData = new float[1 * 3 * 224 * 224];
+        inHandle.copyFromCpu(inData);
+        predictor.run();
+        String outNames = predictor.getOutputNameById(0);
+        Tensor outHandle = predictor.getOutputHandle(outNames);
+        float[] outData = new float[outHandle.getSize()];
+        outHandle.copyToCpu(outData);
+
+        predictor.tryShrinkMemory();
+        predictor.clearIntermediateTensor();
+
+        System.out.println(outData[0]);
+        System.out.println(outData.length);
+
+        outHandle.destroyNativeTensor();
+        inHandle.destroyNativeTensor();
+        predictor.destroyNativePredictor();
+
+        Config newConfig = new Config();
+        newConfig.setCppModelDir("/model_dir");
+        newConfig.setCppProgFile("/prog_file");
+        newConfig.setCppParamsFile("/param");
+        System.out.println("model dir:\n" + newConfig.getCppModelDir());
+        System.out.println("prog file:\n" + newConfig.getProgFile());
+        System.out.println("params file:\n" + newConfig.getCppParamsFile());
+        config.destroyNativeConfig();
+
+    }
+}

+ 5 - 0
recommend-model-service/pom.xml

@@ -104,6 +104,11 @@
             <artifactId>commons-collections4</artifactId>
             <version>4.1</version>
         </dependency>
+        <dependency>
+            <groupId>com.aliyun.oss</groupId>
+            <artifactId>aliyun-sdk-oss</artifactId>
+            <version>3.15.1</version>
+        </dependency>
     </dependencies>
     <build>
         <finalName>recommend-model-service</finalName>

+ 16 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/WarmUpService.java

@@ -0,0 +1,16 @@
+package com.tzld.piaoquan.recommend.model.service;
+
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+
+/**
+ * @author dyp
+ */
+@Component
+public class WarmUpService {
+    @PostConstruct
+    public void warmup() {
+
+    }
+}

+ 52 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/DSSMModel.java

@@ -0,0 +1,52 @@
+package com.tzld.piaoquan.recommend.model.service.model;
+
+
+import com.tzld.piaoquan.recommend.model.util.CompressUtil;
+import com.tzld.piaoquan.recommend.model.util.PropertiesUtil;
+import org.apache.commons.lang.math.NumberUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.Map;
+
+
+public class DSSMModel extends Model {
+    private static final Logger LOGGER = LoggerFactory.getLogger(DSSMModel.class);
+
+private Predictor predictor;
+    @Override
+    public boolean loadFromStream(InputStreamReader in) throws Exception {
+        return false;
+    }
+
+    public void cleanModel() {
+        this.model = null;
+    }
+
+    public Float infer() {
+
+        try {
+            float[] values = new float[features.length];
+            for (int i = 0; i < features.length; i++) {
+                float v = NumberUtils.toFloat(featureMap.getOrDefault(features[i], "0.0"), 0.0f);
+                values[i] = v;
+            }
+            DMatrix dm = new DMatrix(values, 1, features.length, 0.0f);
+            float[][] result = model._booster().predict(dm, false, 100);
+            return result[0][0];
+        } catch (Exception e) {
+            return 0f;
+        }
+    }
+
+    @Override
+    public boolean loadFromStream(InputStream in) throws Exception {
+        String modelDir = PropertiesUtil.getString("model.xgboost.path");
+        CompressUtil.decompressGzFile(in, modelDir);
+        this.model = model2;
+        return true;
+    }
+
+}

+ 14 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/Model.java

@@ -0,0 +1,14 @@
+package com.tzld.piaoquan.recommend.model.service.model;
+
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+abstract public class Model {
+
+    public abstract boolean loadFromStream(InputStreamReader in) throws Exception;
+    public boolean loadFromStream(InputStream is) throws Exception {
+        return loadFromStream(new InputStreamReader(is));
+    }
+}
+

+ 239 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/ModelManager.java

@@ -0,0 +1,239 @@
+package com.tzld.piaoquan.recommend.model.service.model;
+
+
+import com.aliyun.oss.OSS;
+import com.aliyun.oss.OSSClientBuilder;
+import com.aliyun.oss.common.auth.CredentialsProvider;
+import com.aliyun.oss.common.auth.DefaultCredentialProvider;
+import com.aliyun.oss.model.OSSObject;
+import com.ctrip.framework.apollo.Config;
+import com.ctrip.framework.apollo.ConfigService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.ui.Model;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+
+@Slf4j
+public class ModelManager {
+    private static final int SCHEDULE_PERIOD = 10;
+    private static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
+    private static ModelManager instance;
+    Map<String, ModelLoadTask> loadTasks = new HashMap<>();
+    Map<String, String> modelPathMap = new HashMap<>();
+    private OSS client;
+    private String bucketName;
+
+    private final String modelOssEndpoint = "model.oss.internal.endpoint";
+    private final String modelOssAccessKeyId = "model.oss.accessKeyId";
+    private final String modelOssAccessKeySecret = "model.oss.accessKetSecret";
+    private final String modelOssBucketName = "model.oss.bucketName";
+
+    private ModelManager() {
+        // config load
+        Config config = ConfigService.getAppConfig();
+        String endpoint = config.getProperty(modelOssEndpoint, "");
+        String accessKeyId = config.getProperty(modelOssAccessKeyId, "");
+        String accessKetSecret = config.getProperty(modelOssAccessKeySecret, "");
+        // oss client
+        CredentialsProvider credentialsProvider = new DefaultCredentialProvider(accessKeyId, accessKetSecret);
+        this.client = new OSSClientBuilder().build(endpoint, credentialsProvider);
+        this.bucketName = config.getProperty(modelOssBucketName, "");
+
+        config.addChangeListener(changeEvent -> {
+            if (changeEvent.isChanged(modelOssEndpoint)
+                    || changeEvent.isChanged(modelOssAccessKeyId)
+                    || changeEvent.isChanged(modelOssAccessKeySecret)) {
+                String endpointNew = config.getProperty(modelOssEndpoint, "");
+                String accessKeyIdNew = config.getProperty(modelOssAccessKeyId, "");
+                String accessKetSecretNew = config.getProperty(modelOssAccessKeySecret, "");
+                CredentialsProvider credentialsProviderNew = new DefaultCredentialProvider(accessKeyIdNew,
+                        accessKetSecretNew);
+                this.client = new OSSClientBuilder().build(endpointNew, credentialsProviderNew);
+            }
+            if (changeEvent.isChanged(modelOssBucketName)) {
+                this.bucketName = config.getProperty(modelOssBucketName, "");
+            }
+        });
+
+
+        start(SCHEDULE_PERIOD);
+    }
+
+    public static ModelManager getInstance() {
+        if (instance == null) {
+            synchronized (ModelManager.class) {
+                if (instance == null) {
+                    instance = new ModelManager();
+                }
+            }
+        }
+        return instance;
+    }
+
+    /**
+     * 添加一个加载任务到管理器
+     *
+     * @param modelName  Model的名字, 注册到ModelManager的不同model需要不同的名字
+     * @param path       Model在OSS上的全路径
+     * @param modelClass Model的子类型
+     */
+    public void registerModel(String modelName, String path, Class<? extends Model> modelClass) throws ModelRegisterException, IOException {
+        if (modelPathMap.containsKey(modelName)) {
+            // fail fast
+            // throw new RuntimeException(modelName + " already exists");
+            // hard code  广告需要视频模型打分数据,配置要分开
+            return;
+
+        }
+
+        modelPathMap.put(modelName, path);
+        if (loadTasks.containsKey(path)) {
+            ModelLoadTask loadTask = loadTasks.get(path);
+            loadTask.refCount++;
+        } else {
+            ModelLoadTask task = new ModelLoadTask(path, modelClass);
+            task.refCount++;
+            loadTasks.put(path, task);
+            loadModel(task, false, true);
+        }
+    }
+
+    /**
+     * 删除一个加载任务
+     *
+     * @param modelName Model的名字, 需要和registerModel的名字一致
+     */
+    private void unRegisterModel(String modelName) {
+        if (modelPathMap.containsKey(modelName)) {
+            String path = modelPathMap.get(modelName);
+            if (loadTasks.containsKey(path)) {
+                ModelLoadTask task = loadTasks.get(path);
+                task.refCount--;
+                if (task.refCount == 0) {
+                    loadTasks.remove(path);
+                }
+            }
+            modelPathMap.remove(modelName);
+        }
+    }
+
+    /**
+     * @param modelName
+     * @return
+     */
+    public Model getModel(String modelName) {
+        if (modelPathMap.containsKey(modelName) && loadTasks.containsKey(modelPathMap.get(modelName))) {
+            return loadTasks.get(modelPathMap.get(modelName)).model;
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * 开始调度
+     *
+     * @param period
+     */
+    protected void start(long period) {
+        final Runnable task = new Runnable() {
+            public void run() {
+                // 模型更新开关
+                // boolean modelUpdateSwitch = Configuration.getBoolean("recommend-service-framework.model-update-switch", true);
+                boolean modelUpdateSwitch = true;
+                log.info("model update switch [{}]", modelUpdateSwitch);
+                if (modelUpdateSwitch) {
+                    updateModels(false);
+                }
+            }
+        };
+        scheduler.scheduleAtFixedRate(task, 10, period, TimeUnit.MINUTES); // 10分钟
+    }
+
+    /**
+     * 更新模型
+     */
+    public void updateModels(final boolean isForceLoads) {
+        log.info("begin to update: [{}]", loadTasks.keySet().size());
+        for (String modelPath : loadTasks.keySet()) {
+            log.debug("load task model path [{}]", modelPath);
+            ModelLoadTask task = loadTasks.get(modelPath);
+            loadModel(task, isForceLoads, false);
+        }
+    }
+
+    /**
+     * 检查并加载模型
+     * <p>
+     * 从oss加载:
+     * https://help.aliyun.com/zh/oss/developer-reference/streaming-download-7?spm=a2c4g.11186623.0.0.4b527c7dm8LejC
+     *
+     * @param loadTask
+     */
+    private void loadModel(final ModelLoadTask loadTask, final boolean isForceLoads, final boolean isRegister) {
+        if (loadTask.isLoading) {
+            return;
+        }
+        loadTask.isLoading = true;
+        OSSObject ossObj = null;
+        try {
+            ossObj = client.getObject(bucketName, loadTask.path);
+            long timeStamp = ossObj.getObjectMetadata().getLastModified().getTime();
+            if (loadTask.lastModifyTime < timeStamp || isForceLoads) {
+                log.info("model file changed, ready to update, last modify: [{}], current model time: [{}]",
+                        loadTask.lastModifyTime, timeStamp);
+
+                Model model = loadTask.modelClass.newInstance();
+                if (model.loadFromStream(new InputStreamReader(ossObj.getObjectContent()))) {
+                    loadTask.model = model;
+                    loadTask.lastModifyTime = timeStamp;
+                }
+            }
+            ossObj.close();
+        } catch (Exception e) {
+            log.error("update model fail", e);
+        } finally {
+            loadTask.isLoading = false;
+            if (ossObj != null) {
+                try {
+                    ossObj.close();
+                } catch (IOException e) {
+                    log.error("close ossObj fail", e);
+                }
+            }
+        }
+    }
+
+    public class ModelRegisterException extends Exception {
+
+        public ModelRegisterException(String s) {
+            super(s);
+        }
+    }
+
+    /**
+     * 调度的任务单元
+     */
+    private class ModelLoadTask {
+
+        private int refCount;
+        private final String path;
+        private long lastModifyTime;
+        private boolean isLoading;
+        private final Class<? extends Model> modelClass;
+        private Model model;
+
+        ModelLoadTask(String path, Class<? extends Model> modelClass) {
+            this.refCount = 0;
+            this.path = path;
+            this.lastModifyTime = 0;
+            this.modelClass = modelClass;
+        }
+    }
+}

+ 15 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/service/model/OssConfig.java

@@ -0,0 +1,15 @@
+package com.tzld.piaoquan.recommend.model.service.model;
+
+import lombok.Data;
+
+/**
+ * @author dyp
+ */
+@Data
+public class OssConfig {
+
+    private String accessKeyId;
+    private String accessKeySecret;
+    private String endpoint;
+    private String bucketName;
+}

+ 123 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/util/CompressUtil.java

@@ -0,0 +1,123 @@
+package com.tzld.piaoquan.recommend.model.util;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
+
+import java.io.*;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/**
+ * @author dyp
+ */
+@Slf4j
+public class CompressUtil {
+    public static void compressDirectoryToGzip(String sourceDirPath, String outputFilePath) {
+        // 创建.gz文件的输出流
+        try (OutputStream out = new FileOutputStream(outputFilePath);
+             GzipCompressorOutputStream gzipOut = new GzipCompressorOutputStream(out);
+             TarArchiveOutputStream taos = new TarArchiveOutputStream(gzipOut)) {
+
+            taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
+
+            // 遍历目录
+            Files.walk(Paths.get(sourceDirPath))
+                    .filter(Files::isRegularFile)
+                    .forEach(filePath -> {
+                        try {
+                            // 为每个文件创建TarEntry
+                            TarArchiveEntry entry = new TarArchiveEntry(filePath.toFile(), filePath.toString().substring(sourceDirPath.length() + 1));
+                            taos.putArchiveEntry(entry);
+
+                            // 读取文件内容并写入TarArchiveOutputStream
+                            try (InputStream is = Files.newInputStream(filePath)) {
+                                byte[] buffer = new byte[1024];
+                                int len;
+                                while ((len = is.read(buffer)) > 0) {
+                                    taos.write(buffer, 0, len);
+                                }
+                            }
+                            // 关闭entry
+                            taos.closeArchiveEntry();
+                        } catch (IOException e) {
+                            log.error("", e);
+                        }
+                    });
+        } catch (Exception e) {
+            log.error("", e);
+        }
+    }
+
+    public static void decompressGzFile(String gzipFilePath, String destDirPath) {
+        try (InputStream gzipIn = new FileInputStream(gzipFilePath);
+             GzipCompressorInputStream gzIn = new GzipCompressorInputStream(gzipIn);
+             TarArchiveInputStream tais = new TarArchiveInputStream(gzIn)) {
+
+            TarArchiveEntry entry;
+            Files.createDirectories(Paths.get(destDirPath));
+            while ((entry = tais.getNextTarEntry()) != null) {
+                if (entry.isDirectory()) {
+                    // 如果是目录,创建目录
+                    Files.createDirectories(Paths.get(destDirPath, entry.getName()));
+                } else {
+                    // 如果是文件,创建文件并写入内容
+                    File outputFile = new File(destDirPath, entry.getName());
+                    if (!outputFile.exists()) {
+                        File parent = outputFile.getParentFile();
+                        if (!parent.exists()) {
+                            parent.mkdirs();
+                        }
+                        outputFile.createNewFile();
+                    }
+                    try (OutputStream out = new FileOutputStream(outputFile)) {
+                        byte[] buffer = new byte[1024];
+                        int len;
+                        while ((len = tais.read(buffer)) > 0) {
+                            out.write(buffer, 0, len);
+                        }
+                    }
+                }
+            }
+        } catch (Exception e) {
+            log.error("", e);
+        }
+    }
+
+    public static void decompressGzFile(InputStream gzipIn, String destDirPath) {
+        try (GzipCompressorInputStream gzIn = new GzipCompressorInputStream(gzipIn);
+             TarArchiveInputStream tais = new TarArchiveInputStream(gzIn)) {
+
+            TarArchiveEntry entry;
+            Files.createDirectories(Paths.get(destDirPath));
+            while ((entry = tais.getNextTarEntry()) != null) {
+                if (entry.isDirectory()) {
+                    // 如果是目录,创建目录
+                    Files.createDirectories(Paths.get(destDirPath, entry.getName()));
+                } else {
+                    // 如果是文件,创建文件并写入内容
+                    File outputFile = new File(destDirPath, entry.getName());
+                    if (!outputFile.exists()) {
+                        File parent = outputFile.getParentFile();
+                        if (!parent.exists()) {
+                            parent.mkdirs();
+                        }
+                        outputFile.createNewFile();
+                    }
+                    try (OutputStream out = new FileOutputStream(outputFile)) {
+                        byte[] buffer = new byte[1024];
+                        int len;
+                        while ((len = tais.read(buffer)) > 0) {
+                            out.write(buffer, 0, len);
+                        }
+                    }
+                }
+            }
+        } catch (Exception e) {
+            log.error("", e);
+        }
+    }
+}

+ 22 - 0
recommend-model-service/src/main/java/com/tzld/piaoquan/recommend/model/util/PropertiesUtil.java

@@ -0,0 +1,22 @@
+package com.tzld.piaoquan.recommend.model.util;
+
+import org.springframework.context.EnvironmentAware;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Component;
+
+@Component
+public class PropertiesUtil implements EnvironmentAware {
+
+
+    private static Environment environment;
+
+
+    @Override
+    public void setEnvironment(Environment environment) {
+        this.environment = environment;
+    }
+
+    public static String getString(String name) {
+        return environment.getProperty(name);
+    }
+}