|
@@ -14,6 +14,7 @@ import org.apache.spark.api.java.JavaRDD;
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
|
|
+import java.io.IOException;
|
|
|
import java.util.Iterator;
|
|
|
import java.util.Map;
|
|
|
|
|
@@ -24,6 +25,7 @@ import java.util.Map;
|
|
|
public class I2IDSSMPredict {
|
|
|
|
|
|
private static HDFSService hdfsService = new HDFSService();
|
|
|
+
|
|
|
public static void main(String[] args) {
|
|
|
System.loadLibrary("paddle_inference");
|
|
|
CMDService cmd = new CMDService();
|
|
@@ -116,7 +118,11 @@ public class I2IDSSMPredict {
|
|
|
});
|
|
|
// 将处理后的数据写入新的文件,使用Gzip压缩
|
|
|
String outputPath = "hdfs:/dyp/vec2";
|
|
|
- hdfsService.deleteOnExit(outputPath);
|
|
|
+ try {
|
|
|
+ hdfsService.deleteOnExit(outputPath);
|
|
|
+ } catch (Exception e) {
|
|
|
+ log.error(e);
|
|
|
+ }
|
|
|
processedRdd.coalesce(repartition).saveAsTextFile(outputPath, GzipCodec.class);
|
|
|
}
|
|
|
|