Sfoglia il codice sorgente

Merge branch 'feature/jch_makedata' of algorithm/recommend-emr-dataprocess into master

zhaohaipeng 4 mesi fa
parent
commit
652e2837b1

+ 5 - 0
pom.xml

@@ -181,6 +181,11 @@
             <artifactId>xgboost4j-spark_2.11</artifactId>
             <version>1.1.2</version>
         </dependency>
+        <dependency>
+            <groupId>com.tzld.piaoquan</groupId>
+            <artifactId>recommend-similarity</artifactId>
+            <version>1.0.0</version>
+        </dependency>
     </dependencies>
 
     <build>

+ 22 - 0
src/main/java/examples/utils/PropertiesUtil.java

@@ -0,0 +1,22 @@
+package examples.utils;
+
+import org.springframework.context.EnvironmentAware;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Component;
+
+@Component
+public class PropertiesUtil implements EnvironmentAware {
+
+
+    private static Environment environment;
+
+
+    @Override
+    public void setEnvironment(Environment environment) {
+        this.environment = environment;
+    }
+
+    public static String getString(String name) {
+        return environment.getProperty(name);
+    }
+}

+ 43 - 0
src/main/java/examples/utils/SimilarityUtils.java

@@ -0,0 +1,43 @@
+package examples.utils;
+
+import com.tzld.piaoquan.recommend.similarity.word2vec.Segment;
+import com.tzld.piaoquan.recommend.similarity.word2vec.Word2Vec;
+import lombok.extern.slf4j.Slf4j;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * @author dyp
+ * 从recommend-server复制过来的 @zhangbo 20241211
+ */
+@Slf4j
+public final class SimilarityUtils {
+
+    private static Word2Vec vec = new Word2Vec();
+
+    public static void init() {
+        Segment.getWords("1");
+        try {
+            long start = System.currentTimeMillis();
+            String endpoint = "oss-cn-hangzhou-internal.aliyuncs.com";
+            String bucketName = "art-recommend";
+            String path = "similarity/word2vec/Google_word2vec_zhwiki210720_300d.bin";
+            String accessKeyId = "LTAIP6x1l3DXfSxm";
+            String accessKetSecret = "KbTaM9ars4OX3PMS6Xm7rtxGr1FLon";
+            Word2Vec temp = new Word2Vec();
+            temp.loadGoogleModelFromOss(endpoint, bucketName, path, accessKeyId, accessKetSecret);
+            vec = temp;
+            long end = System.currentTimeMillis();
+            log.info("Model loaded successfully cost {}. Scheduled tasks cancelled.", end - start);
+        } catch (IOException e) {
+            log.error("loadGoogleModelFromOss error", e);
+        }
+    }
+
+    public static float word2VecSimilarity(String str1, String str2) {
+        List<String> words1 = Segment.getWords(str1);
+        List<String> words2 = Segment.getWords(str2);
+        return vec.sentenceSimilarity(words1, words2);
+    }
+}

+ 756 - 0
src/main/resources/20241209_recsys_feature_name_756.txt

@@ -0,0 +1,756 @@
+b111213_12h_ROS
+b111213_12h_ROV
+b111213_12h_ROV*log(return)
+b111213_12h_STR
+b111213_12h_log(return)
+b111213_12h_log(share)
+b111213_1d_ROS
+b111213_1d_ROV
+b111213_1d_ROV*log(return)
+b111213_1d_STR
+b111213_1d_log(return)
+b111213_1d_log(share)
+b111213_1h_ROS
+b111213_1h_ROV
+b111213_1h_ROV*log(return)
+b111213_1h_STR
+b111213_1h_log(return)
+b111213_1h_log(share)
+b111213_2h_ROS
+b111213_2h_ROV
+b111213_2h_ROV*log(return)
+b111213_2h_STR
+b111213_2h_log(return)
+b111213_2h_log(share)
+b111213_3d_ROS
+b111213_3d_ROV
+b111213_3d_ROV*log(return)
+b111213_3d_STR
+b111213_3d_log(return)
+b111213_3d_log(share)
+b111213_3h_ROS
+b111213_3h_ROV
+b111213_3h_ROV*log(return)
+b111213_3h_STR
+b111213_3h_log(return)
+b111213_3h_log(share)
+b111213_4h_ROS
+b111213_4h_ROV
+b111213_4h_ROV*log(return)
+b111213_4h_STR
+b111213_4h_log(return)
+b111213_4h_log(share)
+b111213_7d_ROS
+b111213_7d_ROV
+b111213_7d_ROV*log(return)
+b111213_7d_STR
+b111213_7d_log(return)
+b111213_7d_log(share)
+b123_12h_ROS
+b123_12h_ROV
+b123_12h_ROV*log(return)
+b123_12h_STR
+b123_12h_log(return)
+b123_12h_log(share)
+b123_1d_ROS
+b123_1d_ROV
+b123_1d_ROV*log(return)
+b123_1d_STR
+b123_1d_log(return)
+b123_1d_log(share)
+b123_1h_ROS
+b123_1h_ROV
+b123_1h_ROV*log(return)
+b123_1h_STR
+b123_1h_log(return)
+b123_1h_log(share)
+b123_2h_ROS
+b123_2h_ROV
+b123_2h_ROV*log(return)
+b123_2h_STR
+b123_2h_log(return)
+b123_2h_log(share)
+b123_3d_ROS
+b123_3d_ROV
+b123_3d_ROV*log(return)
+b123_3d_STR
+b123_3d_log(return)
+b123_3d_log(share)
+b123_3h_ROS
+b123_3h_ROV
+b123_3h_ROV*log(return)
+b123_3h_STR
+b123_3h_log(return)
+b123_3h_log(share)
+b123_4h_ROS
+b123_4h_ROV
+b123_4h_ROV*log(return)
+b123_4h_STR
+b123_4h_log(return)
+b123_4h_log(share)
+b123_7d_ROS
+b123_7d_ROV
+b123_7d_ROV*log(return)
+b123_7d_STR
+b123_7d_log(return)
+b123_7d_log(share)
+b167_12h_ROS
+b167_12h_ROV
+b167_12h_ROV*log(return)
+b167_12h_STR
+b167_12h_log(return)
+b167_12h_log(share)
+b167_1d_ROS
+b167_1d_ROV
+b167_1d_ROV*log(return)
+b167_1d_STR
+b167_1d_log(return)
+b167_1d_log(share)
+b167_1h_ROS
+b167_1h_ROV
+b167_1h_ROV*log(return)
+b167_1h_STR
+b167_1h_log(return)
+b167_1h_log(share)
+b167_2h_ROS
+b167_2h_ROV
+b167_2h_ROV*log(return)
+b167_2h_STR
+b167_2h_log(return)
+b167_2h_log(share)
+b167_3d_ROS
+b167_3d_ROV
+b167_3d_ROV*log(return)
+b167_3d_STR
+b167_3d_log(return)
+b167_3d_log(share)
+b167_3h_ROS
+b167_3h_ROV
+b167_3h_ROV*log(return)
+b167_3h_STR
+b167_3h_log(return)
+b167_3h_log(share)
+b167_4h_ROS
+b167_4h_ROV
+b167_4h_ROV*log(return)
+b167_4h_STR
+b167_4h_log(return)
+b167_4h_log(share)
+b167_7d_ROS
+b167_7d_ROV
+b167_7d_ROV*log(return)
+b167_7d_STR
+b167_7d_log(return)
+b167_7d_log(share)
+b171819_12h_ROS
+b171819_12h_ROV
+b171819_12h_ROV*log(return)
+b171819_12h_STR
+b171819_12h_log(return)
+b171819_12h_log(share)
+b171819_1d_ROS
+b171819_1d_ROV
+b171819_1d_ROV*log(return)
+b171819_1d_STR
+b171819_1d_log(return)
+b171819_1d_log(share)
+b171819_1h_ROS
+b171819_1h_ROV
+b171819_1h_ROV*log(return)
+b171819_1h_STR
+b171819_1h_log(return)
+b171819_1h_log(share)
+b171819_2h_ROS
+b171819_2h_ROV
+b171819_2h_ROV*log(return)
+b171819_2h_STR
+b171819_2h_log(return)
+b171819_2h_log(share)
+b171819_3d_ROS
+b171819_3d_ROV
+b171819_3d_ROV*log(return)
+b171819_3d_STR
+b171819_3d_log(return)
+b171819_3d_log(share)
+b171819_3h_ROS
+b171819_3h_ROV
+b171819_3h_ROV*log(return)
+b171819_3h_STR
+b171819_3h_log(return)
+b171819_3h_log(share)
+b171819_4h_ROS
+b171819_4h_ROV
+b171819_4h_ROV*log(return)
+b171819_4h_STR
+b171819_4h_log(return)
+b171819_4h_log(share)
+b171819_7d_ROS
+b171819_7d_ROV
+b171819_7d_ROV*log(return)
+b171819_7d_STR
+b171819_7d_log(return)
+b171819_7d_log(share)
+b20_12h_r_cnt
+b20_12h_r_cnt4s
+b20_12h_r_rate
+b20_12h_return
+b20_12h_ros
+b20_12h_rov
+b20_12h_share
+b20_12h_share_hasreturn
+b20_12h_str
+b20_12h_view_hasreturn
+b20_1h_r_cnt
+b20_1h_r_cnt4s
+b20_1h_r_rate
+b20_1h_return
+b20_1h_ros
+b20_1h_rov
+b20_1h_share
+b20_1h_share_hasreturn
+b20_1h_str
+b20_1h_view_hasreturn
+b20_24h_r_cnt
+b20_24h_r_cnt4s
+b20_24h_r_rate
+b20_24h_return
+b20_24h_ros
+b20_24h_rov
+b20_24h_share
+b20_24h_share_hasreturn
+b20_24h_str
+b20_24h_view_hasreturn
+b20_2h_r_cnt
+b20_2h_r_cnt4s
+b20_2h_r_rate
+b20_2h_return
+b20_2h_ros
+b20_2h_rov
+b20_2h_share
+b20_2h_share_hasreturn
+b20_2h_str
+b20_2h_view_hasreturn
+b20_4h_r_cnt
+b20_4h_r_cnt4s
+b20_4h_r_rate
+b20_4h_return
+b20_4h_ros
+b20_4h_rov
+b20_4h_share
+b20_4h_share_hasreturn
+b20_4h_str
+b20_4h_view_hasreturn
+b20_6h_r_cnt
+b20_6h_r_cnt4s
+b20_6h_r_rate
+b20_6h_return
+b20_6h_ros
+b20_6h_rov
+b20_6h_share
+b20_6h_share_hasreturn
+b20_6h_str
+b20_6h_view_hasreturn
+b20_7d_r_cnt
+b20_7d_r_cnt4s
+b20_7d_r_rate
+b20_7d_return
+b20_7d_ros
+b20_7d_rov
+b20_7d_share
+b20_7d_share_hasreturn
+b20_7d_str
+b20_7d_view_hasreturn
+b21_12h_r_cnt
+b21_12h_r_cnt4s
+b21_12h_r_rate
+b21_12h_return
+b21_12h_ros
+b21_12h_rov
+b21_12h_share
+b21_12h_share_hasreturn
+b21_12h_str
+b21_12h_view_hasreturn
+b21_1h_r_cnt
+b21_1h_r_cnt4s
+b21_1h_r_rate
+b21_1h_return
+b21_1h_ros
+b21_1h_rov
+b21_1h_share
+b21_1h_share_hasreturn
+b21_1h_str
+b21_1h_view_hasreturn
+b21_24h_r_cnt
+b21_24h_r_cnt4s
+b21_24h_r_rate
+b21_24h_return
+b21_24h_ros
+b21_24h_rov
+b21_24h_share
+b21_24h_share_hasreturn
+b21_24h_str
+b21_24h_view_hasreturn
+b21_2h_r_cnt
+b21_2h_r_cnt4s
+b21_2h_r_rate
+b21_2h_return
+b21_2h_ros
+b21_2h_rov
+b21_2h_share
+b21_2h_share_hasreturn
+b21_2h_str
+b21_2h_view_hasreturn
+b21_4h_r_cnt
+b21_4h_r_cnt4s
+b21_4h_r_rate
+b21_4h_return
+b21_4h_ros
+b21_4h_rov
+b21_4h_share
+b21_4h_share_hasreturn
+b21_4h_str
+b21_4h_view_hasreturn
+b21_6h_r_cnt
+b21_6h_r_cnt4s
+b21_6h_r_rate
+b21_6h_return
+b21_6h_ros
+b21_6h_rov
+b21_6h_share
+b21_6h_share_hasreturn
+b21_6h_str
+b21_6h_view_hasreturn
+b21_7d_r_cnt
+b21_7d_r_cnt4s
+b21_7d_r_rate
+b21_7d_return
+b21_7d_ros
+b21_7d_rov
+b21_7d_share
+b21_7d_share_hasreturn
+b21_7d_str
+b21_7d_view_hasreturn
+b22_12h_r_cnt
+b22_12h_r_cnt4s
+b22_12h_r_rate
+b22_12h_return
+b22_12h_ros
+b22_12h_rov
+b22_12h_share
+b22_12h_share_hasreturn
+b22_12h_str
+b22_12h_view_hasreturn
+b22_1h_r_cnt
+b22_1h_r_cnt4s
+b22_1h_r_rate
+b22_1h_return
+b22_1h_ros
+b22_1h_rov
+b22_1h_share
+b22_1h_share_hasreturn
+b22_1h_str
+b22_1h_view_hasreturn
+b22_24h_r_cnt
+b22_24h_r_cnt4s
+b22_24h_r_rate
+b22_24h_return
+b22_24h_ros
+b22_24h_rov
+b22_24h_share
+b22_24h_share_hasreturn
+b22_24h_str
+b22_24h_view_hasreturn
+b22_2h_r_cnt
+b22_2h_r_cnt4s
+b22_2h_r_rate
+b22_2h_return
+b22_2h_ros
+b22_2h_rov
+b22_2h_share
+b22_2h_share_hasreturn
+b22_2h_str
+b22_2h_view_hasreturn
+b22_4h_r_cnt
+b22_4h_r_cnt4s
+b22_4h_r_rate
+b22_4h_return
+b22_4h_ros
+b22_4h_rov
+b22_4h_share
+b22_4h_share_hasreturn
+b22_4h_str
+b22_4h_view_hasreturn
+b22_6h_r_cnt
+b22_6h_r_cnt4s
+b22_6h_r_rate
+b22_6h_return
+b22_6h_ros
+b22_6h_rov
+b22_6h_share
+b22_6h_share_hasreturn
+b22_6h_str
+b22_6h_view_hasreturn
+b22_7d_r_cnt
+b22_7d_r_cnt4s
+b22_7d_r_rate
+b22_7d_return
+b22_7d_ros
+b22_7d_rov
+b22_7d_share
+b22_7d_share_hasreturn
+b22_7d_str
+b22_7d_view_hasreturn
+b23_14d_r_cnt
+b23_14d_r_cnt4s
+b23_14d_r_rate
+b23_14d_return
+b23_14d_ros
+b23_14d_rov
+b23_14d_share
+b23_14d_share_hasreturn
+b23_14d_str
+b23_14d_view_hasreturn
+b23_30d_r_cnt
+b23_30d_r_cnt4s
+b23_30d_r_rate
+b23_30d_return
+b23_30d_ros
+b23_30d_rov
+b23_30d_share
+b23_30d_share_hasreturn
+b23_30d_str
+b23_30d_view_hasreturn
+b24_14d_r_cnt
+b24_14d_r_cnt4s
+b24_14d_r_rate
+b24_14d_return
+b24_14d_ros
+b24_14d_rov
+b24_14d_share
+b24_14d_share_hasreturn
+b24_14d_str
+b24_14d_view_hasreturn
+b24_30d_r_cnt
+b24_30d_r_cnt4s
+b24_30d_r_rate
+b24_30d_return
+b24_30d_ros
+b24_30d_rov
+b24_30d_share
+b24_30d_share_hasreturn
+b24_30d_str
+b24_30d_view_hasreturn
+b25_14d_r_cnt
+b25_14d_r_cnt4s
+b25_14d_r_rate
+b25_14d_return
+b25_14d_ros
+b25_14d_rov
+b25_14d_share
+b25_14d_share_hasreturn
+b25_14d_str
+b25_14d_view_hasreturn
+b25_30d_r_cnt
+b25_30d_r_cnt4s
+b25_30d_r_rate
+b25_30d_return
+b25_30d_ros
+b25_30d_rov
+b25_30d_share
+b25_30d_share_hasreturn
+b25_30d_str
+b25_30d_view_hasreturn
+b26_35d_r_cnt
+b26_35d_r_cnt4s
+b26_35d_r_rate
+b26_35d_return
+b26_35d_ros
+b26_35d_rov
+b26_35d_share
+b26_35d_share_hasreturn
+b26_35d_str
+b26_35d_view_hasreturn
+b26_365d_r_cnt
+b26_365d_r_cnt4s
+b26_365d_r_rate
+b26_365d_return
+b26_365d_ros
+b26_365d_rov
+b26_365d_share
+b26_365d_share_hasreturn
+b26_365d_str
+b26_365d_view_hasreturn
+b26_7d_r_cnt
+b26_7d_r_cnt4s
+b26_7d_r_rate
+b26_7d_return
+b26_7d_ros
+b26_7d_rov
+b26_7d_share
+b26_7d_share_hasreturn
+b26_7d_str
+b26_7d_view_hasreturn
+b26_90d_r_cnt
+b26_90d_r_cnt4s
+b26_90d_r_rate
+b26_90d_return
+b26_90d_ros
+b26_90d_rov
+b26_90d_share
+b26_90d_share_hasreturn
+b26_90d_str
+b26_90d_view_hasreturn
+b27_35d_r_cnt
+b27_35d_r_cnt4s
+b27_35d_r_rate
+b27_35d_return
+b27_35d_ros
+b27_35d_rov
+b27_35d_share
+b27_35d_share_hasreturn
+b27_35d_str
+b27_35d_view_hasreturn
+b27_365d_r_cnt
+b27_365d_r_cnt4s
+b27_365d_r_rate
+b27_365d_return
+b27_365d_ros
+b27_365d_rov
+b27_365d_share
+b27_365d_share_hasreturn
+b27_365d_str
+b27_365d_view_hasreturn
+b27_7d_r_cnt
+b27_7d_r_cnt4s
+b27_7d_r_rate
+b27_7d_return
+b27_7d_ros
+b27_7d_rov
+b27_7d_share
+b27_7d_share_hasreturn
+b27_7d_str
+b27_7d_view_hasreturn
+b27_90d_r_cnt
+b27_90d_r_cnt4s
+b27_90d_r_rate
+b27_90d_return
+b27_90d_ros
+b27_90d_rov
+b27_90d_share
+b27_90d_share_hasreturn
+b27_90d_str
+b27_90d_view_hasreturn
+b28_12h_r_cnt
+b28_12h_r_cnt4s
+b28_12h_r_rate
+b28_12h_return
+b28_12h_ros
+b28_12h_rov
+b28_12h_share
+b28_12h_share_hasreturn
+b28_12h_str
+b28_12h_view_hasreturn
+b28_1h_r_cnt
+b28_1h_r_cnt4s
+b28_1h_r_rate
+b28_1h_return
+b28_1h_ros
+b28_1h_rov
+b28_1h_share
+b28_1h_share_hasreturn
+b28_1h_str
+b28_1h_view_hasreturn
+b28_24h_r_cnt
+b28_24h_r_cnt4s
+b28_24h_r_rate
+b28_24h_return
+b28_24h_ros
+b28_24h_rov
+b28_24h_share
+b28_24h_share_hasreturn
+b28_24h_str
+b28_24h_view_hasreturn
+b28_2h_r_cnt
+b28_2h_r_cnt4s
+b28_2h_r_rate
+b28_2h_return
+b28_2h_ros
+b28_2h_rov
+b28_2h_share
+b28_2h_share_hasreturn
+b28_2h_str
+b28_2h_view_hasreturn
+b28_4h_r_cnt
+b28_4h_r_cnt4s
+b28_4h_r_rate
+b28_4h_return
+b28_4h_ros
+b28_4h_rov
+b28_4h_share
+b28_4h_share_hasreturn
+b28_4h_str
+b28_4h_view_hasreturn
+b28_6h_r_cnt
+b28_6h_r_cnt4s
+b28_6h_r_rate
+b28_6h_return
+b28_6h_ros
+b28_6h_rov
+b28_6h_share
+b28_6h_share_hasreturn
+b28_6h_str
+b28_6h_view_hasreturn
+b28_7d_r_cnt
+b28_7d_r_cnt4s
+b28_7d_r_rate
+b28_7d_return
+b28_7d_ros
+b28_7d_rov
+b28_7d_share
+b28_7d_share_hasreturn
+b28_7d_str
+b28_7d_view_hasreturn
+b8910_12h_ROS
+b8910_12h_ROV
+b8910_12h_ROV*log(return)
+b8910_12h_STR
+b8910_12h_log(return)
+b8910_12h_log(share)
+b8910_1d_ROS
+b8910_1d_ROV
+b8910_1d_ROV*log(return)
+b8910_1d_STR
+b8910_1d_log(return)
+b8910_1d_log(share)
+b8910_1h_ROS
+b8910_1h_ROV
+b8910_1h_ROV*log(return)
+b8910_1h_STR
+b8910_1h_log(return)
+b8910_1h_log(share)
+b8910_2h_ROS
+b8910_2h_ROV
+b8910_2h_ROV*log(return)
+b8910_2h_STR
+b8910_2h_log(return)
+b8910_2h_log(share)
+b8910_3d_ROS
+b8910_3d_ROV
+b8910_3d_ROV*log(return)
+b8910_3d_STR
+b8910_3d_log(return)
+b8910_3d_log(share)
+b8910_3h_ROS
+b8910_3h_ROV
+b8910_3h_ROV*log(return)
+b8910_3h_STR
+b8910_3h_log(return)
+b8910_3h_log(share)
+b8910_4h_ROS
+b8910_4h_ROV
+b8910_4h_ROV*log(return)
+b8910_4h_STR
+b8910_4h_log(return)
+b8910_4h_log(share)
+b8910_7d_ROS
+b8910_7d_ROV
+b8910_7d_ROV*log(return)
+b8910_7d_STR
+b8910_7d_log(return)
+b8910_7d_log(share)
+bit_rate
+c3_feature_tags_1d_avgscore
+c3_feature_tags_1d_matchnum
+c3_feature_tags_1d_maxscore
+c3_feature_tags_3d_avgscore
+c3_feature_tags_3d_matchnum
+c3_feature_tags_3d_maxscore
+c3_feature_tags_7d_avgscore
+c3_feature_tags_7d_matchnum
+c3_feature_tags_7d_maxscore
+c4_feature_tags_1d_avgscore
+c4_feature_tags_1d_matchnum
+c4_feature_tags_1d_maxscore
+c4_feature_tags_3d_avgscore
+c4_feature_tags_3d_matchnum
+c4_feature_tags_3d_maxscore
+c4_feature_tags_7d_avgscore
+c4_feature_tags_7d_matchnum
+c4_feature_tags_7d_maxscore
+c5_feature_tags_1d_avgscore
+c5_feature_tags_1d_matchnum
+c5_feature_tags_1d_maxscore
+c5_feature_tags_3d_avgscore
+c5_feature_tags_3d_matchnum
+c5_feature_tags_3d_maxscore
+c5_feature_tags_7d_avgscore
+c5_feature_tags_7d_matchnum
+c5_feature_tags_7d_maxscore
+c6_feature_tags_1d_avgscore
+c6_feature_tags_1d_matchnum
+c6_feature_tags_1d_maxscore
+c6_feature_tags_3d_avgscore
+c6_feature_tags_3d_matchnum
+c6_feature_tags_3d_maxscore
+c6_feature_tags_7d_avgscore
+c6_feature_tags_7d_matchnum
+c6_feature_tags_7d_maxscore
+c7_feature_tags_1d_avgscore
+c7_feature_tags_1d_matchnum
+c7_feature_tags_1d_maxscore
+c7_feature_tags_3d_avgscore
+c7_feature_tags_3d_matchnum
+c7_feature_tags_3d_maxscore
+c7_feature_tags_7d_avgscore
+c7_feature_tags_7d_matchnum
+c7_feature_tags_7d_maxscore
+c8_feature_return_num
+c8_feature_return_rank
+c8_feature_return_score
+c8_feature_share_num
+c8_feature_share_rank
+c8_feature_share_score
+c9_feature_return_num
+c9_feature_return_rank
+c9_feature_return_score
+c9_feature_share_num
+c9_feature_share_rank
+c9_feature_share_score
+d1_exp
+d1_return_n
+d1_rovn
+d2_exp
+d2_return_n
+d2_rosn
+d3_exp
+d3_return_n
+d3_rosn
+d4_exp
+d4_return_n
+d4_rovn
+d5_exp
+d5_return_n
+d5_rovn
+d6
+playcnt_1d
+playcnt_3d
+playcnt_6h
+playcnt_7d
+return_uv_12h
+return_uv_1d
+return_uv_3d
+return_uv_7d
+share_pv_12h
+share_pv_1d
+share_pv_3d
+share_pv_7d
+total_time
+video_sim_cate1_list
+video_sim_cate2
+video_sim_cate2_list
+video_sim_keywords
+video_sim_style
+video_sim_theme
+video_sim_title
+video_sim_topic
+video_sim_user_value

+ 102 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys_r_rate/makedata_recsys_61_bucket_20241209.scala

@@ -0,0 +1,102 @@
+package com.aliyun.odps.spark.examples.makedata_recsys
+
+import com.alibaba.fastjson.JSON
+import com.aliyun.odps.spark.examples.myUtils.{MyHdfsUtils, ParamUtils}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import scala.io.Source
+
+/*
+
+ */
+
+object makedata_recsys_61_bucket_20241209 {
+  def main(args: Array[String]): Unit = {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    val loader = getClass.getClassLoader
+    val resourceUrl = loader.getResource("20241209_recsys_feature_name_756.txt")
+    val content =
+      if (resourceUrl != null) {
+        val content = Source.fromURL(resourceUrl).getLines().mkString("\n")
+        Source.fromURL(resourceUrl).close()
+        content
+      } else {
+        ""
+      }
+    println(content)
+    val contentList = content.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty).toList
+
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val readPath = param.getOrElse("readPath", "/dw/recommend/model/61_origin_data/20241210*")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/61_recsys_bucket/")
+    val fileName = param.getOrElse("fileName", "20241209_756_200")
+    val sampleRate = param.getOrElse("sampleRate", "1.0").toDouble
+    val bucketNum = param.getOrElse("bucketNum", "200").toInt
+
+    val data = sc.textFile(readPath)
+    println("问题数据数量:" + data.filter(r => r.split("\t").length != 3).count())
+    val data1 = data.map(r => {
+      val rList = r.split("\t")
+      val jsons = JSON.parseObject(rList(2))
+      val doubles = scala.collection.mutable.Map[String, Double]()
+      jsons.foreach(r => {
+        doubles.put(r._1, jsons.getDoubleValue(r._1))
+      })
+      doubles
+    }).sample(false, sampleRate).repartition(32).persist()
+
+    val result = new ArrayBuffer[String]()
+
+    for (i <- contentList.indices) {
+      println("特征:" + contentList(i))
+      val data2 = data1.map(r => r.getOrDefault(contentList(i), 0D)).filter(_ > 1E-8).collect().sorted
+      val len = data2.length
+      if (len == 0) {
+        result.add(contentList(i) + "\t" + bucketNum.toString + "\t" + "0")
+      } else {
+        val oneBucketNum = (len - 1) / (bucketNum - 1) + 1 // 确保每个桶至少有一个元素
+        val buffers = new ArrayBuffer[Double]()
+
+        var lastBucketValue = data2(0) // 记录上一个桶的切分点
+        for (j <- 0 until len by oneBucketNum) {
+          val d = data2(j)
+          if (j > 0 && d != lastBucketValue) {
+            // 如果当前切分点不同于上一个切分点,则保存当前切分点
+            buffers += d
+          }
+          lastBucketValue = d // 更新上一个桶的切分点
+        }
+
+        // 最后一个桶的结束点应该是数组的最后一个元素
+        if (!buffers.contains(data2.last)) {
+          buffers += data2.last
+        }
+        result.add(contentList(i) + "\t" + bucketNum.toString + "\t" + buffers.mkString(","))
+      }
+    }
+    val data3 = sc.parallelize(result)
+
+
+    // 4 保存数据到hdfs
+    val hdfsPath = savePath + "/" + fileName
+    if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+      println("删除路径并开始数据写入:" + hdfsPath)
+      MyHdfsUtils.delete_hdfs_path(hdfsPath)
+      data3.repartition(1).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+    } else {
+      println("路径不合法,无法写入:" + hdfsPath)
+    }
+  }
+}

+ 144 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys_r_rate/makedata_recsys_61_nor_sample_20241209.scala

@@ -0,0 +1,144 @@
+package com.aliyun.odps.spark.examples.makedata_recsys_r_rate
+
+import com.alibaba.fastjson.JSON
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils}
+import examples.extractor.ExtractorUtils
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import scala.io.Source
+import scala.util.Random
+
+object makedata_recsys_61_nor_sample_20241209 {
+  def main(args: Array[String]): Unit = {
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val readPath = param.getOrElse("readPath", "/dw/recommend/model/61_origin_data/")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/61_recsys_nor_train_data/")
+    val beginStr = param.getOrElse("beginStr", "20241210")
+    val endStr = param.getOrElse("endStr", "20241210")
+    val repartition = param.getOrElse("repartition", "100").toInt
+    val whatLabel = param.getOrElse("whatLabel", "total_return_uv_new")
+    val whatApps = param.getOrElse("whatApps", "0,4,5,21,3,6").split(",").toSet
+    val fuSampleRate = param.getOrElse("fuSampleRate", "-1.0").toDouble
+    val featureNameFile = param.getOrElse("featureName", "20241209_recsys_nor_name.txt")
+    val featureBucketFile = param.getOrElse("featureBucket", "20241209_recsys_nor_bucket.txt")
+
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    val loader = getClass.getClassLoader
+    val featureNameSet = loadUseFeatureNames(loader, featureNameFile)
+    val featureBucketMap = loadUseFeatureBuckets(loader, featureBucketFile)
+    val bucketsMap_br = sc.broadcast(featureBucketMap)
+
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      println("开始执行:" + date)
+      val data = sc.textFile(readPath + "/" + date + "*").map(r => {
+          val rList = r.split("\t")
+          val logKey = rList(0)
+          val labelKey = rList(1)
+          val jsons = JSON.parseObject(rList(2))
+          val features = scala.collection.mutable.Map[String, Double]()
+          jsons.foreach(r => {
+            features.put(r._1, jsons.getDoubleValue(r._1))
+          })
+          (logKey, labelKey, features)
+        })
+        .filter {
+          case (logKey, labelKey, features) =>
+            val logKeyList = logKey.split(",")
+            val apptype = logKeyList(0)
+            val pagesource = logKeyList(1)
+            whatApps.contains(apptype) && pagesource.endsWith("recommend")
+        }.filter {
+          case (logKey, labelKey, features) =>
+            val label = JSON.parseObject(labelKey).getOrDefault(whatLabel, "0").toString.toInt
+            label > 0 || new Random().nextDouble() <= fuSampleRate
+        }
+        .map {
+          case (logKey, labelKey, features) =>
+            val label = JSON.parseObject(labelKey).getOrDefault(whatLabel, "0").toString
+            (label, features)
+        }
+        .mapPartitions(row => {
+          val result = new ArrayBuffer[String]()
+          val bucketsMap = bucketsMap_br.value
+          row.foreach {
+            case (label, features) =>
+              val featuresBucket = features.map {
+                case (name, score) =>
+                  if (!featureNameSet.contains(name)) {
+                    ""
+                  } else {
+                    if (score > 1E-8) {
+                      if (bucketsMap.contains(name)) {
+                        val (bucketsNum, buckets) = bucketsMap(name)
+                        val scoreNew = 1.0 / bucketsNum * (ExtractorUtils.findInsertPosition(buckets, score).toDouble + 1.0)
+                        name + ":" + scoreNew.toString
+                      } else {
+                        name + ":" + score.toString
+                      }
+                    } else {
+                      ""
+                    }
+                  }
+              }.filter(_.nonEmpty)
+              result.add(label + "\t" + featuresBucket.mkString("\t"))
+          }
+          result.iterator
+        })
+
+      // 4 保存数据到hdfs
+      val hdfsPath = savePath + "/" + date
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        data.repartition(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      } else {
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  private def loadFileData(loader: ClassLoader, nameFile: String): String = {
+    val resourceUrlBucket = loader.getResource(nameFile)
+    val data =
+      if (resourceUrlBucket != null) {
+        val buckets = Source.fromURL(resourceUrlBucket).getLines().mkString("\n")
+        Source.fromURL(resourceUrlBucket).close()
+        buckets
+      } else {
+        ""
+      }
+    data
+  }
+
+  private def loadUseFeatureNames(loader: ClassLoader, nameFile: String): Set[String] = {
+    val names = loadFileData(loader, nameFile)
+    println(names)
+    names.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty)
+      .toSet
+  }
+
+  private def loadUseFeatureBuckets(loader: ClassLoader, nameFile: String): Map[String, (Double, Array[Double])] = {
+    val buckets = loadFileData(loader, nameFile)
+    println(buckets)
+    buckets.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty)
+      .map(r => {
+        val rList = r.split("\t")
+        (rList(0), (rList(1).toDouble, rList(2).split(",").map(_.toDouble)))
+      }).toMap
+  }
+}

+ 307 - 177
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys_r_rate/makedata_recsys_61_originData_20241209.scala

@@ -5,14 +5,16 @@ import com.aliyun.odps.TableSchema
 import com.aliyun.odps.data.Record
 import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
 import examples.extractor.RankExtractorFeature_20240530
+import examples.utils.SimilarityUtils
 import org.apache.hadoop.io.compress.GzipCodec
 import org.apache.spark.sql.SparkSession
-import org.xm.Similarity
 
 import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
-/*
+import scala.util.Random
 
+/*
+   20241211 提取特征
  */
 
 object makedata_recsys_61_originData_20241209 {
@@ -25,15 +27,18 @@ object makedata_recsys_61_originData_20241209 {
 
     // 1 读取参数
     val param = ParamUtils.parseArgs(args)
+
+    val beginStr = param.getOrElse("beginStr", "2024120912")
+    val endStr = param.getOrElse("endStr", "2024120912")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "alg_recsys_sample_all_v2")
     val tablePart = param.getOrElse("tablePart", "64").toInt
-    val beginStr = param.getOrElse("beginStr", "2023010100")
-    val endStr = param.getOrElse("endStr", "2023010123")
     val savePath = param.getOrElse("savePath", "/dw/recommend/model/61_origin_data/")
-    val project = param.getOrElse("project", "loghubods")
-    val table = param.getOrElse("table", "XXXX")
     val repartition = param.getOrElse("repartition", "32").toInt
+    val whatLabel = param.getOrElse("whatLabel", "is_share")
+    val fuSampleRate = param.getOrElse("fuSampleRate", "0.1").toDouble
 
-    // 2 读取odps+表信息
+    // 2 odps
     val odpsOps = env.getODPS(sc)
 
     // 3 循环执行数据生产
@@ -48,197 +53,274 @@ object makedata_recsys_61_originData_20241209 {
           partition = partition,
           transfer = func,
           numPartition = tablePart)
-        .map(record => {
-
-          val featureMap = new JSONObject()
-
-          // a 视频特征
-          val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b1_feature"))
-          val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b2_feature"))
-          val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b3_feature"))
-          val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b6_feature"))
-          val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b7_feature"))
-
-          val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b8_feature"))
-          val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b9_feature"))
-          val b10: JSONObject = if (record.isNull("b10_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b10_feature"))
-          val b11: JSONObject = if (record.isNull("b11_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b11_feature"))
-          val b12: JSONObject = if (record.isNull("b12_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b12_feature"))
-          val b13: JSONObject = if (record.isNull("b13_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b13_feature"))
-          val b17: JSONObject = if (record.isNull("b17_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b17_feature"))
-          val b18: JSONObject = if (record.isNull("b18_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b18_feature"))
-          val b19: JSONObject = if (record.isNull("b19_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("b19_feature"))
-
-
-          val origin_data = List(
-            (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
-            (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
-            (b17, b18, b19, "b171819")
-          )
-          for ((b_1, b_2, b_3, prefix1) <- origin_data) {
-            for (prefix2 <- List(
-              "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
-            )) {
-              val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
-              val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
-              val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
-              val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
-              val f2 = RankExtractorFeature_20240530.calLog(share)
-              val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
-              val f4 = RankExtractorFeature_20240530.calLog(returns)
-              val f5 = f3 * f4
-              val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
-              featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
-              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
-              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
-              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
-              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
-              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
+        .filter(record => {
+          val label = if (record.isNull(whatLabel)) "0" else record.getString(whatLabel)
+          "1".equals(label) || new Random().nextDouble() <= fuSampleRate
+        })
+        .mapPartitions(p => {
+          SimilarityUtils.init()
+          p.map(record => {
+            val featureMap = new JSONObject()
+
+            // a 视频特征
+            val b1: JSONObject = getJsonObject(record, "b1_feature")
+            val b2: JSONObject = getJsonObject(record, "b2_feature")
+            val b3: JSONObject = getJsonObject(record, "b3_feature")
+            val b6: JSONObject = getJsonObject(record, "b6_feature")
+            val b7: JSONObject = getJsonObject(record, "b7_feature")
+
+            val b8: JSONObject = getJsonObject(record, "b8_feature")
+            val b9: JSONObject = getJsonObject(record, "b9_feature")
+            val b10: JSONObject = getJsonObject(record, "b10_feature")
+            val b11: JSONObject = getJsonObject(record, "b11_feature")
+            val b12: JSONObject = getJsonObject(record, "b12_feature")
+            val b13: JSONObject = getJsonObject(record, "b13_feature")
+            val b17: JSONObject = getJsonObject(record, "b17_feature")
+            val b18: JSONObject = getJsonObject(record, "b18_feature")
+            val b19: JSONObject = getJsonObject(record, "b19_feature")
+
+            val origin_data = List(
+              (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
+              (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
+              (b17, b18, b19, "b171819")
+            )
+            for ((b_1, b_2, b_3, prefix1) <- origin_data) {
+              for (prefix2 <- List(
+                "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
+              )) {
+                val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
+                val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
+                val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
+                val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
+                val f2 = RankExtractorFeature_20240530.calLog(share)
+                val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
+                val f4 = RankExtractorFeature_20240530.calLog(returns)
+                val f5 = f3 * f4
+                val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
+                featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
+                featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
+                featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
+                featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
+                featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
+                featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
+              }
             }
-          }
 
-          val video_info: JSONObject = if (record.isNull("t_v_info_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("t_v_info_feature"))
-          featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
-          featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
-
-          val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("c1_feature"))
-          if (c1.nonEmpty) {
-            featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
-            featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
-            featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
-            featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
-          }
-          val c2: JSONObject = if (record.isNull("c2_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("c2_feature"))
-          if (c2.nonEmpty) {
-            featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
-            featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
-            featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
-            featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
-            featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
-            featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
-            featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
-            featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
-          }
+            val video_info: JSONObject = getJsonObject(record, "t_v_info_feature")
+            featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
+            featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
+
+            val c1: JSONObject = getJsonObject(record, "c1_feature")
+            if (c1.nonEmpty) {
+              featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
+              featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
+              featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
+              featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
+            }
+            val c2: JSONObject = getJsonObject(record, "c2_feature")
+            if (c2.nonEmpty) {
+              featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
+              featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
+              featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
+              featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
+              featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
+              featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
+              featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
+              featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
+            }
 
-          val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
-          if (!title.equals("")) {
-            for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
-              val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
-                JSON.parseObject(record.getString(key_feature))
-              for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
-                val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
-                if (!tags.equals("")) {
-                  val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
-                  featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
-                  featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
-                  featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
+            val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
+            if (!title.equals("")) {
+              for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
+                val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
+                  JSON.parseObject(record.getString(key_feature))
+                for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
+                  val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
+                  if (!tags.equals("")) {
+                    val (f1, f2, f3, f4) = funcC34567ForTagsW2V(tags, title)
+                    featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
+                    featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
+                    featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
+                  }
                 }
               }
             }
-          }
 
-          val vid = if (record.isNull("vid")) "" else record.getString("vid")
-          if (!vid.equals("")) {
-            for (key_feature <- List("c8_feature", "c9_feature")) {
-              val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
-                JSON.parseObject(record.getString(key_feature))
-              for (key_action <- List("share", "return")) {
-                val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
-                if (!cfListStr.equals("")) {
-                  val cfMap = cfListStr.split(",").map(r => {
-                    val rList = r.split(":")
-                    (rList(0), (rList(1), rList(2), rList(3)))
-                  }).toMap
-                  if (cfMap.contains(vid)) {
-                    val (score, num, rank) = cfMap(vid)
-                    featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
-                    featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
-                    featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
+            val vid = if (record.isNull("vid")) "" else record.getString("vid")
+            if (!vid.equals("")) {
+              for (key_feature <- List("c8_feature", "c9_feature")) {
+                val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
+                  JSON.parseObject(record.getString(key_feature))
+                for (key_action <- List("share", "return")) {
+                  val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
+                  if (!cfListStr.equals("")) {
+                    val cfMap = cfListStr.split(",").map(r => {
+                      val rList = r.split(":")
+                      (rList(0), (rList(1), rList(2), rList(3)))
+                    }).toMap
+                    if (cfMap.contains(vid)) {
+                      val (score, num, rank) = cfMap(vid)
+                      featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
+                      featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
+                      featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
+                    }
                   }
                 }
               }
             }
-          }
 
-          val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
-            JSON.parseObject(record.getString("d1_feature"))
-          if (d1.nonEmpty) {
-            featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
-            featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
-            featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
-          }
+            val d1: JSONObject = getJsonObject(record, "d1_feature")
+            if (d1.nonEmpty) {
+              featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
+              featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
+              featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
+            }
 
+            // ************* new feature *************
+            val shortPeriod = List("1h", "2h", "4h", "6h", "12h", "24h", "7d")
+            val middlePeriod = List("14d", "30d")
+            val longPeriod = List("7d", "35d", "90d", "365d")
+            val vidStatFeat = List(
+              ("b20", shortPeriod, getJsonObject(record, "b20_feature")), // cate2_feature
+              ("b21", shortPeriod, getJsonObject(record, "b21_feature")), // cate1_feature
+              ("b22", shortPeriod, getJsonObject(record, "b22_feature")), // source_feature
+              ("b28", shortPeriod, getJsonObject(record, "b28_feature")), // sence_type_feature
+              ("b23", middlePeriod, getJsonObject(record, "b23_feature")), // cate2_feature_day
+              ("b24", middlePeriod, getJsonObject(record, "b24_feature")), // cate1_feature_day
+              ("b25", middlePeriod, getJsonObject(record, "b25_feature")), // source_feature_day
+              ("b26", longPeriod, getJsonObject(record, "b26_feature")), // unionid_feature_day
+              ("b27", longPeriod, getJsonObject(record, "b27_feature")) // vid_feature_day
+            )
+            for ((featType, featPeriod, featData) <- vidStatFeat) {
+              for (period <- featPeriod) {
+                // val view = if (featData.isEmpty) 0D else featData.getDoubleValue("view_" + period)
+                val share = if (featData.isEmpty) 0D else featData.getDoubleValue("share_" + period)
+                val return_ = if (featData.isEmpty) 0D else featData.getDoubleValue("return_" + period)
+                val view_hasreturn = if (featData.isEmpty) 0D else featData.getDoubleValue("view_hasreturn_" + period)
+                val share_hasreturn = if (featData.isEmpty) 0D else featData.getDoubleValue("share_hasreturn_" + period)
+                val ros = if (featData.isEmpty) 0D else featData.getDoubleValue("ros_" + period)
+                val rov = if (featData.isEmpty) 0D else featData.getDoubleValue("rov_" + period)
+                val r_cnt = if (featData.isEmpty) 0D else featData.getDoubleValue("r_cnt_" + period)
+                val r_rate = if (featData.isEmpty) 0D else featData.getDoubleValue("r_rate_" + period)
+                val r_cnt4s = if (featData.isEmpty) 0D else featData.getDoubleValue("r_cnt4s_" + period)
+                val str = if (featData.isEmpty) 0D else featData.getDoubleValue("str_" + period)
+                // scale
+                // val view_s = RankExtractorFeature_20240530.calLog(view)
+                val share_s = RankExtractorFeature_20240530.calLog(share)
+                val return_s = RankExtractorFeature_20240530.calLog(return_)
+                val view_hasreturn_s = RankExtractorFeature_20240530.calLog(view_hasreturn)
+                val share_hasreturn_s = RankExtractorFeature_20240530.calLog(share_hasreturn)
 
-          /*
+                // featureMap.put(featType + "_" + period + "_" + "view", view_s)
+                featureMap.put(featType + "_" + period + "_" + "share", share_s)
+                featureMap.put(featType + "_" + period + "_" + "return", return_s)
+                featureMap.put(featType + "_" + period + "_" + "view_hasreturn", view_hasreturn_s)
+                featureMap.put(featType + "_" + period + "_" + "share_hasreturn", share_hasreturn_s)
+                featureMap.put(featType + "_" + period + "_" + "ros", ros)
+                featureMap.put(featType + "_" + period + "_" + "rov", rov)
+                featureMap.put(featType + "_" + period + "_" + "r_cnt", r_cnt)
+                featureMap.put(featType + "_" + period + "_" + "r_rate", r_rate)
+                featureMap.put(featType + "_" + period + "_" + "r_cnt4s", r_cnt4s)
+                featureMap.put(featType + "_" + period + "_" + "str", str)
+              }
+            }
 
+            // new cf
+            val d2345Data = List(
+              ("d2", "rosn", getJsonObject(record, "d2_feature")),
+              ("d3", "rosn", getJsonObject(record, "d3_feature")),
+              ("d4", "rovn", getJsonObject(record, "d4_feature")),
+              ("d5", "rovn", getJsonObject(record, "d5_feature"))
+            )
+            for ((featType, valType, featData) <- d2345Data) {
+              if (featData.nonEmpty) {
+                val exp = if (featData.containsKey("exp")) featData.getString("exp").toDouble else 0D
+                val return_n = if (featData.containsKey("return_n")) featData.getString("return_n").toDouble else 0D
+                val value = if (featData.containsKey(valType)) featData.getString(valType).toDouble else 0D
+                // scale
+                val exp_s = RankExtractorFeature_20240530.calLog(exp)
+                val return_n_s = RankExtractorFeature_20240530.calLog(return_n)
+                featureMap.put(featType + "_exp", exp_s)
+                featureMap.put(featType + "_return_n", return_n_s)
+                featureMap.put(featType + "_" + valType, value)
+              }
+            }
 
-          视频:
-          曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
-          STR log(share) ROV log(return) ROV*log(return)
-          40个特征组合
-          整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
-          200个特征值
+            if (!vid.equals("")) {
+              val idScoreObj = getJsonObject(getJsonObject(record, "d6_feature"), "vids", "scores")
+              if (idScoreObj.nonEmpty && idScoreObj.containsKey(vid)) {
+                val score = idScoreObj.getString(vid).toDouble
+                featureMap.put("d6", score)
+              }
+            }
 
-          视频:
-          视频时长、比特率
+            // head video & rank video
+            val headVideo = getJsonObject(record, "v2_feature")
+            val rankVideo = getJsonObject(record, "v1_feature")
+            if (headVideo.nonEmpty && rankVideo.nonEmpty) {
+              val videoAttrs = List("title", "topic", "keywords", "cate1_list", "cate2", "cate2_list", "style", "theme", "user_value")
+              for (attr <- videoAttrs) {
+                val headAttr = if (headVideo.containsKey(attr)) headVideo.getString(attr) else ""
+                val rankAttr = if (rankVideo.containsKey(attr)) rankVideo.getString(attr) else ""
+                if (!headAttr.equals("") && !rankAttr.equals("")) {
+                  val simScore = SimilarityUtils.word2VecSimilarity(headAttr, rankAttr)
+                  featureMap.put("video_sim_" + attr, simScore)
+                }
+              }
+            }
 
-          人:
-          播放次数 --> 6h 1d 3d 7d --> 4个
-          带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
-          人+vid-title:
-          播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
-          人+vid-cf
-          基于分享行为/基于回流行为 -->  “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
+            /*
+            视频特征: 5*6*5 = 240个
+                      曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
+                      STR log(share) ROV log(return) ROV*log(return) ROS
+                      整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
+            视频基础: 2个   视频时长、比特率
+            用户: 4+8 = 12个
+                      播放次数 --> 6h 1d 3d 7d --> 4个
+                      带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
+            人+vid-title:  5*3*3 = 45
+                      播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
+            人+vid-cf: 2*3*3 = 12
+                      基于分享行为/基于回流行为 -->  “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
+            头部视频:  3
+                      曝光 回流 ROVn 3个特征
+            场景:     小时 星期 apptype city province pagesource 机器型号
+            总量:     240+2+12+45+12+3 = 314
+            ---------------------------------------------------------------
+            视频特征:(4*7+3*2+2*4)*10 = 420个
+            CF: 13个
+            视频相似特征: 9个
 
-          头部视频:
-          曝光 回流 ROVn 3个特征
 
-          场景:
-          小时 星期 apptype city province pagesource 机器型号
-           */
+             */
 
 
-          //4 处理label信息。
-          val labels = new JSONObject
-          for (labelKey <- List(
-            "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
-            "share_pv", "total_share_uv"
-          )) {
-            if (!record.isNull(labelKey)) {
-              labels.put(labelKey, record.getString(labelKey))
+            //4 处理label信息。
+            val labels = new JSONObject
+            for (labelKey <- List(
+              "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
+              "share_pv", "total_share_uv", "view_24h", "total_return_uv_new"
+            )) {
+              if (!record.isNull(labelKey)) {
+                labels.put(labelKey, record.getString(labelKey))
+              }
             }
-          }
-          //5 处理log key表头。
-          val apptype = record.getString("apptype")
-          val pagesource = record.getString("pagesource")
-          val mid = record.getString("mid")
-          // vid 已经提取了
-          val ts = record.getString("ts")
-          val abcode = record.getString("abcode")
-          val level = if (record.isNull("level")) "0" else record.getString("level")
-          val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
-          val labelKey = labels.toString()
-          val featureKey = featureMap.toString()
-          //6 拼接数据,保存。
-          logKey + "\t" + labelKey + "\t" + featureKey
+            //5 处理log key表头。
+            val apptype = record.getString("apptype")
+            val pagesource = record.getString("pagesource")
+            val mid = record.getString("mid")
+            // vid 已经提取了
+            val ts = record.getString("ts")
+            val abcode = record.getString("abcode")
+            val level = if (record.isNull("level")) "0" else record.getString("level")
+            val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
+            val labelKey = labels.toString()
+            // val featureKey = featureMap.toString()
+            val featureKey = truncateDecimal(featureMap).toString()
+            //6 拼接数据,保存。
+            logKey + "\t" + labelKey + "\t" + featureKey
 
+          })
         })
 
       // 4 保存数据到hdfs
@@ -247,7 +329,7 @@ object makedata_recsys_61_originData_20241209 {
       if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
         println("删除路径并开始数据写入:" + hdfsPath)
         MyHdfsUtils.delete_hdfs_path(hdfsPath)
-        odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+        odpsData.repartition(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
       } else {
         println("路径不合法,无法写入:" + hdfsPath)
       }
@@ -258,7 +340,55 @@ object makedata_recsys_61_originData_20241209 {
     record
   }
 
-  def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
+  def getJsonObject(record: Record, key: String): JSONObject = {
+    val data = if (record.isNull(key)) new JSONObject() else JSON.parseObject(record.getString(key))
+    val data2 = new JSONObject()
+    data.foreach(r => {
+      if (r._2 != null) {
+        data2.put(r._1, r._2)
+      }
+    })
+    data2
+  }
+
+  def getJsonObject(obj: JSONObject, keyName: String, valueName: String): JSONObject = {
+    val map = new JSONObject()
+    if (obj.nonEmpty) {
+      val keys = if (obj.containsKey(keyName)) obj.getString(keyName) else ""
+      val values = if (obj.containsKey(valueName)) obj.getString(valueName) else ""
+      if (!keys.equals("") && !values.equals("")) {
+        val key_list = keys.split(",")
+        val value_list = values.split(",")
+        if (key_list.length == value_list.length) {
+          for (index <- 0 until key_list.length) {
+            map.put(key_list(index), value_list(index))
+          }
+        }
+      }
+    }
+    map
+  }
+
+  def truncateDecimal(obj: JSONObject, scale: Int = 6): JSONObject = {
+    val data = new JSONObject()
+    for (key <- obj.keySet()) {
+      try {
+        val value = obj.getDoubleValue(key)
+        if (value == value.floor) {
+          data.put(key, value)
+        } else {
+          val newValue = BigDecimal(value).setScale(scale, BigDecimal.RoundingMode.HALF_UP).toDouble
+          data.put(key, newValue)
+        }
+      } catch {
+        case e: Exception => System.err.println(e.getMessage)
+      }
+    }
+    data
+  }
+
+
+  def funcC34567ForTagsW2V(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
     // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
     val tagsList = tags.split(",")
     var d1 = 0.0
@@ -270,7 +400,7 @@ object makedata_recsys_61_originData_20241209 {
         d1 = d1 + 1.0
         d2.add(tag)
       }
-      val score = Similarity.conceptSimilarity(tag, title)
+      val score = SimilarityUtils.word2VecSimilarity(tag, title)
       d3 = if (score > d3) score else d3
       d4 = d4 + score
     }

+ 148 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys_r_rate/makedata_recsys_61_rov_sample_20241209.scala

@@ -0,0 +1,148 @@
+package com.aliyun.odps.spark.examples.makedata_recsys_r_rate
+
+import com.alibaba.fastjson.JSON
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils}
+import examples.extractor.ExtractorUtils
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import scala.io.Source
+import scala.util.Random
+
+/*
+
+ */
+
+object makedata_recsys_61_rov_sample_20241209 {
+  def main(args: Array[String]): Unit = {
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val readPath = param.getOrElse("readPath", "/dw/recommend/model/61_origin_data/")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/61_recsys_rov_train_data/")
+    val beginStr = param.getOrElse("beginStr", "20241210")
+    val endStr = param.getOrElse("endStr", "20241210")
+    val repartition = param.getOrElse("repartition", "100").toInt
+    val whatLabel = param.getOrElse("whatLabel", "is_return")
+    val whatApps = param.getOrElse("whatApps", "0,4,5,21,3,6").split(",").toSet
+    val fuSampleRate = param.getOrElse("fuSampleRate", "1.0").toDouble
+    val featureNameFile = param.getOrElse("featureName", "20241209_recsys_rov_name.txt")
+    val featureBucketFile = param.getOrElse("featureBucket", "20241209_recsys_rov_bucket.txt")
+
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    val loader = getClass.getClassLoader
+    val featureNameSet = loadUseFeatureNames(loader, featureNameFile)
+    val featureBucketMap = loadUseFeatureBuckets(loader, featureBucketFile)
+    val bucketsMap_br = sc.broadcast(featureBucketMap)
+
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      println("开始执行:" + date)
+      val data = sc.textFile(readPath + "/" + date + "*").map(r => {
+          val rList = r.split("\t")
+          val logKey = rList(0)
+          val labelKey = rList(1)
+          val jsons = JSON.parseObject(rList(2))
+          val features = scala.collection.mutable.Map[String, Double]()
+          jsons.foreach(r => {
+            features.put(r._1, jsons.getDoubleValue(r._1))
+          })
+          (logKey, labelKey, features)
+        })
+        .filter {
+          case (logKey, labelKey, features) =>
+            val logKeyList = logKey.split(",")
+            val apptype = logKeyList(0)
+            val pagesource = logKeyList(1)
+            whatApps.contains(apptype) && pagesource.endsWith("recommend")
+        }.filter {
+          case (logKey, labelKey, features) =>
+            val label = JSON.parseObject(labelKey).getOrDefault(whatLabel, "0").toString
+            "1".equals(label) || new Random().nextDouble() <= fuSampleRate
+        }
+        .map {
+          case (logKey, labelKey, features) =>
+            val label = JSON.parseObject(labelKey).getOrDefault(whatLabel, "0").toString
+            (label, features)
+        }
+        .mapPartitions(row => {
+          val result = new ArrayBuffer[String]()
+          val bucketsMap = bucketsMap_br.value
+          row.foreach {
+            case (label, features) =>
+              val featuresBucket = features.map {
+                case (name, score) =>
+                  if (!featureNameSet.contains(name)) {
+                    ""
+                  } else {
+                    if (score > 1E-8) {
+                      if (bucketsMap.contains(name)) {
+                        val (bucketsNum, buckets) = bucketsMap(name)
+                        val scoreNew = 1.0 / bucketsNum * (ExtractorUtils.findInsertPosition(buckets, score).toDouble + 1.0)
+                        name + ":" + scoreNew.toString
+                      } else {
+                        name + ":" + score.toString
+                      }
+                    } else {
+                      ""
+                    }
+                  }
+              }.filter(_.nonEmpty)
+              result.add(label + "\t" + featuresBucket.mkString("\t"))
+          }
+          result.iterator
+        })
+
+      // 4 保存数据到hdfs
+      val hdfsPath = savePath + "/" + date
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        data.repartition(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      } else {
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  private def loadFileData(loader: ClassLoader, nameFile: String): String = {
+    val resourceUrlBucket = loader.getResource(nameFile)
+    val data =
+      if (resourceUrlBucket != null) {
+        val buckets = Source.fromURL(resourceUrlBucket).getLines().mkString("\n")
+        Source.fromURL(resourceUrlBucket).close()
+        buckets
+      } else {
+        ""
+      }
+    data
+  }
+
+  private def loadUseFeatureNames(loader: ClassLoader, nameFile: String): Set[String] = {
+    val names = loadFileData(loader, nameFile)
+    println(names)
+    names.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty)
+      .toSet
+  }
+
+  private def loadUseFeatureBuckets(loader: ClassLoader, nameFile: String): Map[String, (Double, Array[Double])] = {
+    val buckets = loadFileData(loader, nameFile)
+    println(buckets)
+    buckets.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty)
+      .map(r => {
+        val rList = r.split("\t")
+        (rList(0), (rList(1).toDouble, rList(2).split(",").map(_.toDouble)))
+      }).toMap
+  }
+}