Browse Source

样本重新制作: 小时级别特征。

zhangbo 1 year ago
parent
commit
a653ff0067

+ 6 - 0
pom.xml

@@ -37,6 +37,12 @@
 
     <dependencies>
 
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+            <version>1.2.83</version>
+        </dependency>
+
         <dependency>
             <groupId>com.tzld.piaoquan</groupId>
             <artifactId>recommend-server-client</artifactId>

+ 127 - 0
src/main/java/examples/dataloader/OfflineVlogFeatureGroupV2.java

@@ -0,0 +1,127 @@
+package examples.dataloader;
+
+public enum OfflineVlogFeatureGroupV2 {
+
+    machineinfo_brand,
+    machineinfo_model,
+    machineinfo_platform,
+    machineinfo_system,
+    u_1day_exp_cnt,
+    u_1day_click_cnt,
+    u_1day_share_cnt,
+    u_1day_return_cnt,
+    u_ctr_1day,
+    u_str_1day,
+    u_rov_1day,
+    u_ros_1day,
+
+    u_3day_exp_cnt,
+    u_3day_click_cnt,
+    u_3day_share_cnt,
+    u_3day_return_cnt,
+    u_ctr_3day,
+    u_str_3day,
+    u_rov_3day,
+    u_ros_3day,
+
+
+    total_time,
+
+    play_count_total,
+    i_1day_exp_cnt,
+    i_1day_click_cnt,
+    i_1day_share_cnt,
+    i_1day_return_cnt,
+    i_ctr_1day,
+    i_str_1day,
+    i_rov_1day,
+    i_ros_1day,
+
+    i_3day_exp_cnt,
+    i_3day_click_cnt,
+    i_3day_share_cnt,
+    i_3day_return_cnt,
+    i_ctr_3day,
+    i_str_3day,
+    i_rov_3day,
+    i_ros_3day,
+
+    ctx_week,
+    ctx_hour,
+    ctx_region,
+    ctx_city,
+
+    share_uv_list_1day_6_avg,
+    share_uv_list_1day_6_var,
+    share_uv_list_1day_diff_6_avg,
+    share_uv_list_1day_diff_6_var,
+    return_uv_list_1day_6_avg,
+    return_uv_list_1day_6_var,
+    return_uv_list_1day_diff_6_avg,
+    return_uv_list_1day_diff_6_var,
+    share_uv_list_1h_6_avg,
+    share_uv_list_1h_6_var,
+    share_uv_list_1h_diff_6_avg,
+    share_uv_list_1h_diff_6_var,
+    return_uv_list_1h_6_avg,
+    return_uv_list_1h_6_var,
+    return_uv_list_1h_diff_6_avg,
+    return_uv_list_1h_diff_6_var,
+    view_pv_list_1day,
+    view_uv_list_1day,
+    play_pv_list_1day,
+    play_uv_list_1day,
+    share_pv_list_1day,
+    share_uv_list_1day,
+    return_uv_list_1day,
+    p_view_uv_list_1day,
+    p_view_pv_list_1day,
+    p_return_uv_list_1day,
+    share_uv_list_2day,
+    share_pv_list_2day,
+    share_uv_list_3day,
+    share_pv_list_3day,
+    view_uv_list_1h,
+    view_pv_list_1h,
+    play_uv_list_1h,
+    play_pv_list_1h,
+    share_uv_list_1h,
+    share_pv_list_1h,
+    return_uv_list_1h,
+    p_return_uv_list_1h,
+    i_1day_ctr_rt,
+    i_1day_str_rt,
+    i_1day_ros_rt,
+    i_1day_rov_rt,
+    i_1h_ctr_rt,
+    i_1h_str_rt,
+    i_1h_ros_rt,
+    i_1h_rov_rt
+    ;
+
+
+    private final byte[] idBytes;
+    private final byte[] nameBytes;
+
+    OfflineVlogFeatureGroupV2() {
+        this.idBytes = String.valueOf(ordinal()).getBytes();
+        this.nameBytes = name().toLowerCase().getBytes();
+    }
+
+    public final int getId() {
+        return ordinal();
+    }
+
+    public final String getGroupName() {
+        return name().toLowerCase();
+    }
+
+    public final byte[] getGroupNameBytes() {
+        return getGroupName().getBytes();
+    }
+
+    public final byte[] getIdBytes() {
+        return idBytes;
+    }
+
+}

+ 7 - 23
src/main/java/examples/dataloader/OfflineVlogShareLRFeatureExtractor.java

@@ -44,6 +44,13 @@ public class OfflineVlogShareLRFeatureExtractor {
             }
         }
     }
+    public void makeFeature4String(Map<String, String> maps){
+        for (Map.Entry<String, String> entry : maps.entrySet()){
+            OfflineVlogFeatureGroup ovf = OfflineVlogFeatureGroup.valueOf(entry.getKey());
+            String value = entry.getValue();
+            this.makeFea(ovf, value.getBytes());
+        }
+    }
 
     private FeatureGroup makeGroup(OfflineVlogFeatureGroup group) {
         FeatureGroup.Builder g = FeatureGroup.newBuilder();
@@ -58,27 +65,4 @@ public class OfflineVlogShareLRFeatureExtractor {
         this.featureMap.put(featureGroup, feature);
     }
 
-//    public synchronized LRSamples single(UserBytesFeature userBytesFeature,
-//                                         VideoBytesFeature videoBytesFeature,
-//                                         RequestContextBytesFeature requestContextBytesFeature) {
-//        featureMap.clear();
-//        // extract features todo zhangbo
-//
-//
-//        LRSamples.Builder lr = LRSamples.newBuilder();
-//        lr.setGroupNum(groupCount);
-//        List<FeatureGroup> keys = new ArrayList<>(featureMap.keySet());
-//        int count = 0;
-//        for(FeatureGroup group : keys) {
-//            List<BaseFeature> fea = featureMap.get(group);
-//            GroupedFeature.Builder gf = GroupedFeature.newBuilder();
-//            gf.setGroup(group);
-//            gf.setCount(fea.size());
-//            gf.addAllFeatures(fea);
-//            count += fea.size();
-//            lr.addFeatures(gf);
-//        }
-//        lr.setCount(count);
-//        return lr.build();
-//    }
 }

+ 66 - 0
src/main/java/examples/dataloader/OfflineVlogShareLRFeatureExtractorV2.java

@@ -0,0 +1,66 @@
+package examples.dataloader;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import com.tzld.piaoquan.recommend.feature.domain.video.feature.BytesGroup;
+import com.tzld.piaoquan.recommend.feature.domain.video.feature.BytesUtils;
+import com.tzld.piaoquan.recommend.feature.model.sample.BaseFeature;
+import com.tzld.piaoquan.recommend.feature.model.sample.FeatureGroup;
+
+import java.util.Map;
+
+public class OfflineVlogShareLRFeatureExtractorV2 {
+
+    public ListMultimap<FeatureGroup, BaseFeature> featureMap = ArrayListMultimap.create();
+
+    final private BytesUtils utils;
+    final private int groupCount = OfflineVlogFeatureGroupV2.values().length;
+    public OfflineVlogShareLRFeatureExtractorV2() {
+        BytesGroup[] groups = new BytesGroup[OfflineVlogFeatureGroupV2.values().length];
+        OfflineVlogFeatureGroupV2[] var2 = OfflineVlogFeatureGroupV2.values();
+        int var3 = var2.length;
+
+        for(int var4 = 0; var4 < var3; ++var4) {
+            OfflineVlogFeatureGroupV2 g = var2[var4];
+            groups[g.ordinal()] = new BytesGroup(g.ordinal(), g.getGroupName(), g.getGroupNameBytes());
+        }
+        this.utils = new BytesUtils(groups);
+    }
+    public void makeFeature(Map<String, Object> maps){
+        for (Map.Entry<String, Object> entry : maps.entrySet()){
+            OfflineVlogFeatureGroupV2 ovf = OfflineVlogFeatureGroupV2.valueOf(entry.getKey());
+            Object value = entry.getValue();
+            if (value instanceof String){
+                this.makeFea(ovf, ((String)value).getBytes());
+            }else if (value instanceof Double){
+                this.makeFea(ovf, String.valueOf((Double)value).getBytes());
+            }else if (value instanceof Integer){
+                //todo
+            }else{
+                //todo
+                this.makeFea(ovf, ((String)value).getBytes());
+            }
+        }
+    }
+    public void makeFeature4String(Map<String, String> maps){
+        for (Map.Entry<String, String> entry : maps.entrySet()){
+            OfflineVlogFeatureGroupV2 ovf = OfflineVlogFeatureGroupV2.valueOf(entry.getKey());
+            String value = entry.getValue();
+            this.makeFea(ovf, value.getBytes());
+        }
+    }
+
+    private FeatureGroup makeGroup(OfflineVlogFeatureGroupV2 group) {
+        FeatureGroup.Builder g = FeatureGroup.newBuilder();
+        g.setType("1");
+        g.setName(group.getGroupName());
+        g.setId(group.ordinal());
+        return g.build();
+    }
+    void makeFea(OfflineVlogFeatureGroupV2 group, byte[] value) {
+        FeatureGroup featureGroup = this.makeGroup(group);
+        BaseFeature feature = this.utils.makeFea(group.ordinal(), value);
+        this.featureMap.put(featureGroup, feature);
+    }
+    
+}

+ 2 - 16
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_06_strData.scala → src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_06_originData.scala

@@ -3,7 +3,6 @@ package com.aliyun.odps.spark.examples.makedata
 import com.aliyun.odps.TableSchema
 import com.aliyun.odps.data.Record
 import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
-import examples.dataloader.{OfflineVlogShareLRFeatureExtractor, RequestContextOffline}
 import examples.extractor.RankExtractorUserFeature
 import examples.extractor.RankExtractorItemFeature
 import org.apache.hadoop.io.compress.GzipCodec
@@ -19,7 +18,7 @@ import com.alibaba.fastjson.JSONObject
    注意:所有的构造特征,原始值为0.0时,当作无意义,不保留; 如果经过change变换,得到0.0,保留。
  */
 
-object makedata_06_strData {
+object makedata_06_originData {
   def main(args: Array[String]) {
     val spark = SparkSession
       .builder()
@@ -33,7 +32,7 @@ object makedata_06_strData {
     val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
     val beginStr = param.getOrElse("beginStr", "20230101")
     val endStr = param.getOrElse("endStr", "20230101")
-    val savePath = param.getOrElse("savePath", "/dw/recommend/model/sample_data/")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/00_sample_data/")
     val project = param.getOrElse("project", "loghubods")
     val table = param.getOrElse("table", "alg_recsys_view_sample_v2")
 
@@ -227,19 +226,6 @@ object makedata_06_strData {
   def func(record: Record, schema: TableSchema): Record = {
     record
   }
-//
-//  def singleParse(record: Record, label: String): String = {
-//    //2 处理特征
-//    val reqContext: RequestContextOffline = new RequestContextOffline()
-//    reqContext.putUserFeature(record)
-//    reqContext.putItemFeature(record)
-//    reqContext.putSceneFeature(record)
-//    val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractor()
-//    bytesFeatureExtractor.makeFeature(reqContext.featureMap)
-//    val featureMap = bytesFeatureExtractor.featureMap
-//    label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
-//    ""
-//  }
 
   def getFeatureFromSet(set: Set[String], record: Record): mutable.HashMap[String, String] = {
     val result = mutable.HashMap[String, String]()

+ 117 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_07_strData.scala

@@ -0,0 +1,117 @@
+package com.aliyun.odps.spark.examples.makedata
+
+
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import java.util
+import scala.collection.JavaConversions._
+import com.alibaba.fastjson.JSON
+import examples.dataloader.OfflineVlogShareLRFeatureExtractorV2
+import examples.dataloader.OfflineVlogShareLRFeatureExtractor
+
+
+object makedata_07_strData {
+  def main(args: Array[String]) {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
+    val beginStr = param.getOrElse("beginStr", "20230101")
+    val endStr = param.getOrElse("endStr", "20230101")
+    val readPath = param.getOrElse("savePath", "/dw/recommend/model/00_sample_data/")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/01_str_data/")
+    val featureVersion =  param.getOrElse("featureVersion", "v2")
+
+
+
+    // 3 循环执行数据生产
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      val partition = partitionPrefix + date
+      println("执行partiton:" + partition)
+      var hdfsPath = readPath + "/" + partition
+      val data = sc.textFile(hdfsPath).map(r=>{
+        val rList = r.split("\t")
+        val labelStr = rList(1)
+        val feaStr = rList(2)
+        val labelJson = JSON.parseObject(labelStr)
+        val label = if (labelJson.containsKey("is_share")) labelJson.getString("is_share") else "0"
+        val feaJson = JSON.parseObject(feaStr)
+        val feaSet = Set(
+          "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+          "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
+          "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+          "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+          "total_time", "play_count_total",
+          "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+          "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+          "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+          "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+          "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+          "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+
+          "share_uv_list_1day_6_avg", "share_uv_list_1day_6_var", "share_uv_list_1day_diff_6_avg", "share_uv_list_1day_diff_6_var",
+          "return_uv_list_1day_6_avg", "return_uv_list_1day_6_var", "return_uv_list_1day_diff_6_avg", "return_uv_list_1day_diff_6_var",
+          "share_uv_list_1h_6_avg", "share_uv_list_1h_6_var", "share_uv_list_1h_diff_6_avg", "share_uv_list_1h_diff_6_var",
+          "return_uv_list_1h_6_avg", "return_uv_list_1h_6_var", "return_uv_list_1h_diff_6_avg", "return_uv_list_1h_diff_6_var",
+
+          "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+          "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+          "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+          "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+
+          "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+          "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h",
+
+          "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
+          "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
+        )
+        val feaMap = new util.HashMap[String, String]()
+        feaSet.foreach(r=> {
+          if (feaJson.containsKey(r)){
+            feaMap.put(r, feaJson.getString(r))
+          }
+        })
+
+        if ("v1".equals(featureVersion)){
+          val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractor()
+          bytesFeatureExtractor.makeFeature4String(feaMap)
+          val featureMap = bytesFeatureExtractor.featureMap
+          label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
+
+        }else if ("v2".equals(featureVersion)){
+          val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV2()
+          bytesFeatureExtractor.makeFeature4String(feaMap)
+          val featureMap = bytesFeatureExtractor.featureMap
+          label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
+        }
+
+      })
+      // 4 保存数据到hdfs
+      hdfsPath = savePath + "/" + partition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        data.saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+
+
+}