zhangbo 1 gadu atpakaļ
vecāks
revīzija
8ee1505831

+ 243 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_07_rosData.scala

@@ -0,0 +1,243 @@
+package com.aliyun.odps.spark.examples.makedata
+
+
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils}
+import examples.dataloader.{OfflineVlogShareLRFeatureExtractorV1, OfflineVlogShareLRFeatureExtractorV2}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import java.util
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import com.alibaba.fastjson.JSON
+import com.alibaba.fastjson.JSONObject
+
+
+object makedata_07_rosData {
+  def main(args: Array[String]) {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
+    val beginStr = param.getOrElse("beginStr", "20230101")
+    val endStr = param.getOrElse("endStr", "20230101")
+    val readPath = param.getOrElse("readPath", "/dw/recommend/model/00_sample_data/")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/01_str_data/")
+    val featureVersion =  param.getOrElse("featureVersion", "v2")
+    val ifRepart = param.getOrElse("ifRepart", "100").toInt
+    val labelVersion = param.getOrElse("labelVersion", "v1")
+
+
+
+    // 3 循环执行数据生产
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      val partition = partitionPrefix + date
+      println("执行partiton:" + partition)
+      var hdfsPath = readPath + "/" + partition
+
+      // 4 过滤保留分享样本
+      val dataFilter = sc.textFile(hdfsPath).map(r=>{
+        val rList = r.split("\t")
+        val logKeyStr = rList(0)
+        val labelStr = rList(1)
+        val feaStr = rList(2)
+        val logTs = logKeyStr.split(":")(2)
+        val labelJson = JSON.parseObject(labelStr)
+        val feaJson = JSON.parseObject(feaStr)
+        val is_share = if (labelJson.containsKey("is_share")) labelJson.getString("is_share") else "0"
+        (logTs, feaJson, labelJson, is_share)
+      }).filter(_._4.equals("1"))
+
+      // 5 label处理
+      val dataTrain = labelVersion match {
+        case "v2" => dataFilter.flatMap({
+          case (logTs, feaJson, labelJson, _) =>
+            val res = ArrayBuffer[(String, JSONObject)]()
+            val hour = feaJson.getString("ctx_hour").toInt
+            val expTs = logTs.toLong / 1000
+            hour match {
+              case 23 => res
+              case _ =>
+                res.add(("0", feaJson))
+                val is_return = if (labelJson.containsKey("is_return")) labelJson.getString("is_return") else "0"
+                if ("1".equals(is_return)) {
+                  if (labelJson.containsKey("return_mid_ts_list")){
+                    labelJson.getString("return_mid_ts_list").split(",")
+                      .map(r => r.split(":")(1).toLong / 1000)
+                      .foreach(ts => {
+                        if (ts - expTs < 3600) {
+                          res.add(("1", feaJson))
+                        }
+                      })
+                  }
+                }
+                res
+            }
+        })
+        case _ => dataFilter.map({
+          case (logTs, feaJson, labelJson, _) =>
+            val is_return = if (labelJson.containsKey("is_return")) labelJson.getString("is_return") else "0"
+            (is_return, feaJson)
+        })
+      }
+      // 6 特征选择
+      val data = dataTrain.map{
+        case (is_return, feaJson) =>
+          if ("v1".equals(featureVersion)) {
+            val feaSet = Set(
+              "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+              "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
+              "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+              "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+              "total_time", "play_count_total",
+              "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+              "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+              "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+              "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+              "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+              "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros"
+            )
+            val feaMap = new util.HashMap[String, String]()
+            feaSet.foreach(r => {
+              if (feaJson.containsKey(r)) {
+                feaMap.put(r, feaJson.getString(r))
+              }
+            })
+            val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV1()
+            bytesFeatureExtractor.makeFeature4String(feaMap)
+            val featureMap = bytesFeatureExtractor.featureMap
+            (is_return, featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t"))
+
+          } else if ("v2".equals(featureVersion)) {
+            val feaSet = Set(
+              "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+              "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
+              "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+              "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+              "total_time", "play_count_total",
+              "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+              "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+              "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+              "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+              "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+              "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+
+              //            "share_uv_list_1day_6_avg", "share_uv_list_1day_6_var", "share_uv_list_1day_diff_6_avg", "share_uv_list_1day_diff_6_var",
+              //            "return_uv_list_1day_6_avg", "return_uv_list_1day_6_var", "return_uv_list_1day_diff_6_avg", "return_uv_list_1day_diff_6_var",
+              //            "share_uv_list_1h_6_avg", "share_uv_list_1h_6_var", "share_uv_list_1h_diff_6_avg", "share_uv_list_1h_diff_6_var",
+              //            "return_uv_list_1h_6_avg", "return_uv_list_1h_6_var", "return_uv_list_1h_diff_6_avg", "return_uv_list_1h_diff_6_var",
+
+              "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+              "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+              "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+              "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+
+              "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+              "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h",
+
+              "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
+              "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
+            )
+            val feaMap = new util.HashMap[String, String]()
+            feaSet.foreach(r => {
+              if (feaJson.containsKey(r)) {
+                feaMap.put(r, feaJson.getString(r))
+              }
+            })
+            val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV2()
+            bytesFeatureExtractor.makeFeature4String(feaMap)
+            val featureMap = bytesFeatureExtractor.featureMap
+            (is_return, featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t"))
+          } else if ("v4".equals(featureVersion)) {
+            val feaSet = Set(
+              "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+              "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
+              "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+              "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+              "total_time", "play_count_total",
+              "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+              "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+              "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+              "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+              "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+              "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+
+              "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
+              "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
+            )
+            val feaMap = new util.HashMap[String, String]()
+            feaSet.foreach(r => {
+              if (feaJson.containsKey(r)) {
+                feaMap.put(r, feaJson.getString(r))
+              }
+            })
+            val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV2()
+            bytesFeatureExtractor.makeFeature4String(feaMap)
+            val featureMap = bytesFeatureExtractor.featureMap
+            (is_return, featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t"))
+          } else if ("v5".equals(featureVersion)) {
+            val feaSet = Set(
+              "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+              "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
+              "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+              "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+              "total_time", "play_count_total",
+              "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+              "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+              "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+              "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+              "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+              "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+
+              "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+              "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+              "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+              "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+
+              "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+              "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h"
+            )
+            val feaMap = new util.HashMap[String, String]()
+            feaSet.foreach(r => {
+              if (feaJson.containsKey(r)) {
+                feaMap.put(r, feaJson.getString(r))
+              }
+            })
+            val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV2()
+            bytesFeatureExtractor.makeFeature4String(feaMap)
+            val featureMap = bytesFeatureExtractor.featureMap
+            (is_return, featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t"))
+          } else {
+            (is_return, "")
+          }
+      }.filter(_._2.nonEmpty).map(r=> r._1 + "\t" + r._2)
+
+      // 7 保存数据到hdfs
+      hdfsPath = savePath + "/" + partition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        if (ifRepart == 0){
+          data.saveAsTextFile(hdfsPath, classOf[GzipCodec])
+        }else{
+          data.repartition(ifRepart).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+        }
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+}

+ 1 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_07_strData.scala

@@ -30,7 +30,7 @@ object makedata_07_strData {
     val readPath = param.getOrElse("readPath", "/dw/recommend/model/00_sample_data/")
     val savePath = param.getOrElse("savePath", "/dw/recommend/model/01_str_data/")
     val featureVersion =  param.getOrElse("featureVersion", "v2")
-    val ifRepart = param.getOrElse("ifRepart", "0").toInt
+    val ifRepart = param.getOrElse("ifRepart", "100").toInt