Browse Source

新样本数据生产

zhangbo 1 year ago
parent
commit
d9eff5289f

+ 4 - 4
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_10_originData_v3.scala

@@ -145,12 +145,12 @@ object makedata_10_originData_v3 {
             "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
             "i_7day_exp_cnt", "i_7day_click_cnt", "i_7day_share_cnt", "i_7day_return_cnt",
             "i_3month_exp_cnt", "i_3month_click_cnt", "i_3month_share_cnt", "i_3month_return_cnt",
-            // ----------
+            // ---------- rate
             "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
             "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
             "u_7day_ctr", "u_7day_str", "u_7day_rov", "u_7day_ros",
             "u_3month_ctr", "u_3month_str", "u_3month_rov", "u_3month_ros",
-            // ----------
+            // ---------- rate
             "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
             "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
             "i_7day_ctr", "i_7day_str", "i_7day_rov", "i_7day_ros",
@@ -168,7 +168,7 @@ object makedata_10_originData_v3 {
             // ----------
             "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
             "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h",
-            // ----------
+            // ---------- rate
             "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
             "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
           )
@@ -201,7 +201,7 @@ object makedata_10_originData_v3 {
           val apptype = record.getString("apptype")
           val pagesource_change = record.getString("pagesource_change")
           val abcode = record.getString("abcode")
-          val video_recommend = if (record.isNull("video_recommend")) record.getString("video_recommend") else "111"
+          val video_recommend = if (!record.isNull("video_recommend")) record.getString("video_recommend") else "111"
 
           val logKey = (mid, videoid, logtimestamp, apptype, pagesource_change, abcode, video_recommend).productIterator.mkString(":")
           val labelKey = labelNew.toString()

+ 198 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_12_rosData_v3.scala

@@ -0,0 +1,198 @@
+package com.aliyun.odps.spark.examples.makedata
+
+import com.alibaba.fastjson.{JSON, JSONObject}
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils}
+import examples.dataloader.{OfflineVlogShareLRFeatureExtractorV1, OfflineVlogShareLRFeatureExtractorV2}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import java.util
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import examples.extractor.ExtractorUtils
+
+object makedata_12_rosData_v3 {
+  def main(args: Array[String]) {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
+    val beginStr = param.getOrElse("beginStr", "20230101")
+    val endStr = param.getOrElse("endStr", "20230101")
+    val readPath = param.getOrElse("readPath", "/dw/recommend/model/10_sample_data_v3/")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/12_ros_data_v3/")
+    val featureVersion =  param.getOrElse("featureVersion", "v2")
+    val ifRepart = param.getOrElse("ifRepart", "10").toInt
+    val labelVersion = param.getOrElse("labelVersion", "v1")
+
+    // 3 循环执行数据生产
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      val partition = partitionPrefix + date
+      println("开始执行partiton:" + partition)
+      var hdfsPath = readPath + "/" + partition
+
+      //1 样本过滤(分享样本、012345中的、可推荐的video、不同产品)
+      val data1 = sc.textFile(hdfsPath).map(r => {
+        val rList = r.split("\t")
+        val logKeyStr = rList(0)
+        val (mid, videoid, logtimestamp, apptype, pagesource_change, abcode, video_recommend) = ParamUtils.parseLogKey(logKeyStr)
+        val labelStr = rList(1)
+        val feaStr = rList(2)
+        val labelJson = JSON.parseObject(labelStr)
+        val is_share = labelJson.getString("is_share")
+        (logKeyStr, labelJson, feaStr, is_share, pagesource_change, video_recommend, apptype, logtimestamp.toLong)
+      }).filter({
+        case (logKeyStr, labelJson, feaStr, is_share, pagesource_change, video_recommend, apptype, logtimestamp) =>
+          val pages = Set("2")
+          val video_status = Set("-6")
+          val apps = Set("0", "4", "5", "21", "3", "6")
+          "1".equals(is_share) && pages.contains(pagesource_change) && video_status.contains(video_recommend) && apps.contains(apptype)
+      })
+
+      //2 样本采样(多个回流的样本复制,等价回流量的加权)
+      val data2 = data1.flatMap({
+        case (logKeyStr, labelJson, feaStr, is_share, pagesource_change, video_recommend, apptype, logtimestamp) =>
+          val res = ArrayBuffer[(String, JSONObject)]()
+          val feaJson = JSON.parseObject(feaStr)
+          val is_return = labelJson.getString("is_return")
+          if ("0".equals(is_return)){
+            res.add(("0", feaJson))
+          }else{
+            val return_mid_ts_list = labelJson.getString("return_mid_ts_list").split(",").map(r => {
+              val midReturn = r.split(":")(0)
+              val ts = r.split(":")(1).toLong / 1000
+              (midReturn, ts)
+            }).sortBy(_._2)
+            val midSet = Set()
+            for ((midReturn, tsReturn) <- return_mid_ts_list){
+              if (!midSet.contains(midReturn)){
+                midSet.add(midReturn)
+                if (tsReturn - logtimestamp <= 3600 && tsReturn - logtimestamp > 0){
+                  res.add(("1", feaJson))
+                }
+              }
+            }
+          }
+          res.iterator
+      })
+
+      //3 保留一份原始样本的中间数据
+      println("样本比例")
+      data2.map(r=> (r._1, 1)).reduceByKey(_+_).map(r=> r._1 + "\t" + r._2).collect().foreach(println)
+
+      //4 特征绝对值转换 如 0.456变成19
+      val data3 = data2.map({
+        case (label, feaJson) =>
+          Set(
+            "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+            "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+            "u_7day_ctr", "u_7day_str", "u_7day_rov", "u_7day_ros",
+            "u_3month_ctr", "u_3month_str", "u_3month_rov", "u_3month_ros",
+            // ----------
+            "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+            "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+            "i_7day_ctr", "i_7day_str", "i_7day_rov", "i_7day_ros",
+            "i_3month_ctr", "i_3month_str", "i_3month_rov", "i_3month_ros",
+            // ----------
+            "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
+            "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
+          ).foreach(key =>{
+            if (feaJson.containsKey(key)){
+              val value = ExtractorUtils.ceilLogRate(feaJson.getString(key).toDouble)
+              feaJson.put(key, value.toString)
+            }
+          })
+          Set(
+            "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+            "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+            "u_7day_exp_cnt", "u_7day_click_cnt", "u_7day_share_cnt", "u_7day_return_cnt",
+            "u_3month_exp_cnt", "u_3month_click_cnt", "u_3month_share_cnt", "u_3month_return_cnt",
+            // ----------
+            "total_time", "play_count", "play_count_total",
+            "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+            "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+            "i_7day_exp_cnt", "i_7day_click_cnt", "i_7day_share_cnt", "i_7day_return_cnt",
+            "i_3month_exp_cnt", "i_3month_click_cnt", "i_3month_share_cnt", "i_3month_return_cnt",
+            // ----------
+            "share_uv_list_1day_6_avg", "share_uv_list_1day_6_var", "share_uv_list_1day_diff_6_avg", "share_uv_list_1day_diff_6_var",
+            "return_uv_list_1day_6_avg", "return_uv_list_1day_6_var", "return_uv_list_1day_diff_6_avg", "return_uv_list_1day_diff_6_var",
+            "share_uv_list_1h_6_avg", "share_uv_list_1h_6_var", "share_uv_list_1h_diff_6_avg", "share_uv_list_1h_diff_6_var",
+            "return_uv_list_1h_6_avg", "return_uv_list_1h_6_var", "return_uv_list_1h_diff_6_avg", "return_uv_list_1h_diff_6_var",
+            // ----------
+            "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+            "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+            "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+            "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+            // ----------
+            "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+            "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h"
+
+          ).foreach(key => {
+            if (feaJson.containsKey(key)) {
+              val value = ExtractorUtils.bucketCnt(feaJson.getString(key).toDouble)
+              feaJson.put(key, value.toString)
+            }
+          })
+          (label, feaJson)
+      })
+      //5 libsvm 转换
+      val data4 = data3.map({
+        case (label, feaJson) =>
+          val feaSet = Set(
+            "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+            "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
+            "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+            "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+            "total_time", "play_count_total",
+            "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+            "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+            "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+            "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+            "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+            "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+
+            "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
+            "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
+          )
+          val feaMap = new util.HashMap[String, String]()
+          feaSet.foreach(r => {
+            if (feaJson.containsKey(r)) {
+              feaMap.put(r, feaJson.getString(r))
+            }
+          })
+          val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV2()
+          bytesFeatureExtractor.makeFeature4String(feaMap)
+          val featureMap = bytesFeatureExtractor.featureMap
+          label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
+
+      })
+
+      // 7 保存数据到hdfs
+      hdfsPath = savePath + "/" + partition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        if (ifRepart == 0){
+          data4.saveAsTextFile(hdfsPath, classOf[GzipCodec])
+        }else{
+          data4.repartition(ifRepart).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+        }
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+}

+ 12 - 0
src/main/scala/com/aliyun/odps/spark/examples/myUtils/ParamUtils.scala

@@ -25,4 +25,16 @@ object ParamUtils {
     }
     rst
   }
+
+  def parseLogKey(logKey: String): Tuple7[String, String, String, String, String, String, String] = {
+    val l = logKey.split(":")
+    val mid = l(0)
+    val videoid = l(1)
+    val logtimestamp = l(2)
+    val apptype = l(3)
+    val pagesource_change = l(4)
+    val abcode = l(5)
+    val video_recommend = l(6)
+    (mid, videoid, logtimestamp, apptype, pagesource_change, abcode, video_recommend)
+  }
 }

+ 5 - 0
src/main/scala/com/aliyun/odps/spark/examples/临时记录的脚本

@@ -0,0 +1,5 @@
+nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata.makedata_10_originData_v3 \
+--master yarn --driver-memory 1G --executor-memory 2G --executor-cores 1 --num-executors 32 \
+./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+tablePart:32 savePath:/dw/recommend/model/10_sample_data_v3/ beginStr:20240226 endStr:20240226 > p10.log 2>&1 &