Ver código fonte

feat:添加vid维度的ros样本数据生产

zhaohaipeng 1 mês atrás
pai
commit
12a81db9a7

+ 4 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys/v20250218/makedata_recsys_41_ros_train_data_20250304.scala

@@ -9,6 +9,8 @@ import org.apache.hadoop.io.compress.GzipCodec
 import org.apache.spark.sql.SparkSession
 import org.xm.Similarity
 
+import java.time.LocalDateTime
+import java.time.format.DateTimeFormatter
 import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 
@@ -18,9 +20,10 @@ import scala.collection.mutable.ArrayBuffer
 
 object makedata_recsys_41_ros_train_data_20250304 {
   def main(args: Array[String]): Unit = {
+    val dt = DateTimeFormatter.ofPattern("yyyyMMddHHmm").format(LocalDateTime.now())
     val spark = SparkSession
       .builder()
-      .appName(this.getClass.getName)
+      .appName(this.getClass.getName + ": " + dt)
       .getOrCreate()
     val sc = spark.sparkContext
 

+ 4 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys/v20250218/makedata_recsys_41_str_train_data_20250218.scala

@@ -9,6 +9,8 @@ import org.apache.hadoop.io.compress.GzipCodec
 import org.apache.spark.sql.SparkSession
 import org.xm.Similarity
 
+import java.time.LocalDateTime
+import java.time.format.DateTimeFormatter
 import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 import scala.util.Random
@@ -19,9 +21,10 @@ import scala.util.Random
 
 object makedata_recsys_41_str_train_data_20250218 {
   def main(args: Array[String]): Unit = {
+    val dt = DateTimeFormatter.ofPattern("yyyyMMddHHmm").format(LocalDateTime.now())
     val spark = SparkSession
       .builder()
-      .appName(this.getClass.getName)
+      .appName(this.getClass.getName + ": " + dt)
       .getOrCreate()
     val sc = spark.sparkContext
 

+ 5 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys/v20250218/makedata_recsys_41_str_train_data_sample_20250319.scala

@@ -11,6 +11,8 @@ import org.apache.hadoop.io.compress.GzipCodec
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.SparkSession
 
+import java.time.LocalDateTime
+import java.time.format.DateTimeFormatter
 import java.util
 import scala.util.Random
 
@@ -20,9 +22,11 @@ import scala.util.Random
 
 object makedata_recsys_41_str_train_data_sample_20250319 {
   def main(args: Array[String]): Unit = {
+    val dt = DateTimeFormatter.ofPattern("yyyyMMddHHmm").format(LocalDateTime.now())
+
     val spark = SparkSession
       .builder()
-      .appName(this.getClass.getName)
+      .appName(this.getClass.getName + ": " + dt)
       .getOrCreate()
     val sc = spark.sparkContext
 

+ 145 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys/v20250218/makedata_recsys_41_vid_ros_train_data_20250324.scala

@@ -0,0 +1,145 @@
+package com.aliyun.odps.spark.examples.makedata_recsys.v20250218
+
+import com.alibaba.fastjson.{JSON, JSONObject}
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.extractor.v20250218.ExtractFeature20250218
+import examples.utils.{FestiveUtil, SimilarityUtils}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.SparkSession
+
+import java.time.LocalDateTime
+import java.time.format.DateTimeFormatter
+import java.util
+
+/*
+   20250218 ROS训练数据过滤
+ */
+
+object makedata_recsys_41_vid_ros_train_data_20250324 {
+  def main(args: Array[String]): Unit = {
+
+    val dt = DateTimeFormatter.ofPattern("yyyyMMddHHmm").format(LocalDateTime.now())
+
+
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName + ": " + dt)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val beginStr = param.getOrElse("beginStr", "2025021812")
+    val endStr = param.getOrElse("endStr", "2025021812")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "dwd_recsys_alg_sample_all_20250212")
+    val repartition = param.getOrElse("repartition", "32").toInt
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/41_sample_data/")
+    val fuSampleRate = param.getOrElse("fuSampleRate", "0.05").toDouble
+    val whatLabel = param.getOrElse("whatLabel", "is_share")
+    val whatApps = param.getOrElse("whatApps", "0,4,2,32,17,18,21,22,24,25,26,27,28,29,3,30,31,33,34,35,36").split(",").filter(r => r.nonEmpty).toList
+
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
+    val partitions = timeRange.map { dt_hh =>
+      val dt = dt_hh.substring(0, 8)
+      val hh = dt_hh.substring(8, 10)
+      s"dt=$dt,hh=$hh"
+    }
+
+    var odpsData: RDD[String] = sc.emptyRDD[String] // 初始化空RDD
+    for (partition <- partitions) {
+      println(s"开始读取分区: $partition")
+      val partitionData = odpsOps.readTable(
+          project = project,
+          table = table,
+          partition = partition,
+          transfer = func,
+          numPartition = tablePart)
+        .mapPartitions(p => {
+          SimilarityUtils.init()
+          FestiveUtil.init()
+          p.map(record => {
+            val featureMap = new JSONObject()
+            val vid = if (record.isNull("vid")) "" else record.getString("vid")
+
+            val hh = record.getString("hh").toInt
+
+            // a 视频特征
+            val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else JSON.parseObject(record.getString("b1_feature"))
+            val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else JSON.parseObject(record.getString("b2_feature"))
+            val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else JSON.parseObject(record.getString("b3_feature"))
+            val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else JSON.parseObject(record.getString("b8_feature"))
+            val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else JSON.parseObject(record.getString("b9_feature"))
+            val b12: JSONObject = if (record.isNull("b12_feature")) new JSONObject() else JSON.parseObject(record.getString("b12_feature"))
+            val b13: JSONObject = if (record.isNull("b13_feature")) new JSONObject() else JSON.parseObject(record.getString("b13_feature"))
+
+            val bFeatureMap = new util.HashMap[String, util.Map[String, Object]]();
+            bFeatureMap.put("b2", b2);
+            bFeatureMap.put("b3", b3);
+            bFeatureMap.put("b8", b8);
+            bFeatureMap.put("b9", b9);
+            bFeatureMap.put("b13", b13);
+
+            ExtractFeature20250218.handleB1(b1, featureMap)
+            ExtractFeature20250218.handleB2ToB11AndB13(bFeatureMap, featureMap);
+            ExtractFeature20250218.handleB12(b12, featureMap)
+
+            //4 处理label信息。
+            val labels = new JSONObject
+            for (labelKey <- List(
+              "is_share", "share_cnt",
+              "is_return_1", "return_1_uv",
+              "is_return_n", "return_n_uv",
+              "is_return_noself", "is_return_n_noself"
+            )) {
+              if (!record.isNull(labelKey)) {
+                labels.put(labelKey, record.getString(labelKey))
+              }
+            }
+
+            //5 处理log key表头。
+            val logs = new JSONObject()
+            for (key <- List("vid")) {
+              if (!record.isNull(key)) {
+                logs.put(key, record.getString(key))
+              }
+            }
+            logs.put("hour", hh)
+
+            val logKey = logs.toString()
+            val labelKey = labels.toString()
+            val featureKey = featureMap.toString()
+            //6 拼接数据,保存。
+            logKey + "\t" + labelKey + "\t" + featureKey
+
+          })
+        })
+      odpsData = odpsData.union(partitionData)
+    }
+
+    // 4 保存数据到hdfs
+    val hdfsPath = savePath
+    if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+      println("删除路径并开始数据写入:" + hdfsPath)
+      MyHdfsUtils.delete_hdfs_path(hdfsPath)
+      odpsData.coalesce(repartition, shuffle = true).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+    } else {
+      println("路径不合法,无法写入:" + hdfsPath)
+    }
+  }
+
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+}