Переглянути джерело

Merge branch 'feature/zhangbo_makedata_v2' of algorithm/recommend-emr-dataprocess into master

zhangbo 4 місяців тому
батько
коміт
84b1272dde

+ 374 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_dssm/makedata_i2i_06_itemPred_20241206.scala

@@ -0,0 +1,374 @@
+package com.aliyun.odps.spark.examples.makedata_dssm
+
+import com.alibaba.fastjson.JSON
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+import com.aliyun.odps.spark.examples.makedata_dssm.makedata_i2i_05_trainData_20241129.{getOnehotValue, getDenseBucketValue}
+import scala.collection.mutable
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import scala.io.Source
+
+object makedata_i2i_06_itemPred_20241206 {
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+  def main(args: Array[String]): Unit = {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val dt = param.getOrElse("dt", "20240620")
+    val onehotPath = param.getOrElse("onehotPath", "/dw/recommend/model/53_dssm_i2i_onehot/20241128")
+    val bucketFile = param.getOrElse("bucketFile", "20241128_recsys_i2i_bucket_47_v2.txt")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/56_dssm_i2i_itempredData/")
+    val project = param.getOrElse("project", "loghubods")
+    val repartition = param.getOrElse("repartition", "100").toInt
+
+    // 2 读取onehot文件
+    val onehotMap_br = sc.broadcast(
+      sc.textFile(onehotPath).map(r => {
+        val rList = r.split("\t")
+        (rList(0), rList(1))
+      }).collectAsMap()
+    )
+    // 3 读取dense分桶文件
+    val resourceUrlBucket = this.getClass.getClassLoader.getResource(bucketFile)
+    val buckets =
+      if (resourceUrlBucket != null) {
+        val buckets = Source.fromURL(resourceUrlBucket).getLines().mkString("\n")
+        Source.fromURL(resourceUrlBucket).close()
+        buckets
+      } else {
+        ""
+      }
+    println(buckets)
+    val bucketsMap_br = sc.broadcast(
+      buckets.split("\n")
+        .map(r => r.replace(" ", "").replaceAll("\n", ""))
+        .filter(r => r.nonEmpty)
+        .map(r => {
+          val rList = r.split("\t")
+          (rList(0), (rList(1).toDouble, rList(2).split(",").map(_.toDouble)))
+        }).toMap
+    )
+
+    val odpsOps = env.getODPS(sc)
+    val category1_br = sc.broadcast(
+      odpsOps.readTable(project = project,
+          table = "t_vid_l1_cat_stat_feature",
+          partition = s"dt=$dt",
+          transfer = func,
+          numPartition = tablePart)
+        .map(record => {
+          val category = record.getString("category1")
+          val feature = record.getString("feature")
+          (category, feature)
+        }).collectAsMap()
+    )
+    val category2_br = sc.broadcast(
+      odpsOps.readTable(project = project,
+          table = "t_vid_l2_cat_stat_feature",
+          partition = s"dt=$dt",
+          transfer = func,
+          numPartition = tablePart)
+        .map(record => {
+          val category = record.getString("category2")
+          val feature = record.getString("feature")
+          (category, feature)
+        }).collectAsMap()
+    )
+    // 2 视频特征用join
+    val vidStaticFeature = odpsOps.readTable(project = project,
+        table = "t_vid_tag_feature",
+        partition = s"dt=$dt",
+        transfer = func,
+        numPartition = tablePart)
+      .map(record => {
+        val vid = record.getString("vid")
+        val feature = record.getString("feature")
+        (vid, feature)
+      })
+    val vidActionFeature = odpsOps.readTable(project = project,
+        table = "t_vid_stat_feature",
+        partition = s"dt=$dt",
+        transfer = func,
+        numPartition = tablePart)
+      .map(record => {
+        val vid = record.getString("vid")
+        val feature = record.getString("feature")
+        (vid, feature)
+      })
+
+    val data = vidStaticFeature.leftOuterJoin(vidActionFeature).map{
+      case (vid, (feature, Some(feature_action))) =>
+        (vid, (feature, feature_action))
+      case (vid, (feature, None)) =>
+        (vid, (feature, "{}"))
+    }.mapPartitions(row => {
+      val result = new ArrayBuffer[(String, (String, String, String, String))]()
+      val category1 = category1_br.value
+      val category2 = category2_br.value
+      row.foreach {
+        case (vid, (feature, feature_action)) =>
+          val cate1 = JSON.parseObject(feature).getOrDefault("category1", "无").toString
+          val cate2 = JSON.parseObject(feature).getOrDefault("category2_1", "无").toString
+          val feature_cate1 = category1.getOrElse(cate1, "{}")
+          val feature_cate2 = category2.getOrElse(cate2, "{}")
+          result.add((vid, (feature, feature_action, feature_cate1, feature_cate2)))
+      }
+      result.iterator
+    }).mapPartitions(row =>{
+      val result = new ArrayBuffer[String]()
+      val onehotMap = onehotMap_br.value
+      val bucketsMap = bucketsMap_br.value
+      row.foreach {
+        case (vid_left, (feature_left, feature_left_action, feature_left_cate1, feature_left_cate2)) =>
+          val left = new ArrayBuffer[String]()
+          val left_dense1 = new ArrayBuffer[String]()
+          val left_dense2 = new ArrayBuffer[String]()
+          // 1 sparse 特征 16个
+          // vid cate1 cate2 video_style valid_time captions_color audience_age_group
+          // audience_value_type font_size cover_persons_num audience_gender sentiment_tendency
+          // video_type background_music_type captions has_end_credit_guide
+          left += onehotMap.getOrElse("vid:" + vid_left, "0")
+          var jsonLeft = JSON.parseObject(feature_left)
+          left += getOnehotValue(jsonLeft, onehotMap, "category1", "cate1:")
+          left += getOnehotValue(jsonLeft, onehotMap, "category2_1", "cate2:")
+          left += getOnehotValue(jsonLeft, onehotMap, "video_style", "video_style:")
+          left += getOnehotValue(jsonLeft, onehotMap, "valid_time", "valid_time:")
+          left += getOnehotValue(jsonLeft, onehotMap, "captions_color", "captions_color:")
+          left += getOnehotValue(jsonLeft, onehotMap, "audience_age_group", "audience_age_group:")
+          left += getOnehotValue(jsonLeft, onehotMap, "audience_value_type", "audience_value_type:")
+          left += getOnehotValue(jsonLeft, onehotMap, "font_size", "font_size:")
+          left += getOnehotValue(jsonLeft, onehotMap, "cover_persons_num", "cover_persons_num:")
+          left += getOnehotValue(jsonLeft, onehotMap, "audience_gender", "audience_gender:")
+          left += getOnehotValue(jsonLeft, onehotMap, "sentiment_tendency", "sentiment_tendency:")
+          left += getOnehotValue(jsonLeft, onehotMap, "video_type", "video_type:")
+          left += getOnehotValue(jsonLeft, onehotMap, "background_music_type", "background_music_type:")
+          left += getOnehotValue(jsonLeft, onehotMap, "captions", "captions:")
+          left += getOnehotValue(jsonLeft, onehotMap, "has_end_credit_guide", "has_end_credit_guide:")
+          // 2 dense通过分桶转换成sparse特征 47个 * 3 * 2
+          jsonLeft = JSON.parseObject(feature_left_action)
+          var res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day1", "action:str_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day1", "action:rov_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day1", "action:ros_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day7", "action:str_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day7", "action:rov_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day7", "action:ros_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day21", "action:str_day21")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day21", "action:rov_day21")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day21", "action:ros_day21")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day336", "action:str_day336")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day336", "action:rov_day336")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day336", "action:ros_day336")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day7", "action:vovd1_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day21", "action:vovd1_day21")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day336", "action:vovd1_day336")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          //----------------------cate1-----------------------------cate1---------------------------cate1----------------------
+          jsonLeft = JSON.parseObject(feature_left_cate1)
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day1", "cate1:str_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day1", "cate1:rov_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day1", "cate1:ros_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day3", "cate1:str_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day3", "cate1:rov_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day3", "cate1:ros_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day7", "cate1:str_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day7", "cate1:rov_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day7", "cate1:ros_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day30", "cate1:str_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day30", "cate1:rov_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day30", "cate1:ros_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day1", "cate1:vovd1_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day3", "cate1:vovd1_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day7", "cate1:vovd1_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day30", "cate1:vovd1_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          //----------------------cate2-----------------------------cate2---------------------------cate2----------------------
+          jsonLeft = JSON.parseObject(feature_left_cate2)
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day1", "cate2:str_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day1", "cate2:rov_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day1", "cate2:ros_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day3", "cate2:str_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day3", "cate2:rov_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day3", "cate2:ros_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day7", "cate2:str_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day7", "cate2:rov_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day7", "cate2:ros_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "str_day30", "cate2:str_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "rov_day30", "cate2:rov_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "ros_day30", "cate2:ros_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day1", "cate2:vovd1_day1")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day3", "cate2:vovd1_day3")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day7", "cate2:vovd1_day7")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+          res = getDenseBucketValue(jsonLeft, bucketsMap, "vovd1_day30", "cate2:vovd1_day30")
+          left += res._1.toString
+          left_dense1 += res._2.toString
+          left_dense2 += res._3.toString
+
+          // 3 left 和 right 分别 16+47*3=16+141 = 157
+          left ++= left_dense1
+          left ++= left_dense2
+          result.add(
+            (vid_left, left.mkString(",")).productIterator.mkString("\t")
+          )
+      }
+      result.iterator
+    })
+
+
+    val hdfsPath = savePath + "/" + dt
+    if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+      println("删除路径并开始数据写入:" + hdfsPath)
+      MyHdfsUtils.delete_hdfs_path(hdfsPath)
+      data.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+    } else {
+      println("路径不合法,无法写入:" + hdfsPath)
+    }
+  }
+}

+ 280 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys_r_rate/makedata_recsys_61_originData_20241209.scala

@@ -0,0 +1,280 @@
+package com.aliyun.odps.spark.examples.makedata_recsys_r_rate
+
+import com.alibaba.fastjson.{JSON, JSONObject}
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.extractor.RankExtractorFeature_20240530
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+import org.xm.Similarity
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+/*
+
+ */
+
+object makedata_recsys_61_originData_20241209 {
+  def main(args: Array[String]): Unit = {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val beginStr = param.getOrElse("beginStr", "2023010100")
+    val endStr = param.getOrElse("endStr", "2023010123")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/61_origin_data/")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "XXXX")
+    val repartition = param.getOrElse("repartition", "32").toInt
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
+    for (dt_hh <- timeRange) {
+      val dt = dt_hh.substring(0, 8)
+      val hh = dt_hh.substring(8, 10)
+      val partition = s"dt=$dt,hh=$hh"
+      println("开始执行partiton:" + partition)
+      val odpsData = odpsOps.readTable(project = project,
+          table = table,
+          partition = partition,
+          transfer = func,
+          numPartition = tablePart)
+        .map(record => {
+
+          val featureMap = new JSONObject()
+
+          // a 视频特征
+          val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b1_feature"))
+          val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b2_feature"))
+          val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b3_feature"))
+          val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b6_feature"))
+          val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b7_feature"))
+
+          val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b8_feature"))
+          val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b9_feature"))
+          val b10: JSONObject = if (record.isNull("b10_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b10_feature"))
+          val b11: JSONObject = if (record.isNull("b11_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b11_feature"))
+          val b12: JSONObject = if (record.isNull("b12_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b12_feature"))
+          val b13: JSONObject = if (record.isNull("b13_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b13_feature"))
+          val b17: JSONObject = if (record.isNull("b17_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b17_feature"))
+          val b18: JSONObject = if (record.isNull("b18_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b18_feature"))
+          val b19: JSONObject = if (record.isNull("b19_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b19_feature"))
+
+
+          val origin_data = List(
+            (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
+            (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
+            (b17, b18, b19, "b171819")
+          )
+          for ((b_1, b_2, b_3, prefix1) <- origin_data) {
+            for (prefix2 <- List(
+              "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
+            )) {
+              val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
+              val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
+              val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
+              val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
+              val f2 = RankExtractorFeature_20240530.calLog(share)
+              val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
+              val f4 = RankExtractorFeature_20240530.calLog(returns)
+              val f5 = f3 * f4
+              val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
+            }
+          }
+
+          val video_info: JSONObject = if (record.isNull("t_v_info_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("t_v_info_feature"))
+          featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
+          featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
+
+          val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("c1_feature"))
+          if (c1.nonEmpty) {
+            featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
+            featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
+            featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
+            featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
+          }
+          val c2: JSONObject = if (record.isNull("c2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("c2_feature"))
+          if (c2.nonEmpty) {
+            featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
+            featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
+            featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
+            featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
+            featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
+            featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
+            featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
+            featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
+          }
+
+          val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
+          if (!title.equals("")) {
+            for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
+              val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
+                JSON.parseObject(record.getString(key_feature))
+              for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
+                val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
+                if (!tags.equals("")) {
+                  val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
+                  featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
+                  featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
+                  featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
+                }
+              }
+            }
+          }
+
+          val vid = if (record.isNull("vid")) "" else record.getString("vid")
+          if (!vid.equals("")) {
+            for (key_feature <- List("c8_feature", "c9_feature")) {
+              val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
+                JSON.parseObject(record.getString(key_feature))
+              for (key_action <- List("share", "return")) {
+                val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
+                if (!cfListStr.equals("")) {
+                  val cfMap = cfListStr.split(",").map(r => {
+                    val rList = r.split(":")
+                    (rList(0), (rList(1), rList(2), rList(3)))
+                  }).toMap
+                  if (cfMap.contains(vid)) {
+                    val (score, num, rank) = cfMap(vid)
+                    featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
+                    featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
+                    featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
+                  }
+                }
+              }
+            }
+          }
+
+          val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("d1_feature"))
+          if (d1.nonEmpty) {
+            featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
+            featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
+            featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
+          }
+
+
+          /*
+
+
+          视频:
+          曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
+          STR log(share) ROV log(return) ROV*log(return)
+          40个特征组合
+          整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
+          200个特征值
+
+          视频:
+          视频时长、比特率
+
+          人:
+          播放次数 --> 6h 1d 3d 7d --> 4个
+          带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
+          人+vid-title:
+          播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
+          人+vid-cf
+          基于分享行为/基于回流行为 -->  “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
+
+          头部视频:
+          曝光 回流 ROVn 3个特征
+
+          场景:
+          小时 星期 apptype city province pagesource 机器型号
+           */
+
+
+          //4 处理label信息。
+          val labels = new JSONObject
+          for (labelKey <- List(
+            "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
+            "share_pv", "total_share_uv"
+          )) {
+            if (!record.isNull(labelKey)) {
+              labels.put(labelKey, record.getString(labelKey))
+            }
+          }
+          //5 处理log key表头。
+          val apptype = record.getString("apptype")
+          val pagesource = record.getString("pagesource")
+          val mid = record.getString("mid")
+          // vid 已经提取了
+          val ts = record.getString("ts")
+          val abcode = record.getString("abcode")
+          val level = if (record.isNull("level")) "0" else record.getString("level")
+          val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
+          val labelKey = labels.toString()
+          val featureKey = featureMap.toString()
+          //6 拼接数据,保存。
+          logKey + "\t" + labelKey + "\t" + featureKey
+
+        })
+
+      // 4 保存数据到hdfs
+      val savePartition = dt + hh
+      val hdfsPath = savePath + "/" + savePartition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      } else {
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+  def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
+    // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
+    val tagsList = tags.split(",")
+    var d1 = 0.0
+    val d2 = new ArrayBuffer[String]()
+    var d3 = 0.0
+    var d4 = 0.0
+    for (tag <- tagsList) {
+      if (title.contains(tag)) {
+        d1 = d1 + 1.0
+        d2.add(tag)
+      }
+      val score = Similarity.conceptSimilarity(tag, title)
+      d3 = if (score > d3) score else d3
+      d4 = d4 + score
+    }
+    d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
+    (d1, d2.mkString(","), d3, d4)
+  }
+}

+ 14 - 4
src/main/scala/com/aliyun/odps/spark/examples/临时记录的脚本-I2I

@@ -9,10 +9,10 @@ nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.s
 --class com.aliyun.odps.spark.examples.makedata_dssm.makedata_i2i_02_joinFeatureData_20241128 \
 --master yarn --driver-memory 2G --executor-memory 2G --executor-cores 1 --num-executors 32 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
-beginStr:2024112800 endStr:2024112823 \
+beginStr:2024113000 endStr:2024113023 \
 tablePart:64 \
 readPath:/dw/recommend/model/51_dssm_i2i_sample/ \
-savePath:/dw/recommend/model/52_dssm_i2i_joinfeature/ > p52.log 2>&1 &
+savePath:/dw/recommend/model/52_dssm_i2i_joinfeature/ > p52_2.log 2>&1 &
 
 nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
 --class com.aliyun.odps.spark.examples.makedata_dssm.makedata_i2i_03_onehotFile_20241128 \
@@ -56,8 +56,18 @@ nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.s
 --class com.aliyun.odps.spark.examples.makedata_dssm.makedata_i2i_05_trainData_20241129 \
 --master yarn --driver-memory 2G --executor-memory 4G --executor-cores 1 --num-executors 32 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
-beginStr:2024112800 endStr:2024112823 \
+beginStr:2024113000 endStr:2024113023 \
 readPath:/dw/recommend/model/52_dssm_i2i_joinfeature/ \
 savePath:/dw/recommend/model/55_dssm_i2i_traindata/ \
 onehotPath:/dw/recommend/model/53_dssm_i2i_onehot/after_20241201_file \
-bucketFile:20241128_recsys_i2i_bucket_47_v2.txt > p55.log 2>&1 &
+bucketFile:20241128_recsys_i2i_bucket_47_v2.txt > p55_2.log 2>&1 &
+
+nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata_dssm.makedata_i2i_06_itemPred_20241206 \
+--master yarn --driver-memory 2G --executor-memory 2G --executor-cores 1 --num-executors 16 \
+./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+onehotPath:/dw/recommend/model/53_dssm_i2i_onehot/after_20241201_file \
+bucketFile:20241128_recsys_i2i_bucket_47_v2.txt repartition:100 \
+dt:20241206 \
+savePath:/dw/recommend/model/56_dssm_i2i_itempredData/ \
+> p56.log 2>&1 &