#19 推荐排序新特征

Merged
zhangbo merged 7 commits from algorithm/feature/zhangbo_makedata_v2 into algorithm/master 2 months ago

+ 589 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_ad/v20240718/makedata_ad_34_bucketDataPrint_20241217.scala

@@ -0,0 +1,589 @@
+package com.aliyun.odps.spark.examples.makedata_ad.v20240718
+
+import com.alibaba.fastjson.{JSON, JSONObject}
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.extractor.{ExtractorUtils, RankExtractorFeature_20240530}
+import examples.utils.DateTimeUtil
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+import org.xm.Similarity
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+import scala.io.Source
+/*
+   20240608 提取特征
+ */
+
+object makedata_ad_34_bucketDataPrint_20241217 {
+  def main(args: Array[String]): Unit = {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val beginStr = param.getOrElse("beginStr", "2024061500")
+    val endStr = param.getOrElse("endStr", "2024061523")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/17_for_check/")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "alg_recsys_sample_all")
+    val repartition = param.getOrElse("repartition", "32").toInt
+    val readDate = param.getOrElse("readDate", "20240615")
+    val idDefaultValue = param.getOrElse("idDefaultValue", "0.1").toDouble
+    val filterNames = param.getOrElse("filterNames", "").split(",").toSet
+
+
+    val loader = getClass.getClassLoader
+    val resourceUrl = loader.getResource("20240703_ad_feature_name.txt")
+    val content =
+      if (resourceUrl != null) {
+        val content = Source.fromURL(resourceUrl).getLines().mkString("\n")
+        Source.fromURL(resourceUrl).close()
+        content
+      } else {
+        ""
+      }
+    println(content)
+    val contentList = content.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty).toList
+    val contentList_br = sc.broadcast(contentList)
+
+    val resourceUrlBucket = loader.getResource("20240718_ad_bucket_688.txt")
+    val buckets =
+      if (resourceUrlBucket != null) {
+        val buckets = Source.fromURL(resourceUrlBucket).getLines().mkString("\n")
+        Source.fromURL(resourceUrlBucket).close()
+        buckets
+      } else {
+        ""
+      }
+    println(buckets)
+    val bucketsMap = buckets.split("\n")
+      .map(r => r.replace(" ", "").replaceAll("\n", ""))
+      .filter(r => r.nonEmpty)
+      .map(r => {
+        val rList = r.split("\t")
+        (rList(0), (rList(1).toDouble, rList(2).split(",").map(_.toDouble)))
+      }).toMap
+    val bucketsMap_br = sc.broadcast(bucketsMap)
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
+    for (dt_hh <- timeRange) {
+      val dt = dt_hh.substring(0, 8)
+      val hh = dt_hh.substring(8, 10)
+      val partition = s"dt=$dt,hh=$hh"
+      println("开始执行partiton:" + partition)
+      val odpsData = odpsOps.readTable(project = project,
+        table = table,
+        partition = partition,
+        transfer = func,
+        numPartition = tablePart)
+        .map(record => {
+          val ts = record.getString("ts").toInt
+          val cid = record.getString("cid")
+          val featureMap = new JSONObject()
+          val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b1_feature"))
+          val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b2_feature"))
+          val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b3_feature"))
+          val b4: JSONObject = if (record.isNull("b4_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b4_feature"))
+          val b5: JSONObject = if (record.isNull("b5_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b5_feature"))
+          val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b6_feature"))
+          val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b7_feature"))
+          val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b8_feature"))
+          val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b9_feature"))
+          featureMap.put("cid_" + cid, idDefaultValue)
+          // if (b1.containsKey("adid") && b1.getString("adid").nonEmpty) {
+          //   featureMap.put("adid_" + b1.getString("adid"), idDefaultValue)
+          // }
+          if (b1.containsKey("adverid") && b1.getString("adverid").nonEmpty) {
+            featureMap.put("adverid_" + b1.getString("adverid"), idDefaultValue)
+          }
+          // if (b1.containsKey("targeting_conversion") && b1.getString("targeting_conversion").nonEmpty) {
+          //   featureMap.put("targeting_conversion_" + b1.getString("targeting_conversion"), 1.0)
+          // }
+          val hour = DateTimeUtil.getHourByTimestamp(ts)
+          featureMap.put("hour_" + hour, 0.1)
+          val dayOfWeek = DateTimeUtil.getDayOrWeekByTimestamp(ts)
+          featureMap.put("dayofweek_" + dayOfWeek, 0.1);
+          if (b1.containsKey("cpa")) {
+            featureMap.put("cpa", b1.getString("cpa").toDouble)
+          }
+          for ((bn, prefix1) <- List(
+            (b2, "b2"), (b3, "b3"), (b4, "b4"), (b5, "b5"), (b8, "b8"), (b9, "b9")
+          )) {
+            for (prefix2 <- List(
+             "1h","2h" ,"3h", "6h", "12h", "1d", "3d", "7d", "today", "yesterday"
+            )) {
+              val view = if (bn.isEmpty) 0D else bn.getIntValue("ad_view_" + prefix2).toDouble
+              val click = if (bn.isEmpty) 0D else bn.getIntValue("ad_click_" + prefix2).toDouble
+              val conver = if (bn.isEmpty) 0D else bn.getIntValue("ad_conversion_" + prefix2).toDouble
+              val income = if (bn.isEmpty) 0D else bn.getIntValue("ad_income_" + prefix2).toDouble
+              val f1 = RankExtractorFeature_20240530.calDiv(click, view)
+              val f2 = RankExtractorFeature_20240530.calDiv(conver, view)
+              val f3 = RankExtractorFeature_20240530.calDiv(conver, click)
+              val f4 = conver
+              val f5 = RankExtractorFeature_20240530.calDiv(income * 1000, view)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctr", f1)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctcvr", f2)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "cvr", f3)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver", f4)
+              // featureMap.put(prefix1 + "_" + prefix2 + "_" + "ecpm", f5)
+
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "click", click)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*log(view)", conver * RankExtractorFeature_20240530.calLog(view))
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*ctcvr", conver * f2)
+            }
+          }
+          for ((bn, prefix1) <- List(
+            (b6, "b6"), (b7, "b7")
+          )) {
+            for (prefix2 <- List(
+              "7d", "14d"
+            )) {
+              val view = if (bn.isEmpty) 0D else bn.getIntValue("ad_view_" + prefix2).toDouble
+              val click = if (bn.isEmpty) 0D else bn.getIntValue("ad_click_" + prefix2).toDouble
+              val conver = if (bn.isEmpty) 0D else bn.getIntValue("ad_conversion_" + prefix2).toDouble
+              val income = if (bn.isEmpty) 0D else bn.getIntValue("ad_income_" + prefix2).toDouble
+              val f1 = RankExtractorFeature_20240530.calDiv(click, view)
+              val f2 = RankExtractorFeature_20240530.calDiv(conver, view)
+              val f3 = RankExtractorFeature_20240530.calDiv(conver, click)
+              val f4 = conver
+              val f5 = RankExtractorFeature_20240530.calDiv(income * 1000, view)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctr", f1)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctcvr", f2)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "cvr", f3)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver", f4)
+              // featureMap.put(prefix1 + "_" + prefix2 + "_" + "ecpm", f5)
+
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "click", click)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*log(view)", conver * RankExtractorFeature_20240530.calLog(view))
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*ctcvr", conver * f2)
+            }
+          }
+          val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("c1_feature"))
+          val midActionList = if (c1.containsKey("action") && c1.getString("action").nonEmpty) {
+            c1.getString("action").split(",").map(r => {
+              val rList = r.split(":")
+              (rList(0), (rList(1).toInt, rList(2).toInt, rList(3).toInt, rList(4).toInt, rList(5)))
+            }).sortBy(-_._2._1).toList
+          } else {
+            new ArrayBuffer[(String, (Int, Int, Int, Int, String))]().toList
+          }
+          // u特征
+          val viewAll = midActionList.size.toDouble
+          val clickAll = midActionList.map(_._2._2).sum.toDouble
+          val converAll = midActionList.map(_._2._3).sum.toDouble
+          val incomeAll = midActionList.map(_._2._4).sum.toDouble
+          featureMap.put("viewAll", viewAll)
+          featureMap.put("clickAll", clickAll)
+          featureMap.put("converAll", converAll)
+          featureMap.put("incomeAll", incomeAll)
+          featureMap.put("ctr_all", RankExtractorFeature_20240530.calDiv(clickAll, viewAll))
+          featureMap.put("ctcvr_all", RankExtractorFeature_20240530.calDiv(converAll, viewAll))
+          featureMap.put("cvr_all", RankExtractorFeature_20240530.calDiv(clickAll, converAll))
+          // featureMap.put("ecpm_all", RankExtractorFeature_20240530.calDiv(incomeAll * 1000, viewAll))
+          // ui特征
+          val midTimeDiff = scala.collection.mutable.Map[String, Double]()
+          midActionList.foreach {
+            case (cid, (ts_history, click, conver, income, title)) =>
+              if (!midTimeDiff.contains("timediff_view_" + cid)) {
+                midTimeDiff.put("timediff_view_" + cid, 1.0 / ((ts - ts_history).toDouble / 3600.0 / 24.0))
+              }
+              if (!midTimeDiff.contains("timediff_click_" + cid) && click > 0) {
+                midTimeDiff.put("timediff_click_" + cid, 1.0 / ((ts - ts_history).toDouble / 3600.0 / 24.0))
+              }
+              if (!midTimeDiff.contains("timediff_conver_" + cid) && conver > 0) {
+                midTimeDiff.put("timediff_conver_" + cid, 1.0 / ((ts - ts_history).toDouble / 3600.0 / 24.0))
+              }
+          }
+          val midActionStatic = scala.collection.mutable.Map[String, Double]()
+          midActionList.foreach {
+            case (cid, (ts_history, click, conver, income, title)) =>
+              midActionStatic.put("actionstatic_view_" + cid, 1.0 + midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0))
+              midActionStatic.put("actionstatic_click_" + cid, click + midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0))
+              midActionStatic.put("actionstatic_conver_" + cid, conver + midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0))
+              midActionStatic.put("actionstatic_income_" + cid, income + midActionStatic.getOrDefault("actionstatic_income_" + cid, 0.0))
+          }
+          if (midTimeDiff.contains("timediff_view_" + cid)) {
+            featureMap.put("timediff_view", midTimeDiff.getOrDefault("timediff_view_" + cid, 0.0))
+          }
+          if (midTimeDiff.contains("timediff_click_" + cid)) {
+            featureMap.put("timediff_click", midTimeDiff.getOrDefault("timediff_click_" + cid, 0.0))
+          }
+          if (midTimeDiff.contains("timediff_conver_" + cid)) {
+            featureMap.put("timediff_conver", midTimeDiff.getOrDefault("timediff_conver_" + cid, 0.0))
+          }
+          if (midActionStatic.contains("actionstatic_view_" + cid)) {
+            featureMap.put("actionstatic_view", midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0))
+          }
+          if (midActionStatic.contains("actionstatic_click_" + cid)) {
+            featureMap.put("actionstatic_click", midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0))
+          }
+          if (midActionStatic.contains("actionstatic_conver_" + cid)) {
+            featureMap.put("actionstatic_conver", midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0))
+          }
+          if (midActionStatic.contains("actionstatic_income_" + cid)) {
+            featureMap.put("actionstatic_income", midActionStatic.getOrDefault("actionstatic_income_" + cid, 0.0))
+          }
+          if (midActionStatic.contains("actionstatic_view_" + cid) && midActionStatic.contains("actionstatic_click_" + cid)) {
+            featureMap.put("actionstatic_ctr", RankExtractorFeature_20240530.calDiv(
+              midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0),
+              midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0)
+            ))
+          }
+          if (midActionStatic.contains("actionstatic_view_" + cid) && midActionStatic.contains("actionstatic_conver_" + cid)) {
+            featureMap.put("actionstatic_ctcvr", RankExtractorFeature_20240530.calDiv(
+              midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0),
+              midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0)
+            ))
+          }
+          if (midActionStatic.contains("actionstatic_conver_" + cid) && midActionStatic.contains("actionstatic_click_" + cid)) {
+            featureMap.put("actionstatic_cvr", RankExtractorFeature_20240530.calDiv(
+              midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0),
+              midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0)
+            ))
+          }
+          val e1: JSONObject = if (record.isNull("e1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("e1_feature"))
+          val e2: JSONObject = if (record.isNull("e2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("e2_feature"))
+          val title = b1.getOrDefault("cidtitle", "").toString
+          if (title.nonEmpty) {
+            for ((en, prefix1) <- List((e1, "e1"), (e2, "e2"))) {
+              for (prefix2 <- List("tags_3d", "tags_7d", "tags_14d")) {
+                if (en.nonEmpty && en.containsKey(prefix2) && en.getString(prefix2).nonEmpty) {
+                  val (f1, f2, f3, f4) = funcC34567ForTags(en.getString(prefix2), title)
+                  featureMap.put(prefix1 + "_" + prefix2 + "_matchnum", f1)
+                  featureMap.put(prefix1 + "_" + prefix2 + "_maxscore", f3)
+                  featureMap.put(prefix1 + "_" + prefix2 + "_avgscore", f4)
+
+                }
+              }
+            }
+          }
+          val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("d1_feature"))
+          val d2: JSONObject = if (record.isNull("d2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("d2_feature"))
+          val d3: JSONObject = if (record.isNull("d3_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("d3_feature"))
+          if (d1.nonEmpty) {
+            for (prefix <- List("3h", "6h", "12h", "1d", "3d", "7d")) {
+              val view = if (!d1.containsKey("ad_view_" + prefix)) 0D else d1.getIntValue("ad_view_" + prefix).toDouble
+              val click = if (!d1.containsKey("ad_click_" + prefix)) 0D else d1.getIntValue("ad_click_" + prefix).toDouble
+              val conver = if (!d1.containsKey("ad_conversion_" + prefix)) 0D else d1.getIntValue("ad_conversion_" + prefix).toDouble
+              val income = if (!d1.containsKey("ad_income_" + prefix)) 0D else d1.getIntValue("ad_income_" + prefix).toDouble
+              val f1 = RankExtractorFeature_20240530.calDiv(click, view)
+              val f2 = RankExtractorFeature_20240530.calDiv(conver, view)
+              val f3 = RankExtractorFeature_20240530.calDiv(conver, click)
+              val f4 = conver
+              val f5 = RankExtractorFeature_20240530.calDiv(income * 1000, view)
+              featureMap.put("d1_feature" + "_" + prefix + "_" + "ctr", f1)
+              featureMap.put("d1_feature" + "_" + prefix + "_" + "ctcvr", f2)
+              featureMap.put("d1_feature" + "_" + prefix + "_" + "cvr", f3)
+              featureMap.put("d1_feature" + "_" + prefix + "_" + "conver", f4)
+              // featureMap.put("d1_feature" + "_" + prefix + "_" + "ecpm", f5)
+            }
+          }
+          val vidRankMaps = scala.collection.mutable.Map[String, scala.collection.immutable.Map[String, Double]]()
+          if (d2.nonEmpty) {
+            d2.foreach(r => {
+              val key = r._1
+              val value = d2.getString(key).split(",").map(r => {
+                val rList = r.split(":")
+                (rList(0), rList(2).toDouble)
+              }).toMap
+              vidRankMaps.put(key, value)
+            })
+          }
+          // for (prefix1 <- List("ctr", "ctcvr", "ecpm")) {
+          for (prefix1 <- List("ctr", "ctcvr")) {
+            for (prefix2 <- List("1d", "3d", "7d", "14d")) {
+              if (vidRankMaps.contains(prefix1 + "_" + prefix2)) {
+                val rank = vidRankMaps(prefix1 + "_" + prefix2).getOrDefault(cid, 0.0)
+                if (rank >= 1.0) {
+                  featureMap.put("vid_rank_" + prefix1 + "_" + prefix2, 1.0 / rank)
+                }
+              }
+            }
+          }
+          if (d3.nonEmpty){
+            val vTitle= d3.getString("title")
+            val score = Similarity.conceptSimilarity(title, vTitle)
+            featureMap.put("ctitle_vtitle_similarity", score);
+          }
+          val flag = record.isNull("metafeaturemap")
+          val allfeaturemap = if (record.isNull("allfeaturemap")) new JSONObject() else
+            JSON.parseObject(record.getString("allfeaturemap"))
+          val apptype = record.getString("apptype")
+          val label = record.getString("ad_is_conversion")
+          val extend = record.getString("extend")
+          val abcode = JSON.parseObject(extend).getString("abcode")
+          (apptype, "pagesource", "level", label, abcode, allfeaturemap, featureMap, flag)
+        }).filter{
+          case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag) =>
+            Set("4").contains(apptype) && !flag &&
+              Set("ab0", "ab1", "ab2", "ab3", "ab4").contains(abcode)
+        }.mapPartitions(row => {
+          val result = new ArrayBuffer[String]()
+          val bucketsMap = bucketsMap_br.value
+          row.foreach {
+            case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag) =>
+              val offlineFeatrueMap = featureMap.filter(r =>
+                bucketsMap.containsKey(r._1) || r._1.startsWith("cid_") || r._1.startsWith("adid_")
+                  || r._1.startsWith("adverid_") || r._1.startsWith("targeting_conversion_")
+                  || r._1.startsWith("hour_") || r._1.startsWith("dayofweek_")
+              ).map(r => {
+                val name = r._1
+                var ifFilter = false
+                if (filterNames.nonEmpty) {
+                  filterNames.foreach(r => if (!ifFilter && name.contains(r)) {
+                    ifFilter = true
+                  })
+                }
+                if (ifFilter) {
+                  ""
+                } else {
+                  val score = r._2.toString.toDouble
+                  if (score > 1E-8) {
+                    if (bucketsMap.contains(name)) {
+                      val (bucketNum, buckets) = bucketsMap(name)
+                      val scoreNew = 1.0 / bucketNum * (ExtractorUtils.findInsertPosition(buckets, score).toDouble + 1.0)
+                      name + ":" + scoreNew.toString
+                    } else {
+                      name + ":" + score.toString
+                    }
+                  } else {
+                    ""
+                  }
+                }
+              }).filter(_.nonEmpty)
+              result.add((apptype, pagesource, level, label, abcode, allfeaturemap.toString, offlineFeatrueMap.iterator.mkString(","))
+              .productIterator.mkString("\t"))
+          }
+          result.iterator
+        })
+
+      // 4 保存数据到hdfs
+      val savePartition = dt + hh
+      val hdfsPath = savePath + "/" + savePartition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+
+    val data2 = sc.textFile(savePath + "/" + readDate + "*").mapPartitions(row=>{
+      val result = new ArrayBuffer[(String,
+        Map[String, String], Map[String, String], List[String], List[String], List[String], List[String])]()
+      val contentList = contentList_br.value
+      row.foreach(r=>{
+        val rList = r.split("\t")
+        val label = rList(3)
+        val allfeaturemap = JSON.parseObject(rList(5)).toMap.map(r => (r._1, r._2.toString))
+        val offlineFeatrueMap = rList(6).split(",").map(r => (r.split(":")(0), r.split(":")(1))).toMap
+
+        val v3 = contentList.map(name =>{
+          val useOfflineNames = Set(
+            "b2_3h_ctr","b2_3h_ctcvr","b2_3h_cvr","b2_3h_conver","b2_3h_ecpm","b2_6h_ctr","b2_6h_ctcvr","b2_6h_cvr",
+            "b2_6h_conver","b2_6h_ecpm","b2_12h_ctr","b2_12h_ctcvr","b2_12h_cvr","b2_12h_conver","b2_12h_ecpm",
+            "b2_1d_ctr","b2_1d_ctcvr","b2_1d_cvr","b2_1d_conver","b2_1d_ecpm","b2_3d_ctr","b2_3d_ctcvr","b2_3d_cvr",
+            "b2_3d_conver","b2_3d_ecpm","b2_7d_ctr","b2_7d_ctcvr","b2_7d_cvr","b2_7d_conver","b2_7d_ecpm",
+            "b3_1h_ctr","b3_1h_ctcvr","b3_1h_cvr","b3_1h_conver","b3_1h_click","b3_1h_conver*log(view)","b3_1h_conver*ctcvr","b3_2h_ctr","b3_2h_ctcvr","b3_2h_cvr","b3_2h_conver","b3_2h_click","b3_2h_conver*log(view)","b3_2h_conver*ctcvr","b3_3h_ctr","b3_3h_ctcvr","b3_3h_cvr","b3_3h_conver","b3_3h_click","b3_3h_conver*log(view)","b3_3h_conver*ctcvr","b3_6h_ctr","b3_6h_ctcvr","b3_6h_cvr","b3_6h_conver","b3_6h_click","b3_6h_conver*log(view)","b3_6h_conver*ctcvr","b3_12h_ctr","b3_12h_ctcvr","b3_12h_cvr","b3_12h_conver","b3_12h_click","b3_12h_conver*log(view)","b3_12h_conver*ctcvr","b3_1d_ctr","b3_1d_ctcvr","b3_1d_cvr","b3_1d_conver","b3_1d_click","b3_1d_conver*log(view)","b3_1d_conver*ctcvr","b3_3d_ctr","b3_3d_ctcvr","b3_3d_cvr","b3_3d_conver","b3_3d_click","b3_3d_conver*log(view)","b3_3d_conver*ctcvr","b3_7d_ctr","b3_7d_ctcvr","b3_7d_cvr","b3_7d_conver","b3_7d_click","b3_7d_conver*log(view)","b3_7d_conver*ctcvr"
+          )
+          if (useOfflineNames.contains(name)){
+            if (offlineFeatrueMap.contains(name)){
+              name + ":" + offlineFeatrueMap(name)
+            }else{
+              ""
+            }
+          }else{
+            if (allfeaturemap.contains(name)) {
+              name + ":" + allfeaturemap(name)
+            } else {
+              ""
+            }
+          }
+        }).filter(_.nonEmpty)
+
+        val v4 = contentList.map(name => {
+          val useOfflineNames = Set(
+            "e1_tags_3d_matchnum","e1_tags_3d_maxscore","e1_tags_3d_avgscore","e1_tags_7d_matchnum",
+            "e1_tags_7d_maxscore","e1_tags_7d_avgscore","e1_tags_14d_matchnum","e1_tags_14d_maxscore",
+            "e1_tags_14d_avgscore","e2_tags_3d_matchnum","e2_tags_3d_maxscore","e2_tags_3d_avgscore",
+            "e2_tags_7d_matchnum","e2_tags_7d_maxscore","e2_tags_7d_avgscore","e2_tags_14d_matchnum",
+            "e2_tags_14d_maxscore","e2_tags_14d_avgscore","d1_feature_3h_ctr","d1_feature_3h_ctcvr"
+
+          )
+          if (useOfflineNames.contains(name)) {
+            if (offlineFeatrueMap.contains(name)) {
+              name + ":" + offlineFeatrueMap(name)
+            } else {
+              ""
+            }
+          } else {
+            if (allfeaturemap.contains(name)) {
+              name + ":" + allfeaturemap(name)
+            } else {
+              ""
+            }
+          }
+        }).filter(_.nonEmpty)
+
+        val v5 = contentList.map(name => {
+          val useOfflineNames = Set(
+            "viewAll","clickAll","converAll","incomeAll","ctr_all","ctcvr_all","cvr_all","ecpm_all"
+          )
+          if (useOfflineNames.contains(name)) {
+            if (offlineFeatrueMap.contains(name)) {
+              name + ":" + offlineFeatrueMap(name)
+            } else {
+              ""
+            }
+          } else {
+            if (allfeaturemap.contains(name)) {
+              name + ":" + allfeaturemap(name)
+            } else {
+              ""
+            }
+          }
+        }).filter(_.nonEmpty)
+
+        val v6 = contentList.map(name => {
+          val useOfflineNames = Set(
+            "d1_feature_3h_cvr", "d1_feature_3h_conver", "d1_feature_3h_ecpm", "d1_feature_6h_ctr",
+            "d1_feature_6h_ctcvr", "d1_feature_6h_cvr", "d1_feature_6h_conver", "d1_feature_6h_ecpm",
+            "d1_feature_12h_ctr", "d1_feature_12h_ctcvr", "d1_feature_12h_cvr", "d1_feature_12h_conver",
+            "d1_feature_12h_ecpm", "d1_feature_1d_ctr", "d1_feature_1d_ctcvr", "d1_feature_1d_cvr", "d1_feature_1d_conver",
+            "d1_feature_1d_ecpm", "d1_feature_3d_ctr", "d1_feature_3d_ctcvr", "d1_feature_3d_cvr", "d1_feature_3d_conver",
+            "d1_feature_3d_ecpm", "d1_feature_7d_ctr", "d1_feature_7d_ctcvr", "d1_feature_7d_cvr", "d1_feature_7d_conver",
+            "d1_feature_7d_ecpm"          )
+          if (useOfflineNames.contains(name)) {
+            if (offlineFeatrueMap.contains(name)) {
+              name + ":" + offlineFeatrueMap(name)
+            } else {
+              ""
+            }
+          } else {
+            if (allfeaturemap.contains(name)) {
+              name + ":" + allfeaturemap(name)
+            } else {
+              ""
+            }
+          }
+        }).filter(_.nonEmpty)
+
+
+        result.add((label, offlineFeatrueMap, allfeaturemap, v3, v4, v5, v6))
+
+      })
+      result.iterator
+    })
+
+    val saveV1 = "/dw/recommend/model/33_for_check_v1/" + readDate
+    if (saveV1.nonEmpty && saveV1.startsWith("/dw/recommend/model/")) {
+      println("删除路径并开始数据写入:" + saveV1)
+      MyHdfsUtils.delete_hdfs_path(saveV1)
+      data2.map(r => r._1 + "\t" + r._2.map(r=> r._1 + ":" + r._2).mkString("\t")).saveAsTextFile(saveV1, classOf[GzipCodec])
+    } else {
+      println("路径不合法,无法写入:" + saveV1)
+    }
+
+    val saveV2 = "/dw/recommend/model/33_for_check_v2/" + readDate
+    if (saveV2.nonEmpty && saveV2.startsWith("/dw/recommend/model/")) {
+      println("删除路径并开始数据写入:" + saveV2)
+      MyHdfsUtils.delete_hdfs_path(saveV2)
+      data2.map(r => r._1 + "\t" + r._3.map(r=> r._1 + ":" + r._2).mkString("\t")).saveAsTextFile(saveV2, classOf[GzipCodec])
+    } else {
+      println("路径不合法,无法写入:" + saveV2)
+    }
+
+     val saveV3 = "/dw/recommend/model/33_for_check_v3/" + readDate
+     if (saveV3.nonEmpty && saveV3.startsWith("/dw/recommend/model/")) {
+       println("删除路径并开始数据写入:" + saveV3)
+       MyHdfsUtils.delete_hdfs_path(saveV3)
+       data2.map(r => r._1 + "\t" + r._4.mkString("\t")).saveAsTextFile(saveV3, classOf[GzipCodec])
+     } else {
+       println("路径不合法,无法写入:" + saveV3)
+     }
+
+     val saveV4 = "/dw/recommend/model/33_for_check_v4/" + readDate
+     if (saveV4.nonEmpty && saveV4.startsWith("/dw/recommend/model/")) {
+       println("删除路径并开始数据写入:" + saveV4)
+       MyHdfsUtils.delete_hdfs_path(saveV4)
+       data2.map(r => r._1 + "\t" + r._5.mkString("\t")).saveAsTextFile(saveV4, classOf[GzipCodec])
+     } else {
+       println("路径不合法,无法写入:" + saveV4)
+     }
+
+     val saveV5 = "/dw/recommend/model/33_for_check_v5/" + readDate
+     if (saveV5.nonEmpty && saveV5.startsWith("/dw/recommend/model/")) {
+       println("删除路径并开始数据写入:" + saveV5)
+       MyHdfsUtils.delete_hdfs_path(saveV5)
+       data2.map(r => r._1 + "\t" + r._6.mkString("\t")).saveAsTextFile(saveV5, classOf[GzipCodec])
+     } else {
+       println("路径不合法,无法写入:" + saveV5)
+     }
+
+     val saveV6 = "/dw/recommend/model/33_for_check_v6/" + readDate
+     if (saveV6.nonEmpty && saveV6.startsWith("/dw/recommend/model/")) {
+       println("删除路径并开始数据写入:" + saveV6)
+       MyHdfsUtils.delete_hdfs_path(saveV6)
+       data2.map(r => r._1 + "\t" + r._7.mkString("\t")).saveAsTextFile(saveV6, classOf[GzipCodec])
+     } else {
+       println("路径不合法,无法写入:" + saveV6)
+     }
+
+
+
+  }
+
+
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+  def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
+    // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
+    val tagsList = tags.split(",")
+    var d1 = 0.0
+    val d2 = new ArrayBuffer[String]()
+    var d3 = 0.0
+    var d4 = 0.0
+    for (tag <- tagsList){
+      if (title.contains(tag)){
+        d1 = d1 + 1.0
+        d2.add(tag)
+      }
+      val score = Similarity.conceptSimilarity(tag, title)
+      d3 = if (score > d3) score else d3
+      d4 = d4 + score
+    }
+    d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
+    (d1, d2.mkString(","), d3, d4)
+  }
+}

+ 11 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata_dssm/makedata_i2i_06_itemPred_20241206.scala

@@ -32,6 +32,7 @@ object makedata_i2i_06_itemPred_20241206 {
     val savePath = param.getOrElse("savePath", "/dw/recommend/model/56_dssm_i2i_itempredData/")
     val project = param.getOrElse("project", "loghubods")
     val repartition = param.getOrElse("repartition", "100").toInt
+    val ifFilterCate = param.getOrElse("ifFilterCate", "true").toBoolean
 
     // 2 读取onehot文件
     val onehotMap_br = sc.broadcast(
@@ -126,7 +127,16 @@ object makedata_i2i_06_itemPred_20241206 {
           result.add((vid, (feature, feature_action, feature_cate1, feature_cate2)))
       }
       result.iterator
-    }).mapPartitions(row =>{
+    }).filter{
+      case (vid, (feature, feature_action, feature_cate1, feature_cate2)) =>
+        if (ifFilterCate){
+          val cate1 = JSON.parseObject(feature).getOrDefault("category1", "无").toString
+          val cate2 = JSON.parseObject(feature).getOrDefault("category2_1", "无").toString
+          !Set("无", "", "unknown").contains(cate1) || !Set("无", "", "unknown").contains(cate2)
+        }else{
+          true
+        }
+    }.mapPartitions(row =>{
       val result = new ArrayBuffer[String]()
       val onehotMap = onehotMap_br.value
       val bucketsMap = bucketsMap_br.value

+ 281 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata_recsys/makedata_recsys_41_originData_20250218.scala

@@ -0,0 +1,281 @@
+package com.aliyun.odps.spark.examples.makedata_recsys
+
+import com.alibaba.fastjson.{JSON, JSONObject}
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.extractor.RankExtractorFeature_20240530
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+import org.xm.Similarity
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+/*
+   20250218 提取特征
+ */
+
+object makedata_recsys_41_originData_20250218 {
+  def main(args: Array[String]): Unit = {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val beginStr = param.getOrElse("beginStr", "2025021812")
+    val endStr = param.getOrElse("endStr", "2025021812")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "dwd_recsys_alg_sample_all_20250212")
+    val fuSampleRate = param.getOrElse("fuSampleRate", "0.1").toDouble  // 采样率1/20=0.05 自己不是唯一回流
+    val repartition = param.getOrElse("repartition", "32").toInt
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/41_sample_data/")
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
+    for (dt_hh <- timeRange) {
+      val dt = dt_hh.substring(0, 8)
+      val hh = dt_hh.substring(8, 10)
+      val partition = s"dt=$dt,hh=$hh"
+      println("开始执行partiton:" + partition)
+      val odpsData = odpsOps.readTable(project = project,
+          table = table,
+          partition = partition,
+          transfer = func,
+          numPartition = tablePart)
+        .map(record => {
+
+          val featureMap = new JSONObject()
+
+          // a 视频特征
+          val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b1_feature"))
+          val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b2_feature"))
+          val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b3_feature"))
+          val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b6_feature"))
+          val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b7_feature"))
+
+          val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b8_feature"))
+          val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b9_feature"))
+          val b10: JSONObject = if (record.isNull("b10_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b10_feature"))
+          val b11: JSONObject = if (record.isNull("b11_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b11_feature"))
+          val b12: JSONObject = if (record.isNull("b12_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b12_feature"))
+          val b13: JSONObject = if (record.isNull("b13_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b13_feature"))
+          val b17: JSONObject = if (record.isNull("b17_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b17_feature"))
+          val b18: JSONObject = if (record.isNull("b18_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b18_feature"))
+          val b19: JSONObject = if (record.isNull("b19_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("b19_feature"))
+
+
+          val origin_data = List(
+            (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
+            (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
+            (b17, b18, b19, "b171819")
+          )
+          for ((b_1, b_2, b_3, prefix1) <- origin_data) {
+            for (prefix2 <- List(
+              "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
+            )) {
+              val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
+              val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
+              val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
+              val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
+              val f2 = RankExtractorFeature_20240530.calLog(share)
+              val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
+              val f4 = RankExtractorFeature_20240530.calLog(returns)
+              val f5 = f3 * f4
+              val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
+            }
+          }
+
+          val video_info: JSONObject = if (record.isNull("t_v_info_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("t_v_info_feature"))
+          featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
+          featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
+
+          val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("c1_feature"))
+          if (c1.nonEmpty) {
+            featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
+            featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
+            featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
+            featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
+          }
+          val c2: JSONObject = if (record.isNull("c2_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("c2_feature"))
+          if (c2.nonEmpty) {
+            featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
+            featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
+            featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
+            featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
+            featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
+            featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
+            featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
+            featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
+          }
+
+          val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
+          if (!title.equals("")) {
+            for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
+              val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
+                JSON.parseObject(record.getString(key_feature))
+              for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
+                val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
+                if (!tags.equals("")) {
+                  val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
+                  featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
+                  featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
+                  featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
+                }
+              }
+            }
+          }
+
+          val vid = if (record.isNull("vid")) "" else record.getString("vid")
+          if (!vid.equals("")) {
+            for (key_feature <- List("c8_feature", "c9_feature")) {
+              val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
+                JSON.parseObject(record.getString(key_feature))
+              for (key_action <- List("share", "return")) {
+                val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
+                if (!cfListStr.equals("")) {
+                  val cfMap = cfListStr.split(",").map(r => {
+                    val rList = r.split(":")
+                    (rList(0), (rList(1), rList(2), rList(3)))
+                  }).toMap
+                  if (cfMap.contains(vid)) {
+                    val (score, num, rank) = cfMap(vid)
+                    featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
+                    featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
+                    featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
+                  }
+                }
+              }
+            }
+          }
+
+          val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
+            JSON.parseObject(record.getString("d1_feature"))
+          if (d1.nonEmpty) {
+            featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
+            featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
+            featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
+          }
+
+
+          /*
+
+
+          视频:
+          曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
+          STR log(share) ROV log(return) ROV*log(return)
+          40个特征组合
+          整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
+          200个特征值
+
+          视频:
+          视频时长、比特率
+
+          人:
+          播放次数 --> 6h 1d 3d 7d --> 4个
+          带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
+          人+vid-title:
+          播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
+          人+vid-cf
+          基于分享行为/基于回流行为 -->  “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
+
+          头部视频:
+          曝光 回流 ROVn 3个特征
+
+          场景:
+          小时 星期 apptype city province pagesource 机器型号
+           */
+
+
+          //4 处理label信息。
+          val labels = new JSONObject
+          for (labelKey <- List(
+            "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
+            "share_pv", "total_share_uv"
+          )) {
+            if (!record.isNull(labelKey)) {
+              labels.put(labelKey, record.getString(labelKey))
+            }
+          }
+          //5 处理log key表头。
+          val apptype = record.getString("apptype")
+          val pagesource = record.getString("pagesource")
+          val mid = record.getString("mid")
+          // vid 已经提取了
+          val ts = record.getString("ts")
+          val abcode = record.getString("abcode")
+          val level = if (record.isNull("level")) "0" else record.getString("level")
+          val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
+          val labelKey = labels.toString()
+          val featureKey = featureMap.toString()
+          //6 拼接数据,保存。
+          logKey + "\t" + labelKey + "\t" + featureKey
+
+        })
+
+      // 4 保存数据到hdfs
+      val savePartition = dt + hh
+      val hdfsPath = savePath + "/" + savePartition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      } else {
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+  def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
+    // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
+    val tagsList = tags.split(",")
+    var d1 = 0.0
+    val d2 = new ArrayBuffer[String]()
+    var d3 = 0.0
+    var d4 = 0.0
+    for (tag <- tagsList) {
+      if (title.contains(tag)) {
+        d1 = d1 + 1.0
+        d2.add(tag)
+      }
+      val score = Similarity.conceptSimilarity(tag, title)
+      d3 = if (score > d3) score else d3
+      d4 = d4 + score
+    }
+    d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
+    (d1, d2.mkString(","), d3, d4)
+  }
+}

+ 2 - 2
src/main/scala/com/aliyun/odps/spark/examples/临时记录的脚本-I2I

@@ -67,7 +67,7 @@ nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.s
 --master yarn --driver-memory 2G --executor-memory 2G --executor-cores 1 --num-executors 16 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
 onehotPath:/dw/recommend/model/53_dssm_i2i_onehot/after_20241201_file \
-bucketFile:20241128_recsys_i2i_bucket_47_v2.txt repartition:100 \
-dt:20241206 \
+bucketFile:20241128_recsys_i2i_bucket_47_v2.txt repartition:10 ifFilterCate:true \
+dt:20241225 \
 savePath:/dw/recommend/model/56_dssm_i2i_itempredData/ \
 > p56.log 2>&1 &

+ 7 - 6
src/main/scala/com/aliyun/odps/spark/examples/临时记录的脚本-广告

@@ -48,14 +48,15 @@ filterNames:"XXXXXX,adid_,targeting_conversion_,b2_3h_click,b2_3h_conver*log(vie
 
 
 nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
---class com.aliyun.odps.spark.examples.makedata_ad.makedata_ad_33_bucketDataPrint_20240628 \
+--class com.aliyun.odps.spark.examples.makedata_ad.v20240718.makedata_ad_34_bucketDataPrint_20241217 \
 --master yarn --driver-memory 2G --executor-memory 4G --executor-cores 1 --num-executors 16 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
-beginStr:2024062908 endStr:2024062923 \
-readDate:20240629 \
-table:alg_recsys_ad_sample_all_new \
-savePath:/dw/recommend/model/33_for_check/ \
-> p33_data_check.log 2>&1 &
+beginStr:2024121708 endStr:2024121708 \
+readDate:20241217 \
+table:alg_recsys_ad_sample_all \
+savePath:/dw/recommend/model/34_for_check/ \
+filterNames:adid_,targeting_conversion_ \
+> p34_data_check.log 2>&1 &
 
 
 /dw/recommend/model/33_for_check_v1/