|  | @@ -0,0 +1,631 @@
 | 
	
		
			
				|  |  | +package com.aliyun.odps.spark.examples.makedata_ad.v20240718
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +import com.alibaba.fastjson.{JSON, JSONObject}
 | 
	
		
			
				|  |  | +import com.aliyun.odps.TableSchema
 | 
	
		
			
				|  |  | +import com.aliyun.odps.data.Record
 | 
	
		
			
				|  |  | +import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
 | 
	
		
			
				|  |  | +import examples.extractor.{ExtractorUtils, RankExtractorFeature_20240530}
 | 
	
		
			
				|  |  | +import examples.utils.DateTimeUtil
 | 
	
		
			
				|  |  | +import org.apache.hadoop.io.compress.GzipCodec
 | 
	
		
			
				|  |  | +import org.apache.spark.sql.SparkSession
 | 
	
		
			
				|  |  | +import org.xm.Similarity
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +import scala.collection.JavaConversions._
 | 
	
		
			
				|  |  | +import scala.collection.mutable.ArrayBuffer
 | 
	
		
			
				|  |  | +import scala.io.Source
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | +   20240608 提取特征
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +object makedata_ad_34_bucketDataPrint_20241217 {
 | 
	
		
			
				|  |  | +  def main(args: Array[String]): Unit = {
 | 
	
		
			
				|  |  | +    val spark = SparkSession
 | 
	
		
			
				|  |  | +      .builder()
 | 
	
		
			
				|  |  | +      .appName(this.getClass.getName)
 | 
	
		
			
				|  |  | +      .getOrCreate()
 | 
	
		
			
				|  |  | +    val sc = spark.sparkContext
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    // 1 读取参数
 | 
	
		
			
				|  |  | +    val param = ParamUtils.parseArgs(args)
 | 
	
		
			
				|  |  | +    val tablePart = param.getOrElse("tablePart", "64").toInt
 | 
	
		
			
				|  |  | +    val beginStr = param.getOrElse("beginStr", "2024061500")
 | 
	
		
			
				|  |  | +    val endStr = param.getOrElse("endStr", "2024061523")
 | 
	
		
			
				|  |  | +    val savePath = param.getOrElse("savePath", "/dw/recommend/model/17_for_check/")
 | 
	
		
			
				|  |  | +    val project = param.getOrElse("project", "loghubods")
 | 
	
		
			
				|  |  | +    val table = param.getOrElse("table", "alg_recsys_sample_all")
 | 
	
		
			
				|  |  | +    val repartition = param.getOrElse("repartition", "32").toInt
 | 
	
		
			
				|  |  | +    val readDate = param.getOrElse("readDate", "20240615")
 | 
	
		
			
				|  |  | +    val idDefaultValue = param.getOrElse("idDefaultValue", "0.1").toDouble
 | 
	
		
			
				|  |  | +    val filterNames = param.getOrElse("filterNames", "").split(",").toSet
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    val loader = getClass.getClassLoader
 | 
	
		
			
				|  |  | +    val resourceUrl = loader.getResource("20240718_ad_feature_name.txt")
 | 
	
		
			
				|  |  | +    val content =
 | 
	
		
			
				|  |  | +      if (resourceUrl != null) {
 | 
	
		
			
				|  |  | +        val content = Source.fromURL(resourceUrl).getLines().mkString("\n")
 | 
	
		
			
				|  |  | +        Source.fromURL(resourceUrl).close()
 | 
	
		
			
				|  |  | +        content
 | 
	
		
			
				|  |  | +      } else {
 | 
	
		
			
				|  |  | +        ""
 | 
	
		
			
				|  |  | +      }
 | 
	
		
			
				|  |  | +    println(content)
 | 
	
		
			
				|  |  | +    val contentList = content.split("\n")
 | 
	
		
			
				|  |  | +      .map(r => r.replace(" ", "").replaceAll("\n", ""))
 | 
	
		
			
				|  |  | +      .filter(r => r.nonEmpty).toList
 | 
	
		
			
				|  |  | +    val contentList_br = sc.broadcast(contentList)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    val resourceUrlBucket = loader.getResource("20240718_ad_bucket_688.txt")
 | 
	
		
			
				|  |  | +    val buckets =
 | 
	
		
			
				|  |  | +      if (resourceUrlBucket != null) {
 | 
	
		
			
				|  |  | +        val buckets = Source.fromURL(resourceUrlBucket).getLines().mkString("\n")
 | 
	
		
			
				|  |  | +        Source.fromURL(resourceUrlBucket).close()
 | 
	
		
			
				|  |  | +        buckets
 | 
	
		
			
				|  |  | +      } else {
 | 
	
		
			
				|  |  | +        ""
 | 
	
		
			
				|  |  | +      }
 | 
	
		
			
				|  |  | +    println(buckets)
 | 
	
		
			
				|  |  | +    val bucketsMap = buckets.split("\n")
 | 
	
		
			
				|  |  | +      .map(r => r.replace(" ", "").replaceAll("\n", ""))
 | 
	
		
			
				|  |  | +      .filter(r => r.nonEmpty)
 | 
	
		
			
				|  |  | +      .map(r => {
 | 
	
		
			
				|  |  | +        val rList = r.split("\t")
 | 
	
		
			
				|  |  | +        (rList(0), (rList(1).toDouble, rList(2).split(",").map(_.toDouble)))
 | 
	
		
			
				|  |  | +      }).toMap
 | 
	
		
			
				|  |  | +    val bucketsMap_br = sc.broadcast(bucketsMap)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    // 2 读取odps+表信息
 | 
	
		
			
				|  |  | +    val odpsOps = env.getODPS(sc)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    // 3 循环执行数据生产
 | 
	
		
			
				|  |  | +    val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
 | 
	
		
			
				|  |  | +    for (dt_hh <- timeRange) {
 | 
	
		
			
				|  |  | +      val dt = dt_hh.substring(0, 8)
 | 
	
		
			
				|  |  | +      val hh = dt_hh.substring(8, 10)
 | 
	
		
			
				|  |  | +      val partition = s"dt=$dt,hh=$hh"
 | 
	
		
			
				|  |  | +      println("开始执行partiton:" + partition)
 | 
	
		
			
				|  |  | +      val odpsData = odpsOps.readTable(project = project,
 | 
	
		
			
				|  |  | +        table = table,
 | 
	
		
			
				|  |  | +        partition = partition,
 | 
	
		
			
				|  |  | +        transfer = func,
 | 
	
		
			
				|  |  | +        numPartition = tablePart)
 | 
	
		
			
				|  |  | +//        .filter(record =>{
 | 
	
		
			
				|  |  | +//          val flag1 = record.isNull("metafeaturemap")
 | 
	
		
			
				|  |  | +//          val flag2 = record.isNull("extend")
 | 
	
		
			
				|  |  | +//          if (flag1 || flag2){
 | 
	
		
			
				|  |  | +//            false
 | 
	
		
			
				|  |  | +//          }else{
 | 
	
		
			
				|  |  | +//            val apptype = record.getString("apptype")
 | 
	
		
			
				|  |  | +//            val extend = record.getString("extend")
 | 
	
		
			
				|  |  | +//            val abcode = JSON.parseObject(extend).getString("abcode")
 | 
	
		
			
				|  |  | +//            val scoreMap = record.getString("scoremap")
 | 
	
		
			
				|  |  | +//            val ctcvr = JSON.parseObject(scoreMap).getString("ctcvrScore").toDouble
 | 
	
		
			
				|  |  | +//            if (
 | 
	
		
			
				|  |  | +//              apptype.equals("4")
 | 
	
		
			
				|  |  | +//                && Set("ab0", "ab1", "ab2", "ab3", "ab4").contains(abcode)
 | 
	
		
			
				|  |  | +//            ) {
 | 
	
		
			
				|  |  | +//              true
 | 
	
		
			
				|  |  | +//            } else {
 | 
	
		
			
				|  |  | +//              false
 | 
	
		
			
				|  |  | +//            }
 | 
	
		
			
				|  |  | +//          }
 | 
	
		
			
				|  |  | +//        })
 | 
	
		
			
				|  |  | +        .map(record => {
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val ts = record.getString("ts").toInt
 | 
	
		
			
				|  |  | +          val cid = record.getString("cid")
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val featureMap = new JSONObject()
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b1_feature"))
 | 
	
		
			
				|  |  | +          val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b2_feature"))
 | 
	
		
			
				|  |  | +          val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b3_feature"))
 | 
	
		
			
				|  |  | +          val b4: JSONObject = if (record.isNull("b4_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b4_feature"))
 | 
	
		
			
				|  |  | +          val b5: JSONObject = if (record.isNull("b5_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b5_feature"))
 | 
	
		
			
				|  |  | +          val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b6_feature"))
 | 
	
		
			
				|  |  | +          val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b7_feature"))
 | 
	
		
			
				|  |  | +          val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b8_feature"))
 | 
	
		
			
				|  |  | +          val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("b9_feature"))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          featureMap.put("cid_" + cid, idDefaultValue)
 | 
	
		
			
				|  |  | +          // if (b1.containsKey("adid") && b1.getString("adid").nonEmpty) {
 | 
	
		
			
				|  |  | +          //   featureMap.put("adid_" + b1.getString("adid"), idDefaultValue)
 | 
	
		
			
				|  |  | +          // }
 | 
	
		
			
				|  |  | +          if (b1.containsKey("adverid") && b1.getString("adverid").nonEmpty) {
 | 
	
		
			
				|  |  | +            featureMap.put("adverid_" + b1.getString("adverid"), idDefaultValue)
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          // if (b1.containsKey("targeting_conversion") && b1.getString("targeting_conversion").nonEmpty) {
 | 
	
		
			
				|  |  | +          //   featureMap.put("targeting_conversion_" + b1.getString("targeting_conversion"), 1.0)
 | 
	
		
			
				|  |  | +          // }
 | 
	
		
			
				|  |  | +          val hour = DateTimeUtil.getHourByTimestamp(ts)
 | 
	
		
			
				|  |  | +          featureMap.put("hour_" + hour, 0.1)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val dayOfWeek = DateTimeUtil.getDayOrWeekByTimestamp(ts)
 | 
	
		
			
				|  |  | +          featureMap.put("dayofweek_" + dayOfWeek, 0.1);
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          if (b1.containsKey("cpa")) {
 | 
	
		
			
				|  |  | +            featureMap.put("cpa", b1.getString("cpa").toDouble)
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          for ((bn, prefix1) <- List(
 | 
	
		
			
				|  |  | +            (b2, "b2"), (b3, "b3"), (b4, "b4"), (b5, "b5"), (b8, "b8"), (b9, "b9")
 | 
	
		
			
				|  |  | +          )) {
 | 
	
		
			
				|  |  | +            for (prefix2 <- List(
 | 
	
		
			
				|  |  | +             "1h","2h" ,"3h", "6h", "12h", "1d", "3d", "7d", "today", "yesterday"
 | 
	
		
			
				|  |  | +            )) {
 | 
	
		
			
				|  |  | +              val view = if (bn.isEmpty) 0D else bn.getIntValue("ad_view_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val click = if (bn.isEmpty) 0D else bn.getIntValue("ad_click_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val conver = if (bn.isEmpty) 0D else bn.getIntValue("ad_conversion_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val income = if (bn.isEmpty) 0D else bn.getIntValue("ad_income_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val f1 = RankExtractorFeature_20240530.calDiv(click, view)
 | 
	
		
			
				|  |  | +              val f2 = RankExtractorFeature_20240530.calDiv(conver, view)
 | 
	
		
			
				|  |  | +              val f3 = RankExtractorFeature_20240530.calDiv(conver, click)
 | 
	
		
			
				|  |  | +              val f4 = conver
 | 
	
		
			
				|  |  | +              val f5 = RankExtractorFeature_20240530.calDiv(income * 1000, view)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctr", f1)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctcvr", f2)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "cvr", f3)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver", f4)
 | 
	
		
			
				|  |  | +              // featureMap.put(prefix1 + "_" + prefix2 + "_" + "ecpm", f5)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "click", click)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*log(view)", conver * RankExtractorFeature_20240530.calLog(view))
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*ctcvr", conver * f2)
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          for ((bn, prefix1) <- List(
 | 
	
		
			
				|  |  | +            (b6, "b6"), (b7, "b7")
 | 
	
		
			
				|  |  | +          )) {
 | 
	
		
			
				|  |  | +            for (prefix2 <- List(
 | 
	
		
			
				|  |  | +              "7d", "14d"
 | 
	
		
			
				|  |  | +            )) {
 | 
	
		
			
				|  |  | +              val view = if (bn.isEmpty) 0D else bn.getIntValue("ad_view_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val click = if (bn.isEmpty) 0D else bn.getIntValue("ad_click_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val conver = if (bn.isEmpty) 0D else bn.getIntValue("ad_conversion_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val income = if (bn.isEmpty) 0D else bn.getIntValue("ad_income_" + prefix2).toDouble
 | 
	
		
			
				|  |  | +              val f1 = RankExtractorFeature_20240530.calDiv(click, view)
 | 
	
		
			
				|  |  | +              val f2 = RankExtractorFeature_20240530.calDiv(conver, view)
 | 
	
		
			
				|  |  | +              val f3 = RankExtractorFeature_20240530.calDiv(conver, click)
 | 
	
		
			
				|  |  | +              val f4 = conver
 | 
	
		
			
				|  |  | +              val f5 = RankExtractorFeature_20240530.calDiv(income * 1000, view)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctr", f1)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ctcvr", f2)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "cvr", f3)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver", f4)
 | 
	
		
			
				|  |  | +              // featureMap.put(prefix1 + "_" + prefix2 + "_" + "ecpm", f5)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "click", click)
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*log(view)", conver * RankExtractorFeature_20240530.calLog(view))
 | 
	
		
			
				|  |  | +              featureMap.put(prefix1 + "_" + prefix2 + "_" + "conver*ctcvr", conver * f2)
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("c1_feature"))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val midActionList = if (c1.containsKey("action") && c1.getString("action").nonEmpty) {
 | 
	
		
			
				|  |  | +            c1.getString("action").split(",").map(r => {
 | 
	
		
			
				|  |  | +              val rList = r.split(":")
 | 
	
		
			
				|  |  | +              (rList(0), (rList(1).toInt, rList(2).toInt, rList(3).toInt, rList(4).toInt, rList(5)))
 | 
	
		
			
				|  |  | +            }).sortBy(-_._2._1).toList
 | 
	
		
			
				|  |  | +          } else {
 | 
	
		
			
				|  |  | +            new ArrayBuffer[(String, (Int, Int, Int, Int, String))]().toList
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          // u特征
 | 
	
		
			
				|  |  | +          val viewAll = midActionList.size.toDouble
 | 
	
		
			
				|  |  | +          val clickAll = midActionList.map(_._2._2).sum.toDouble
 | 
	
		
			
				|  |  | +          val converAll = midActionList.map(_._2._3).sum.toDouble
 | 
	
		
			
				|  |  | +          val incomeAll = midActionList.map(_._2._4).sum.toDouble
 | 
	
		
			
				|  |  | +          featureMap.put("viewAll", viewAll)
 | 
	
		
			
				|  |  | +          featureMap.put("clickAll", clickAll)
 | 
	
		
			
				|  |  | +          featureMap.put("converAll", converAll)
 | 
	
		
			
				|  |  | +          featureMap.put("incomeAll", incomeAll)
 | 
	
		
			
				|  |  | +          featureMap.put("ctr_all", RankExtractorFeature_20240530.calDiv(clickAll, viewAll))
 | 
	
		
			
				|  |  | +          featureMap.put("ctcvr_all", RankExtractorFeature_20240530.calDiv(converAll, viewAll))
 | 
	
		
			
				|  |  | +          featureMap.put("cvr_all", RankExtractorFeature_20240530.calDiv(clickAll, converAll))
 | 
	
		
			
				|  |  | +          // featureMap.put("ecpm_all", RankExtractorFeature_20240530.calDiv(incomeAll * 1000, viewAll))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          // ui特征
 | 
	
		
			
				|  |  | +          val midTimeDiff = scala.collection.mutable.Map[String, Double]()
 | 
	
		
			
				|  |  | +          midActionList.foreach {
 | 
	
		
			
				|  |  | +            case (cid, (ts_history, click, conver, income, title)) =>
 | 
	
		
			
				|  |  | +              if (!midTimeDiff.contains("timediff_view_" + cid)) {
 | 
	
		
			
				|  |  | +                midTimeDiff.put("timediff_view_" + cid, 1.0 / ((ts - ts_history).toDouble / 3600.0 / 24.0))
 | 
	
		
			
				|  |  | +              }
 | 
	
		
			
				|  |  | +              if (!midTimeDiff.contains("timediff_click_" + cid) && click > 0) {
 | 
	
		
			
				|  |  | +                midTimeDiff.put("timediff_click_" + cid, 1.0 / ((ts - ts_history).toDouble / 3600.0 / 24.0))
 | 
	
		
			
				|  |  | +              }
 | 
	
		
			
				|  |  | +              if (!midTimeDiff.contains("timediff_conver_" + cid) && conver > 0) {
 | 
	
		
			
				|  |  | +                midTimeDiff.put("timediff_conver_" + cid, 1.0 / ((ts - ts_history).toDouble / 3600.0 / 24.0))
 | 
	
		
			
				|  |  | +              }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val midActionStatic = scala.collection.mutable.Map[String, Double]()
 | 
	
		
			
				|  |  | +          midActionList.foreach {
 | 
	
		
			
				|  |  | +            case (cid, (ts_history, click, conver, income, title)) =>
 | 
	
		
			
				|  |  | +              midActionStatic.put("actionstatic_view_" + cid, 1.0 + midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0))
 | 
	
		
			
				|  |  | +              midActionStatic.put("actionstatic_click_" + cid, click + midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0))
 | 
	
		
			
				|  |  | +              midActionStatic.put("actionstatic_conver_" + cid, conver + midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0))
 | 
	
		
			
				|  |  | +              midActionStatic.put("actionstatic_income_" + cid, income + midActionStatic.getOrDefault("actionstatic_income_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          if (midTimeDiff.contains("timediff_view_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("timediff_view", midTimeDiff.getOrDefault("timediff_view_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midTimeDiff.contains("timediff_click_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("timediff_click", midTimeDiff.getOrDefault("timediff_click_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midTimeDiff.contains("timediff_conver_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("timediff_conver", midTimeDiff.getOrDefault("timediff_conver_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_view_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_view", midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_click_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_click", midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_conver_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_conver", midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_income_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_income", midActionStatic.getOrDefault("actionstatic_income_" + cid, 0.0))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_view_" + cid) && midActionStatic.contains("actionstatic_click_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_ctr", RankExtractorFeature_20240530.calDiv(
 | 
	
		
			
				|  |  | +              midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0),
 | 
	
		
			
				|  |  | +              midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0)
 | 
	
		
			
				|  |  | +            ))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_view_" + cid) && midActionStatic.contains("actionstatic_conver_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_ctcvr", RankExtractorFeature_20240530.calDiv(
 | 
	
		
			
				|  |  | +              midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0),
 | 
	
		
			
				|  |  | +              midActionStatic.getOrDefault("actionstatic_view_" + cid, 0.0)
 | 
	
		
			
				|  |  | +            ))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (midActionStatic.contains("actionstatic_conver_" + cid) && midActionStatic.contains("actionstatic_click_" + cid)) {
 | 
	
		
			
				|  |  | +            featureMap.put("actionstatic_cvr", RankExtractorFeature_20240530.calDiv(
 | 
	
		
			
				|  |  | +              midActionStatic.getOrDefault("actionstatic_conver_" + cid, 0.0),
 | 
	
		
			
				|  |  | +              midActionStatic.getOrDefault("actionstatic_click_" + cid, 0.0)
 | 
	
		
			
				|  |  | +            ))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val e1: JSONObject = if (record.isNull("e1_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("e1_feature"))
 | 
	
		
			
				|  |  | +          val e2: JSONObject = if (record.isNull("e2_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("e2_feature"))
 | 
	
		
			
				|  |  | +          val title = b1.getOrDefault("cidtitle", "").toString
 | 
	
		
			
				|  |  | +          if (title.nonEmpty) {
 | 
	
		
			
				|  |  | +            for ((en, prefix1) <- List((e1, "e1"), (e2, "e2"))) {
 | 
	
		
			
				|  |  | +              for (prefix2 <- List("tags_3d", "tags_7d", "tags_14d")) {
 | 
	
		
			
				|  |  | +                if (en.nonEmpty && en.containsKey(prefix2) && en.getString(prefix2).nonEmpty) {
 | 
	
		
			
				|  |  | +                  val (f1, f2, f3, f4) = funcC34567ForTags(en.getString(prefix2), title)
 | 
	
		
			
				|  |  | +                  featureMap.put(prefix1 + "_" + prefix2 + "_matchnum", f1)
 | 
	
		
			
				|  |  | +                  featureMap.put(prefix1 + "_" + prefix2 + "_maxscore", f3)
 | 
	
		
			
				|  |  | +                  featureMap.put(prefix1 + "_" + prefix2 + "_avgscore", f4)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +                }
 | 
	
		
			
				|  |  | +              }
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("d1_feature"))
 | 
	
		
			
				|  |  | +          val d2: JSONObject = if (record.isNull("d2_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("d2_feature"))
 | 
	
		
			
				|  |  | +          val d3: JSONObject = if (record.isNull("d3_feature")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("d3_feature"))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          if (d1.nonEmpty) {
 | 
	
		
			
				|  |  | +            for (prefix <- List("3h", "6h", "12h", "1d", "3d", "7d")) {
 | 
	
		
			
				|  |  | +              val view = if (!d1.containsKey("ad_view_" + prefix)) 0D else d1.getIntValue("ad_view_" + prefix).toDouble
 | 
	
		
			
				|  |  | +              val click = if (!d1.containsKey("ad_click_" + prefix)) 0D else d1.getIntValue("ad_click_" + prefix).toDouble
 | 
	
		
			
				|  |  | +              val conver = if (!d1.containsKey("ad_conversion_" + prefix)) 0D else d1.getIntValue("ad_conversion_" + prefix).toDouble
 | 
	
		
			
				|  |  | +              val income = if (!d1.containsKey("ad_income_" + prefix)) 0D else d1.getIntValue("ad_income_" + prefix).toDouble
 | 
	
		
			
				|  |  | +              val f1 = RankExtractorFeature_20240530.calDiv(click, view)
 | 
	
		
			
				|  |  | +              val f2 = RankExtractorFeature_20240530.calDiv(conver, view)
 | 
	
		
			
				|  |  | +              val f3 = RankExtractorFeature_20240530.calDiv(conver, click)
 | 
	
		
			
				|  |  | +              val f4 = conver
 | 
	
		
			
				|  |  | +              val f5 = RankExtractorFeature_20240530.calDiv(income * 1000, view)
 | 
	
		
			
				|  |  | +              featureMap.put("d1_feature" + "_" + prefix + "_" + "ctr", f1)
 | 
	
		
			
				|  |  | +              featureMap.put("d1_feature" + "_" + prefix + "_" + "ctcvr", f2)
 | 
	
		
			
				|  |  | +              featureMap.put("d1_feature" + "_" + prefix + "_" + "cvr", f3)
 | 
	
		
			
				|  |  | +              featureMap.put("d1_feature" + "_" + prefix + "_" + "conver", f4)
 | 
	
		
			
				|  |  | +              // featureMap.put("d1_feature" + "_" + prefix + "_" + "ecpm", f5)
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val vidRankMaps = scala.collection.mutable.Map[String, scala.collection.immutable.Map[String, Double]]()
 | 
	
		
			
				|  |  | +          if (d2.nonEmpty) {
 | 
	
		
			
				|  |  | +            d2.foreach(r => {
 | 
	
		
			
				|  |  | +              val key = r._1
 | 
	
		
			
				|  |  | +              val value = d2.getString(key).split(",").map(r => {
 | 
	
		
			
				|  |  | +                val rList = r.split(":")
 | 
	
		
			
				|  |  | +                (rList(0), rList(2).toDouble)
 | 
	
		
			
				|  |  | +              }).toMap
 | 
	
		
			
				|  |  | +              vidRankMaps.put(key, value)
 | 
	
		
			
				|  |  | +            })
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          // for (prefix1 <- List("ctr", "ctcvr", "ecpm")) {
 | 
	
		
			
				|  |  | +          for (prefix1 <- List("ctr", "ctcvr")) {
 | 
	
		
			
				|  |  | +            for (prefix2 <- List("1d", "3d", "7d", "14d")) {
 | 
	
		
			
				|  |  | +              if (vidRankMaps.contains(prefix1 + "_" + prefix2)) {
 | 
	
		
			
				|  |  | +                val rank = vidRankMaps(prefix1 + "_" + prefix2).getOrDefault(cid, 0.0)
 | 
	
		
			
				|  |  | +                if (rank >= 1.0) {
 | 
	
		
			
				|  |  | +                  featureMap.put("vid_rank_" + prefix1 + "_" + prefix2, 1.0 / rank)
 | 
	
		
			
				|  |  | +                }
 | 
	
		
			
				|  |  | +              }
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          if (d3.nonEmpty){
 | 
	
		
			
				|  |  | +            val vTitle= d3.getString("title")
 | 
	
		
			
				|  |  | +            val score = Similarity.conceptSimilarity(title, vTitle)
 | 
	
		
			
				|  |  | +            featureMap.put("ctitle_vtitle_similarity", score);
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val flag = record.isNull("metafeaturemap")
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          val allfeaturemap = if (record.isNull("allfeaturemap")) new JSONObject() else
 | 
	
		
			
				|  |  | +            JSON.parseObject(record.getString("allfeaturemap"))
 | 
	
		
			
				|  |  | +          val apptype = record.getString("apptype")
 | 
	
		
			
				|  |  | +          val label = record.getString("ad_is_conversion")
 | 
	
		
			
				|  |  | +          val extend = record.getString("extend")
 | 
	
		
			
				|  |  | +          val abcode = JSON.parseObject(extend).getString("abcode")
 | 
	
		
			
				|  |  | +          (apptype, "pagesource", "level", label, abcode, allfeaturemap, featureMap, flag)
 | 
	
		
			
				|  |  | +        }).filter{
 | 
	
		
			
				|  |  | +          case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag) =>
 | 
	
		
			
				|  |  | +            Set("4").contains(apptype) && !flag &&
 | 
	
		
			
				|  |  | +              Set("ab0", "ab1", "ab2", "ab3", "ab4").contains(abcode)
 | 
	
		
			
				|  |  | +        }.mapPartitions(row => {
 | 
	
		
			
				|  |  | +          val result = new ArrayBuffer[String]()
 | 
	
		
			
				|  |  | +          val bucketsMap = bucketsMap_br.value
 | 
	
		
			
				|  |  | +          row.foreach {
 | 
	
		
			
				|  |  | +            case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag) =>
 | 
	
		
			
				|  |  | +              val offlineFeatrueMap = featureMap.filter(r =>
 | 
	
		
			
				|  |  | +                bucketsMap.containsKey(r._1) || r._1.startsWith("cid_") || r._1.startsWith("adid_")
 | 
	
		
			
				|  |  | +                  || r._1.startsWith("adverid_") || r._1.startsWith("targeting_conversion_")
 | 
	
		
			
				|  |  | +                  || r._1.startsWith("hour_") || r._1.startsWith("dayofweek_")
 | 
	
		
			
				|  |  | +              ).map(r => {
 | 
	
		
			
				|  |  | +                val name = r._1
 | 
	
		
			
				|  |  | +                var ifFilter = false
 | 
	
		
			
				|  |  | +                if (filterNames.nonEmpty) {
 | 
	
		
			
				|  |  | +                  filterNames.foreach(r => if (!ifFilter && name.contains(r)) {
 | 
	
		
			
				|  |  | +                    ifFilter = true
 | 
	
		
			
				|  |  | +                  })
 | 
	
		
			
				|  |  | +                }
 | 
	
		
			
				|  |  | +                if (ifFilter) {
 | 
	
		
			
				|  |  | +                  ""
 | 
	
		
			
				|  |  | +                } else {
 | 
	
		
			
				|  |  | +                  val score = r._2.toString.toDouble
 | 
	
		
			
				|  |  | +                  if (score > 1E-8) {
 | 
	
		
			
				|  |  | +                    if (bucketsMap.contains(name)) {
 | 
	
		
			
				|  |  | +                      val (bucketNum, buckets) = bucketsMap(name)
 | 
	
		
			
				|  |  | +                      val scoreNew = 1.0 / bucketNum * (ExtractorUtils.findInsertPosition(buckets, score).toDouble + 1.0)
 | 
	
		
			
				|  |  | +                      name + ":" + scoreNew.toString
 | 
	
		
			
				|  |  | +                    } else {
 | 
	
		
			
				|  |  | +                      name + ":" + score.toString
 | 
	
		
			
				|  |  | +                    }
 | 
	
		
			
				|  |  | +                  } else {
 | 
	
		
			
				|  |  | +                    ""
 | 
	
		
			
				|  |  | +                  }
 | 
	
		
			
				|  |  | +                }
 | 
	
		
			
				|  |  | +              }).filter(_.nonEmpty)
 | 
	
		
			
				|  |  | +              result.add((apptype, pagesource, level, label, abcode, allfeaturemap.toString, offlineFeatrueMap.iterator.mkString(","))
 | 
	
		
			
				|  |  | +              .productIterator.mkString("\t"))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +          result.iterator
 | 
	
		
			
				|  |  | +        })
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +      // 4 保存数据到hdfs
 | 
	
		
			
				|  |  | +      val savePartition = dt + hh
 | 
	
		
			
				|  |  | +      val hdfsPath = savePath + "/" + savePartition
 | 
	
		
			
				|  |  | +      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
 | 
	
		
			
				|  |  | +        println("删除路径并开始数据写入:" + hdfsPath)
 | 
	
		
			
				|  |  | +        MyHdfsUtils.delete_hdfs_path(hdfsPath)
 | 
	
		
			
				|  |  | +        odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +      }else{
 | 
	
		
			
				|  |  | +        println("路径不合法,无法写入:" + hdfsPath)
 | 
	
		
			
				|  |  | +      }
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    val data2 = sc.textFile(savePath + "/" + readDate + "*").mapPartitions(row=>{
 | 
	
		
			
				|  |  | +      val result = new ArrayBuffer[(String,
 | 
	
		
			
				|  |  | +        Map[String, String], Map[String, String], List[String], List[String], List[String], List[String])]()
 | 
	
		
			
				|  |  | +      val contentList = contentList_br.value
 | 
	
		
			
				|  |  | +      row.foreach(r=>{
 | 
	
		
			
				|  |  | +        val rList = r.split("\t")
 | 
	
		
			
				|  |  | +        val label = rList(3)
 | 
	
		
			
				|  |  | +        val allfeaturemap = JSON.parseObject(rList(5)).toMap.map(r => (r._1, r._2.toString))
 | 
	
		
			
				|  |  | +        val offlineFeatrueMap = rList(6).split(",").map(r => (r.split(":")(0), r.split(":")(1))).toMap
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        val v3 = contentList.map(name =>{
 | 
	
		
			
				|  |  | +          val useOfflineNames = Set(
 | 
	
		
			
				|  |  | +            "b2_3h_ctr","b2_3h_ctcvr","b2_3h_cvr","b2_3h_conver","b2_3h_ecpm","b2_6h_ctr","b2_6h_ctcvr","b2_6h_cvr",
 | 
	
		
			
				|  |  | +            "b2_6h_conver","b2_6h_ecpm","b2_12h_ctr","b2_12h_ctcvr","b2_12h_cvr","b2_12h_conver","b2_12h_ecpm",
 | 
	
		
			
				|  |  | +            "b2_1d_ctr","b2_1d_ctcvr","b2_1d_cvr","b2_1d_conver","b2_1d_ecpm","b2_3d_ctr","b2_3d_ctcvr","b2_3d_cvr",
 | 
	
		
			
				|  |  | +            "b2_3d_conver","b2_3d_ecpm","b2_7d_ctr","b2_7d_ctcvr","b2_7d_cvr","b2_7d_conver","b2_7d_ecpm",
 | 
	
		
			
				|  |  | +            "b3_1h_ctr","b3_1h_ctcvr","b3_1h_cvr","b3_1h_conver","b3_1h_click","b3_1h_conver*log(view)","b3_1h_conver*ctcvr","b3_2h_ctr","b3_2h_ctcvr","b3_2h_cvr","b3_2h_conver","b3_2h_click","b3_2h_conver*log(view)","b3_2h_conver*ctcvr","b3_3h_ctr","b3_3h_ctcvr","b3_3h_cvr","b3_3h_conver","b3_3h_click","b3_3h_conver*log(view)","b3_3h_conver*ctcvr","b3_6h_ctr","b3_6h_ctcvr","b3_6h_cvr","b3_6h_conver","b3_6h_click","b3_6h_conver*log(view)","b3_6h_conver*ctcvr","b3_12h_ctr","b3_12h_ctcvr","b3_12h_cvr","b3_12h_conver","b3_12h_click","b3_12h_conver*log(view)","b3_12h_conver*ctcvr","b3_1d_ctr","b3_1d_ctcvr","b3_1d_cvr","b3_1d_conver","b3_1d_click","b3_1d_conver*log(view)","b3_1d_conver*ctcvr","b3_3d_ctr","b3_3d_ctcvr","b3_3d_cvr","b3_3d_conver","b3_3d_click","b3_3d_conver*log(view)","b3_3d_conver*ctcvr","b3_7d_ctr","b3_7d_ctcvr","b3_7d_cvr","b3_7d_conver","b3_7d_click","b3_7d_conver*log(view)","b3_7d_conver*ctcvr"
 | 
	
		
			
				|  |  | +          )
 | 
	
		
			
				|  |  | +          if (useOfflineNames.contains(name)){
 | 
	
		
			
				|  |  | +            if (offlineFeatrueMap.contains(name)){
 | 
	
		
			
				|  |  | +              name + ":" + offlineFeatrueMap(name)
 | 
	
		
			
				|  |  | +            }else{
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }else{
 | 
	
		
			
				|  |  | +            if (allfeaturemap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + allfeaturemap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +        }).filter(_.nonEmpty)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        val v4 = contentList.map(name => {
 | 
	
		
			
				|  |  | +          val useOfflineNames = Set(
 | 
	
		
			
				|  |  | +            "e1_tags_3d_matchnum","e1_tags_3d_maxscore","e1_tags_3d_avgscore","e1_tags_7d_matchnum",
 | 
	
		
			
				|  |  | +            "e1_tags_7d_maxscore","e1_tags_7d_avgscore","e1_tags_14d_matchnum","e1_tags_14d_maxscore",
 | 
	
		
			
				|  |  | +            "e1_tags_14d_avgscore","e2_tags_3d_matchnum","e2_tags_3d_maxscore","e2_tags_3d_avgscore",
 | 
	
		
			
				|  |  | +            "e2_tags_7d_matchnum","e2_tags_7d_maxscore","e2_tags_7d_avgscore","e2_tags_14d_matchnum",
 | 
	
		
			
				|  |  | +            "e2_tags_14d_maxscore","e2_tags_14d_avgscore","d1_feature_3h_ctr","d1_feature_3h_ctcvr"
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          )
 | 
	
		
			
				|  |  | +          if (useOfflineNames.contains(name)) {
 | 
	
		
			
				|  |  | +            if (offlineFeatrueMap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + offlineFeatrueMap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          } else {
 | 
	
		
			
				|  |  | +            if (allfeaturemap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + allfeaturemap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +        }).filter(_.nonEmpty)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        val v5 = contentList.map(name => {
 | 
	
		
			
				|  |  | +          val useOfflineNames = Set(
 | 
	
		
			
				|  |  | +            "viewAll","clickAll","converAll","incomeAll","ctr_all","ctcvr_all","cvr_all","ecpm_all"
 | 
	
		
			
				|  |  | +          )
 | 
	
		
			
				|  |  | +          if (useOfflineNames.contains(name)) {
 | 
	
		
			
				|  |  | +            if (offlineFeatrueMap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + offlineFeatrueMap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          } else {
 | 
	
		
			
				|  |  | +            if (allfeaturemap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + allfeaturemap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +        }).filter(_.nonEmpty)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        val v6 = contentList.map(name => {
 | 
	
		
			
				|  |  | +          val useOfflineNames = Set(
 | 
	
		
			
				|  |  | +            "d1_feature_3h_cvr", "d1_feature_3h_conver", "d1_feature_3h_ecpm", "d1_feature_6h_ctr",
 | 
	
		
			
				|  |  | +            "d1_feature_6h_ctcvr", "d1_feature_6h_cvr", "d1_feature_6h_conver", "d1_feature_6h_ecpm",
 | 
	
		
			
				|  |  | +            "d1_feature_12h_ctr", "d1_feature_12h_ctcvr", "d1_feature_12h_cvr", "d1_feature_12h_conver",
 | 
	
		
			
				|  |  | +            "d1_feature_12h_ecpm", "d1_feature_1d_ctr", "d1_feature_1d_ctcvr", "d1_feature_1d_cvr", "d1_feature_1d_conver",
 | 
	
		
			
				|  |  | +            "d1_feature_1d_ecpm", "d1_feature_3d_ctr", "d1_feature_3d_ctcvr", "d1_feature_3d_cvr", "d1_feature_3d_conver",
 | 
	
		
			
				|  |  | +            "d1_feature_3d_ecpm", "d1_feature_7d_ctr", "d1_feature_7d_ctcvr", "d1_feature_7d_cvr", "d1_feature_7d_conver",
 | 
	
		
			
				|  |  | +            "d1_feature_7d_ecpm"          )
 | 
	
		
			
				|  |  | +          if (useOfflineNames.contains(name)) {
 | 
	
		
			
				|  |  | +            if (offlineFeatrueMap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + offlineFeatrueMap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          } else {
 | 
	
		
			
				|  |  | +            if (allfeaturemap.contains(name)) {
 | 
	
		
			
				|  |  | +              name + ":" + allfeaturemap(name)
 | 
	
		
			
				|  |  | +            } else {
 | 
	
		
			
				|  |  | +              ""
 | 
	
		
			
				|  |  | +            }
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +        }).filter(_.nonEmpty)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        result.add((label, offlineFeatrueMap, allfeaturemap, v3, v4, v5, v6))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +      })
 | 
	
		
			
				|  |  | +      result.iterator
 | 
	
		
			
				|  |  | +    })
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    val saveV1 = "/dw/recommend/model/33_for_check_v1/" + readDate
 | 
	
		
			
				|  |  | +    if (saveV1.nonEmpty && saveV1.startsWith("/dw/recommend/model/")) {
 | 
	
		
			
				|  |  | +      println("删除路径并开始数据写入:" + saveV1)
 | 
	
		
			
				|  |  | +      MyHdfsUtils.delete_hdfs_path(saveV1)
 | 
	
		
			
				|  |  | +      data2.map(r => r._1 + "\t" + r._2.map(r=> r._1 + ":" + r._2).mkString("\t")).saveAsTextFile(saveV1, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +    } else {
 | 
	
		
			
				|  |  | +      println("路径不合法,无法写入:" + saveV1)
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    val saveV2 = "/dw/recommend/model/33_for_check_v2/" + readDate
 | 
	
		
			
				|  |  | +    if (saveV2.nonEmpty && saveV2.startsWith("/dw/recommend/model/")) {
 | 
	
		
			
				|  |  | +      println("删除路径并开始数据写入:" + saveV2)
 | 
	
		
			
				|  |  | +      MyHdfsUtils.delete_hdfs_path(saveV2)
 | 
	
		
			
				|  |  | +      data2.map(r => r._1 + "\t" + r._3.map(r=> r._1 + ":" + r._2).mkString("\t")).saveAsTextFile(saveV2, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +    } else {
 | 
	
		
			
				|  |  | +      println("路径不合法,无法写入:" + saveV2)
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +     val saveV3 = "/dw/recommend/model/33_for_check_v3/" + readDate
 | 
	
		
			
				|  |  | +     if (saveV3.nonEmpty && saveV3.startsWith("/dw/recommend/model/")) {
 | 
	
		
			
				|  |  | +       println("删除路径并开始数据写入:" + saveV3)
 | 
	
		
			
				|  |  | +       MyHdfsUtils.delete_hdfs_path(saveV3)
 | 
	
		
			
				|  |  | +       data2.map(r => r._1 + "\t" + r._4.mkString("\t")).saveAsTextFile(saveV3, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +     } else {
 | 
	
		
			
				|  |  | +       println("路径不合法,无法写入:" + saveV3)
 | 
	
		
			
				|  |  | +     }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +     val saveV4 = "/dw/recommend/model/33_for_check_v4/" + readDate
 | 
	
		
			
				|  |  | +     if (saveV4.nonEmpty && saveV4.startsWith("/dw/recommend/model/")) {
 | 
	
		
			
				|  |  | +       println("删除路径并开始数据写入:" + saveV4)
 | 
	
		
			
				|  |  | +       MyHdfsUtils.delete_hdfs_path(saveV4)
 | 
	
		
			
				|  |  | +       data2.map(r => r._1 + "\t" + r._5.mkString("\t")).saveAsTextFile(saveV4, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +     } else {
 | 
	
		
			
				|  |  | +       println("路径不合法,无法写入:" + saveV4)
 | 
	
		
			
				|  |  | +     }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +     val saveV5 = "/dw/recommend/model/33_for_check_v5/" + readDate
 | 
	
		
			
				|  |  | +     if (saveV5.nonEmpty && saveV5.startsWith("/dw/recommend/model/")) {
 | 
	
		
			
				|  |  | +       println("删除路径并开始数据写入:" + saveV5)
 | 
	
		
			
				|  |  | +       MyHdfsUtils.delete_hdfs_path(saveV5)
 | 
	
		
			
				|  |  | +       data2.map(r => r._1 + "\t" + r._6.mkString("\t")).saveAsTextFile(saveV5, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +     } else {
 | 
	
		
			
				|  |  | +       println("路径不合法,无法写入:" + saveV5)
 | 
	
		
			
				|  |  | +     }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +     val saveV6 = "/dw/recommend/model/33_for_check_v6/" + readDate
 | 
	
		
			
				|  |  | +     if (saveV6.nonEmpty && saveV6.startsWith("/dw/recommend/model/")) {
 | 
	
		
			
				|  |  | +       println("删除路径并开始数据写入:" + saveV6)
 | 
	
		
			
				|  |  | +       MyHdfsUtils.delete_hdfs_path(saveV6)
 | 
	
		
			
				|  |  | +       data2.map(r => r._1 + "\t" + r._7.mkString("\t")).saveAsTextFile(saveV6, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +     } else {
 | 
	
		
			
				|  |  | +       println("路径不合法,无法写入:" + saveV6)
 | 
	
		
			
				|  |  | +     }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +  }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +  def func(record: Record, schema: TableSchema): Record = {
 | 
	
		
			
				|  |  | +    record
 | 
	
		
			
				|  |  | +  }
 | 
	
		
			
				|  |  | +  def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
 | 
	
		
			
				|  |  | +    // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
 | 
	
		
			
				|  |  | +    val tagsList = tags.split(",")
 | 
	
		
			
				|  |  | +    var d1 = 0.0
 | 
	
		
			
				|  |  | +    val d2 = new ArrayBuffer[String]()
 | 
	
		
			
				|  |  | +    var d3 = 0.0
 | 
	
		
			
				|  |  | +    var d4 = 0.0
 | 
	
		
			
				|  |  | +    for (tag <- tagsList){
 | 
	
		
			
				|  |  | +      if (title.contains(tag)){
 | 
	
		
			
				|  |  | +        d1 = d1 + 1.0
 | 
	
		
			
				|  |  | +        d2.add(tag)
 | 
	
		
			
				|  |  | +      }
 | 
	
		
			
				|  |  | +      val score = Similarity.conceptSimilarity(tag, title)
 | 
	
		
			
				|  |  | +      d3 = if (score > d3) score else d3
 | 
	
		
			
				|  |  | +      d4 = d4 + score
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +    d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
 | 
	
		
			
				|  |  | +    (d1, d2.mkString(","), d3, d4)
 | 
	
		
			
				|  |  | +  }
 | 
	
		
			
				|  |  | +}
 |