|
@@ -75,199 +75,200 @@ object makedata_17_bucketDataPrint_20240617 {
|
|
|
|
|
|
val odpsOps = env.getODPS(sc)
|
|
|
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
+
|
|
|
+ val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
|
|
|
+ for (dt_hh <- timeRange) {
|
|
|
+ val dt = dt_hh.substring(0, 8)
|
|
|
+ val hh = dt_hh.substring(8, 10)
|
|
|
+ val partition = s"dt=$dt,hh=$hh"
|
|
|
+ println("开始执行partiton:" + partition)
|
|
|
+ val odpsData = odpsOps.readTable(project = project,
|
|
|
+ table = table,
|
|
|
+ partition = partition,
|
|
|
+ transfer = func,
|
|
|
+ numPartition = tablePart)
|
|
|
+ .map(record => {
|
|
|
+ val featureMap = new JSONObject()
|
|
|
+
|
|
|
+
|
|
|
+ val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b1_feature"))
|
|
|
+ val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b2_feature"))
|
|
|
+ val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b3_feature"))
|
|
|
+ val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b6_feature"))
|
|
|
+ val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b7_feature"))
|
|
|
+
|
|
|
+ val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b8_feature"))
|
|
|
+ val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b9_feature"))
|
|
|
+ val b10: JSONObject = if (record.isNull("b10_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b10_feature"))
|
|
|
+ val b11: JSONObject = if (record.isNull("b11_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b11_feature"))
|
|
|
+ val b12: JSONObject = if (record.isNull("b12_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b12_feature"))
|
|
|
+ val b13: JSONObject = if (record.isNull("b13_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b13_feature"))
|
|
|
+ val b17: JSONObject = if (record.isNull("b17_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b17_feature"))
|
|
|
+ val b18: JSONObject = if (record.isNull("b18_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b18_feature"))
|
|
|
+ val b19: JSONObject = if (record.isNull("b19_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("b19_feature"))
|
|
|
+
|
|
|
+
|
|
|
+ val origin_data = List(
|
|
|
+ (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
|
|
|
+ (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
|
|
|
+ (b17, b18, b19, "b171819")
|
|
|
+ )
|
|
|
+ for ((b_1, b_2, b_3, prefix1) <- origin_data) {
|
|
|
+ for (prefix2 <- List(
|
|
|
+ "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
|
|
|
+ )) {
|
|
|
+ val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
|
|
|
+ val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
|
|
|
+ val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
|
|
|
+ val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
|
|
|
+ val f2 = RankExtractorFeature_20240530.calLog(share)
|
|
|
+ val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
|
|
|
+ val f4 = RankExtractorFeature_20240530.calLog(returns)
|
|
|
+ val f5 = f3 * f4
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val video_info: JSONObject = if (record.isNull("t_v_info_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("t_v_info_feature"))
|
|
|
+ featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
|
|
|
+ featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
|
|
|
+
|
|
|
+ val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("c1_feature"))
|
|
|
+ if (c1.nonEmpty) {
|
|
|
+ featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
|
|
|
+ }
|
|
|
+ val c2: JSONObject = if (record.isNull("c2_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("c2_feature"))
|
|
|
+ if (c2.nonEmpty) {
|
|
|
+ featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
|
|
|
+ }
|
|
|
+
|
|
|
+ val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
|
|
|
+ if (!title.equals("")) {
|
|
|
+ for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
|
|
|
+ val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString(key_feature))
|
|
|
+ for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
|
|
|
+ val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
|
|
|
+ if (!tags.equals("")) {
|
|
|
+ val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
|
|
|
+ featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
|
|
|
+ featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
|
|
|
+ featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val vid = if (record.isNull("vid")) "" else record.getString("vid")
|
|
|
+ if (!vid.equals("")) {
|
|
|
+ for (key_feature <- List("c8_feature", "c9_feature")) {
|
|
|
+ val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString(key_feature))
|
|
|
+ for (key_action <- List("share", "return")) {
|
|
|
+ val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
|
|
|
+ if (!cfListStr.equals("")) {
|
|
|
+ val cfMap = cfListStr.split(",").map(r => {
|
|
|
+ val rList = r.split(":")
|
|
|
+ (rList(0), (rList(1), rList(2), rList(3)))
|
|
|
+ }).toMap
|
|
|
+ if (cfMap.contains(vid)) {
|
|
|
+ val (score, num, rank) = cfMap(vid)
|
|
|
+ featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
|
|
|
+ featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
|
|
|
+ featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("d1_feature"))
|
|
|
+ if (d1.nonEmpty) {
|
|
|
+ featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
|
|
|
+ featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
|
|
|
+ featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ val flag = record.isNull("allfeaturemap")
|
|
|
+
|
|
|
+ val allfeaturemap = if (record.isNull("allfeaturemap")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("allfeaturemap"))
|
|
|
+
|
|
|
+ val apptype = record.getString("apptype")
|
|
|
+ val pagesource = record.getString("pagesource")
|
|
|
+ val abcode = record.getString("abcode")
|
|
|
+ val level = if (record.isNull("level")) "0" else record.getString("level")
|
|
|
+ val label = record.getString("is_return")
|
|
|
+ (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag)
|
|
|
+ }).filter{
|
|
|
+ case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag) =>
|
|
|
+ apptype.equals("3") && pagesource.endsWith("recommend") &&
|
|
|
+ Set("ab0", "ab1", "ab2", "ab3").contains(abcode) && level.equals("0") && !flag
|
|
|
+ }.mapPartitions(row => {
|
|
|
+ val result = new ArrayBuffer[String]()
|
|
|
+ val bucketsMap = bucketsMap_br.value
|
|
|
+ row.foreach {
|
|
|
+ case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap, flag) =>
|
|
|
+ val offlineFeatrueMap = featureMap.filter(r => bucketsMap.containsKey(r._1)).map(r =>{
|
|
|
+ val score = r._2.toString.toDouble
|
|
|
+ if (score > 1E-8) {
|
|
|
+ val (bucketNum, buckets) = bucketsMap(r._1)
|
|
|
+ val scoreNew = 1.0 / bucketNum * (ExtractorUtils.findInsertPosition(buckets, score).toDouble + 1.0)
|
|
|
+ r._1 + ":" + scoreNew.toString
|
|
|
+ } else {
|
|
|
+ ""
|
|
|
+ }
|
|
|
+ }).filter(_.nonEmpty)
|
|
|
+ result.add((apptype, pagesource, level, label, abcode, allfeaturemap.toString, offlineFeatrueMap.iterator.mkString(","))
|
|
|
+ .productIterator.mkString("\t"))
|
|
|
+ }
|
|
|
+ result.iterator
|
|
|
+ })
|
|
|
+
|
|
|
+
|
|
|
+ val savePartition = dt + hh
|
|
|
+ val hdfsPath = savePath + "/" + savePartition
|
|
|
+ if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
|
|
|
+ println("删除路径并开始数据写入:" + hdfsPath)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(hdfsPath)
|
|
|
+ odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
+ }else{
|
|
|
+ println("路径不合法,无法写入:" + hdfsPath)
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
val data2 = sc.textFile(savePath + "/" + readDate + "*").mapPartitions(row=>{
|
|
|
val result = new ArrayBuffer[(String,
|
|
@@ -355,50 +356,50 @@ object makedata_17_bucketDataPrint_20240617 {
|
|
|
result.iterator
|
|
|
})
|
|
|
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- val saveV5 = "/dw/recommend/model/17_for_check_v5/" + readDate
|
|
|
- if (saveV5.nonEmpty && saveV5.startsWith("/dw/recommend/model/")) {
|
|
|
- println("删除路径并开始数据写入:" + saveV5)
|
|
|
- MyHdfsUtils.delete_hdfs_path(saveV5)
|
|
|
- data2.map(r => r._1 + "\t" + r._6.mkString("\t")).saveAsTextFile(saveV5, classOf[GzipCodec])
|
|
|
- } else {
|
|
|
- println("路径不合法,无法写入:" + saveV5)
|
|
|
- }
|
|
|
+ val saveV1 = "/dw/recommend/model/17_for_check_v1/" + readDate
|
|
|
+ if (saveV1.nonEmpty && saveV1.startsWith("/dw/recommend/model/")) {
|
|
|
+ println("删除路径并开始数据写入:" + saveV1)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(saveV1)
|
|
|
+ data2.map(r => r._1 + "\t" + r._2.map(r=> r._1 + ":" + r._2).mkString("\t")).saveAsTextFile(saveV1, classOf[GzipCodec])
|
|
|
+ } else {
|
|
|
+ println("路径不合法,无法写入:" + saveV1)
|
|
|
+ }
|
|
|
+
|
|
|
+ val saveV2 = "/dw/recommend/model/17_for_check_v2/" + readDate
|
|
|
+ if (saveV2.nonEmpty && saveV2.startsWith("/dw/recommend/model/")) {
|
|
|
+ println("删除路径并开始数据写入:" + saveV2)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(saveV2)
|
|
|
+ data2.map(r => r._1 + "\t" + r._3.map(r=> r._1 + ":" + r._2).mkString("\t")).saveAsTextFile(saveV2, classOf[GzipCodec])
|
|
|
+ } else {
|
|
|
+ println("路径不合法,无法写入:" + saveV2)
|
|
|
+ }
|
|
|
+
|
|
|
+ val saveV3 = "/dw/recommend/model/17_for_check_v3/" + readDate
|
|
|
+ if (saveV3.nonEmpty && saveV3.startsWith("/dw/recommend/model/")) {
|
|
|
+ println("删除路径并开始数据写入:" + saveV3)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(saveV3)
|
|
|
+ data2.map(r => r._1 + "\t" + r._4.mkString("\t")).saveAsTextFile(saveV3, classOf[GzipCodec])
|
|
|
+ } else {
|
|
|
+ println("路径不合法,无法写入:" + saveV3)
|
|
|
+ }
|
|
|
+
|
|
|
+ val saveV4 = "/dw/recommend/model/17_for_check_v4/" + readDate
|
|
|
+ if (saveV4.nonEmpty && saveV4.startsWith("/dw/recommend/model/")) {
|
|
|
+ println("删除路径并开始数据写入:" + saveV4)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(saveV4)
|
|
|
+ data2.map(r => r._1 + "\t" + r._5.mkString("\t")).saveAsTextFile(saveV4, classOf[GzipCodec])
|
|
|
+ } else {
|
|
|
+ println("路径不合法,无法写入:" + saveV4)
|
|
|
+ }
|
|
|
+
|
|
|
+ val saveV5 = "/dw/recommend/model/17_for_check_v5/" + readDate
|
|
|
+ if (saveV5.nonEmpty && saveV5.startsWith("/dw/recommend/model/")) {
|
|
|
+ println("删除路径并开始数据写入:" + saveV5)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(saveV5)
|
|
|
+ data2.map(r => r._1 + "\t" + r._6.mkString("\t")).saveAsTextFile(saveV5, classOf[GzipCodec])
|
|
|
+ } else {
|
|
|
+ println("路径不合法,无法写入:" + saveV5)
|
|
|
+ }
|
|
|
|
|
|
val saveV6 = "/dw/recommend/model/17_for_check_v6/" + readDate
|
|
|
if (saveV6.nonEmpty && saveV6.startsWith("/dw/recommend/model/")) {
|