|
@@ -0,0 +1,256 @@
|
|
|
+package com.aliyun.odps.spark.examples.makedata
|
|
|
+
|
|
|
+import com.alibaba.fastjson.{JSON, JSONObject}
|
|
|
+import com.aliyun.odps.TableSchema
|
|
|
+import com.aliyun.odps.data.Record
|
|
|
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
|
|
|
+import examples.extractor.RankExtractorFeature_20240530
|
|
|
+import org.apache.hadoop.io.compress.GzipCodec
|
|
|
+import org.apache.spark.sql.SparkSession
|
|
|
+import org.xm.Similarity
|
|
|
+
|
|
|
+import scala.collection.JavaConversions._
|
|
|
+import scala.collection.mutable.ArrayBuffer
|
|
|
+/*
|
|
|
+ 20240608 提取特征
|
|
|
+ */
|
|
|
+
|
|
|
+object makedata_13_originData_20240529_check {
|
|
|
+ def main(args: Array[String]): Unit = {
|
|
|
+ val spark = SparkSession
|
|
|
+ .builder()
|
|
|
+ .appName(this.getClass.getName)
|
|
|
+ .getOrCreate()
|
|
|
+ val sc = spark.sparkContext
|
|
|
+
|
|
|
+ // 1 读取参数
|
|
|
+ val param = ParamUtils.parseArgs(args)
|
|
|
+ val tablePart = param.getOrElse("tablePart", "64").toInt
|
|
|
+ val beginStr = param.getOrElse("beginStr", "2023010100")
|
|
|
+ val endStr = param.getOrElse("endStr", "2023010123")
|
|
|
+ val savePath = param.getOrElse("savePath", "/dw/recommend/model/13_sample_data/")
|
|
|
+ val project = param.getOrElse("project", "loghubods")
|
|
|
+ val table = param.getOrElse("table", "XXXX")
|
|
|
+ val repartition = param.getOrElse("repartition", "100").toInt
|
|
|
+
|
|
|
+ // 2 读取odps+表信息
|
|
|
+ val odpsOps = env.getODPS(sc)
|
|
|
+
|
|
|
+ // 3 循环执行数据生产
|
|
|
+ val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
|
|
|
+ for (dt_hh <- timeRange) {
|
|
|
+ val dt = dt_hh.substring(0, 8)
|
|
|
+ val hh = dt_hh.substring(8, 10)
|
|
|
+ val partition = s"dt=$dt,hh=$hh"
|
|
|
+ println("开始执行partiton:" + partition)
|
|
|
+ val odpsData = odpsOps.readTable(project = project,
|
|
|
+ table = table,
|
|
|
+ partition = partition,
|
|
|
+ transfer = func,
|
|
|
+ numPartition = tablePart)
|
|
|
+ .map(record_ => {
|
|
|
+
|
|
|
+
|
|
|
+ val record = if (record_.isNull("metafeaturemap")) new JSONObject() else
|
|
|
+ JSON.parseObject(record_.getString("metafeaturemap"))
|
|
|
+
|
|
|
+ val featureMap = new JSONObject()
|
|
|
+
|
|
|
+ // a 视频特征
|
|
|
+ val b1: JSONObject = if (record.containsKey("alg_vid_feature_all_exp")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_all_exp"))
|
|
|
+ val b2: JSONObject = if (record.containsKey("alg_vid_feature_all_share")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_all_share"))
|
|
|
+ val b3: JSONObject = if (record.containsKey("alg_vid_feature_all_return")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_all_return"))
|
|
|
+ val b6: JSONObject = if (record.containsKey("alg_vid_feature_exp2share")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_exp2share"))
|
|
|
+ val b7: JSONObject = if (record.containsKey("alg_vid_feature_share2return")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_share2return"))
|
|
|
+
|
|
|
+ val b8: JSONObject = if (record.containsKey("alg_vid_feature_feed_noflow_exp")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_noflow_exp"))
|
|
|
+ val b9: JSONObject = if (record.containsKey("alg_vid_feature_feed_noflow_root_share")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_noflow_root_share"))
|
|
|
+ val b10: JSONObject = if (record.containsKey("alg_vid_feature_feed_noflow_root_return")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_noflow_root_return"))
|
|
|
+ val b11: JSONObject = if (record.containsKey("alg_vid_feature_feed_flow_exp")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_flow_exp"))
|
|
|
+ val b12: JSONObject = if (record.containsKey("alg_vid_feature_feed_flow_root_share")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_flow_root_share"))
|
|
|
+ val b13: JSONObject = if (record.containsKey("alg_vid_feature_feed_flow_root_return")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_flow_root_return"))
|
|
|
+ val b17: JSONObject = if (record.containsKey("alg_vid_feature_feed_province_exp")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_province_exp"))
|
|
|
+ val b18: JSONObject = if (record.containsKey("alg_vid_feature_feed_province_root_share")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_province_root_share"))
|
|
|
+ val b19: JSONObject = if (record.containsKey("alg_vid_feature_feed_province_root_return")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_feed_province_root_return"))
|
|
|
+
|
|
|
+
|
|
|
+ val origin_data = List(
|
|
|
+ (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
|
|
|
+ (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
|
|
|
+ (b17, b18, b19, "b171819")
|
|
|
+ )
|
|
|
+ for ((b_1, b_2, b_3, prefix1) <- origin_data) {
|
|
|
+ for (prefix2 <- List(
|
|
|
+ "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
|
|
|
+ )) {
|
|
|
+ val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
|
|
|
+ val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
|
|
|
+ val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
|
|
|
+ val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
|
|
|
+ val f2 = RankExtractorFeature_20240530.calLog(share)
|
|
|
+ val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
|
|
|
+ val f4 = RankExtractorFeature_20240530.calLog(returns)
|
|
|
+ val f5 = f3 * f4
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val video_info: JSONObject = if (record.containsKey("alg_vid_feature_basic_info")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_vid_feature_basic_info"))
|
|
|
+ featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
|
|
|
+ featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
|
|
|
+
|
|
|
+ val c1: JSONObject = if (record.containsKey("alg_mid_feature_play")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_mid_feature_play"))
|
|
|
+ if (c1.nonEmpty) {
|
|
|
+ featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
|
|
|
+ }
|
|
|
+ val c2: JSONObject = if (record.containsKey("alg_mid_feature_share_and_return")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_mid_feature_share_and_return"))
|
|
|
+ if (c2.nonEmpty) {
|
|
|
+ featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
|
|
|
+ }
|
|
|
+
|
|
|
+ val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
|
|
|
+ if (!title.equals("")) {
|
|
|
+ for (key_feature <- List(("c3_feature", "alg_mid_feature_play_tags"),
|
|
|
+ ("c4_feature", "alg_mid_feature_play_tags"),
|
|
|
+ ("c5_feature", "alg_mid_feature_play_tags"),
|
|
|
+ ("c6_feature", "alg_mid_feature_play_tags"),
|
|
|
+ ("c7_feature", "alg_mid_feature_play_tags"))) {
|
|
|
+ val c34567: JSONObject = if (record.containsKey(key_feature._2)) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString(key_feature._2))
|
|
|
+ for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
|
|
|
+ val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
|
|
|
+ if (!tags.equals("")) {
|
|
|
+ val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
|
|
|
+ featureMap.put(key_feature._1 + "_" + key_time + "_matchnum", f1)
|
|
|
+ featureMap.put(key_feature._1 + "_" + key_time + "_maxscore", f3)
|
|
|
+ featureMap.put(key_feature._1 + "_" + key_time + "_avgscore", f4)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val vid = if (record_.isNull("vid")) "" else record_.getString("vid")
|
|
|
+ if (!vid.equals("")) {
|
|
|
+ for (key_feature <- List(("c8_feature", "alg_mid_feature_sharecf"), ("c9_feature", "alg_mid_feature_returncf"))) {
|
|
|
+ val c89: JSONObject = if (record.containsKey(key_feature._2)) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString(key_feature._2))
|
|
|
+ for (key_action <- List("share", "return")) {
|
|
|
+ val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
|
|
|
+ if (!cfListStr.equals("")) {
|
|
|
+ val cfMap = cfListStr.split(",").map(r => {
|
|
|
+ val rList = r.split(":")
|
|
|
+ (rList(0), (rList(1), rList(2), rList(3)))
|
|
|
+ }).toMap
|
|
|
+ if (cfMap.contains(vid)) {
|
|
|
+ val (score, num, rank) = cfMap(vid)
|
|
|
+ featureMap.put(key_feature._1 + "_" + key_action + "_score", score.toDouble)
|
|
|
+ featureMap.put(key_feature._1 + "_" + key_action + "_num", num.toDouble)
|
|
|
+ featureMap.put(key_feature._1 + "_" + key_action + "_rank", 1.0 / rank.toDouble)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val d1: JSONObject = if (record.containsKey("alg_recsys_feature_cf_i2i_new")) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString("alg_recsys_feature_cf_i2i_new"))
|
|
|
+ if (d1.nonEmpty) {
|
|
|
+ featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
|
|
|
+ featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
|
|
|
+ featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ //4 处理label信息。
|
|
|
+ val labels = new JSONObject
|
|
|
+ for (labelKey <- List(
|
|
|
+ "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
|
|
|
+ "share_pv", "total_share_uv"
|
|
|
+ )){
|
|
|
+ if (!record_.isNull(labelKey)){
|
|
|
+ labels.put(labelKey, record_.getString(labelKey))
|
|
|
+ }
|
|
|
+ }
|
|
|
+ //5 处理log key表头。
|
|
|
+ val apptype = record_.getString("apptype")
|
|
|
+ val pagesource = record_.getString("pagesource")
|
|
|
+ val mid = record_.getString("mid")
|
|
|
+ // vid 已经提取了
|
|
|
+ val ts = record_.getString("ts")
|
|
|
+ val abcode = record_.getString("abcode")
|
|
|
+ val level = if (record_.isNull("level")) "0" else record_.getString("level")
|
|
|
+ val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
|
|
|
+ val labelKey = labels.toString()
|
|
|
+ val featureKey = featureMap.toString()
|
|
|
+ //6 拼接数据,保存。
|
|
|
+ logKey + "\t" + labelKey + "\t" + featureKey
|
|
|
+
|
|
|
+ })
|
|
|
+
|
|
|
+ // 4 保存数据到hdfs
|
|
|
+ val savePartition = dt + hh
|
|
|
+ val hdfsPath = savePath + "/" + savePartition
|
|
|
+ if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
|
|
|
+ println("删除路径并开始数据写入:" + hdfsPath)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(hdfsPath)
|
|
|
+ odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
+ }else{
|
|
|
+ println("路径不合法,无法写入:" + hdfsPath)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ def func(record: Record, schema: TableSchema): Record = {
|
|
|
+ record
|
|
|
+ }
|
|
|
+ def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
|
|
|
+ // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
|
|
|
+ val tagsList = tags.split(",")
|
|
|
+ var d1 = 0.0
|
|
|
+ val d2 = new ArrayBuffer[String]()
|
|
|
+ var d3 = 0.0
|
|
|
+ var d4 = 0.0
|
|
|
+ for (tag <- tagsList){
|
|
|
+ if (title.contains(tag)){
|
|
|
+ d1 = d1 + 1.0
|
|
|
+ d2.add(tag)
|
|
|
+ }
|
|
|
+ val score = Similarity.conceptSimilarity(tag, title)
|
|
|
+ d3 = if (score > d3) score else d3
|
|
|
+ d4 = d4 + score
|
|
|
+ }
|
|
|
+ d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
|
|
|
+ (d1, d2.mkString(","), d3, d4)
|
|
|
+ }
|
|
|
+}
|