|
@@ -5,14 +5,16 @@ import com.aliyun.odps.TableSchema
|
|
|
import com.aliyun.odps.data.Record
|
|
|
import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
|
|
|
import examples.extractor.RankExtractorFeature_20240530
|
|
|
+import examples.utils.SimilarityUtils
|
|
|
import org.apache.hadoop.io.compress.GzipCodec
|
|
|
import org.apache.spark.sql.SparkSession
|
|
|
-import org.xm.Similarity
|
|
|
|
|
|
import scala.collection.JavaConversions._
|
|
|
import scala.collection.mutable.ArrayBuffer
|
|
|
-/*
|
|
|
+import scala.util.Random
|
|
|
|
|
|
+/*
|
|
|
+ 20241211 提取特征
|
|
|
*/
|
|
|
|
|
|
object makedata_recsys_61_originData_20241209 {
|
|
@@ -25,15 +27,18 @@ object makedata_recsys_61_originData_20241209 {
|
|
|
|
|
|
// 1 读取参数
|
|
|
val param = ParamUtils.parseArgs(args)
|
|
|
+
|
|
|
+ val beginStr = param.getOrElse("beginStr", "2024120912")
|
|
|
+ val endStr = param.getOrElse("endStr", "2024120912")
|
|
|
+ val project = param.getOrElse("project", "loghubods")
|
|
|
+ val table = param.getOrElse("table", "alg_recsys_sample_all_v2")
|
|
|
val tablePart = param.getOrElse("tablePart", "64").toInt
|
|
|
- val beginStr = param.getOrElse("beginStr", "2023010100")
|
|
|
- val endStr = param.getOrElse("endStr", "2023010123")
|
|
|
val savePath = param.getOrElse("savePath", "/dw/recommend/model/61_origin_data/")
|
|
|
- val project = param.getOrElse("project", "loghubods")
|
|
|
- val table = param.getOrElse("table", "XXXX")
|
|
|
val repartition = param.getOrElse("repartition", "32").toInt
|
|
|
+ val whatLabel = param.getOrElse("whatLabel", "is_share")
|
|
|
+ val fuSampleRate = param.getOrElse("fuSampleRate", "0.1").toDouble
|
|
|
|
|
|
- // 2 读取odps+表信息
|
|
|
+ // 2 odps
|
|
|
val odpsOps = env.getODPS(sc)
|
|
|
|
|
|
// 3 循环执行数据生产
|
|
@@ -48,197 +53,274 @@ object makedata_recsys_61_originData_20241209 {
|
|
|
partition = partition,
|
|
|
transfer = func,
|
|
|
numPartition = tablePart)
|
|
|
- .map(record => {
|
|
|
-
|
|
|
- val featureMap = new JSONObject()
|
|
|
-
|
|
|
- // a 视频特征
|
|
|
- val b1: JSONObject = if (record.isNull("b1_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b1_feature"))
|
|
|
- val b2: JSONObject = if (record.isNull("b2_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b2_feature"))
|
|
|
- val b3: JSONObject = if (record.isNull("b3_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b3_feature"))
|
|
|
- val b6: JSONObject = if (record.isNull("b6_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b6_feature"))
|
|
|
- val b7: JSONObject = if (record.isNull("b7_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b7_feature"))
|
|
|
-
|
|
|
- val b8: JSONObject = if (record.isNull("b8_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b8_feature"))
|
|
|
- val b9: JSONObject = if (record.isNull("b9_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b9_feature"))
|
|
|
- val b10: JSONObject = if (record.isNull("b10_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b10_feature"))
|
|
|
- val b11: JSONObject = if (record.isNull("b11_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b11_feature"))
|
|
|
- val b12: JSONObject = if (record.isNull("b12_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b12_feature"))
|
|
|
- val b13: JSONObject = if (record.isNull("b13_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b13_feature"))
|
|
|
- val b17: JSONObject = if (record.isNull("b17_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b17_feature"))
|
|
|
- val b18: JSONObject = if (record.isNull("b18_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b18_feature"))
|
|
|
- val b19: JSONObject = if (record.isNull("b19_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("b19_feature"))
|
|
|
-
|
|
|
-
|
|
|
- val origin_data = List(
|
|
|
- (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
|
|
|
- (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
|
|
|
- (b17, b18, b19, "b171819")
|
|
|
- )
|
|
|
- for ((b_1, b_2, b_3, prefix1) <- origin_data) {
|
|
|
- for (prefix2 <- List(
|
|
|
- "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
|
|
|
- )) {
|
|
|
- val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
|
|
|
- val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
|
|
|
- val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
|
|
|
- val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
|
|
|
- val f2 = RankExtractorFeature_20240530.calLog(share)
|
|
|
- val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
|
|
|
- val f4 = RankExtractorFeature_20240530.calLog(returns)
|
|
|
- val f5 = f3 * f4
|
|
|
- val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
|
|
|
+ .filter(record => {
|
|
|
+ val label = if (record.isNull(whatLabel)) "0" else record.getString(whatLabel)
|
|
|
+ "1".equals(label) || new Random().nextDouble() <= fuSampleRate
|
|
|
+ })
|
|
|
+ .mapPartitions(p => {
|
|
|
+ SimilarityUtils.init()
|
|
|
+ p.map(record => {
|
|
|
+ val featureMap = new JSONObject()
|
|
|
+
|
|
|
+ // a 视频特征
|
|
|
+ val b1: JSONObject = getJsonObject(record, "b1_feature")
|
|
|
+ val b2: JSONObject = getJsonObject(record, "b2_feature")
|
|
|
+ val b3: JSONObject = getJsonObject(record, "b3_feature")
|
|
|
+ val b6: JSONObject = getJsonObject(record, "b6_feature")
|
|
|
+ val b7: JSONObject = getJsonObject(record, "b7_feature")
|
|
|
+
|
|
|
+ val b8: JSONObject = getJsonObject(record, "b8_feature")
|
|
|
+ val b9: JSONObject = getJsonObject(record, "b9_feature")
|
|
|
+ val b10: JSONObject = getJsonObject(record, "b10_feature")
|
|
|
+ val b11: JSONObject = getJsonObject(record, "b11_feature")
|
|
|
+ val b12: JSONObject = getJsonObject(record, "b12_feature")
|
|
|
+ val b13: JSONObject = getJsonObject(record, "b13_feature")
|
|
|
+ val b17: JSONObject = getJsonObject(record, "b17_feature")
|
|
|
+ val b18: JSONObject = getJsonObject(record, "b18_feature")
|
|
|
+ val b19: JSONObject = getJsonObject(record, "b19_feature")
|
|
|
+
|
|
|
+ val origin_data = List(
|
|
|
+ (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
|
|
|
+ (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
|
|
|
+ (b17, b18, b19, "b171819")
|
|
|
+ )
|
|
|
+ for ((b_1, b_2, b_3, prefix1) <- origin_data) {
|
|
|
+ for (prefix2 <- List(
|
|
|
+ "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
|
|
|
+ )) {
|
|
|
+ val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
|
|
|
+ val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
|
|
|
+ val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
|
|
|
+ val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
|
|
|
+ val f2 = RankExtractorFeature_20240530.calLog(share)
|
|
|
+ val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
|
|
|
+ val f4 = RankExtractorFeature_20240530.calLog(returns)
|
|
|
+ val f5 = f3 * f4
|
|
|
+ val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
|
|
|
+ featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
|
|
|
+ }
|
|
|
}
|
|
|
- }
|
|
|
|
|
|
- val video_info: JSONObject = if (record.isNull("t_v_info_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("t_v_info_feature"))
|
|
|
- featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
|
|
|
- featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
|
|
|
-
|
|
|
- val c1: JSONObject = if (record.isNull("c1_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("c1_feature"))
|
|
|
- if (c1.nonEmpty) {
|
|
|
- featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
|
|
|
- featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
|
|
|
- featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
|
|
|
- featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
|
|
|
- }
|
|
|
- val c2: JSONObject = if (record.isNull("c2_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("c2_feature"))
|
|
|
- if (c2.nonEmpty) {
|
|
|
- featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
|
|
|
- featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
|
|
|
- featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
|
|
|
- featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
|
|
|
- }
|
|
|
+ val video_info: JSONObject = getJsonObject(record, "t_v_info_feature")
|
|
|
+ featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
|
|
|
+ featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
|
|
|
+
|
|
|
+ val c1: JSONObject = getJsonObject(record, "c1_feature")
|
|
|
+ if (c1.nonEmpty) {
|
|
|
+ featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
|
|
|
+ featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
|
|
|
+ }
|
|
|
+ val c2: JSONObject = getJsonObject(record, "c2_feature")
|
|
|
+ if (c2.nonEmpty) {
|
|
|
+ featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
|
|
|
+ featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
|
|
|
+ featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
|
|
|
+ }
|
|
|
|
|
|
- val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
|
|
|
- if (!title.equals("")) {
|
|
|
- for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
|
|
|
- val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString(key_feature))
|
|
|
- for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
|
|
|
- val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
|
|
|
- if (!tags.equals("")) {
|
|
|
- val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
|
|
|
- featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
|
|
|
- featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
|
|
|
- featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
|
|
|
+ val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
|
|
|
+ if (!title.equals("")) {
|
|
|
+ for (key_feature <- List("c3_feature", "c4_feature", "c5_feature", "c6_feature", "c7_feature")) {
|
|
|
+ val c34567: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString(key_feature))
|
|
|
+ for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
|
|
|
+ val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
|
|
|
+ if (!tags.equals("")) {
|
|
|
+ val (f1, f2, f3, f4) = funcC34567ForTagsW2V(tags, title)
|
|
|
+ featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
|
|
|
+ featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
|
|
|
+ featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
- }
|
|
|
|
|
|
- val vid = if (record.isNull("vid")) "" else record.getString("vid")
|
|
|
- if (!vid.equals("")) {
|
|
|
- for (key_feature <- List("c8_feature", "c9_feature")) {
|
|
|
- val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString(key_feature))
|
|
|
- for (key_action <- List("share", "return")) {
|
|
|
- val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
|
|
|
- if (!cfListStr.equals("")) {
|
|
|
- val cfMap = cfListStr.split(",").map(r => {
|
|
|
- val rList = r.split(":")
|
|
|
- (rList(0), (rList(1), rList(2), rList(3)))
|
|
|
- }).toMap
|
|
|
- if (cfMap.contains(vid)) {
|
|
|
- val (score, num, rank) = cfMap(vid)
|
|
|
- featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
|
|
|
- featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
|
|
|
- featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
|
|
|
+ val vid = if (record.isNull("vid")) "" else record.getString("vid")
|
|
|
+ if (!vid.equals("")) {
|
|
|
+ for (key_feature <- List("c8_feature", "c9_feature")) {
|
|
|
+ val c89: JSONObject = if (record.isNull(key_feature)) new JSONObject() else
|
|
|
+ JSON.parseObject(record.getString(key_feature))
|
|
|
+ for (key_action <- List("share", "return")) {
|
|
|
+ val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
|
|
|
+ if (!cfListStr.equals("")) {
|
|
|
+ val cfMap = cfListStr.split(",").map(r => {
|
|
|
+ val rList = r.split(":")
|
|
|
+ (rList(0), (rList(1), rList(2), rList(3)))
|
|
|
+ }).toMap
|
|
|
+ if (cfMap.contains(vid)) {
|
|
|
+ val (score, num, rank) = cfMap(vid)
|
|
|
+ featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
|
|
|
+ featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
|
|
|
+ featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
- }
|
|
|
|
|
|
- val d1: JSONObject = if (record.isNull("d1_feature")) new JSONObject() else
|
|
|
- JSON.parseObject(record.getString("d1_feature"))
|
|
|
- if (d1.nonEmpty) {
|
|
|
- featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
|
|
|
- featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
|
|
|
- featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
|
|
|
- }
|
|
|
+ val d1: JSONObject = getJsonObject(record, "d1_feature")
|
|
|
+ if (d1.nonEmpty) {
|
|
|
+ featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
|
|
|
+ featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
|
|
|
+ featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
|
|
|
+ }
|
|
|
|
|
|
+ // ************* new feature *************
|
|
|
+ val shortPeriod = List("1h", "2h", "4h", "6h", "12h", "24h", "7d")
|
|
|
+ val middlePeriod = List("14d", "30d")
|
|
|
+ val longPeriod = List("7d", "35d", "90d", "365d")
|
|
|
+ val vidStatFeat = List(
|
|
|
+ ("b20", shortPeriod, getJsonObject(record, "b20_feature")), // cate2_feature
|
|
|
+ ("b21", shortPeriod, getJsonObject(record, "b21_feature")), // cate1_feature
|
|
|
+ ("b22", shortPeriod, getJsonObject(record, "b22_feature")), // source_feature
|
|
|
+ ("b28", shortPeriod, getJsonObject(record, "b28_feature")), // sence_type_feature
|
|
|
+ ("b23", middlePeriod, getJsonObject(record, "b23_feature")), // cate2_feature_day
|
|
|
+ ("b24", middlePeriod, getJsonObject(record, "b24_feature")), // cate1_feature_day
|
|
|
+ ("b25", middlePeriod, getJsonObject(record, "b25_feature")), // source_feature_day
|
|
|
+ ("b26", longPeriod, getJsonObject(record, "b26_feature")), // unionid_feature_day
|
|
|
+ ("b27", longPeriod, getJsonObject(record, "b27_feature")) // vid_feature_day
|
|
|
+ )
|
|
|
+ for ((featType, featPeriod, featData) <- vidStatFeat) {
|
|
|
+ for (period <- featPeriod) {
|
|
|
+ // val view = if (featData.isEmpty) 0D else featData.getDoubleValue("view_" + period)
|
|
|
+ val share = if (featData.isEmpty) 0D else featData.getDoubleValue("share_" + period)
|
|
|
+ val return_ = if (featData.isEmpty) 0D else featData.getDoubleValue("return_" + period)
|
|
|
+ val view_hasreturn = if (featData.isEmpty) 0D else featData.getDoubleValue("view_hasreturn_" + period)
|
|
|
+ val share_hasreturn = if (featData.isEmpty) 0D else featData.getDoubleValue("share_hasreturn_" + period)
|
|
|
+ val ros = if (featData.isEmpty) 0D else featData.getDoubleValue("ros_" + period)
|
|
|
+ val rov = if (featData.isEmpty) 0D else featData.getDoubleValue("rov_" + period)
|
|
|
+ val r_cnt = if (featData.isEmpty) 0D else featData.getDoubleValue("r_cnt_" + period)
|
|
|
+ val r_rate = if (featData.isEmpty) 0D else featData.getDoubleValue("r_rate_" + period)
|
|
|
+ val r_cnt4s = if (featData.isEmpty) 0D else featData.getDoubleValue("r_cnt4s_" + period)
|
|
|
+ val str = if (featData.isEmpty) 0D else featData.getDoubleValue("str_" + period)
|
|
|
+ // scale
|
|
|
+ // val view_s = RankExtractorFeature_20240530.calLog(view)
|
|
|
+ val share_s = RankExtractorFeature_20240530.calLog(share)
|
|
|
+ val return_s = RankExtractorFeature_20240530.calLog(return_)
|
|
|
+ val view_hasreturn_s = RankExtractorFeature_20240530.calLog(view_hasreturn)
|
|
|
+ val share_hasreturn_s = RankExtractorFeature_20240530.calLog(share_hasreturn)
|
|
|
|
|
|
- /*
|
|
|
+ // featureMap.put(featType + "_" + period + "_" + "view", view_s)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "share", share_s)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "return", return_s)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "view_hasreturn", view_hasreturn_s)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "share_hasreturn", share_hasreturn_s)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "ros", ros)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "rov", rov)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "r_cnt", r_cnt)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "r_rate", r_rate)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "r_cnt4s", r_cnt4s)
|
|
|
+ featureMap.put(featType + "_" + period + "_" + "str", str)
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
+ // new cf
|
|
|
+ val d2345Data = List(
|
|
|
+ ("d2", "rosn", getJsonObject(record, "d2_feature")),
|
|
|
+ ("d3", "rosn", getJsonObject(record, "d3_feature")),
|
|
|
+ ("d4", "rovn", getJsonObject(record, "d4_feature")),
|
|
|
+ ("d5", "rovn", getJsonObject(record, "d5_feature"))
|
|
|
+ )
|
|
|
+ for ((featType, valType, featData) <- d2345Data) {
|
|
|
+ if (featData.nonEmpty) {
|
|
|
+ val exp = if (featData.containsKey("exp")) featData.getString("exp").toDouble else 0D
|
|
|
+ val return_n = if (featData.containsKey("return_n")) featData.getString("return_n").toDouble else 0D
|
|
|
+ val value = if (featData.containsKey(valType)) featData.getString(valType).toDouble else 0D
|
|
|
+ // scale
|
|
|
+ val exp_s = RankExtractorFeature_20240530.calLog(exp)
|
|
|
+ val return_n_s = RankExtractorFeature_20240530.calLog(return_n)
|
|
|
+ featureMap.put(featType + "_exp", exp_s)
|
|
|
+ featureMap.put(featType + "_return_n", return_n_s)
|
|
|
+ featureMap.put(featType + "_" + valType, value)
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
- 视频:
|
|
|
- 曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
|
|
|
- STR log(share) ROV log(return) ROV*log(return)
|
|
|
- 40个特征组合
|
|
|
- 整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
|
|
|
- 200个特征值
|
|
|
+ if (!vid.equals("")) {
|
|
|
+ val idScoreObj = getJsonObject(getJsonObject(record, "d6_feature"), "vids", "scores")
|
|
|
+ if (idScoreObj.nonEmpty && idScoreObj.containsKey(vid)) {
|
|
|
+ val score = idScoreObj.getString(vid).toDouble
|
|
|
+ featureMap.put("d6", score)
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
- 视频:
|
|
|
- 视频时长、比特率
|
|
|
+ // head video & rank video
|
|
|
+ val headVideo = getJsonObject(record, "v2_feature")
|
|
|
+ val rankVideo = getJsonObject(record, "v1_feature")
|
|
|
+ if (headVideo.nonEmpty && rankVideo.nonEmpty) {
|
|
|
+ val videoAttrs = List("title", "topic", "keywords", "cate1_list", "cate2", "cate2_list", "style", "theme", "user_value")
|
|
|
+ for (attr <- videoAttrs) {
|
|
|
+ val headAttr = if (headVideo.containsKey(attr)) headVideo.getString(attr) else ""
|
|
|
+ val rankAttr = if (rankVideo.containsKey(attr)) rankVideo.getString(attr) else ""
|
|
|
+ if (!headAttr.equals("") && !rankAttr.equals("")) {
|
|
|
+ val simScore = SimilarityUtils.word2VecSimilarity(headAttr, rankAttr)
|
|
|
+ featureMap.put("video_sim_" + attr, simScore)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
- 人:
|
|
|
- 播放次数 --> 6h 1d 3d 7d --> 4个
|
|
|
- 带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
|
|
|
- 人+vid-title:
|
|
|
- 播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
|
|
|
- 人+vid-cf
|
|
|
- 基于分享行为/基于回流行为 --> “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
|
|
|
+ /*
|
|
|
+ 视频特征: 5*6*5 = 240个
|
|
|
+ 曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
|
|
|
+ STR log(share) ROV log(return) ROV*log(return) ROS
|
|
|
+ 整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
|
|
|
+ 视频基础: 2个 视频时长、比特率
|
|
|
+ 用户: 4+8 = 12个
|
|
|
+ 播放次数 --> 6h 1d 3d 7d --> 4个
|
|
|
+ 带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
|
|
|
+ 人+vid-title: 5*3*3 = 45
|
|
|
+ 播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
|
|
|
+ 人+vid-cf: 2*3*3 = 12
|
|
|
+ 基于分享行为/基于回流行为 --> “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
|
|
|
+ 头部视频: 3
|
|
|
+ 曝光 回流 ROVn 3个特征
|
|
|
+ 场景: 小时 星期 apptype city province pagesource 机器型号
|
|
|
+ 总量: 240+2+12+45+12+3 = 314
|
|
|
+ ---------------------------------------------------------------
|
|
|
+ 视频特征:(4*7+3*2+2*4)*10 = 420个
|
|
|
+ CF: 13个
|
|
|
+ 视频相似特征: 9个
|
|
|
|
|
|
- 头部视频:
|
|
|
- 曝光 回流 ROVn 3个特征
|
|
|
|
|
|
- 场景:
|
|
|
- 小时 星期 apptype city province pagesource 机器型号
|
|
|
- */
|
|
|
+ */
|
|
|
|
|
|
|
|
|
- //4 处理label信息。
|
|
|
- val labels = new JSONObject
|
|
|
- for (labelKey <- List(
|
|
|
- "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
|
|
|
- "share_pv", "total_share_uv"
|
|
|
- )) {
|
|
|
- if (!record.isNull(labelKey)) {
|
|
|
- labels.put(labelKey, record.getString(labelKey))
|
|
|
+ //4 处理label信息。
|
|
|
+ val labels = new JSONObject
|
|
|
+ for (labelKey <- List(
|
|
|
+ "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
|
|
|
+ "share_pv", "total_share_uv", "view_24h", "total_return_uv_new"
|
|
|
+ )) {
|
|
|
+ if (!record.isNull(labelKey)) {
|
|
|
+ labels.put(labelKey, record.getString(labelKey))
|
|
|
+ }
|
|
|
}
|
|
|
- }
|
|
|
- //5 处理log key表头。
|
|
|
- val apptype = record.getString("apptype")
|
|
|
- val pagesource = record.getString("pagesource")
|
|
|
- val mid = record.getString("mid")
|
|
|
- // vid 已经提取了
|
|
|
- val ts = record.getString("ts")
|
|
|
- val abcode = record.getString("abcode")
|
|
|
- val level = if (record.isNull("level")) "0" else record.getString("level")
|
|
|
- val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
|
|
|
- val labelKey = labels.toString()
|
|
|
- val featureKey = featureMap.toString()
|
|
|
- //6 拼接数据,保存。
|
|
|
- logKey + "\t" + labelKey + "\t" + featureKey
|
|
|
+ //5 处理log key表头。
|
|
|
+ val apptype = record.getString("apptype")
|
|
|
+ val pagesource = record.getString("pagesource")
|
|
|
+ val mid = record.getString("mid")
|
|
|
+ // vid 已经提取了
|
|
|
+ val ts = record.getString("ts")
|
|
|
+ val abcode = record.getString("abcode")
|
|
|
+ val level = if (record.isNull("level")) "0" else record.getString("level")
|
|
|
+ val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
|
|
|
+ val labelKey = labels.toString()
|
|
|
+ // val featureKey = featureMap.toString()
|
|
|
+ val featureKey = truncateDecimal(featureMap).toString()
|
|
|
+ //6 拼接数据,保存。
|
|
|
+ logKey + "\t" + labelKey + "\t" + featureKey
|
|
|
|
|
|
+ })
|
|
|
})
|
|
|
|
|
|
// 4 保存数据到hdfs
|
|
@@ -247,7 +329,7 @@ object makedata_recsys_61_originData_20241209 {
|
|
|
if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
|
|
|
println("删除路径并开始数据写入:" + hdfsPath)
|
|
|
MyHdfsUtils.delete_hdfs_path(hdfsPath)
|
|
|
- odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
+ odpsData.repartition(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
} else {
|
|
|
println("路径不合法,无法写入:" + hdfsPath)
|
|
|
}
|
|
@@ -258,7 +340,55 @@ object makedata_recsys_61_originData_20241209 {
|
|
|
record
|
|
|
}
|
|
|
|
|
|
- def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
|
|
|
+ def getJsonObject(record: Record, key: String): JSONObject = {
|
|
|
+ val data = if (record.isNull(key)) new JSONObject() else JSON.parseObject(record.getString(key))
|
|
|
+ val data2 = new JSONObject()
|
|
|
+ data.foreach(r => {
|
|
|
+ if (r._2 != null) {
|
|
|
+ data2.put(r._1, r._2)
|
|
|
+ }
|
|
|
+ })
|
|
|
+ data2
|
|
|
+ }
|
|
|
+
|
|
|
+ def getJsonObject(obj: JSONObject, keyName: String, valueName: String): JSONObject = {
|
|
|
+ val map = new JSONObject()
|
|
|
+ if (obj.nonEmpty) {
|
|
|
+ val keys = if (obj.containsKey(keyName)) obj.getString(keyName) else ""
|
|
|
+ val values = if (obj.containsKey(valueName)) obj.getString(valueName) else ""
|
|
|
+ if (!keys.equals("") && !values.equals("")) {
|
|
|
+ val key_list = keys.split(",")
|
|
|
+ val value_list = values.split(",")
|
|
|
+ if (key_list.length == value_list.length) {
|
|
|
+ for (index <- 0 until key_list.length) {
|
|
|
+ map.put(key_list(index), value_list(index))
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ map
|
|
|
+ }
|
|
|
+
|
|
|
+ def truncateDecimal(obj: JSONObject, scale: Int = 6): JSONObject = {
|
|
|
+ val data = new JSONObject()
|
|
|
+ for (key <- obj.keySet()) {
|
|
|
+ try {
|
|
|
+ val value = obj.getDoubleValue(key)
|
|
|
+ if (value == value.floor) {
|
|
|
+ data.put(key, value)
|
|
|
+ } else {
|
|
|
+ val newValue = BigDecimal(value).setScale(scale, BigDecimal.RoundingMode.HALF_UP).toDouble
|
|
|
+ data.put(key, newValue)
|
|
|
+ }
|
|
|
+ } catch {
|
|
|
+ case e: Exception => System.err.println(e.getMessage)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ data
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ def funcC34567ForTagsW2V(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
|
|
|
// 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
|
|
|
val tagsList = tags.split(",")
|
|
|
var d1 = 0.0
|
|
@@ -270,7 +400,7 @@ object makedata_recsys_61_originData_20241209 {
|
|
|
d1 = d1 + 1.0
|
|
|
d2.add(tag)
|
|
|
}
|
|
|
- val score = Similarity.conceptSimilarity(tag, title)
|
|
|
+ val score = SimilarityUtils.word2VecSimilarity(tag, title)
|
|
|
d3 = if (score > d3) score else d3
|
|
|
d4 = d4 + score
|
|
|
}
|