|
@@ -1,492 +0,0 @@
|
|
|
-package com.aliyun.odps.spark.examples.makedata_recsys_r_rate
|
|
|
-
|
|
|
-import com.alibaba.fastjson.{JSON, JSONObject}
|
|
|
-import com.aliyun.odps.TableSchema
|
|
|
-import com.aliyun.odps.data.Record
|
|
|
-import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
|
|
|
-import examples.extractor.RankExtractorFeature_20240530
|
|
|
-import examples.utils.SimilarityUtils
|
|
|
-import org.apache.hadoop.io.compress.GzipCodec
|
|
|
-import org.apache.spark.sql.SparkSession
|
|
|
-
|
|
|
-import java.util.Calendar
|
|
|
-import scala.collection.JavaConversions._
|
|
|
-import scala.collection.mutable.ArrayBuffer
|
|
|
-import scala.util.Random
|
|
|
-
|
|
|
-/*
|
|
|
- 20250109 提取特征
|
|
|
- */
|
|
|
-
|
|
|
-object makedata_recsys_72_originData_20250109 {
|
|
|
- def main(args: Array[String]): Unit = {
|
|
|
- val spark = SparkSession
|
|
|
- .builder()
|
|
|
- .appName(this.getClass.getName)
|
|
|
- .getOrCreate()
|
|
|
- val sc = spark.sparkContext
|
|
|
-
|
|
|
- // 1 读取参数
|
|
|
- val param = ParamUtils.parseArgs(args)
|
|
|
- val project = param.getOrElse("project", "loghubods")
|
|
|
- val table = param.getOrElse("table", "alg_recsys_sample_tmp_20250109")
|
|
|
- val tablePart = param.getOrElse("tablePart", "64").toInt
|
|
|
- val beginStr = param.getOrElse("beginStr", "2025010723")
|
|
|
- val endStr = param.getOrElse("endStr", "2025010723")
|
|
|
- val whatLabel = param.getOrElse("whatLabel", "is_return_1")
|
|
|
- val fuSampleRate = param.getOrElse("fuSampleRate", "0.1").toDouble
|
|
|
- val repartition = param.getOrElse("repartition", "32").toInt
|
|
|
- val savePath = param.getOrElse("savePath", "/dw/recommend/model/72_origin_data/")
|
|
|
-
|
|
|
- // 2 odps
|
|
|
- val odpsOps = env.getODPS(sc)
|
|
|
-
|
|
|
- // 3 循环执行数据生产
|
|
|
- val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
|
|
|
- for (dt_hh <- timeRange) {
|
|
|
- val dt = dt_hh.substring(0, 8)
|
|
|
- val hh = dt_hh.substring(8, 10)
|
|
|
- val partition = s"dt=$dt,hh=$hh"
|
|
|
- println("开始执行partiton:" + partition)
|
|
|
- val odpsData = odpsOps.readTable(project = project,
|
|
|
- table = table,
|
|
|
- partition = partition,
|
|
|
- transfer = func,
|
|
|
- numPartition = tablePart)
|
|
|
- .filter(record => {
|
|
|
- val label = getStringValue(record, whatLabel, "0")
|
|
|
- "1".equals(label) || new Random().nextDouble() <= fuSampleRate
|
|
|
- })
|
|
|
- .mapPartitions(p => {
|
|
|
- SimilarityUtils.init()
|
|
|
- p.map(record => {
|
|
|
- val featureMap = new JSONObject()
|
|
|
- val metaData = getJsonObject(record, "metafeaturemap")
|
|
|
- // a 视频特征
|
|
|
- val b1: JSONObject = getJsonObject(metaData, "alg_vid_feature_all_exp_v2", record, "b1_feature")
|
|
|
- val b2: JSONObject = getJsonObject(metaData, "alg_vid_feature_all_share", record, "b2_feature")
|
|
|
- val b3: JSONObject = getJsonObject(metaData, "alg_vid_feature_all_return", record, "b3_feature")
|
|
|
- val b6: JSONObject = getJsonObject(metaData, "alg_vid_feature_exp2share_v2", record, "b6_feature")
|
|
|
- val b7: JSONObject = getJsonObject(metaData, "alg_vid_feature_share2return", record, "b7_feature")
|
|
|
-
|
|
|
- val b8: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_noflow_exp_v2", record, "b8_feature")
|
|
|
- val b9: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_noflow_root_share_v2", record, "b9_feature")
|
|
|
- val b10: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_noflow_root_return_v2", record, "b10_feature")
|
|
|
- val b11: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_flow_exp_v2", record, "b11_feature")
|
|
|
- val b12: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_flow_root_share_v2", record, "b12_feature")
|
|
|
- val b13: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_flow_root_return_v2", record, "b13_feature")
|
|
|
- val b17: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_province_exp_v2", record, "b17_feature")
|
|
|
- val b18: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_province_root_share_v2", record, "b18_feature")
|
|
|
- val b19: JSONObject = getJsonObject(metaData, "alg_vid_feature_feed_province_root_return_v2", record, "b19_feature")
|
|
|
-
|
|
|
- val origin_data = List(
|
|
|
- (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
|
|
|
- (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
|
|
|
- (b17, b18, b19, "b171819")
|
|
|
- )
|
|
|
- for ((b_1, b_2, b_3, prefix1) <- origin_data) {
|
|
|
- for (prefix2 <- List(
|
|
|
- "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
|
|
|
- )) {
|
|
|
- val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
|
|
|
- val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
|
|
|
- val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
|
|
|
- val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
|
|
|
- val f2 = RankExtractorFeature_20240530.calLog(share)
|
|
|
- val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
|
|
|
- val f4 = RankExtractorFeature_20240530.calLog(returns)
|
|
|
- val f5 = f3 * f4
|
|
|
- val f6 = RankExtractorFeature_20240530.calDiv(returns, share)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
|
|
|
- featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROS", f6)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- val video_info: JSONObject = getJsonObject(metaData, "alg_vid_feature_basic_info", record, "t_v_info_feature")
|
|
|
- featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
|
|
|
- featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
|
|
|
-
|
|
|
- val c1: JSONObject = getJsonObject(metaData, "alg_mid_feature_play", record, "c1_feature")
|
|
|
- if (c1.nonEmpty) {
|
|
|
- featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
|
|
|
- featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
|
|
|
- featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
|
|
|
- featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
|
|
|
- }
|
|
|
- val c2: JSONObject = getJsonObject(metaData, "alg_mid_feature_share_and_return", record, "c2_feature")
|
|
|
- if (c2.nonEmpty) {
|
|
|
- featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
|
|
|
- featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
|
|
|
- featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
|
|
|
- featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
|
|
|
- featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
|
|
|
- }
|
|
|
-
|
|
|
- val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
|
|
|
- if (!title.equals("")) {
|
|
|
- val c34567List = List(
|
|
|
- ("c3_feature", getJsonObject(metaData, "alg_mid_feature_play_tags")),
|
|
|
- ("c4_feature", getJsonObject(metaData, "alg_mid_feature_return_tags")),
|
|
|
- ("c5_feature", getJsonObject(metaData, "alg_mid_feature_share_tags")),
|
|
|
- ("c6_feature", getJsonObject(metaData, "alg_mid_feature_feed_exp_share_tags_v2")),
|
|
|
- ("c7_feature", getJsonObject(metaData, "alg_mid_feature_feed_exp_return_tags_v2"))
|
|
|
- )
|
|
|
- for ((key_feature, c34567) <- c34567List) {
|
|
|
- for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
|
|
|
- val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
|
|
|
- if (!tags.equals("")) {
|
|
|
- val (f1, f2, f3, f4) = funcC34567ForTagsW2V(tags, title)
|
|
|
- featureMap.put(key_feature + "_" + key_time + "_matchnum", f1)
|
|
|
- featureMap.put(key_feature + "_" + key_time + "_maxscore", f3)
|
|
|
- featureMap.put(key_feature + "_" + key_time + "_avgscore", f4)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- val vid = if (record.isNull("vid")) "" else record.getString("vid")
|
|
|
- if (!vid.equals("")) {
|
|
|
- val c89List = List(
|
|
|
- ("c8_feature", getJsonObject(metaData, "alg_mid_feature_sharecf")),
|
|
|
- ("c9_feature", getJsonObject(metaData, "alg_mid_feature_returncf"))
|
|
|
- )
|
|
|
- for ((key_feature, c89) <- c89List) {
|
|
|
- for (key_action <- List("share", "return")) {
|
|
|
- val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
|
|
|
- if (!cfListStr.equals("")) {
|
|
|
- val cfMap = cfListStr.split(",").map(r => {
|
|
|
- val rList = r.split(":")
|
|
|
- (rList(0), (rList(1), rList(2), rList(3)))
|
|
|
- }).toMap
|
|
|
- if (cfMap.contains(vid)) {
|
|
|
- val (score, num, rank) = cfMap(vid)
|
|
|
- featureMap.put(key_feature + "_" + key_action + "_score", score.toDouble)
|
|
|
- featureMap.put(key_feature + "_" + key_action + "_num", num.toDouble)
|
|
|
- featureMap.put(key_feature + "_" + key_action + "_rank", 1.0 / rank.toDouble)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- val d1: JSONObject = getJsonObject(metaData, "alg_recsys_feature_cf_i2i_new_v2", record, "d1_feature")
|
|
|
- if (d1.nonEmpty) {
|
|
|
- featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
|
|
|
- featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
|
|
|
- featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
|
|
|
- }
|
|
|
-
|
|
|
- // ************* new feature *************
|
|
|
- val shortPeriod = List("1h", "2h", "4h", "6h", "12h", "24h", "7d")
|
|
|
- val middlePeriod = List("14d", "30d")
|
|
|
- val longPeriod = List("7d", "35d", "90d", "365d")
|
|
|
- val vidStatFeat = List(
|
|
|
- ("b20", shortPeriod, getJsonObject(record, "b20_feature")), // cate2_feature
|
|
|
- ("b21", shortPeriod, getJsonObject(record, "b21_feature")), // cate1_feature
|
|
|
- ("b22", shortPeriod, getJsonObject(record, "b22_feature")), // source_feature
|
|
|
- ("b28", shortPeriod, getJsonObject(record, "b28_feature")), // sence_type_feature
|
|
|
- ("b29", shortPeriod, getJsonObject(record, "b29_feature")), // alg_videoid_feature
|
|
|
- ("b23", middlePeriod, getJsonObject(record, "b23_feature")), // cate2_feature_day
|
|
|
- ("b24", middlePeriod, getJsonObject(record, "b24_feature")), // cate1_feature_day
|
|
|
- ("b25", middlePeriod, getJsonObject(record, "b25_feature")), // source_feature_day
|
|
|
- ("b26", longPeriod, getJsonObject(record, "b26_feature")), // unionid_feature_day
|
|
|
- ("b27", longPeriod, getJsonObject(record, "b27_feature")) // vid_feature_day
|
|
|
- )
|
|
|
- for ((featType, featPeriod, featData) <- vidStatFeat) {
|
|
|
- for (period <- featPeriod) {
|
|
|
- // val view = if (featData.isEmpty) 0D else featData.getDoubleValue("view_" + period)
|
|
|
- val share = if (featData.isEmpty) 0D else featData.getDoubleValue("share_" + period)
|
|
|
- val return_ = if (featData.isEmpty) 0D else featData.getDoubleValue("return_" + period)
|
|
|
- val view_hasreturn = if (featData.isEmpty) 0D else featData.getDoubleValue("view_hasreturn_" + period)
|
|
|
- val share_hasreturn = if (featData.isEmpty) 0D else featData.getDoubleValue("share_hasreturn_" + period)
|
|
|
- val ros = if (featData.isEmpty) 0D else featData.getDoubleValue("ros_" + period)
|
|
|
- val rov = if (featData.isEmpty) 0D else featData.getDoubleValue("rov_" + period)
|
|
|
- val r_cnt = if (featData.isEmpty) 0D else featData.getDoubleValue("r_cnt_" + period)
|
|
|
- val r_rate = if (featData.isEmpty) 0D else featData.getDoubleValue("r_rate_" + period)
|
|
|
- val r_cnt4s = if (featData.isEmpty) 0D else featData.getDoubleValue("r_cnt4s_" + period)
|
|
|
- val str = if (featData.isEmpty) 0D else featData.getDoubleValue("str_" + period)
|
|
|
- // scale
|
|
|
- // val view_s = RankExtractorFeature_20240530.calLog(view)
|
|
|
- val share_s = RankExtractorFeature_20240530.calLog(share)
|
|
|
- val return_s = RankExtractorFeature_20240530.calLog(return_)
|
|
|
- val view_hasreturn_s = RankExtractorFeature_20240530.calLog(view_hasreturn)
|
|
|
- val share_hasreturn_s = RankExtractorFeature_20240530.calLog(share_hasreturn)
|
|
|
-
|
|
|
- // featureMap.put(featType + "_" + period + "_" + "view", view_s)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "share", share_s)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "return", return_s)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "view_hasreturn", view_hasreturn_s)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "share_hasreturn", share_hasreturn_s)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "ros", ros)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "rov", rov)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "r_cnt", r_cnt)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "r_rate", r_rate)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "r_cnt4s", r_cnt4s)
|
|
|
- featureMap.put(featType + "_" + period + "_" + "str", str)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // new cf
|
|
|
- val d2345Data = List(
|
|
|
- ("d2", "rosn", getJsonObject(record, "d2_feature")),
|
|
|
- ("d3", "rosn", getJsonObject(record, "d3_feature")),
|
|
|
- ("d4", "rovn", getJsonObject(record, "d4_feature")),
|
|
|
- ("d5", "rovn", getJsonObject(record, "d5_feature"))
|
|
|
- )
|
|
|
- for ((featType, valType, featData) <- d2345Data) {
|
|
|
- if (featData.nonEmpty) {
|
|
|
- val expKey = if (valType.equals("rosn")) "share" else "exp"
|
|
|
- val exp = if (featData.containsKey(expKey)) featData.getString(expKey).toDouble else 0D
|
|
|
- val return_n = if (featData.containsKey("return_n")) featData.getString("return_n").toDouble else 0D
|
|
|
- val value = if (featData.containsKey(valType)) featData.getString(valType).toDouble else 0D
|
|
|
- // scale
|
|
|
- val exp_s = RankExtractorFeature_20240530.calLog(exp)
|
|
|
- val return_n_s = RankExtractorFeature_20240530.calLog(return_n)
|
|
|
- featureMap.put(featType + "_exp", exp_s)
|
|
|
- featureMap.put(featType + "_return_n", return_n_s)
|
|
|
- featureMap.put(featType + "_" + valType, value)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- if (!vid.equals("")) {
|
|
|
- val idScoreObj = getJsonObject(getJsonObject(record, "d6_feature"), "vids", "scores")
|
|
|
- if (idScoreObj.nonEmpty && idScoreObj.containsKey(vid)) {
|
|
|
- val score = idScoreObj.getString(vid).toDouble
|
|
|
- featureMap.put("d6", score)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // head video & rank video
|
|
|
- val headVideo = getJsonObject(record, "v2_feature")
|
|
|
- val rankVideo = video_info
|
|
|
- if (headVideo.nonEmpty && rankVideo.nonEmpty) {
|
|
|
- val videoAttrs = List("title", "topic", "keywords", "cate1_list", "cate2", "cate2_list", "style", "theme", "user_value")
|
|
|
- for (attr <- videoAttrs) {
|
|
|
- val headAttr = if (headVideo.containsKey(attr)) headVideo.getString(attr) else ""
|
|
|
- val rankAttr = if (rankVideo.containsKey(attr)) rankVideo.getString(attr) else ""
|
|
|
- if (headAttr.nonEmpty && !headAttr.equals("unknown") && rankAttr.nonEmpty && !rankAttr.equals("unknown")) {
|
|
|
- val simScore = SimilarityUtils.word2VecSimilarity(headAttr, rankAttr)
|
|
|
- featureMap.put("video_sim_" + attr, simScore)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // time
|
|
|
- val ts = record.getString("ts")
|
|
|
- val calendar = tsToCalendar(ts)
|
|
|
- if (null != calendar) {
|
|
|
- val week = calendar.get(Calendar.DAY_OF_WEEK)
|
|
|
- val hour = calendar.get(Calendar.HOUR_OF_DAY) + 1
|
|
|
- featureMap.put("week", week)
|
|
|
- featureMap.put("hour", hour)
|
|
|
- }
|
|
|
-
|
|
|
- /*
|
|
|
- 视频特征: 5*6*5 = 240个
|
|
|
- 曝光使用pv 分享使用pv 回流使用uv --> 1h 2h 3h 4h 12h 1d 3d 7d
|
|
|
- STR log(share) ROV log(return) ROV*log(return) ROS
|
|
|
- 整体、整体曝光对应、推荐非冷启root、推荐冷启root、分省份root
|
|
|
- 视频基础: 2个 视频时长、比特率
|
|
|
- 用户: 4+8 = 12个
|
|
|
- 播放次数 --> 6h 1d 3d 7d --> 4个
|
|
|
- 带回来的分享pv 回流uv --> 12h 1d 3d 7d --> 8个
|
|
|
- 人+vid-title: 5*3*3 = 45
|
|
|
- 播放点/回流点/分享点/累积分享/累积回流 --> 1d 3d 7d --> 匹配数量 语义最高相似度分 语义平均相似度分 --> 45个
|
|
|
- 人+vid-cf: 2*3*3 = 12
|
|
|
- 基于分享行为/基于回流行为 --> “分享cf”+”回流点击cf“ 相似分 相似数量 相似rank的倒数 --> 12个
|
|
|
- 头部视频: 3
|
|
|
- 曝光 回流 ROVn 3个特征
|
|
|
- 场景: 小时 星期 apptype city province pagesource 机器型号
|
|
|
- 总量: 240+2+12+45+12+3 = 314
|
|
|
- ---------------------------------------------------------------
|
|
|
- 视频特征:(4*7+3*2+2*4)*10 = 420个
|
|
|
- CF: 13个
|
|
|
- 视频相似特征: 9个
|
|
|
- */
|
|
|
-
|
|
|
- //4 处理label信息。
|
|
|
- val labels = new JSONObject
|
|
|
- for (labelKey <- List("is_share", "share_cnt", "is_return_1", "return_1_uv", "is_return_n", "return_n_uv", "is_return_noself", "return_1_uv_noself")) {
|
|
|
- if (!record.isNull(labelKey)) {
|
|
|
- labels.put(labelKey, record.getString(labelKey))
|
|
|
- }
|
|
|
- }
|
|
|
- //5 处理log key表头。
|
|
|
- val apptype = record.getString("apptype")
|
|
|
- val page = getStringValue(record, "page")
|
|
|
- val pagesource = getStringValue(record, "pagesource")
|
|
|
- val recommendpagetype = getStringValue(record, "recommendpagetype")
|
|
|
- val flowpool = getStringValue(record, "flowpool")
|
|
|
- val abcode = record.getString("abcode")
|
|
|
- val mid = record.getString("mid")
|
|
|
- val level = getStringValue(record, "level", "0")
|
|
|
- val logKey = (apptype, page, pagesource, recommendpagetype, flowpool, abcode, mid, vid, level, ts).productIterator.mkString(",")
|
|
|
- val labelKey = labels.toString()
|
|
|
- val featureKey = truncateDecimal(featureMap).toString()
|
|
|
- val scoresMap = getSubJson(record, "extend_alg", "scoresMap").toString()
|
|
|
- //6 拼接数据,保存。
|
|
|
- logKey + "\t" + labelKey + "\t" + scoresMap + "\t" + featureKey
|
|
|
- })
|
|
|
- })
|
|
|
-
|
|
|
- // 4 保存数据到hdfs
|
|
|
- val savePartition = dt + hh
|
|
|
- val hdfsPath = savePath + "/" + savePartition
|
|
|
- if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
|
|
|
- println("删除路径并开始数据写入:" + hdfsPath)
|
|
|
- MyHdfsUtils.delete_hdfs_path(hdfsPath)
|
|
|
- odpsData.repartition(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
- } else {
|
|
|
- println("路径不合法,无法写入:" + hdfsPath)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- def func(record: Record, schema: TableSchema): Record = {
|
|
|
- record
|
|
|
- }
|
|
|
-
|
|
|
- def getJsonObject(record: Record, key: String): JSONObject = {
|
|
|
- val data = if (record.isNull(key)) new JSONObject() else JSON.parseObject(record.getString(key))
|
|
|
- val data2 = new JSONObject()
|
|
|
- data.foreach(r => {
|
|
|
- if (r._2 != null) {
|
|
|
- data2.put(r._1, r._2)
|
|
|
- }
|
|
|
- })
|
|
|
- data2
|
|
|
- }
|
|
|
-
|
|
|
- def getJsonObject(obj: JSONObject, key: String): JSONObject = {
|
|
|
- if (obj.nonEmpty) {
|
|
|
- if (obj.containsKey(key)) {
|
|
|
- val data = obj.getJSONObject(key)
|
|
|
- val validData = new JSONObject()
|
|
|
- data.foreach(r => {
|
|
|
- if (r._2 != null) {
|
|
|
- validData.put(r._1, r._2)
|
|
|
- }
|
|
|
- })
|
|
|
- return validData
|
|
|
- }
|
|
|
- }
|
|
|
- new JSONObject()
|
|
|
- }
|
|
|
-
|
|
|
- def getJsonObject(obj: JSONObject, onlineKey: String, record: Record, offlineKey: String): JSONObject = {
|
|
|
- var data: JSONObject = null
|
|
|
- if (obj.nonEmpty) {
|
|
|
- if (obj.containsKey(onlineKey)) {
|
|
|
- data = obj.getJSONObject(onlineKey)
|
|
|
- }
|
|
|
- } else {
|
|
|
- if (!record.isNull(offlineKey)) {
|
|
|
- data = JSON.parseObject(record.getString(offlineKey))
|
|
|
- }
|
|
|
- }
|
|
|
- if (null != data && data.nonEmpty) {
|
|
|
- val validData = new JSONObject()
|
|
|
- data.foreach(r => {
|
|
|
- if (r._2 != null) {
|
|
|
- validData.put(r._1, r._2)
|
|
|
- }
|
|
|
- })
|
|
|
- return validData
|
|
|
- }
|
|
|
- new JSONObject()
|
|
|
- }
|
|
|
-
|
|
|
- def getJsonObject(obj: JSONObject, keyName: String, valueName: String): JSONObject = {
|
|
|
- val map = new JSONObject()
|
|
|
- if (obj.nonEmpty) {
|
|
|
- val keys = if (obj.containsKey(keyName)) obj.getString(keyName) else ""
|
|
|
- val values = if (obj.containsKey(valueName)) obj.getString(valueName) else ""
|
|
|
- if (!keys.equals("") && !values.equals("")) {
|
|
|
- val key_list = keys.split(",")
|
|
|
- val value_list = values.split(",")
|
|
|
- if (key_list.length == value_list.length) {
|
|
|
- for (index <- 0 until key_list.length) {
|
|
|
- map.put(key_list(index), value_list(index))
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- map
|
|
|
- }
|
|
|
-
|
|
|
- def truncateDecimal(obj: JSONObject, scale: Int = 6): JSONObject = {
|
|
|
- val data = new JSONObject()
|
|
|
- for (key <- obj.keySet()) {
|
|
|
- try {
|
|
|
- val value = obj.getDoubleValue(key)
|
|
|
- if (value == value.floor) {
|
|
|
- data.put(key, value)
|
|
|
- } else {
|
|
|
- val newValue = BigDecimal(value).setScale(scale, BigDecimal.RoundingMode.HALF_UP).toDouble
|
|
|
- data.put(key, newValue)
|
|
|
- }
|
|
|
- } catch {
|
|
|
- case e: Exception => System.err.println(e.getMessage)
|
|
|
- }
|
|
|
- }
|
|
|
- data
|
|
|
- }
|
|
|
-
|
|
|
- def funcC34567ForTagsW2V(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
|
|
|
- // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
|
|
|
- val tagsList = tags.split(",")
|
|
|
- var d1 = 0.0
|
|
|
- val d2 = new ArrayBuffer[String]()
|
|
|
- var d3 = 0.0
|
|
|
- var d4 = 0.0
|
|
|
- for (tag <- tagsList) {
|
|
|
- if (title.contains(tag)) {
|
|
|
- d1 = d1 + 1.0
|
|
|
- d2.add(tag)
|
|
|
- }
|
|
|
- val score = SimilarityUtils.word2VecSimilarity(tag, title)
|
|
|
- d3 = if (score > d3) score else d3
|
|
|
- d4 = d4 + score
|
|
|
- }
|
|
|
- d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
|
|
|
- (d1, d2.mkString(","), d3, d4)
|
|
|
- }
|
|
|
-
|
|
|
- def getSubJson(record: Record, key1: String, key2: String): JSONObject = {
|
|
|
- if (!record.isNull(key1)) {
|
|
|
- val obj = JSON.parseObject(record.getString(key1))
|
|
|
- if (obj.nonEmpty && obj.containsKey(key2)) {
|
|
|
- val data = obj.getString(key2)
|
|
|
- return JSON.parseObject(data.replace("\\", ""))
|
|
|
- }
|
|
|
- }
|
|
|
- new JSONObject()
|
|
|
- }
|
|
|
-
|
|
|
- def getStringValue(record: Record, key: String, default: String = ""): String = {
|
|
|
- if (!record.isNull(key)) {
|
|
|
- val value = record.getString(key)
|
|
|
- return value.trim
|
|
|
- }
|
|
|
- default
|
|
|
- }
|
|
|
-
|
|
|
- def tsToCalendar(ts: String): Calendar = {
|
|
|
- if (null != ts && ts.nonEmpty) {
|
|
|
- val ms = ts.toLong * 1000L
|
|
|
- if (ms > 1107658247000L && ms < 2685495047000L) {
|
|
|
- val calendar = Calendar.getInstance
|
|
|
- calendar.setTimeInMillis(ms)
|
|
|
- return calendar
|
|
|
- }
|
|
|
- }
|
|
|
- null
|
|
|
- }
|
|
|
-}
|