|
@@ -0,0 +1,244 @@
|
|
|
|
+package com.aliyun.odps.spark.examples.makedata
|
|
|
|
+
|
|
|
|
+import com.alibaba.fastjson.JSONObject
|
|
|
|
+import com.aliyun.odps.TableSchema
|
|
|
|
+import com.aliyun.odps.data.Record
|
|
|
|
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
|
|
|
|
+import examples.extractor.{RankExtractorItemFeature, RankExtractorUserFeature}
|
|
|
|
+import org.apache.hadoop.io.compress.GzipCodec
|
|
|
|
+import org.apache.spark.sql.SparkSession
|
|
|
|
+
|
|
|
|
+import java.util
|
|
|
|
+import java.util.{HashMap, Map}
|
|
|
|
+import scala.collection.JavaConversions._
|
|
|
|
+import scala.collection.mutable
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ 所有获取不到的特征,给默认值0.
|
|
|
|
+ */
|
|
|
|
+
|
|
|
|
+object makedata_10_originData_v3 {
|
|
|
|
+ def main(args: Array[String]) {
|
|
|
|
+ val spark = SparkSession
|
|
|
|
+ .builder()
|
|
|
|
+ .appName(this.getClass.getName)
|
|
|
|
+ .getOrCreate()
|
|
|
|
+ val sc = spark.sparkContext
|
|
|
|
+
|
|
|
|
+ // 1 读取参数
|
|
|
|
+ val param = ParamUtils.parseArgs(args)
|
|
|
|
+ val tablePart = param.getOrElse("tablePart", "32").toInt
|
|
|
|
+ val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
|
|
|
|
+ val beginStr = param.getOrElse("beginStr", "20230101")
|
|
|
|
+ val endStr = param.getOrElse("endStr", "20230101")
|
|
|
|
+ val savePath = param.getOrElse("savePath", "/dw/recommend/model/10_sample_data_v3/")
|
|
|
|
+ val project = param.getOrElse("project", "loghubods")
|
|
|
|
+ val table = param.getOrElse("table", "alg_recsys_view_sample_v3")
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ // 2 读取odps+表信息
|
|
|
|
+ val odpsOps = env.getODPS(sc)
|
|
|
|
+
|
|
|
|
+ // 3 循环执行数据生产
|
|
|
|
+ val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
|
|
|
|
+ for (date <- dateRange) {
|
|
|
|
+ val partition = partitionPrefix + date
|
|
|
|
+ println("开始执行partiton:" + partition)
|
|
|
|
+ val odpsData = odpsOps.readTable(project = project,
|
|
|
|
+ table = table,
|
|
|
|
+ partition = partition,
|
|
|
|
+ transfer = func,
|
|
|
|
+ numPartition = tablePart)
|
|
|
|
+ .map(record => {
|
|
|
|
+
|
|
|
|
+ //1 拿到所有的特征,保存在hashmap中。如果表中是空,那么map中没有这个key。
|
|
|
|
+ val originFeatureName = Set(
|
|
|
|
+ "apptype","mid","uid","videoid","logtimestamp","ctx_day","ctx_week","ctx_hour","clientip","ctx_region",
|
|
|
|
+ "ctx_city","pagesource","recommend_page_type","pagesource_change","abcode",
|
|
|
|
+ // ----------
|
|
|
|
+ "playtime","is_play","share_cnt_pv","is_share","share_ts_list","return_cnt_pv","return_cnt_uv","return_mid_ts_list","is_return",
|
|
|
|
+ // ----------
|
|
|
|
+
|
|
|
|
+ // ----------
|
|
|
|
+ "gender","machineinfo_brand","machineinfo_model","machineinfo_platform","machineinfo_sdkversion","machineinfo_system","machineinfo_wechatversion","gmt_create_user",
|
|
|
|
+ "u_1day_exp_cnt","u_1day_click_cnt","u_1day_share_cnt","u_1day_return_cnt",
|
|
|
|
+ "u_3day_exp_cnt","u_3day_click_cnt","u_3day_share_cnt","u_3day_return_cnt",
|
|
|
|
+ "u_7day_exp_cnt","u_7day_click_cnt","u_7day_share_cnt","u_7day_return_cnt",
|
|
|
|
+ "u_3month_exp_cnt","u_3month_click_cnt","u_3month_share_cnt","u_3month_return_cnt",
|
|
|
|
+ // ----------
|
|
|
|
+ "title","distrubute_title","gmt_create_video","tags","existence_days","total_time","play_count","play_count_total","video_recommend",
|
|
|
|
+ "i_1day_exp_cnt","i_1day_click_cnt","i_1day_share_cnt","i_1day_return_cnt",
|
|
|
|
+ "i_3day_exp_cnt","i_3day_click_cnt","i_3day_share_cnt","i_3day_return_cnt",
|
|
|
|
+ "i_7day_exp_cnt","i_7day_click_cnt","i_7day_share_cnt","i_7day_return_cnt",
|
|
|
|
+ "i_3month_exp_cnt","i_3month_click_cnt","i_3month_share_cnt","i_3month_return_cnt"
|
|
|
|
+ )
|
|
|
|
+ val originFeatureMap = getFeatureFromSet(originFeatureName, record)
|
|
|
|
+ //2 计算天级别的比率特征。
|
|
|
|
+ val f2 = RankExtractorUserFeature.getUserRateFeature(originFeatureMap)
|
|
|
|
+ val f4 = RankExtractorItemFeature.getItemRateFeature(originFeatureMap)
|
|
|
|
+ //3 计算item的实时特征。先解析格式,再进行计算。
|
|
|
|
+ val itemRealtimeFeatureMap = getFeatureFromSet(Set(
|
|
|
|
+ "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day", "share_pv_list_1day",
|
|
|
|
+ "share_uv_list_1day", "return_uv_list_1day", "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
|
|
|
|
+ "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
|
|
|
|
+ // ----------
|
|
|
|
+ "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
|
|
|
|
+ "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h"
|
|
|
|
+ ), record).map(r => {
|
|
|
|
+ val m = new java.util.HashMap[String, Double]()
|
|
|
|
+ r._2.split(",").foreach(r => {
|
|
|
|
+ m.put(r.split(":")(0), r.split(":")(1).toDouble)
|
|
|
|
+ })
|
|
|
|
+ (r._1, m)
|
|
|
|
+ })
|
|
|
|
+ val javaMap = new HashMap[String, Map[String, java.lang.Double]]()
|
|
|
|
+ itemRealtimeFeatureMap.foreach { case (key, value) =>
|
|
|
|
+ val javaValue = new HashMap[String, java.lang.Double]()
|
|
|
|
+ value.foreach { case (innerKey, innerValue) =>
|
|
|
|
+ javaValue.put(innerKey, innerValue.asInstanceOf[java.lang.Double])
|
|
|
|
+ }
|
|
|
|
+ javaMap.put(key, javaValue)
|
|
|
|
+ }
|
|
|
|
+ val f6 = RankExtractorItemFeature.getItemRealtimeTrend(javaMap,
|
|
|
|
+ originFeatureMap.getOrElse("ctx_day", ""),
|
|
|
|
+ originFeatureMap.getOrElse("ctx_hour", ""))
|
|
|
|
+ val f7 = RankExtractorItemFeature.getItemRealtimeCnt(javaMap,
|
|
|
|
+ new util.HashSet[String](util.Arrays.asList(
|
|
|
|
+ "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
|
|
|
|
+ "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
|
|
|
|
+ "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
|
|
|
|
+ "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
|
|
|
|
+ // ----------
|
|
|
|
+ "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
|
|
|
|
+ "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h"
|
|
|
|
+ )),
|
|
|
|
+ originFeatureMap.getOrElse("ctx_day", ""),
|
|
|
|
+ originFeatureMap.getOrElse("ctx_hour", "")
|
|
|
|
+ )
|
|
|
|
+ val f8 = RankExtractorItemFeature.getItemRealtimeRate(javaMap,
|
|
|
|
+ originFeatureMap.getOrElse("ctx_day", ""),
|
|
|
|
+ originFeatureMap.getOrElse("ctx_hour", "")
|
|
|
|
+ )
|
|
|
|
+ val result = new util.HashMap[String, String]()
|
|
|
|
+ result ++= originFeatureMap
|
|
|
|
+ result ++= f2
|
|
|
|
+ result ++= f4
|
|
|
|
+ result ++= f6
|
|
|
|
+ result ++= f7
|
|
|
|
+ result ++= f8
|
|
|
|
+ val names = Set(
|
|
|
|
+ "apptype", "mid", "uid", "videoid", "logtimestamp", "ctx_day", "ctx_week", "ctx_hour", "clientip", "ctx_region",
|
|
|
|
+ "ctx_city", "pagesource", "recommend_page_type", "pagesource_change", "abcode",
|
|
|
|
+ // ----------
|
|
|
|
+ "playtime", "is_play", "share_cnt_pv", "is_share", "share_ts_list", "return_cnt_pv", "return_cnt_uv", "return_mid_ts_list", "is_return",
|
|
|
|
+ // ----------
|
|
|
|
+
|
|
|
|
+ // ----------
|
|
|
|
+ "gender", "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_sdkversion", "machineinfo_system", "machineinfo_wechatversion", "gmt_create_user",
|
|
|
|
+ "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
|
|
|
|
+ "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
|
|
|
|
+ "u_7day_exp_cnt", "u_7day_click_cnt", "u_7day_share_cnt", "u_7day_return_cnt",
|
|
|
|
+ "u_3month_exp_cnt", "u_3month_click_cnt", "u_3month_share_cnt", "u_3month_return_cnt",
|
|
|
|
+ // ----------
|
|
|
|
+ "title", "distrubute_title", "gmt_create_video", "tags", "existence_days", "total_time", "play_count", "play_count_total", "video_recommend",
|
|
|
|
+ "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
|
|
|
|
+ "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
|
|
|
|
+ "i_7day_exp_cnt", "i_7day_click_cnt", "i_7day_share_cnt", "i_7day_return_cnt",
|
|
|
|
+ "i_3month_exp_cnt", "i_3month_click_cnt", "i_3month_share_cnt", "i_3month_return_cnt",
|
|
|
|
+ // ----------
|
|
|
|
+ "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
|
|
|
|
+ "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
|
|
|
|
+ "u_7day_ctr", "u_7day_str", "u_7day_rov", "u_7day_ros",
|
|
|
|
+ "u_3month_ctr", "u_3month_str", "u_3month_rov", "u_3month_ros",
|
|
|
|
+ // ----------
|
|
|
|
+ "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
|
|
|
|
+ "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
|
|
|
|
+ "i_7day_ctr", "i_7day_str", "i_7day_rov", "i_7day_ros",
|
|
|
|
+ "i_3month_ctr", "i_3month_str", "i_3month_rov", "i_3month_ros",
|
|
|
|
+ // ----------
|
|
|
|
+ "share_uv_list_1day_6_avg", "share_uv_list_1day_6_var", "share_uv_list_1day_diff_6_avg", "share_uv_list_1day_diff_6_var",
|
|
|
|
+ "return_uv_list_1day_6_avg", "return_uv_list_1day_6_var", "return_uv_list_1day_diff_6_avg", "return_uv_list_1day_diff_6_var",
|
|
|
|
+ "share_uv_list_1h_6_avg", "share_uv_list_1h_6_var", "share_uv_list_1h_diff_6_avg", "share_uv_list_1h_diff_6_var",
|
|
|
|
+ "return_uv_list_1h_6_avg", "return_uv_list_1h_6_var", "return_uv_list_1h_diff_6_avg", "return_uv_list_1h_diff_6_var",
|
|
|
|
+ // ----------
|
|
|
|
+ "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
|
|
|
|
+ "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
|
|
|
|
+ "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
|
|
|
|
+ "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
|
|
|
|
+ // ----------
|
|
|
|
+ "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
|
|
|
|
+ "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h",
|
|
|
|
+ // ----------
|
|
|
|
+ "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
|
|
|
|
+ "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
|
|
|
|
+ )
|
|
|
|
+ val resultNew = new JSONObject
|
|
|
|
+ names.foreach(r => {
|
|
|
|
+ if (result.containsKey(r)) {
|
|
|
|
+ resultNew.put(r, result.get(r))
|
|
|
|
+ }
|
|
|
|
+ })
|
|
|
|
+
|
|
|
|
+ //4 处理label信息。
|
|
|
|
+ val labels = Set(
|
|
|
|
+ "pagesource", "recommend_page_type", "pagesource_change",
|
|
|
|
+ "abcode",
|
|
|
|
+ "is_play", "playtime",
|
|
|
|
+ "is_share", "share_cnt_pv", "share_ts_list",
|
|
|
|
+ "is_return", "return_cnt_pv", "return_cnt_uv", "return_mid_ts_list"
|
|
|
|
+ )
|
|
|
|
+ val labelNew = new JSONObject
|
|
|
|
+ val labelMap = getFeatureFromSet(labels, record)
|
|
|
|
+ labels.foreach(r => {
|
|
|
|
+ if (labelMap.containsKey(r)) {
|
|
|
|
+ labelNew.put(r, labelMap(r))
|
|
|
|
+ }
|
|
|
|
+ })
|
|
|
|
+ //5 处理log key表头。
|
|
|
|
+ val mid = record.getString("mid")
|
|
|
|
+ val videoid = record.getString("videoid")
|
|
|
|
+ val logtimestamp = record.getString("logtimestamp")
|
|
|
|
+ val apptype = record.getString("apptype")
|
|
|
|
+ val pagesource_change = record.getString("pagesource_change")
|
|
|
|
+ val abcode = record.getString("abcode")
|
|
|
|
+ val video_recommend = if (record.isNull("video_recommend")) record.getString("video_recommend") else "111"
|
|
|
|
+
|
|
|
|
+ val logKey = (mid, videoid, logtimestamp, apptype, pagesource_change, abcode, video_recommend).productIterator.mkString(":")
|
|
|
|
+ val labelKey = labelNew.toString()
|
|
|
|
+ val featureKey = resultNew.toString()
|
|
|
|
+ //6 拼接数据,保存。
|
|
|
|
+ logKey + "\t" + labelKey + "\t" + featureKey
|
|
|
|
+
|
|
|
|
+ })
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ // 4 保存数据到hdfs
|
|
|
|
+ val hdfsPath = savePath + "/" + partition
|
|
|
|
+ if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
|
|
|
|
+ println("删除路径并开始数据写入:" + hdfsPath)
|
|
|
|
+ MyHdfsUtils.delete_hdfs_path(hdfsPath)
|
|
|
|
+ odpsData.saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
|
+ }else{
|
|
|
|
+ println("路径不合法,无法写入:" + hdfsPath)
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ def func(record: Record, schema: TableSchema): Record = {
|
|
|
|
+ record
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ def getFeatureFromSet(set: Set[String], record: Record): mutable.HashMap[String, String] = {
|
|
|
|
+ val result = mutable.HashMap[String, String]()
|
|
|
|
+ set.foreach(r =>{
|
|
|
|
+ if (!record.isNull(r)){
|
|
|
|
+ try{
|
|
|
|
+ result.put(r, record.getString(r))
|
|
|
|
+ }catch {
|
|
|
|
+ case _ => result.put(r, String.valueOf(record.getBigint(r)))
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ })
|
|
|
|
+ result
|
|
|
|
+ }
|
|
|
|
+}
|