zhangbo 1 year ago
parent
commit
79e5b41d50

+ 256 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_06_originData_v2.scala

@@ -0,0 +1,256 @@
+package com.aliyun.odps.spark.examples.makedata
+
+import com.alibaba.fastjson.JSONObject
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.extractor.{RankExtractorItemFeature, RankExtractorUserFeature}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import java.util
+import java.util.{HashMap, Map}
+import scala.collection.JavaConversions._
+import scala.collection.mutable
+
+/*
+   注意:所有的构造特征,原始值为0.0时,当作无意义,不保留; 如果经过change变换,得到0.0,保留。
+ */
+
+object makedata_06_originData_v2 {
+  def main(args: Array[String]) {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "32").toInt
+    val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
+    val beginStr = param.getOrElse("beginStr", "20230101")
+    val endStr = param.getOrElse("endStr", "20230101")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/00_sample_data/")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "alg_recsys_view_sample_v2")
+
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      val partition = partitionPrefix + date
+      println("执行partiton:" + partition)
+      val odpsData = odpsOps.readTable(project = project,
+        table = table,
+        partition = partition,
+        transfer = func,
+        numPartition = tablePart)
+        .map(record => {
+
+          val originFeatureName = Set(
+            "apptype", "logtimestamp", "clientip", "ctx_day", "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+
+            "gender", "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_sdkversion",
+            "machineinfo_system", "machineinfo_wechatversion", "gmt_create_user",
+            "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+            "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+            "u_7day_exp_cnt", "u_7day_click_cnt", "u_7day_share_cnt", "u_7day_return_cnt",
+            "u_3month_exp_cnt", "u_3month_click_cnt", "u_3month_share_cnt", "u_3month_return_cnt",
+
+            "title", "tags", "total_time", "play_count_total",
+            "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+            "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+            "i_7day_exp_cnt", "i_7day_click_cnt", "i_7day_share_cnt", "i_7day_return_cnt",
+            "i_3month_exp_cnt", "i_3month_click_cnt", "i_3month_share_cnt", "i_3month_return_cnt"
+          )
+          val originFeatureMap = getFeatureFromSet(originFeatureName, record)
+
+          val itemRealtimeFeatureMap = getFeatureFromSet(Set(
+            "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+            "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+            "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+            "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+
+            "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+            "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h"
+          ), record).map(r => {
+            val m = new java.util.HashMap[String, Double]()
+            r._2.split(",").foreach(r => {
+              m.put(r.split(":")(0), r.split(":")(1).toDouble)
+            })
+            (r._1, m)
+          })
+          val javaMap = new HashMap[String, Map[String, java.lang.Double]]()
+          itemRealtimeFeatureMap.foreach { case (key, value) =>
+            val javaValue = new HashMap[String, java.lang.Double]()
+            value.foreach { case (innerKey, innerValue) =>
+              javaValue.put(innerKey, innerValue.asInstanceOf[java.lang.Double])
+            }
+            javaMap.put(key, javaValue)
+          }
+
+          val f1 = getFeatureFromSet(Set(
+            "apptype", "logtimestamp", "clientip", "ctx_day", "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+            "gender", "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_sdkversion",
+            "machineinfo_system", "machineinfo_wechatversion", "gmt_create_user",
+            "title", "tags"
+          ), record)
+          val f2 = RankExtractorUserFeature.getUserRateFeature(originFeatureMap)
+          val f3 = RankExtractorUserFeature.cntFeatureChange(originFeatureMap,
+            new util.HashSet[String](util.Arrays.asList(
+              "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+              "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+              "u_7day_exp_cnt", "u_7day_click_cnt", "u_7day_share_cnt", "u_7day_return_cnt",
+              "u_3month_exp_cnt", "u_3month_click_cnt", "u_3month_share_cnt", "u_3month_return_cnt"))
+          )
+          val f4 = RankExtractorItemFeature.getItemRateFeature(originFeatureMap)
+          val f5 = RankExtractorItemFeature.cntFeatureChange(originFeatureMap,
+            new util.HashSet[String](util.Arrays.asList(
+              "total_time", "play_count_total",
+              "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+              "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+              "i_7day_exp_cnt", "i_7day_click_cnt", "i_7day_share_cnt", "i_7day_return_cnt",
+              "i_3month_exp_cnt", "i_3month_click_cnt", "i_3month_share_cnt", "i_3month_return_cnt")))
+          val f6 = RankExtractorItemFeature.getItemRealtimeTrend(javaMap,
+            originFeatureMap.getOrElse("ctx_day", ""),
+            originFeatureMap.getOrElse("ctx_hour", ""))
+          val f7 = RankExtractorItemFeature.getItemRealtimeCnt(javaMap,
+            new util.HashSet[String](util.Arrays.asList(
+              "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+              "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+              "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+              "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+
+              "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+              "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h"
+            )),
+            originFeatureMap.getOrElse("ctx_day", ""),
+            originFeatureMap.getOrElse("ctx_hour", "")
+          )
+          val f8 = RankExtractorItemFeature.getItemRealtimeRate(javaMap,
+            originFeatureMap.getOrElse("ctx_day", ""),
+            originFeatureMap.getOrElse("ctx_hour", "")
+          )
+
+          // 1:特征聚合到map中
+          val result = new util.HashMap[String, String]()
+          result ++= f1
+          result ++= f2
+          result ++= f3
+          result ++= f4
+          result ++= f5
+          result ++= f6
+          result ++= f7
+          result ++= f8
+          val names = Set(
+            "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
+
+            "gender", "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_sdkversion",
+            "machineinfo_system", "machineinfo_wechatversion",
+
+            "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
+            "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
+            "u_7day_exp_cnt", "u_7day_click_cnt", "u_7day_share_cnt", "u_7day_return_cnt",
+            "u_3month_exp_cnt", "u_3month_click_cnt", "u_3month_share_cnt", "u_3month_return_cnt",
+
+            "title", "tags", "total_time", "play_count_total",
+            "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
+            "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
+            "i_7day_exp_cnt", "i_7day_click_cnt", "i_7day_share_cnt", "i_7day_return_cnt",
+            "i_3month_exp_cnt", "i_3month_click_cnt", "i_3month_share_cnt", "i_3month_return_cnt",
+
+            "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
+            "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
+            "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
+            "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
+
+            "u_7day_ctr", "u_7day_str", "u_7day_rov", "u_7day_ros",
+            "u_3month_ctr", "u_3month_str", "u_3month_rov", "u_3month_ros",
+            "i_7day_ctr", "i_7day_str", "i_7day_rov", "i_7day_ros",
+            "i_3month_ctr", "i_3month_str", "i_3month_rov", "i_3month_ros",
+
+            "share_uv_list_1day_6_avg", "share_uv_list_1day_6_var", "share_uv_list_1day_diff_6_avg", "share_uv_list_1day_diff_6_var",
+            "return_uv_list_1day_6_avg", "return_uv_list_1day_6_var", "return_uv_list_1day_diff_6_avg", "return_uv_list_1day_diff_6_var",
+            "share_uv_list_1h_6_avg", "share_uv_list_1h_6_var", "share_uv_list_1h_diff_6_avg", "share_uv_list_1h_diff_6_var",
+            "return_uv_list_1h_6_avg", "return_uv_list_1h_6_var", "return_uv_list_1h_diff_6_avg", "return_uv_list_1h_diff_6_var",
+
+            "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
+            "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
+            "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
+            "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
+
+            "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
+            "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h",
+
+            "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
+            "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
+
+
+          )
+          val resultNew = new JSONObject
+          names.foreach(r => {
+            if (result.containsKey(r)){
+              resultNew.put(r, result.get(r))
+            }
+          })
+          //2: label聚合到map中
+          val labels = Set(
+            "is_share", "is_return", "playtime",
+            "is_play",
+            "share_ts", "share_ts_list", "return_mid_ts_list"
+          )
+          val labelNew = new JSONObject
+          val labelMap = getFeatureFromSet(labels, record)
+          labels.foreach(r => {
+            if (labelMap.containsKey(r)) {
+              labelNew.put(r, labelMap.get(r).get)
+            }
+          })
+          //3:记录唯一key
+          val mid = record.getString("mid")
+          val videoid = record.getString("videoid")
+          val logtimestamp = record.getString("logtimestamp")
+          val sessionid = record.getString("sessionid")
+
+          val logKey = (mid, videoid, logtimestamp, sessionid).productIterator.mkString(":")
+          val labelKey = labelNew.toString()
+          val featureKey = resultNew.toString()
+
+          logKey + "\t" + labelKey + "\t" + featureKey
+        })
+
+
+      // 4 保存数据到hdfs
+      val hdfsPath = savePath + "/" + partition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        odpsData.saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+  def getFeatureFromSet(set: Set[String], record: Record): mutable.HashMap[String, String] = {
+    val result = mutable.HashMap[String, String]()
+    set.foreach(r =>{
+      if (!record.isNull(r)){
+        try{
+          result.put(r, record.getString(r))
+        }catch {
+          case _ => result.put(r, String.valueOf(record.getBigint(r)))
+        }
+      }
+    })
+    result
+  }
+}

+ 5 - 0
zhangbo/50_delete_hdfs.sh

@@ -1,5 +1,10 @@
 #!/bin/sh
 
+export SPARK_HOME=/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8
+export PATH=$SPARK_HOME/bin:$PATH
+export HADOOP_CONF_DIR=/etc/taihao-apps/hadoop-conf
+export JAVA_HOME=/usr/lib/jvm/java-1.8.0
+
 DATE="$(date -d '8 days ago' +%Y%m%d)"
 HADOOP="/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop"