zhangbo 1 năm trước cách đây
mục cha
commit
e1812decfc

+ 1 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_01_rosHdfsFromTable.scala → src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_04_rosHdfsFromTablev1.scala

@@ -9,7 +9,7 @@ import org.apache.spark.sql.SparkSession
 
 import scala.collection.JavaConversions._
 
-object makedata_01_rosHdfsFromTable {
+object makedata_04_rosHdfsFromTablev1 {
   def main(args: Array[String]) {
     val spark = SparkSession
       .builder()

+ 106 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_04_rosHdfsFromTablev2.scala

@@ -0,0 +1,106 @@
+package com.aliyun.odps.spark.examples.makedata
+
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.dataloader.{OfflineVlogShareLRFeatureExtractor, RequestContextOffline}
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+
+object makedata_04_rosHdfsFromTablev2 {
+  def main(args: Array[String]) {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
+    val beginStr = param.getOrElse("beginStr", "20230101")
+    val endStr = param.getOrElse("endStr", "20230101")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/ros_sample_v2/")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "alg_recsys_view_sample")
+
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
+    for (date <- dateRange) {
+      val partition = partitionPrefix + date
+      println("执行partiton:" + partition)
+      val odpsData = odpsOps.readTable(project = project,
+        table = table,
+        partition = partition,
+        transfer = func,
+        numPartition = tablePart)
+        .filter{
+          case record =>
+            val not_share: String = record.getString("share_ornot")
+            "0".equals(not_share)
+        }
+        .flatMap(record =>{
+          val res = ArrayBuffer()
+          val hour = record.getString("ctx_hour").toInt
+          hour match {
+            case 23 => res
+            case _ =>
+              res.add((record, "0"))
+              val label_return = record.getString("return_ornot")
+              val expTs = record.getString("view_logtimestamp").toLong / 1000
+              if ("0".equals(label_return)) {
+                if (!record.isNull("machinecode_clienttimestamp")) {
+                  record.getString("machinecode_clienttimestamp").split(",")
+                    .map(r => r.split(":")(1).toLong / 1000)
+                    .foreach(ts=>{
+                      if (ts - expTs < 3600){
+                        res.add((record, "1"))
+                      }
+                    })
+                }
+              }
+              res
+          }
+        })
+        .map{
+          case (record, label) =>
+            singleParse(record, label)
+      }
+
+      // 4 保存数据到hdfs
+      val hdfsPath = savePath + "/" + partition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        odpsData.saveAsTextFile(hdfsPath, classOf[GzipCodec])
+        println("写入数据量:" + odpsData.count())
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+
+  def singleParse(record: Record, label: String): String = {
+    //2 处理特征
+    val reqContext: RequestContextOffline = new RequestContextOffline()
+    reqContext.putUserFeature(record)
+    reqContext.putItemFeature(record)
+    reqContext.putSceneFeature(record)
+    val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractor()
+    bytesFeatureExtractor.makeFeature(reqContext.featureMap)
+    val featureMap = bytesFeatureExtractor.featureMap
+    label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
+  }
+}