|
@@ -0,0 +1,119 @@
|
|
|
+package com.aliyun.odps.spark.examples.makedata_dssm
|
|
|
+
|
|
|
+import com.alibaba.fastjson.JSON
|
|
|
+import com.aliyun.odps.TableSchema
|
|
|
+import com.aliyun.odps.data.Record
|
|
|
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
|
|
|
+import org.apache.hadoop.io.compress.GzipCodec
|
|
|
+import org.apache.spark.sql.SparkSession
|
|
|
+import scala.collection.mutable
|
|
|
+import scala.collection.JavaConversions._
|
|
|
+import scala.collection.mutable.ArrayBuffer
|
|
|
+
|
|
|
+object makedata_i2i_03_onehotFile_20241128 {
|
|
|
+ def func(record: Record, schema: TableSchema): Record = {
|
|
|
+ record
|
|
|
+ }
|
|
|
+ def main(args: Array[String]): Unit = {
|
|
|
+ val spark = SparkSession
|
|
|
+ .builder()
|
|
|
+ .appName(this.getClass.getName)
|
|
|
+ .getOrCreate()
|
|
|
+ val sc = spark.sparkContext
|
|
|
+
|
|
|
+ // 1 读取参数
|
|
|
+ val param = ParamUtils.parseArgs(args)
|
|
|
+ val tablePart = param.getOrElse("tablePart", "64").toInt
|
|
|
+ val dt = param.getOrElse("dt", "20240620")
|
|
|
+ val readPath = param.getOrElse("readPath", "/dw/recommend/model/53_dssm_i2i_onehot/20240101")
|
|
|
+ val savePath = param.getOrElse("savePath", "/dw/recommend/model/53_dssm_i2i_onehot/20250101")
|
|
|
+ val project = param.getOrElse("project", "loghubods")
|
|
|
+ val repartition = param.getOrElse("repartition", "100").toInt
|
|
|
+ val ifDebug = param.getOrElse("ifDebug", "false").toBoolean
|
|
|
+ // 2 读取odps+表信息
|
|
|
+ val odpsOps = env.getODPS(sc)
|
|
|
+ // 3 vid中的sparse特征
|
|
|
+ if (MyHdfsUtils.hdfs_exits(readPath)){
|
|
|
+ val onehot = sc.textFile(readPath).map(r => {
|
|
|
+ val rList = r.split("\t")
|
|
|
+ (rList(0), rList(1))
|
|
|
+ })
|
|
|
+ val onehotMapOldStatic = onehot.map(_._1.split(":")(0)).map(r => (r, 1)).reduceByKey(_ + _).collect().sortBy(-_._2)
|
|
|
+ print(s"读入路径:$readPath \n数据量:" + onehot.count())
|
|
|
+ print("打印各个特征多少枚举值:")
|
|
|
+ onehotMapOldStatic.foreach(r => println(r.productIterator.mkString("\t")))
|
|
|
+ }
|
|
|
+ val onehotMap = if (MyHdfsUtils.hdfs_exits(readPath)){
|
|
|
+ mutable.Map(sc.textFile(readPath).map(r => {
|
|
|
+ val rList = r.split("\t")
|
|
|
+ (rList(0), rList(1))
|
|
|
+ }).collectAsMap().toSeq: _*)
|
|
|
+ }else{
|
|
|
+ mutable.Map[String, String]()
|
|
|
+ }
|
|
|
+
|
|
|
+ val vidStaticFeature = odpsOps.readTable(project = project,
|
|
|
+ table = "t_vid_tag_feature",
|
|
|
+ partition = s"dt=$dt",
|
|
|
+ transfer = func,
|
|
|
+ numPartition = tablePart)
|
|
|
+ .map(record => {
|
|
|
+ val vid = record.getString("vid")
|
|
|
+ val feature = record.getString("feature")
|
|
|
+ (vid, feature)
|
|
|
+ }).flatMap{
|
|
|
+ case (vid, feature) =>
|
|
|
+ val result = new ArrayBuffer[String]()
|
|
|
+ result.add("vid:" + vid)
|
|
|
+ JSON.parseObject(feature).foreach(r =>{
|
|
|
+ val value = if (r._2 == null) "无" else r._2.toString
|
|
|
+ r._1 match {
|
|
|
+ case "category1" => result += "cate1:" + value
|
|
|
+ case "category2_1" => result += "cate2:" + value
|
|
|
+ case "category2_2" => result += "cate2:" + value
|
|
|
+ case "category2_3" => result += "cate2:" + value
|
|
|
+ case "valid_time" => result += "valid_time:" + value
|
|
|
+ case " timeliness" => result += "timeliness:" + value
|
|
|
+ case "sentiment_tendency" => result += "sentiment_tendency:" + value
|
|
|
+ case "has_end_credit_guide" => result += "has_end_credit_guide:" + value
|
|
|
+ case "background_music_type" => result += "background_music_type:" + value
|
|
|
+ case "cover_persons_num" => result += "cover_persons_num:" + value
|
|
|
+ case "captions" => result += "captions:" + value
|
|
|
+ case "captions_color" => result += "captions_color:" + value
|
|
|
+ case "audience_value_type" => result += "audience_value_type:" + value
|
|
|
+ case "audience_gender" => result += "audience_gender:" + value
|
|
|
+ case "audience_age_group" => result += "audience_age_group:" + value
|
|
|
+ case "font_size" => result += "font_size:" + value
|
|
|
+ case "video_type" => result += "video_type:" + value
|
|
|
+ case "video_style" => result += "video_style:" + value
|
|
|
+ case _ =>
|
|
|
+ }
|
|
|
+ })
|
|
|
+ result.distinct
|
|
|
+ }.distinct().collect()
|
|
|
+ val maxEnumMap = mutable.Map[String, Int]()
|
|
|
+ onehotMap.foreach { case (key, value) =>
|
|
|
+ val prefix = key.split(":")(0)
|
|
|
+ val currentMax = maxEnumMap.getOrElse(prefix, 0)
|
|
|
+ maxEnumMap(prefix) = Math.max(currentMax, value.toInt)
|
|
|
+ }
|
|
|
+ vidStaticFeature.foreach { feature =>
|
|
|
+ val prefix = feature.split(":")(0)
|
|
|
+ if (!onehotMap.contains(feature)) {
|
|
|
+ val newEnumValue = maxEnumMap.getOrElse(prefix, 0) + 1
|
|
|
+ maxEnumMap(prefix) = newEnumValue
|
|
|
+ onehotMap(feature) = newEnumValue.toString
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ val dataRdd = sc.parallelize(onehotMap.toSeq.map(_.productIterator.mkString("\t")))
|
|
|
+ val hdfsPath = savePath
|
|
|
+ if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
|
|
|
+ println("删除路径并开始数据写入:" + hdfsPath)
|
|
|
+ MyHdfsUtils.delete_hdfs_path(hdfsPath)
|
|
|
+ dataRdd.repartition(10).saveAsTextFile(hdfsPath, classOf[GzipCodec])
|
|
|
+ } else {
|
|
|
+ println("路径不合法,无法写入:" + hdfsPath)
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|