|  | @@ -0,0 +1,117 @@
 | 
	
		
			
				|  |  | +package com.aliyun.odps.spark.examples.makedata
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +import com.aliyun.odps.TableSchema
 | 
	
		
			
				|  |  | +import com.aliyun.odps.data.Record
 | 
	
		
			
				|  |  | +import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
 | 
	
		
			
				|  |  | +import org.apache.hadoop.io.compress.GzipCodec
 | 
	
		
			
				|  |  | +import org.apache.spark.sql.SparkSession
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +import java.util
 | 
	
		
			
				|  |  | +import scala.collection.JavaConversions._
 | 
	
		
			
				|  |  | +import com.alibaba.fastjson.JSON
 | 
	
		
			
				|  |  | +import examples.dataloader.OfflineVlogShareLRFeatureExtractorV2
 | 
	
		
			
				|  |  | +import examples.dataloader.OfflineVlogShareLRFeatureExtractor
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +object makedata_07_strData {
 | 
	
		
			
				|  |  | +  def main(args: Array[String]) {
 | 
	
		
			
				|  |  | +    val spark = SparkSession
 | 
	
		
			
				|  |  | +      .builder()
 | 
	
		
			
				|  |  | +      .appName(this.getClass.getName)
 | 
	
		
			
				|  |  | +      .getOrCreate()
 | 
	
		
			
				|  |  | +    val sc = spark.sparkContext
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    // 1 读取参数
 | 
	
		
			
				|  |  | +    val param = ParamUtils.parseArgs(args)
 | 
	
		
			
				|  |  | +    val partitionPrefix = param.getOrElse("partitionPrefix", "dt=")
 | 
	
		
			
				|  |  | +    val beginStr = param.getOrElse("beginStr", "20230101")
 | 
	
		
			
				|  |  | +    val endStr = param.getOrElse("endStr", "20230101")
 | 
	
		
			
				|  |  | +    val readPath = param.getOrElse("savePath", "/dw/recommend/model/00_sample_data/")
 | 
	
		
			
				|  |  | +    val savePath = param.getOrElse("savePath", "/dw/recommend/model/01_str_data/")
 | 
	
		
			
				|  |  | +    val featureVersion =  param.getOrElse("featureVersion", "v2")
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +    // 3 循环执行数据生产
 | 
	
		
			
				|  |  | +    val dateRange = MyDateUtils.getDateRange(beginStr, endStr)
 | 
	
		
			
				|  |  | +    for (date <- dateRange) {
 | 
	
		
			
				|  |  | +      val partition = partitionPrefix + date
 | 
	
		
			
				|  |  | +      println("执行partiton:" + partition)
 | 
	
		
			
				|  |  | +      var hdfsPath = readPath + "/" + partition
 | 
	
		
			
				|  |  | +      val data = sc.textFile(hdfsPath).map(r=>{
 | 
	
		
			
				|  |  | +        val rList = r.split("\t")
 | 
	
		
			
				|  |  | +        val labelStr = rList(1)
 | 
	
		
			
				|  |  | +        val feaStr = rList(2)
 | 
	
		
			
				|  |  | +        val labelJson = JSON.parseObject(labelStr)
 | 
	
		
			
				|  |  | +        val label = if (labelJson.containsKey("is_share")) labelJson.getString("is_share") else "0"
 | 
	
		
			
				|  |  | +        val feaJson = JSON.parseObject(feaStr)
 | 
	
		
			
				|  |  | +        val feaSet = Set(
 | 
	
		
			
				|  |  | +          "ctx_week", "ctx_hour", "ctx_region", "ctx_city",
 | 
	
		
			
				|  |  | +          "machineinfo_brand", "machineinfo_model", "machineinfo_platform", "machineinfo_system",
 | 
	
		
			
				|  |  | +          "u_1day_exp_cnt", "u_1day_click_cnt", "u_1day_share_cnt", "u_1day_return_cnt",
 | 
	
		
			
				|  |  | +          "u_3day_exp_cnt", "u_3day_click_cnt", "u_3day_share_cnt", "u_3day_return_cnt",
 | 
	
		
			
				|  |  | +          "total_time", "play_count_total",
 | 
	
		
			
				|  |  | +          "i_1day_exp_cnt", "i_1day_click_cnt", "i_1day_share_cnt", "i_1day_return_cnt",
 | 
	
		
			
				|  |  | +          "i_3day_exp_cnt", "i_3day_click_cnt", "i_3day_share_cnt", "i_3day_return_cnt",
 | 
	
		
			
				|  |  | +          "u_1day_ctr", "u_1day_str", "u_1day_rov", "u_1day_ros",
 | 
	
		
			
				|  |  | +          "u_3day_ctr", "u_3day_str", "u_3day_rov", "u_3day_ros",
 | 
	
		
			
				|  |  | +          "i_1day_ctr", "i_1day_str", "i_1day_rov", "i_1day_ros",
 | 
	
		
			
				|  |  | +          "i_3day_ctr", "i_3day_str", "i_3day_rov", "i_3day_ros",
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          "share_uv_list_1day_6_avg", "share_uv_list_1day_6_var", "share_uv_list_1day_diff_6_avg", "share_uv_list_1day_diff_6_var",
 | 
	
		
			
				|  |  | +          "return_uv_list_1day_6_avg", "return_uv_list_1day_6_var", "return_uv_list_1day_diff_6_avg", "return_uv_list_1day_diff_6_var",
 | 
	
		
			
				|  |  | +          "share_uv_list_1h_6_avg", "share_uv_list_1h_6_var", "share_uv_list_1h_diff_6_avg", "share_uv_list_1h_diff_6_var",
 | 
	
		
			
				|  |  | +          "return_uv_list_1h_6_avg", "return_uv_list_1h_6_var", "return_uv_list_1h_diff_6_avg", "return_uv_list_1h_diff_6_var",
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          "view_pv_list_1day", "view_uv_list_1day", "play_pv_list_1day", "play_uv_list_1day",
 | 
	
		
			
				|  |  | +          "share_pv_list_1day", "share_uv_list_1day", "return_uv_list_1day",
 | 
	
		
			
				|  |  | +          "p_view_uv_list_1day", "p_view_pv_list_1day", "p_return_uv_list_1day",
 | 
	
		
			
				|  |  | +          "share_uv_list_2day", "share_pv_list_2day", "share_uv_list_3day", "share_pv_list_3day",
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          "view_uv_list_1h", "view_pv_list_1h", "play_uv_list_1h", "play_pv_list_1h",
 | 
	
		
			
				|  |  | +          "share_uv_list_1h", "share_pv_list_1h", "return_uv_list_1h", "p_return_uv_list_1h",
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +          "i_1day_ctr_rt", "i_1day_str_rt", "i_1day_ros_rt", "i_1day_rov_rt",
 | 
	
		
			
				|  |  | +          "i_1h_ctr_rt", "i_1h_str_rt", "i_1h_ros_rt", "i_1h_rov_rt"
 | 
	
		
			
				|  |  | +        )
 | 
	
		
			
				|  |  | +        val feaMap = new util.HashMap[String, String]()
 | 
	
		
			
				|  |  | +        feaSet.foreach(r=> {
 | 
	
		
			
				|  |  | +          if (feaJson.containsKey(r)){
 | 
	
		
			
				|  |  | +            feaMap.put(r, feaJson.getString(r))
 | 
	
		
			
				|  |  | +          }
 | 
	
		
			
				|  |  | +        })
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        if ("v1".equals(featureVersion)){
 | 
	
		
			
				|  |  | +          val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractor()
 | 
	
		
			
				|  |  | +          bytesFeatureExtractor.makeFeature4String(feaMap)
 | 
	
		
			
				|  |  | +          val featureMap = bytesFeatureExtractor.featureMap
 | 
	
		
			
				|  |  | +          label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +        }else if ("v2".equals(featureVersion)){
 | 
	
		
			
				|  |  | +          val bytesFeatureExtractor = new OfflineVlogShareLRFeatureExtractorV2()
 | 
	
		
			
				|  |  | +          bytesFeatureExtractor.makeFeature4String(feaMap)
 | 
	
		
			
				|  |  | +          val featureMap = bytesFeatureExtractor.featureMap
 | 
	
		
			
				|  |  | +          label + "\t" + featureMap.entries().map(r => r.getValue.getIdentifier + ":1").mkString("\t")
 | 
	
		
			
				|  |  | +        }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +      })
 | 
	
		
			
				|  |  | +      // 4 保存数据到hdfs
 | 
	
		
			
				|  |  | +      hdfsPath = savePath + "/" + partition
 | 
	
		
			
				|  |  | +      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
 | 
	
		
			
				|  |  | +        println("删除路径并开始数据写入:" + hdfsPath)
 | 
	
		
			
				|  |  | +        MyHdfsUtils.delete_hdfs_path(hdfsPath)
 | 
	
		
			
				|  |  | +        data.saveAsTextFile(hdfsPath, classOf[GzipCodec])
 | 
	
		
			
				|  |  | +      }else{
 | 
	
		
			
				|  |  | +        println("路径不合法,无法写入:" + hdfsPath)
 | 
	
		
			
				|  |  | +      }
 | 
	
		
			
				|  |  | +    }
 | 
	
		
			
				|  |  | +  }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +  def func(record: Record, schema: TableSchema): Record = {
 | 
	
		
			
				|  |  | +    record
 | 
	
		
			
				|  |  | +  }
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +}
 |