ソースを参照

检查数据 用metafeaturemap

zhangbo 11 ヶ月 前
コミット
cb79f5f980

+ 256 - 0
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_13_originData_20240529_check.scala

@@ -0,0 +1,256 @@
+package com.aliyun.odps.spark.examples.makedata
+
+import com.alibaba.fastjson.{JSON, JSONObject}
+import com.aliyun.odps.TableSchema
+import com.aliyun.odps.data.Record
+import com.aliyun.odps.spark.examples.myUtils.{MyDateUtils, MyHdfsUtils, ParamUtils, env}
+import examples.extractor.RankExtractorFeature_20240530
+import org.apache.hadoop.io.compress.GzipCodec
+import org.apache.spark.sql.SparkSession
+import org.xm.Similarity
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.ArrayBuffer
+/*
+   20240608 提取特征
+ */
+
+object makedata_13_originData_20240529_check {
+  def main(args: Array[String]): Unit = {
+    val spark = SparkSession
+      .builder()
+      .appName(this.getClass.getName)
+      .getOrCreate()
+    val sc = spark.sparkContext
+
+    // 1 读取参数
+    val param = ParamUtils.parseArgs(args)
+    val tablePart = param.getOrElse("tablePart", "64").toInt
+    val beginStr = param.getOrElse("beginStr", "2023010100")
+    val endStr = param.getOrElse("endStr", "2023010123")
+    val savePath = param.getOrElse("savePath", "/dw/recommend/model/13_sample_data/")
+    val project = param.getOrElse("project", "loghubods")
+    val table = param.getOrElse("table", "XXXX")
+    val repartition = param.getOrElse("repartition", "100").toInt
+
+    // 2 读取odps+表信息
+    val odpsOps = env.getODPS(sc)
+
+    // 3 循环执行数据生产
+    val timeRange = MyDateUtils.getDateHourRange(beginStr, endStr)
+    for (dt_hh <- timeRange) {
+      val dt = dt_hh.substring(0, 8)
+      val hh = dt_hh.substring(8, 10)
+      val partition = s"dt=$dt,hh=$hh"
+      println("开始执行partiton:" + partition)
+      val odpsData = odpsOps.readTable(project = project,
+        table = table,
+        partition = partition,
+        transfer = func,
+        numPartition = tablePart)
+        .map(record_ => {
+
+
+          val record = if (record_.isNull("metafeaturemap")) new JSONObject() else
+            JSON.parseObject(record_.getString("metafeaturemap"))
+
+          val featureMap = new JSONObject()
+
+          // a 视频特征
+          val b1: JSONObject = if (record.containsKey("alg_vid_feature_all_exp")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_all_exp"))
+          val b2: JSONObject = if (record.containsKey("alg_vid_feature_all_share")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_all_share"))
+          val b3: JSONObject = if (record.containsKey("alg_vid_feature_all_return")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_all_return"))
+          val b6: JSONObject = if (record.containsKey("alg_vid_feature_exp2share")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_exp2share"))
+          val b7: JSONObject = if (record.containsKey("alg_vid_feature_share2return")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_share2return"))
+
+          val b8: JSONObject = if (record.containsKey("alg_vid_feature_feed_noflow_exp")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_noflow_exp"))
+          val b9: JSONObject = if (record.containsKey("alg_vid_feature_feed_noflow_root_share")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_noflow_root_share"))
+          val b10: JSONObject = if (record.containsKey("alg_vid_feature_feed_noflow_root_return")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_noflow_root_return"))
+          val b11: JSONObject = if (record.containsKey("alg_vid_feature_feed_flow_exp")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_flow_exp"))
+          val b12: JSONObject = if (record.containsKey("alg_vid_feature_feed_flow_root_share")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_flow_root_share"))
+          val b13: JSONObject = if (record.containsKey("alg_vid_feature_feed_flow_root_return")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_flow_root_return"))
+          val b17: JSONObject = if (record.containsKey("alg_vid_feature_feed_province_exp")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_province_exp"))
+          val b18: JSONObject = if (record.containsKey("alg_vid_feature_feed_province_root_share")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_province_root_share"))
+          val b19: JSONObject = if (record.containsKey("alg_vid_feature_feed_province_root_return")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_feed_province_root_return"))
+
+
+          val origin_data = List(
+            (b1, b2, b3, "b123"), (b1, b6, b7, "b167"),
+            (b8, b9, b10, "b8910"), (b11, b12, b13, "b111213"),
+            (b17, b18, b19, "b171819")
+          )
+          for ((b_1, b_2, b_3, prefix1) <- origin_data) {
+            for (prefix2 <- List(
+              "1h", "2h", "3h", "4h", "12h", "1d", "3d", "7d"
+            )) {
+              val exp = if (b_1.isEmpty) 0D else b_1.getIntValue("exp_pv_" + prefix2).toDouble
+              val share = if (b_2.isEmpty) 0D else b_2.getIntValue("share_pv_" + prefix2).toDouble
+              val returns = if (b_3.isEmpty) 0D else b_3.getIntValue("return_uv_" + prefix2).toDouble
+              val f1 = RankExtractorFeature_20240530.calDiv(share, exp)
+              val f2 = RankExtractorFeature_20240530.calLog(share)
+              val f3 = RankExtractorFeature_20240530.calDiv(returns, exp)
+              val f4 = RankExtractorFeature_20240530.calLog(returns)
+              val f5 = f3 * f4
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "STR", f1)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(share)", f2)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV", f3)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "log(return)", f4)
+              featureMap.put(prefix1 + "_" + prefix2 + "_" + "ROV*log(return)", f5)
+            }
+          }
+
+          val video_info: JSONObject = if (record.containsKey("alg_vid_feature_basic_info")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_vid_feature_basic_info"))
+          featureMap.put("total_time", if (video_info.containsKey("total_time")) video_info.getIntValue("total_time").toDouble else 0D)
+          featureMap.put("bit_rate", if (video_info.containsKey("bit_rate")) video_info.getIntValue("bit_rate").toDouble else 0D)
+
+          val c1: JSONObject = if (record.containsKey("alg_mid_feature_play")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_mid_feature_play"))
+          if (c1.nonEmpty) {
+            featureMap.put("playcnt_6h", if (c1.containsKey("playcnt_6h")) c1.getIntValue("playcnt_6h").toDouble else 0D)
+            featureMap.put("playcnt_1d", if (c1.containsKey("playcnt_1d")) c1.getIntValue("playcnt_1d").toDouble else 0D)
+            featureMap.put("playcnt_3d", if (c1.containsKey("playcnt_3d")) c1.getIntValue("playcnt_3d").toDouble else 0D)
+            featureMap.put("playcnt_7d", if (c1.containsKey("playcnt_7d")) c1.getIntValue("playcnt_7d").toDouble else 0D)
+          }
+          val c2: JSONObject = if (record.containsKey("alg_mid_feature_share_and_return")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_mid_feature_share_and_return"))
+          if (c2.nonEmpty) {
+            featureMap.put("share_pv_12h", if (c2.containsKey("share_pv_12h")) c2.getIntValue("share_pv_12h").toDouble else 0D)
+            featureMap.put("share_pv_1d", if (c2.containsKey("share_pv_1d")) c2.getIntValue("share_pv_1d").toDouble else 0D)
+            featureMap.put("share_pv_3d", if (c2.containsKey("share_pv_3d")) c2.getIntValue("share_pv_3d").toDouble else 0D)
+            featureMap.put("share_pv_7d", if (c2.containsKey("share_pv_7d")) c2.getIntValue("share_pv_7d").toDouble else 0D)
+            featureMap.put("return_uv_12h", if (c2.containsKey("return_uv_12h")) c2.getIntValue("return_uv_12h").toDouble else 0D)
+            featureMap.put("return_uv_1d", if (c2.containsKey("return_uv_1d")) c2.getIntValue("return_uv_1d").toDouble else 0D)
+            featureMap.put("return_uv_3d", if (c2.containsKey("return_uv_3d")) c2.getIntValue("return_uv_3d").toDouble else 0D)
+            featureMap.put("return_uv_7d", if (c2.containsKey("return_uv_7d")) c2.getIntValue("return_uv_7d").toDouble else 0D)
+          }
+
+          val title = if (video_info.containsKey("title")) video_info.getString("title") else ""
+          if (!title.equals("")) {
+            for (key_feature <- List(("c3_feature", "alg_mid_feature_play_tags"),
+              ("c4_feature", "alg_mid_feature_play_tags"),
+              ("c5_feature", "alg_mid_feature_play_tags"),
+              ("c6_feature", "alg_mid_feature_play_tags"),
+              ("c7_feature", "alg_mid_feature_play_tags"))) {
+              val c34567: JSONObject = if (record.containsKey(key_feature._2)) new JSONObject() else
+                JSON.parseObject(record.getString(key_feature._2))
+              for (key_time <- List("tags_1d", "tags_3d", "tags_7d")) {
+                val tags = if (c34567.containsKey(key_time)) c34567.getString(key_time) else ""
+                if (!tags.equals("")) {
+                  val (f1, f2, f3, f4) = funcC34567ForTags(tags, title)
+                  featureMap.put(key_feature._1 + "_" + key_time + "_matchnum", f1)
+                  featureMap.put(key_feature._1 + "_" + key_time + "_maxscore", f3)
+                  featureMap.put(key_feature._1 + "_" + key_time + "_avgscore", f4)
+                }
+              }
+            }
+          }
+
+          val vid = if (record_.isNull("vid")) "" else record_.getString("vid")
+          if (!vid.equals("")) {
+            for (key_feature <- List(("c8_feature", "alg_mid_feature_sharecf"), ("c9_feature", "alg_mid_feature_returncf"))) {
+              val c89: JSONObject = if (record.containsKey(key_feature._2)) new JSONObject() else
+                JSON.parseObject(record.getString(key_feature._2))
+              for (key_action <- List("share", "return")) {
+                val cfListStr = if (c89.containsKey(key_action)) c89.getString(key_action) else ""
+                if (!cfListStr.equals("")) {
+                  val cfMap = cfListStr.split(",").map(r => {
+                    val rList = r.split(":")
+                    (rList(0), (rList(1), rList(2), rList(3)))
+                  }).toMap
+                  if (cfMap.contains(vid)) {
+                    val (score, num, rank) = cfMap(vid)
+                    featureMap.put(key_feature._1 + "_" + key_action + "_score", score.toDouble)
+                    featureMap.put(key_feature._1 + "_" + key_action + "_num", num.toDouble)
+                    featureMap.put(key_feature._1 + "_" + key_action + "_rank", 1.0 / rank.toDouble)
+                  }
+                }
+              }
+            }
+          }
+
+          val d1: JSONObject = if (record.containsKey("alg_recsys_feature_cf_i2i_new")) new JSONObject() else
+            JSON.parseObject(record.getString("alg_recsys_feature_cf_i2i_new"))
+          if (d1.nonEmpty) {
+            featureMap.put("d1_exp", if (d1.containsKey("exp")) d1.getString("exp").toDouble else 0D)
+            featureMap.put("d1_return_n", if (d1.containsKey("return_n")) d1.getString("return_n").toDouble else 0D)
+            featureMap.put("d1_rovn", if (d1.containsKey("rovn")) d1.getString("rovn").toDouble else 0D)
+          }
+
+
+          //4 处理label信息。
+          val labels = new JSONObject
+          for (labelKey <- List(
+            "is_play", "is_share", "is_return", "noself_is_return", "return_uv", "noself_return_uv", "total_return_uv",
+            "share_pv", "total_share_uv"
+          )){
+            if (!record_.isNull(labelKey)){
+              labels.put(labelKey, record_.getString(labelKey))
+            }
+          }
+          //5 处理log key表头。
+          val apptype = record_.getString("apptype")
+          val pagesource = record_.getString("pagesource")
+          val mid = record_.getString("mid")
+          // vid 已经提取了
+          val ts = record_.getString("ts")
+          val abcode = record_.getString("abcode")
+          val level = if (record_.isNull("level")) "0" else record_.getString("level")
+          val logKey = (apptype, pagesource, mid, vid, ts, abcode, level).productIterator.mkString(",")
+          val labelKey = labels.toString()
+          val featureKey = featureMap.toString()
+          //6 拼接数据,保存。
+          logKey + "\t" + labelKey + "\t" + featureKey
+
+        })
+
+      // 4 保存数据到hdfs
+      val savePartition = dt + hh
+      val hdfsPath = savePath + "/" + savePartition
+      if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")){
+        println("删除路径并开始数据写入:" + hdfsPath)
+        MyHdfsUtils.delete_hdfs_path(hdfsPath)
+        odpsData.coalesce(repartition).saveAsTextFile(hdfsPath, classOf[GzipCodec])
+      }else{
+        println("路径不合法,无法写入:" + hdfsPath)
+      }
+    }
+  }
+
+  def func(record: Record, schema: TableSchema): Record = {
+    record
+  }
+  def funcC34567ForTags(tags: String, title: String): Tuple4[Double, String, Double, Double] = {
+    // 匹配数量 匹配词 语义最高相似度分 语义平均相似度分
+    val tagsList = tags.split(",")
+    var d1 = 0.0
+    val d2 = new ArrayBuffer[String]()
+    var d3 = 0.0
+    var d4 = 0.0
+    for (tag <- tagsList){
+      if (title.contains(tag)){
+        d1 = d1 + 1.0
+        d2.add(tag)
+      }
+      val score = Similarity.conceptSimilarity(tag, title)
+      d3 = if (score > d3) score else d3
+      d4 = d4 + score
+    }
+    d4 = if (tagsList.nonEmpty) d4 / tagsList.size else d4
+    (d1, d2.mkString(","), d3, d4)
+  }
+}

+ 9 - 6
src/main/scala/com/aliyun/odps/spark/examples/临时记录的脚本

@@ -72,17 +72,20 @@ nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.s
 --master yarn --driver-memory 1G --executor-memory 2G --executor-cores 1 --num-executors 16 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
 tablePart:64 repartition:32 \
-beginStr:2024060816 endStr:2024060823 \
-table:alg_recsys_sample_all \
-> p13_2024060816.log 2>&1 &
+beginStr:2024061500 endStr:2024061523 \
+savePath:/dw/recommend/model/13_sample_data_check/ \
+table:alg_recsys_sample_all_new \
+> p13_2024061500_check.log 2>&1 &
 
 
 nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
 --class com.aliyun.odps.spark.examples.makedata.makedata_14_valueData_20240608 \
 --master yarn --driver-memory 1G --executor-memory 3G --executor-cores 1 --num-executors 32 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
-beginStr:20240607 endStr:20240607 repartition:1000 \
-> p14_data.log 2>&1 &
+readPath:/dw/recommend/model/13_sample_data_check/ \
+savePath:/dw/recommend/model/14_feature_data_check/ \
+beginStr:20240615 endStr:20240615 repartition:1000 \
+> p14_data_check.log 2>&1 &
 
 
 nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
@@ -99,7 +102,7 @@ nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.s
 --class com.aliyun.odps.spark.examples.makedata.makedata_16_bucketData_20240609 \
 --master yarn --driver-memory 2G --executor-memory 4G --executor-cores 1 --num-executors 16 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
-beginStr:20240607 endStr:20240607 repartition:1000 \
+beginStr:20240614 endStr:20240614 repartition:1000 \
 > p16_data.log 2>&1 &
 
 

+ 1 - 1
zhangbo/02_train_go.sh

@@ -22,4 +22,4 @@ while [[ "$current_date" != "$end_date" ]]; do
     current_date=$(date -d "$current_date + 1 day" +%Y%m%d)
 done
 
-# nohup sh 02_train_go.sh 20240607 20240608 model_aka0 /dw/recommend/model/16_train_data/ 1,1,0 >p2_model_aka0.log 2>&1 &
+# nohup sh 02_train_go.sh 20240614 20240615 model_aka8 /dw/recommend/model/16_train_data/ 1,1,8 >p2_model_aka8.log 2>&1 &

+ 8 - 4
zhangbo/03_predict.sh

@@ -11,8 +11,12 @@ HADOOP="/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop"
 $HADOOP fs -text ${train_path}/${day}/* | /root/sunmingze/alphaFM/bin/fm_predict -m model/$model_name -dim ${bias} -core 8 -out predict/${output_file}_$day.txt
 cat predict/${output_file}_$day.txt | /root/sunmingze/AUC/AUC
 
-# nohup sh 03_predict.sh 20240607 /dw/recommend/model/16_train_data/ model_fuck_20240606.txt model_fuck_20240606 8 >p3_model_fuck.log 2>&1 &
 
-# nohup sh 03_predict.sh 20240607 /dw/recommend/model/16_train_data/ model_aka0_20240606.txt model_aka0_20240606 0 >p3_model_aka0.log 2>&1 &
-# nohup sh 03_predict.sh 20240607 /dw/recommend/model/16_train_data/ model_aka4_20240606.txt model_aka4_20240606 4 >p3_model_aka4.log 2>&1 &
-# nohup sh 03_predict.sh 20240607 /dw/recommend/model/16_train_data/ model_aka8_20240606.txt model_aka8_20240606 8 >p3_model_aka8.log 2>&1 &
+# nohup sh 03_predict.sh 20240611 /dw/recommend/model/16_train_data/ model_aka0_20240610.txt model_aka0_20240610 0 >p3_model_aka0.log 2>&1 &
+# nohup sh 03_predict.sh 20240611 /dw/recommend/model/16_train_data/ model_aka4_20240610.txt model_aka4_20240610 4 >p3_model_aka4.log 2>&1 &
+# nohup sh 03_predict.sh 20240613 /dw/recommend/model/16_train_data/ model_aka8_20240612.txt model_aka8_20240612 8 >p3_model_aka8_12.log 2>&1 &
+
+
+
+
+# cat tmpfile | /root/sunmingze/alphaFM/bin/fm_predict -m model/model_aka8_20240608.txt -dim 8 -core 1 -out tmpfile_out.txt

+ 21 - 25
zhangbo/04_upload.sh

@@ -2,28 +2,24 @@
 cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_str_mid_20240313.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_str_mid_20240313_change.txt
 dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_str_mid_20240313_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/video_str_model/model_str_mid.txt
 
-
-cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_sharev2_20231220.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_sharev2_20231220_change.txt
-dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_sharev2_20240107_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/video_str_model/model_sharev2_20231220.txt
-
-
-
-cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_ros_v2_20231220.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_ros_v2_20231220_change.txt
-dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_ros_v2_20231220_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/video_str_model/
-
-
-cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_ros_v2_20240106.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_ros_v2_20240106_change.txt
-dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_ros_v2_20240106_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/video_str_model/model_ros_v2_20231220_change.txt
-
-
-cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_str_mid_20240112.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_str_mid_20240112_change.txt
-dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_str_mid_20240112_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/video_str_model/model_str_mid.txt
-
-
-cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_tom_20240225.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_tom_20240225_change.txt
-dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_tom_20240225_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/model_tom.txt
-
-cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_jerry_20240225.txt | sed '1d' | awk -F " " '{if($2!="0") print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_jerry_20240225_change.txt
-dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_jerry_20240225_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/model_jerry.txt
-
-
+cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_aka0_20240608.txt | awk -F " " '{print $1"\t"$2}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_aka0_20240608_change.txt
+dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_aka0_20240608_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/model_aka0.txt
+
+
+
+cat /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_aka8_20240608.txt |
+awk -F " " '{
+    if (NR == 1) {
+        print $1"\t"$2
+    } else {
+        split($0, fields, " ");
+        OFS="\t";
+        line=""
+        for (i = 1; i <= 10 && i <= length(fields); i++) {
+            line = (line ? line "\t" : "") fields[i];
+        }
+        print line
+    }
+}' > /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_aka8_20240608_change.txt
+
+dfs -put /root/zhangbo/recommend-emr-dataprocess/zhangbo/model/model_aka8_20240608_change.txt oss://art-recommend.oss-cn-hangzhou.aliyuncs.com/zhangbo/model_aka8.txt