|
@@ -51,7 +51,8 @@ object pred_recsys_61_xgb_nor_hdfsfile_20241209 {
|
|
DataTypes.createStructField("label", DataTypes.DoubleType, true)
|
|
DataTypes.createStructField("label", DataTypes.DoubleType, true)
|
|
) ++ features.map(f => DataTypes.createStructField(f, DataTypes.DoubleType, true))
|
|
) ++ features.map(f => DataTypes.createStructField(f, DataTypes.DoubleType, true))
|
|
fields = fields ++ Array(
|
|
fields = fields ++ Array(
|
|
- DataTypes.createStructField("logKey", DataTypes.StringType, true)
|
|
|
|
|
|
+ DataTypes.createStructField("logKey", DataTypes.StringType, true),
|
|
|
|
+ DataTypes.createStructField("scoresMap", DataTypes.StringType, true)
|
|
)
|
|
)
|
|
|
|
|
|
val schema = DataTypes.createStructType(fields)
|
|
val schema = DataTypes.createStructType(fields)
|
|
@@ -66,13 +67,13 @@ object pred_recsys_61_xgb_nor_hdfsfile_20241209 {
|
|
)
|
|
)
|
|
|
|
|
|
val testDataSet = spark.createDataFrame(testData, schema)
|
|
val testDataSet = spark.createDataFrame(testData, schema)
|
|
- val testDataSetTrans = vectorAssembler.transform(testDataSet).select("features", "label", "logKey")
|
|
|
|
|
|
+ val testDataSetTrans = vectorAssembler.transform(testDataSet).select("features", "label", "logKey", "scoresMap")
|
|
val predictions = model.transform(testDataSetTrans)
|
|
val predictions = model.transform(testDataSetTrans)
|
|
val clipPrediction = getClipData(spark, predictions).persist()
|
|
val clipPrediction = getClipData(spark, predictions).persist()
|
|
|
|
|
|
- val saveData = clipPrediction.select("label", "prediction", "clipPrediction", "logKey").rdd
|
|
|
|
|
|
+ val saveData = clipPrediction.select("label", "prediction", "clipPrediction", "logKey", "scoresMap").rdd
|
|
.map(r => {
|
|
.map(r => {
|
|
- (r.get(0), r.get(1), r.get(2), r.get(3)).productIterator.mkString("\t")
|
|
|
|
|
|
+ (r.get(0), r.get(1), r.get(2), r.get(3), r.get(4)).productIterator.mkString("\t")
|
|
})
|
|
})
|
|
val hdfsPath = savePath
|
|
val hdfsPath = savePath
|
|
if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
|
|
if (hdfsPath.nonEmpty && hdfsPath.startsWith("/dw/recommend/model/")) {
|
|
@@ -109,36 +110,39 @@ object pred_recsys_61_xgb_nor_hdfsfile_20241209 {
|
|
val line: Array[String] = StringUtils.split(r, '\t')
|
|
val line: Array[String] = StringUtils.split(r, '\t')
|
|
val logKey = line(0)
|
|
val logKey = line(0)
|
|
val label: Double = NumberUtils.toDouble(line(1))
|
|
val label: Double = NumberUtils.toDouble(line(1))
|
|
|
|
+ val scoresMap = line(2)
|
|
val map: util.Map[String, Double] = new util.HashMap[String, Double]
|
|
val map: util.Map[String, Double] = new util.HashMap[String, Double]
|
|
- for (i <- 2 until line.length) {
|
|
|
|
|
|
+ for (i <- 3 until line.length) {
|
|
val fv: Array[String] = StringUtils.split(line(i), ':')
|
|
val fv: Array[String] = StringUtils.split(line(i), ':')
|
|
map.put(fv(0), NumberUtils.toDouble(fv(1), 0.0))
|
|
map.put(fv(0), NumberUtils.toDouble(fv(1), 0.0))
|
|
}
|
|
}
|
|
|
|
|
|
- val v: Array[Any] = new Array[Any](features.length + 2)
|
|
|
|
|
|
+ val v: Array[Any] = new Array[Any](features.length + 3)
|
|
v(0) = label
|
|
v(0) = label
|
|
for (i <- 0 until features.length) {
|
|
for (i <- 0 until features.length) {
|
|
v(i + 1) = map.getOrDefault(features(i), 0.0d)
|
|
v(i + 1) = map.getOrDefault(features(i), 0.0d)
|
|
}
|
|
}
|
|
v(features.length + 1) = logKey
|
|
v(features.length + 1) = logKey
|
|
|
|
+ v(features.length + 2) = scoresMap
|
|
Row(v: _*)
|
|
Row(v: _*)
|
|
})
|
|
})
|
|
}
|
|
}
|
|
|
|
|
|
def getClipData(spark: SparkSession, df: DataFrame): DataFrame = {
|
|
def getClipData(spark: SparkSession, df: DataFrame): DataFrame = {
|
|
import spark.implicits._
|
|
import spark.implicits._
|
|
- df.select("label", "prediction", "logKey").rdd
|
|
|
|
|
|
+ df.select("label", "prediction", "logKey", "scoresMap").rdd
|
|
.map(row => {
|
|
.map(row => {
|
|
val label = row.getAs[Double]("label")
|
|
val label = row.getAs[Double]("label")
|
|
val prediction = row.getAs[Double]("prediction")
|
|
val prediction = row.getAs[Double]("prediction")
|
|
val logKey = row.getAs[String]("logKey")
|
|
val logKey = row.getAs[String]("logKey")
|
|
|
|
+ val scoresMap = row.getAs[String]("scoresMap")
|
|
if (prediction < 1E-8) {
|
|
if (prediction < 1E-8) {
|
|
- (label, prediction, 0d, logKey)
|
|
|
|
|
|
+ (label, prediction, 0d, logKey, scoresMap)
|
|
} else {
|
|
} else {
|
|
- (label, prediction, prediction, logKey)
|
|
|
|
|
|
+ (label, prediction, prediction, logKey, scoresMap)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- ).toDF("label", "prediction", "clipPrediction", "logKey")
|
|
|
|
|
|
+ ).toDF("label", "prediction", "clipPrediction", "logKey", "scoresMap")
|
|
}
|
|
}
|
|
|
|
|
|
def calMAPE(evalRdd: RDD[Row]): Double = {
|
|
def calMAPE(evalRdd: RDD[Row]): Double = {
|