zhangbo il y a 10 mois
Parent
commit
fbbfe39cfa

+ 0 - 1
src/main/scala/com/aliyun/odps/spark/examples/makedata/makedata_17_bucketDataPrint_20240617.scala

@@ -237,7 +237,6 @@ object makedata_17_bucketDataPrint_20240617 {
             Set("ab0", "ab1", "ab2", "ab3").contains(abcode) && level.equals("0")
         }.mapPartitions(row => {
           val result = new ArrayBuffer[String]()
-          val contentList = contentList_br.value
           val bucketsMap = bucketsMap_br.value
           row.foreach {
             case (apptype, pagesource, level, label, abcode, allfeaturemap, featureMap) =>

+ 7 - 1
src/main/scala/com/aliyun/odps/spark/examples/临时记录的脚本

@@ -150,4 +150,10 @@ nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.s
 --class com.aliyun.odps.spark.examples.makedata.makedata_18_mergehour2day_20240617 \
 --master yarn --driver-memory 2G --executor-memory 4G --executor-cores 1 --num-executors 16 \
 ./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
-> p18_data_check.log 2>&1 &
+> p18_data_check.log 2>&1 &
+
+nohup /opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
+--class com.aliyun.odps.spark.examples.makedata.makedata_17_bucketDataPrint_20240617 \
+--master yarn --driver-memory 2G --executor-memory 4G --executor-cores 1 --num-executors 16 \
+./target/spark-examples-1.0.0-SNAPSHOT-shaded.jar \
+> p17_data_check.log 2>&1 &