sunmingze il y a 1 an
Parent
commit
f581ab7323

+ 5 - 9
pom.xml

@@ -41,16 +41,17 @@
 
         <dependency>
             <groupId>com.tzld.piaoquan</groupId>
-            <artifactId>ad-engine-commons</artifactId>
-            <version>1.0.0</version>
+            <artifactId>recommend-server-client</artifactId>
+            <version>1.0.1</version>
         </dependency>
 
         <dependency>
             <groupId>com.tzld.piaoquan</groupId>
-            <artifactId>recommend-server-client</artifactId>
-            <version>1.0.1</version>
+            <artifactId>ad-engine-commons</artifactId>
+            <version>1.0.0</version>
         </dependency>
 
+
         <dependency>
             <groupId>org.apache.spark</groupId>
             <artifactId>spark-core_${scala.binary.version}</artifactId>
@@ -148,17 +149,12 @@
                     <artifactId>aliyun-sdk-mns</artifactId>
                 </exclusion>
             </exclusions>
-
         </dependency>
-
-
         <dependency>
             <groupId>com.aliyun.emr</groupId>
             <artifactId>emr-maxcompute_2.11</artifactId>
             <version>${emr.version}</version>
         </dependency>
-
-
     </dependencies>
 
     <build>

+ 3 - 3
src/main/java/examples/dataloader/AdSampleConstructor.java

@@ -34,9 +34,9 @@ public class AdSampleConstructor {
         requestContext.setMachineinfoModel(record.getString("machineinfo_model"));
         requestContext.setMachineinfoSdkversion(record.getString("machineinfo_sdkversion"));
         requestContext.setMachineinfoSdkversion(record.getString("machineinfo_wechatversion"));
-        requestContext.setDay(record.getString("day"));
-        requestContext.setWeek(record.getString("week"));
-        requestContext.setHour(record.getString("hour"));
+//        requestContext.setDay(record.getString("day"));
+//        requestContext.setWeek(record.getString("week"));
+//        requestContext.setHour(record.getString("hour"));
         requestContext.setRegion(record.getString("province"));
         requestContext.setCity(record.getString("city"));
         return requestContext;

+ 2 - 2
src/main/java/examples/sparksql/SparkAdCTRSampleLoader.java

@@ -30,7 +30,7 @@ public class SparkAdCTRSampleLoader {
         String odpsUrl = "http://service.odps.aliyun.com/api";
         String tunnelUrl = "http://dt.cn-hangzhou.maxcompute.aliyun-inc.com";
         String project = "loghubods";
-        String table = "alg_recsys_view_sample";
+        String table = "alg_ad_view_sample";
         String hdfsPath = "/dw/recommend/model/ad_ctr_samples/" + partition;
 
         SparkConf sparkConf = new SparkConf().setAppName("E-MapReduce Demo 3-2: Spark MaxCompute Demo (Java)");
@@ -38,7 +38,7 @@ public class SparkAdCTRSampleLoader {
         OdpsOps odpsOps = new OdpsOps(jsc.sc(), accessId, accessKey, odpsUrl, tunnelUrl);
         System.out.println("Read odps table...");
 
-        JavaRDD<String> readData = odpsOps.readTableWithJava(project, table, partition, new RecordsToSamples(), Integer.valueOf(50));
+        JavaRDD<String> readData = odpsOps.readTableWithJava(project, table, partition, new RecordsToSamples(), Integer.valueOf(30));
         readData.saveAsTextFile(hdfsPath);
     }