|
|
@@ -189,7 +189,6 @@ public class DataDashboardService {
|
|
|
return ReturnT.SUCCESS;
|
|
|
}
|
|
|
|
|
|
-
|
|
|
private void exportFeishuNewSortStrategy(List<String> dateStrList, String articleType, Integer filter,
|
|
|
String sheetToken, String sheetId, List<String> delDateStrList) {
|
|
|
String minDate = dateStrList.stream().min(String::compareTo).orElse("");
|
|
|
@@ -1058,6 +1057,340 @@ public class DataDashboardService {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ public void exportFWH(String beginDate, String endDate) {
|
|
|
+ List<String> dateStrList = DateUtils.getBeforeDays(beginDate, endDate, 5);
|
|
|
+ // 滚动删除,仅保留最近14天内容
|
|
|
+ List<String> delDateStrList = DateUtils.getBeforeDays(null, null, 14);
|
|
|
+ exportFeishuNewSortFWHStrategy(dateStrList, ArticleTypeEnum.QUNFA.getVal(), StatusEnum.ZERO.getCode(),
|
|
|
+ dailyDetailSheetToken, "NBZPVI", delDateStrList);
|
|
|
+ }
|
|
|
+
|
|
|
+ @XxlJob("scheduledExportFWH")
|
|
|
+ public ReturnT<String> scheduledExportFWH(String param) {
|
|
|
+ List<String> dateStrList = DateUtils.getBeforeDays(null, null, 5);
|
|
|
+ // 滚动删除,仅保留最近14天内容
|
|
|
+ List<String> delDateStrList = DateUtils.getBeforeDays(null, null, 14);
|
|
|
+ exportFeishuNewSortFWHStrategy(dateStrList, ArticleTypeEnum.QUNFA.getVal(), StatusEnum.ZERO.getCode(),
|
|
|
+ dailyDetailSheetToken, "NBZPVI", delDateStrList);
|
|
|
+ return ReturnT.SUCCESS;
|
|
|
+ }
|
|
|
+
|
|
|
+ private void exportFeishuNewSortFWHStrategy(List<String> dateStrList, String articleType, Integer filter,
|
|
|
+ String sheetToken, String sheetId, List<String> delDateStrList) {
|
|
|
+ String minDate = dateStrList.stream().min(String::compareTo).orElse("");
|
|
|
+ String maxDate = dateStrList.stream().max(String::compareTo).orElse("");
|
|
|
+ List<NewSortStrategyExport> newContentsYesData = newSortStrategyFWHData(minDate, maxDate, articleType, filter);
|
|
|
+ if (CollectionUtil.isEmpty(newContentsYesData)) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ int rowNum = newContentsYesData.size();
|
|
|
+ List<List<Object>> rows = new ArrayList<>();
|
|
|
+ Field[] fields = NewSortStrategyExport.class.getDeclaredFields();
|
|
|
+ for (NewSortStrategyExport datum : newContentsYesData) {
|
|
|
+ List<Object> rowDatas = new ArrayList<>();
|
|
|
+ rows.add(rowDatas);
|
|
|
+
|
|
|
+ for (Field field : fields) {
|
|
|
+ field.setAccessible(true);
|
|
|
+ try {
|
|
|
+ rowDatas.add(field.get(datum));
|
|
|
+ } catch (IllegalAccessException e) {
|
|
|
+ log.error("获取值出错:{}", field.getName());
|
|
|
+ } catch (Exception e) {
|
|
|
+ throw new RuntimeException(e.getMessage());
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ List<Pair<String, String>> styles = Arrays
|
|
|
+ .asList(
|
|
|
+ Pair.of("J", "0.00%"),
|
|
|
+ Pair.of("K", "0.00%"),
|
|
|
+ Pair.of("L", "0.00%"),
|
|
|
+ Pair.of("M", "0.00%"),
|
|
|
+ Pair.of("N", "0.00%"),
|
|
|
+ Pair.of("O", "0.00%"),
|
|
|
+ Pair.of("P", "0.00%"),
|
|
|
+ Pair.of("Q", "0.00%"),
|
|
|
+ Pair.of("R", "0.00%"),
|
|
|
+ Pair.of("S", "0.00%"),
|
|
|
+ Pair.of("T", "0.00%")
|
|
|
+ );
|
|
|
+ doSendFeishuSheet(dateStrList, sheetToken, sheetId, rowNum, rows, 2, styles, delDateStrList, null);
|
|
|
+ }
|
|
|
+
|
|
|
+ public List<NewSortStrategyExport> newSortStrategyFWHData(String beginDate, String endDate,
|
|
|
+ String articleType, Integer filter) {
|
|
|
+ long beginTimestamp = DateUtils.dateStrToTimestamp(beginDate, "yyyyMMdd");
|
|
|
+ long endTimestamp = DateUtils.dateStrToTimestamp(endDate, "yyyyMMdd") + 86400;
|
|
|
+ // 获取所有分组群发服务号
|
|
|
+ List<String> ghIds = aigcBaseMapper.getAllBatchGroupSendAccount();
|
|
|
+ ghIds.addAll(Arrays.asList("gh_61a72b720de3","gh_efaf7da157f5","gh_b3ffc1ca3a04","gh_5e543853d8f0","gh_b32125c73861","gh_9cf3b7ff486b","gh_b8baac4296cb","gh_45beb952dc74","gh_ecb21c0453af","gh_761976bb98a6","gh_5855bed97938","gh_84e744b16b3a","gh_05a0318105be","gh_006929c2305f","gh_2d764faf83f6"));
|
|
|
+ List<AccountAvgInfo> accountAvgInfoList = accountAvgInfoRepository.findAll();
|
|
|
+ log.info("newSortStrategyFWHData accountAvgInfoList finish");
|
|
|
+
|
|
|
+ List<Article> articleList = articleRepository.getByGhIdInAndPublishTimestampBetweenAndTypeEquals(ghIds,
|
|
|
+ beginTimestamp, endTimestamp, articleType);
|
|
|
+ Map<String, Map<String, Map<Integer, Article>>> articleMap = articleList.stream().collect(Collectors.groupingBy(Article::getGhId,
|
|
|
+ Collectors.groupingBy(Article::getAppMsgId, Collectors.toMap(Article::getItemIndex, o -> o))));
|
|
|
+ log.info("newSortStrategyFWHData articleList finish");
|
|
|
+ Set<String> snList = articleList.stream().map(Article::getWxSn).collect(Collectors.toSet());
|
|
|
+ List<ArticleDetailInfo> articleDetailInfoList = new ArrayList<>();
|
|
|
+ for (List<String> partitions : Lists.partition(new ArrayList<>(snList), 1000)) {
|
|
|
+ articleDetailInfoList.addAll(articleDetailInfoRepository.getByWxSnIn(partitions));
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData articleDetailInfoList finish");
|
|
|
+ Map<String, List<ArticleDetailInfo>> articleDetailInfoMap = articleDetailInfoList.stream()
|
|
|
+ .collect(Collectors.groupingBy(ArticleDetailInfo::getWxSn));
|
|
|
+ List<PublishSortLog> sortLogList = publishSortLogRepository.findByGhIdInAndDateStrBetween(ghIds, beginDate, endDate);
|
|
|
+ log.info("newSortStrategyFWHData sortLogList finish");
|
|
|
+ Map<String, Map<String, Map<String, PublishSortLog>>> sortStrategyMap = sortLogList.stream()
|
|
|
+ .collect(Collectors.groupingBy(PublishSortLog::getGhId,
|
|
|
+ Collectors.groupingBy(PublishSortLog::getDateStr, Collectors.toMap(PublishSortLog::getTitle,
|
|
|
+ Function.identity(), (existing, replacement) -> existing))));
|
|
|
+ Map<String, Map<String, Map<String, AccountAvgInfo>>> accountAvgInfoIndexMap = accountAvgInfoList.stream()
|
|
|
+ .filter(o -> Objects.nonNull(o.getReadAvg()) && o.getReadAvg() > 0 && o.getFans() > 1000)
|
|
|
+ .collect(Collectors.groupingBy(AccountAvgInfo::getGhId, Collectors.groupingBy(AccountAvgInfo::getUpdateTime,
|
|
|
+ Collectors.toMap(AccountAvgInfo::getPosition, o -> o))));
|
|
|
+ // 获取发布账号
|
|
|
+ List<PublishAccount> publishAccountList = publishAccountRepository.getAllByGhIdIn(ghIds);
|
|
|
+ log.info("newSortStrategyFWHData publishAccountList finish");
|
|
|
+ Map<String, PublishAccount> publishAccountMap = publishAccountList.stream().collect(Collectors.toMap(PublishAccount::getGhId, o -> o));
|
|
|
+ // 获取发布内容
|
|
|
+ List<PublishContentParam> publishContentParamList = articleList.stream().map(article -> {
|
|
|
+ PublishContentParam item = new PublishContentParam();
|
|
|
+ item.setTitle(article.getTitle());
|
|
|
+ PublishAccount account = publishAccountMap.get(article.getGhId());
|
|
|
+ if (Objects.nonNull(account)) {
|
|
|
+ item.setPublishAccountId(account.getId());
|
|
|
+ return item;
|
|
|
+ }
|
|
|
+ return null;
|
|
|
+ }).filter(Objects::nonNull).collect(Collectors.toList());
|
|
|
+ List<PublishContentDTO> publishContents = new ArrayList<>();
|
|
|
+ for (List<PublishContentParam> partitions : Lists.partition(publishContentParamList, 100)) {
|
|
|
+ publishContents.addAll(publishContentMapper.getPublishContentByTitle(partitions));
|
|
|
+ }
|
|
|
+ if (CollectionUtils.isEmpty(publishContents)) {
|
|
|
+ return new ArrayList<>();
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData publishContents finish");
|
|
|
+ Map<String, Map<String, Map<Long, PublishContentDTO>>> publishContentMap = publishContents.stream()
|
|
|
+ .filter(o -> Objects.nonNull(o.getPublishTimestamp()))
|
|
|
+ .sorted(Comparator.comparingLong(PublishContentDTO::getPublishTimestamp)).collect(
|
|
|
+ Collectors.groupingBy(PublishContentDTO::getPublishAccountId,
|
|
|
+ Collectors.groupingBy(PublishContentDTO::getTitle,
|
|
|
+ Collectors.toMap(PublishContentDTO::getPublishTimestamp, o -> o,
|
|
|
+ (existing, replacement) -> replacement))));
|
|
|
+ // 获取发布内容排版
|
|
|
+ List<String> publishContentIds = publishContents.stream().map(PublishContentDTO::getId).collect(Collectors.toList());
|
|
|
+ List<PublishContentLayout> publishContentLayoutList = new ArrayList<>();
|
|
|
+ for (List<String> partitions : Lists.partition(publishContentIds, 1000)) {
|
|
|
+ publishContentLayoutList.addAll(publishContentLayOutRepository.findByPublishContentIdIn(partitions));
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData publishContentLayoutList finish");
|
|
|
+ Map<String, PublishContentLayout> publishContentLayoutMap = publishContentLayoutList.stream()
|
|
|
+ .collect(Collectors.toMap(PublishContentLayout::getPublishContentId, o -> o,
|
|
|
+ (existing, replacement) -> replacement));
|
|
|
+ //获取发布计划
|
|
|
+ List<String> publishPlanIds = publishContents.stream().map(PublishContentDTO::getPlanId).distinct()
|
|
|
+ .collect(Collectors.toList());
|
|
|
+ List<PublishPlan> publishPlanList = publishPlanRepository.findByIdIn(publishPlanIds);
|
|
|
+ log.info("newSortStrategyFWHData publishPlanList finish");
|
|
|
+ Map<String, PublishPlan> publishPlanMap = publishPlanList.stream()
|
|
|
+ .collect(Collectors.toMap(PublishPlan::getId, o -> o));
|
|
|
+ // 获取生成记录
|
|
|
+ List<String> contentSourceIds = publishContents.stream()
|
|
|
+ .filter(o -> Arrays.asList(1, 2).contains(o.getSourceType()))
|
|
|
+ .map(PublishContentDTO::getSourceId).distinct().collect(Collectors.toList());
|
|
|
+ List<ProducePlanExeRecord> planExeRecordList = new ArrayList<>();
|
|
|
+ for (List<String> partitions : Lists.partition(contentSourceIds, 1000)) {
|
|
|
+ planExeRecordList.addAll(producePlanExeRecordRepository.getByPlanExeIdIn(partitions));
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData planExeRecordList finish");
|
|
|
+ List<String> videoPoolSourceIds = publishContents.stream()
|
|
|
+ .filter(o -> o.getSourceType().equals(PublishPlanInputSourceTypesEnum.longArticleVideoPoolSource.getVal()))
|
|
|
+ .map(PublishContentDTO::getSourceId).distinct().collect(Collectors.toList());
|
|
|
+ List<PublishSingleVideoSource> videoPoolList = videoPoolRepository.getByContentTraceIdIn(videoPoolSourceIds);
|
|
|
+ Map<String, PublishSingleVideoSource> videoPoolSourceMap = videoPoolList.stream()
|
|
|
+ .collect(Collectors.toMap(PublishSingleVideoSource::getContentTraceId, o -> o));
|
|
|
+ log.info("newSortStrategyFWHData videoPoolList finish");
|
|
|
+ Map<String, ProducePlanExeRecord> planExeRecordMap = planExeRecordList.stream()
|
|
|
+ .collect(Collectors.toMap(ProducePlanExeRecord::getPlanExeId, o -> o));
|
|
|
+ // 获取生成计划
|
|
|
+ List<String> producePlanIds = planExeRecordList.stream().map(ProducePlanExeRecord::getPlanId).distinct()
|
|
|
+ .collect(Collectors.toList());
|
|
|
+ List<ProducePlan> producePlanList = producePlanRepository.findByIdIn(producePlanIds);
|
|
|
+ log.info("newSortStrategyFWHData producePlanList finish");
|
|
|
+ Map<String, ProducePlan> producePlanMap = producePlanList.stream()
|
|
|
+ .collect(Collectors.toMap(ProducePlan::getId, o -> o));
|
|
|
+ // 获取生成计划输入
|
|
|
+ List<ProducePlanInputSource> inputSourceList = producePlanInputSourceRepository.findByPlanIdIn(producePlanIds);
|
|
|
+ log.info("newSortStrategyFWHData inputSourceList finish");
|
|
|
+ Map<String, List<ProducePlanInputSource>> inputSourceMap = inputSourceList.stream()
|
|
|
+ .collect(Collectors.groupingBy(ProducePlanInputSource::getPlanId));
|
|
|
+ // 获取抓取内容关联
|
|
|
+ List<String> crawlerChannelContentIds = publishContents.stream().map(PublishContentDTO::getCrawlerChannelContentId)
|
|
|
+ .distinct().collect(Collectors.toList());
|
|
|
+ List<CrawlerPlanResultRel> resultRelList = aigcBaseMapper.getCrawlerPlanRelByChannelContentIds(crawlerChannelContentIds);
|
|
|
+ log.info("newSortStrategyFWHData resultRelList finish");
|
|
|
+ Map<String, List<CrawlerPlanResultRel>> resultRelMap = resultRelList.stream()
|
|
|
+ .collect(Collectors.groupingBy(CrawlerPlanResultRel::getChannelSourceId));
|
|
|
+ // 获取抓取计划
|
|
|
+ List<String> crawlerPlanIds = resultRelList.stream().map(CrawlerPlanResultRel::getPlanId).distinct()
|
|
|
+ .collect(Collectors.toList());
|
|
|
+ List<CrawlerPlan> crawlerPlanList = aigcBaseMapper.getCrawlerPlanByPlanIds(crawlerPlanIds);
|
|
|
+ log.info("newSortStrategyFWHData crawlerPlanList finish");
|
|
|
+ Map<String, CrawlerPlan> crawlerPlanMap = crawlerPlanList.stream()
|
|
|
+ .collect(Collectors.toMap(CrawlerPlan::getId, o -> o));
|
|
|
+ // 获取小程序任务
|
|
|
+ List<MiniprogramTaskParam> miniprogramTaskParamList = new ArrayList<>();
|
|
|
+ Set<String> distinct = new HashSet<>();
|
|
|
+ for (PublishContentDTO publishContent : publishContents) {
|
|
|
+ String key = publishContent.getPlanId() + publishContent.getPublishAccountId();
|
|
|
+ if (distinct.contains(key)) {
|
|
|
+ continue;
|
|
|
+ } else {
|
|
|
+ distinct.add(key);
|
|
|
+ }
|
|
|
+ MiniprogramTaskParam param = new MiniprogramTaskParam();
|
|
|
+ param.setAccountId(publishContent.getPublishAccountId());
|
|
|
+ param.setPlanId(publishContent.getPlanId());
|
|
|
+ miniprogramTaskParamList.add(param);
|
|
|
+ }
|
|
|
+ List<PublishPlanMiniprogramTask> miniprogramTaskList = new ArrayList<>();
|
|
|
+ for (List<MiniprogramTaskParam> partitions : Lists.partition(miniprogramTaskParamList, 1000)) {
|
|
|
+ miniprogramTaskList.addAll(publishContentMapper.getMiniProgramTask(partitions));
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData miniprogramTaskList finish");
|
|
|
+ Map<String, List<PublishPlanMiniprogramTask>> miniprogramTaskMap = miniprogramTaskList.stream()
|
|
|
+ .collect(Collectors.groupingBy(PublishPlanMiniprogramTask::getPlanId));
|
|
|
+ // 源生成计划
|
|
|
+ List<String> titleList = articleList.stream().map(Article::getTitle).distinct().collect(Collectors.toList());
|
|
|
+ Map<String, ProducePlan> sourceTitlePlanMap = getTitleSourceProducePlanMap(titleList);
|
|
|
+ log.info("newSortStrategyFWHData sourceTitlePlan finish");
|
|
|
+ // 历史发布情况
|
|
|
+ List<String> titleMd5List = articleList.stream().map(Article::getTitleMd5).distinct().collect(Collectors.toList());
|
|
|
+ List<Article> hisArticleList = new ArrayList<>();
|
|
|
+ List<List<String>> titleMd5Partition = Lists.partition(new ArrayList<>(titleMd5List), 1000);
|
|
|
+ for (List<String> titleMd5s : titleMd5Partition) {
|
|
|
+ hisArticleList.addAll(articleRepository.getByTitleMd5InAndTypeEqualsAndStatusEquals(titleMd5s, articleType, 1));
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData hisArticleList finish");
|
|
|
+ Map<String, List<Article>> hisArticleMap = hisArticleList.stream().collect(Collectors.groupingBy(Article::getTitle));
|
|
|
+ Set<String> hisWxSnList = hisArticleList.stream().map(Article::getWxSn).collect(Collectors.toSet());
|
|
|
+ List<ArticleDetailInfo> hisArticleDetailInfoList = new ArrayList<>();
|
|
|
+ List<List<String>> hisSnPartition = Lists.partition(new ArrayList<>(hisWxSnList), 1000);
|
|
|
+ for (List<String> sns : hisSnPartition) {
|
|
|
+ hisArticleDetailInfoList.addAll(articleDetailInfoRepository.getByWxSnIn(sns));
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData hisArticleDetailInfoList finish");
|
|
|
+ Map<String, List<ArticleDetailInfo>> hisArticleDetailInfoMap = hisArticleDetailInfoList.stream()
|
|
|
+ .collect(Collectors.groupingBy(ArticleDetailInfo::getWxSn));
|
|
|
+
|
|
|
+ // result
|
|
|
+ List<DatastatSortStrategy> saveList = new ArrayList<>();
|
|
|
+ for (Article article : articleList) {
|
|
|
+ DatastatSortStrategy obj = new DatastatSortStrategy();
|
|
|
+ obj.setType(articleType);
|
|
|
+ List<ArticleDetailInfo> articleDetailInfos = articleDetailInfoMap.get(article.getWxSn());
|
|
|
+ if (CollectionUtils.isEmpty(articleDetailInfos)) {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ setObjArticleDetailInfo(article, obj, articleDetailInfos);
|
|
|
+ Article firstArticle = articleMap.get(article.getGhId()).get(article.getAppMsgId()).get(1);
|
|
|
+ Map<String, Map<String, PublishSortLog>> dateStrategy = sortStrategyMap.get(article.getGhId());
|
|
|
+ Map<String, AccountAvgInfo> indexAvgInfoMap = getDateAccountAvgInfo(accountAvgInfoIndexMap, article.getGhId(),
|
|
|
+ article.getPublishTimestamp(), article.getItemIndex());
|
|
|
+ AccountAvgInfo firstAvgInfo = getAccountAvgInfo(accountAvgInfoIndexMap, article.getGhId(),
|
|
|
+ article.getPublishTimestamp(), 1);
|
|
|
+ String date = DateUtils.timestampToYMDStr(article.getPublishTimestamp(), "yyyyMMdd");
|
|
|
+ setObjBaseInfo(article, obj, date);
|
|
|
+ if (Objects.nonNull(firstArticle)) {
|
|
|
+ obj.setFirstViewCount(firstArticle.getShowViewCount());
|
|
|
+ }
|
|
|
+ if (Objects.nonNull(firstAvgInfo)) {
|
|
|
+ obj.setFirstAvgViewCount(firstAvgInfo.getReadAvg());
|
|
|
+ }
|
|
|
+ if (Objects.nonNull(dateStrategy)) {
|
|
|
+ Map<String, PublishSortLog> titleStrategyMap = dateStrategy.get(date);
|
|
|
+ if (Objects.nonNull(titleStrategyMap)) {
|
|
|
+ PublishSortLog sortLog = titleStrategyMap.get(article.getTitle());
|
|
|
+ if (Objects.nonNull(sortLog)) {
|
|
|
+ String strategy = sortLog.getStrategy();
|
|
|
+ if (!StringUtils.hasText(sortLog.getStrategy())) {
|
|
|
+ for (Map.Entry<String, PublishSortLog> entry : titleStrategyMap.entrySet()) {
|
|
|
+ strategy = entry.getValue().getStrategy();
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ obj.setSourceType(sortLog.getSourceType());
|
|
|
+ obj.setSourceId(sortLog.getSourceId());
|
|
|
+ obj.setStrategy(strategy);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ setObjSortInfo(article, obj, date);
|
|
|
+ setObjAvgInfo(article, obj, indexAvgInfoMap);
|
|
|
+ setObjHisRateInfo(article, obj, hisArticleMap, accountAvgInfoIndexMap, hisArticleDetailInfoMap);
|
|
|
+ // aigc 数据
|
|
|
+ setObjAigcInfo(article, obj, date, publishAccountMap, publishContentMap, publishContentLayoutMap,
|
|
|
+ publishPlanMap, miniprogramTaskMap, planExeRecordMap, producePlanMap, inputSourceMap,
|
|
|
+ resultRelMap, crawlerPlanMap, sourceTitlePlanMap, videoPoolSourceMap);
|
|
|
+ saveList.add(obj);
|
|
|
+ }
|
|
|
+ log.info("newSortStrategyFWHData buildData finish");
|
|
|
+
|
|
|
+ if (CollectionUtil.isNotEmpty(saveList) && filter == StatusEnum.ONE.getCode()) {
|
|
|
+ saveList = saveList.stream()
|
|
|
+ .filter(o -> Objects.nonNull(o.getPublishMiniProgramNum()) && o.getPublishMiniProgramNum() > 0)
|
|
|
+ .collect(Collectors.toList());
|
|
|
+ }
|
|
|
+ saveList = fwhBatchGroupSendAggregation(saveList);
|
|
|
+ saveList.sort(Comparator.comparing(DatastatSortStrategy::getDateStr).reversed()
|
|
|
+ .thenComparing(DatastatSortStrategy::getGhId).thenComparing(DatastatSortStrategy::getPosition));
|
|
|
+ log.info("newSortStrategyFWHData finish");
|
|
|
+ return NewSortStrategyExport.dbObjToExportObj(saveList);
|
|
|
+ }
|
|
|
+
|
|
|
+ private List<DatastatSortStrategy> fwhBatchGroupSendAggregation(List<DatastatSortStrategy> saveList) {
|
|
|
+ if (CollectionUtils.isEmpty(saveList)) {
|
|
|
+ return saveList;
|
|
|
+ }
|
|
|
+ Map<String, Map<String, Map<Integer, List<DatastatSortStrategy>>>> dateGhIdPositionListMap = saveList.stream()
|
|
|
+ .collect(Collectors.groupingBy(DatastatSortStrategy::getDateStr,
|
|
|
+ Collectors.groupingBy(DatastatSortStrategy::getGhId,
|
|
|
+ Collectors.groupingBy(DatastatSortStrategy::getPosition, Collectors.toList()))));
|
|
|
+ List<DatastatSortStrategy> res = new ArrayList<>();
|
|
|
+ for (Map.Entry<String, Map<String, Map<Integer, List<DatastatSortStrategy>>>> dateEntry : dateGhIdPositionListMap.entrySet()) {
|
|
|
+ for (Map.Entry<String, Map<Integer, List<DatastatSortStrategy>>> ghIdEntry : dateEntry.getValue().entrySet()) {
|
|
|
+ for (Map.Entry<Integer, List<DatastatSortStrategy>> positionEntry : ghIdEntry.getValue().entrySet()) {
|
|
|
+ List<DatastatSortStrategy> list = positionEntry.getValue();
|
|
|
+ DatastatSortStrategy resItem = new DatastatSortStrategy();
|
|
|
+ BeanUtils.copyProperties(list.get(0), resItem);
|
|
|
+ resItem.setViewCount(0);
|
|
|
+ resItem.setAvgViewCount(resItem.getAvgViewCount() * list.size());
|
|
|
+ resItem.setFirstViewCount(0);
|
|
|
+ resItem.setFirstAvgViewCount(resItem.getFirstAvgViewCount() * list.size());
|
|
|
+ for (DatastatSortStrategy item : list) {
|
|
|
+ resItem.setViewCount(resItem.getViewCount() + item.getViewCount());
|
|
|
+ resItem.setFirstViewCount(resItem.getFirstViewCount() + item.getFirstViewCount());
|
|
|
+ }
|
|
|
+ resItem.setReadRate((resItem.getViewCount() * 1.0) / resItem.getAvgViewCount());
|
|
|
+ resItem.setReadFansRate((resItem.getViewCount() * 1.0) / resItem.getFans());
|
|
|
+ resItem.setFirstReadRate((resItem.getFirstViewCount() * 1.0) / resItem.getFirstAvgViewCount());
|
|
|
+ resItem.setFission0ReadAvgRate((resItem.getFission0() * 1.0) / resItem.getAvgViewCount());
|
|
|
+ resItem.setFission0ReadAvg100Rate((resItem.getFission0() * 1.0) / (resItem.getAvgViewCount() + 100));
|
|
|
+ resItem.setFission0ReadAvg500Rate((resItem.getFission0() * 1.0) / (resItem.getAvgViewCount() + 500));
|
|
|
+ resItem.setFission0ReadAvg1000Rate((resItem.getFission0() * 1.0) / (resItem.getAvgViewCount() + 1000));
|
|
|
+ res.add(resItem);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return res;
|
|
|
+ }
|
|
|
+
|
|
|
@XxlJob("scheduleExportIntermediateIndicators")
|
|
|
public ReturnT<String> scheduleIntermediateIndicatorsExport(String param) {
|
|
|
List<String> dateStrList = DateUtils.getBeforeDays(null, null, 3);
|