|
@@ -119,6 +119,7 @@ public class ArticlePromotionService {
|
|
|
return;
|
|
|
}
|
|
|
try {
|
|
|
+ String level = pos.equals("【1】") ? contentPoolType.get(0) : contentPoolType.get(1);
|
|
|
String produceId = produceConfig.get(accountNickName).get(pos).get(way).trim();
|
|
|
List<ProduceContentListItemVO> contentList = getProduceContentList(accountNickName, pos, way);
|
|
|
// 获取已访问的标题和URL
|
|
@@ -144,7 +145,6 @@ public class ArticlePromotionService {
|
|
|
filterUrlList.add(url);
|
|
|
// 调用爬虫 detail 接口并保存数据
|
|
|
WxContentDetailResponse detail = getArticleDetail(url);
|
|
|
- String level = pos.equals("【1】") ? contentPoolType.get(0) : contentPoolType.get(1);
|
|
|
if (detail != null && StringUtils.hasText(detail.getChannelContentId())) {
|
|
|
saveArticlePoolPromotionSource(detail.getChannelContentId(), wxSn, title, level);
|
|
|
} else {
|
|
@@ -159,15 +159,15 @@ public class ArticlePromotionService {
|
|
|
log.info("url_list empty: " + accountNickName + ", " + pos + ", " + way);
|
|
|
return;
|
|
|
}
|
|
|
- int urlLen = filterUrlList.size();
|
|
|
- String planName = String.format("%d_%s_%s_%s【%s】_%s", urlLen, today, accountNickName, pos, way, today);
|
|
|
- log.info("url_len: " + list.size() + ", " + urlLen);
|
|
|
+ String planName = String.format("%d_%s_%s_%s【%s】_%s", filterUrlList.size(), today, accountNickName, pos, way, today);
|
|
|
+ log.info("url_len: " + list.size() + ", " + filterUrlList.size());
|
|
|
IdNameVO<String> planInfo = aigcCrawlerPlanSaveService.createArticleUrlPlan(planName, filterUrlList, tag, CrawlerModeEnum.ContentIDs.getVal());
|
|
|
if (StringUtils.hasText(produceId)) {
|
|
|
articleAddDependPlan(produceId, planInfo.getId(), planInfo.getName());
|
|
|
}
|
|
|
log.info("{}, {}, produce plan not exist: {}, {}, {}", planInfo.getName(), planInfo.getId(), accountNickName, pos, way);
|
|
|
if (CollectionUtils.isNotEmpty(publishContentIds)) {
|
|
|
+ planName = String.format("%d_%s_%s_%s【%s】_%s", publishContentIds.size(), today, accountNickName, pos, way, today);
|
|
|
planInfo = aigcCrawlerPlanSaveService.createArticleUrlPlan(planName, publishContentIds, tag, CrawlerModeEnum.PublishContentIds.getVal());
|
|
|
if (StringUtils.hasText(produceId)) {
|
|
|
articleAddDependPlan(produceId, planInfo.getId(), planInfo.getName());
|