|
@@ -17,6 +17,7 @@ import com.tzld.longarticle.recommend.server.mapper.longArticle.LongArticleBaseM
|
|
|
import com.tzld.longarticle.recommend.server.model.dto.AccountTypeFansDTO;
|
|
|
import com.tzld.longarticle.recommend.server.model.dto.NotPublishPlan;
|
|
|
import com.tzld.longarticle.recommend.server.model.dto.PublishPlanAccountNotifyDTO;
|
|
|
+import com.tzld.longarticle.recommend.server.model.entity.aigc.CrawlerPlan;
|
|
|
import com.tzld.longarticle.recommend.server.model.entity.aigc.PublishAccount;
|
|
|
import com.tzld.longarticle.recommend.server.model.entity.crawler.AccountAvgInfo;
|
|
|
import com.tzld.longarticle.recommend.server.model.entity.crawler.GetOffVideoCrawler;
|
|
@@ -201,6 +202,44 @@ public class XxlJobService {
|
|
|
return ReturnT.SUCCESS;
|
|
|
}
|
|
|
|
|
|
+ @XxlJob("checkColdCrawlerPlan")
|
|
|
+ public ReturnT<String> checkColdCrawlerPlan(String param) {
|
|
|
+ long timeStamp = DateUtils.getBeforeDayStart(1);
|
|
|
+ if (StringUtils.hasText(param)) {
|
|
|
+ timeStamp = DateUtils.getStartOfDay(param, "yyyyMMdd");
|
|
|
+ }
|
|
|
+ String dateStr = DateUtils.timestampToYMDStr(timeStamp, "yyyyMMdd");
|
|
|
+ timeStamp = timeStamp * 1000;
|
|
|
+ List<String> planTags = Arrays.asList("账号联想_v1", "文章联想_v1", "品类冷启动");
|
|
|
+ List<CrawlerPlan> crawlerPlanList = aigcBaseMapper.getColdCrawlerPlan(timeStamp, timeStamp + 86400000, planTags);
|
|
|
+ if (CollectionUtil.isNotEmpty(crawlerPlanList)) {
|
|
|
+ for (CrawlerPlan crawlerPlan : crawlerPlanList) {
|
|
|
+ JSONObject crawlerModeValue = JSONObject.parseObject(crawlerPlan.getCrawlerModeValue());
|
|
|
+ if (crawlerModeValue == null) {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ JSONObject sourceModeValues = crawlerModeValue.getJSONObject("sourceModeValues");
|
|
|
+ if (sourceModeValues == null) {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ JSONArray inputModeValues = sourceModeValues.getJSONArray("inputModeValues");
|
|
|
+ if (inputModeValues == null) {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ log.info("checkColdCrawlerPlan crawlerPlan: {}", JSONObject.toJSONString(crawlerPlan));
|
|
|
+ FeishuMessageSender.sendWebHookMessage(FeishuRobotIdEnum.RECOMMEND.getRobotId(),
|
|
|
+ dateStr + "冷启计划抓取数量\n"
|
|
|
+ + "计划ID: " + crawlerPlan.getId() + "\n"
|
|
|
+ + "计划名称: " + crawlerPlan.getName() + "\n"
|
|
|
+ + "计划标签: " + crawlerPlan.getPlanTag() + "\n"
|
|
|
+ + "计划抓取数量: " + inputModeValues.size() + "\n"
|
|
|
+ + "实际抓取数量: " + crawlerPlan.getCrawlerTotalNum() + "\n"
|
|
|
+ + "<at user_id=\"all\">所有人</at> ");
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return ReturnT.SUCCESS;
|
|
|
+ }
|
|
|
+
|
|
|
@XxlJob("migrateCrawlerRootSourceId")
|
|
|
public ReturnT<String> migrateCrawlerRootSourceId(String param) {
|
|
|
try {
|