luojunhui 1 tydzień temu
rodzic
commit
984cfbc4ad
1 zmienionych plików z 28 dodań i 35 usunięć
  1. 28 35
      tasks/publish_tasks/top_article_generalize.py

+ 28 - 35
tasks/publish_tasks/top_article_generalize.py

@@ -125,40 +125,33 @@ class TopArticleGeneralizeFromArticlePool(TopArticleGeneralize):
         title_obj_list = self.fetch_distinct_top_titles()
         publishing_article_list = []
         for title_obj in title_obj_list:
-            if (
-                title_obj["title"]
-                == "母亲去世136天后,女子回到家,在锅盖上留下一句话,瞬间泪崩!"
-            ):
-                if self.get_title_read_info_detail(title_obj["title"]):
-
-                    temp = []
-                    keys = self.get_keys_by_ai(title_obj)
-                    for key in keys:
-                        candidate_articles = self.get_candidate_articles(key)
-                        temp += candidate_articles
-
-                    if temp:
-                        print(title_obj["title"])
-                        title_list = [i["title"] for i in temp]
-                        # 相关性排序
-                        similarity_array = similarity_between_title_list(
-                            title_list, [title_obj["title"]]
-                        )
-                        print(similarity_array)
-                        print(title_list)
-                        response_with_similarity_list = []
-                        for index, item in enumerate(temp):
-                            item["similarity"] = similarity_array[index][0]
-                            response_with_similarity_list.append(item)
-
-                        sorted_response_with_similarity_list = sorted(
-                            response_with_similarity_list,
-                            key=lambda k: k["similarity"],
-                            reverse=True,
-                        )
-                        publishing_article_list += sorted_response_with_similarity_list[
-                            :10
-                        ]
+            if self.get_title_read_info_detail(title_obj["title"]):
+
+                temp = []
+                keys = self.get_keys_by_ai(title_obj)
+                for key in keys:
+                    candidate_articles = self.get_candidate_articles(key)
+                    temp += candidate_articles
+
+                if temp:
+                    title_list = [i["title"] for i in temp]
+                    # 相关性排序
+                    similarity_array = similarity_between_title_list(
+                        title_list, [title_obj["title"]]
+                    )
+                    response_with_similarity_list = []
+                    for index, item in enumerate(temp):
+                        item["similarity"] = similarity_array[index][0]
+                        response_with_similarity_list.append(item)
+
+                    sorted_response_with_similarity_list = sorted(
+                        response_with_similarity_list,
+                        key=lambda k: k["similarity"],
+                        reverse=True,
+                    )
+                    publishing_article_list += sorted_response_with_similarity_list[
+                        :10
+                    ]
 
         url_list = [i["link"] for i in publishing_article_list]
         if url_list:
@@ -196,5 +189,5 @@ class TopArticleGeneralizeFromArticlePool(TopArticleGeneralize):
                 generate_task_id="20250703081329508785665",
             )
             # change article status
-            article_id_list = [i["article_id"] for i in generate_plan_response]
+            article_id_list = [i["article_id"] for i in publishing_article_list]
             self.change_article_status_while_publishing(article_id_list=article_id_list)