build_match_graph.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. 从匹配结果中构建帖子与人设的节点边关系图
  5. 输入:
  6. 1. filtered_results目录下的匹配结果文件
  7. 2. 节点列表.json
  8. 3. 边关系.json
  9. 输出:
  10. 1. match_graph目录下的节点边关系文件
  11. """
  12. import json
  13. from pathlib import Path
  14. from typing import Dict, List, Set, Any, Optional
  15. import sys
  16. # 添加项目根目录到路径
  17. project_root = Path(__file__).parent.parent.parent
  18. sys.path.insert(0, str(project_root))
  19. from script.data_processing.path_config import PathConfig
  20. def build_post_node_id(dimension: str, node_type: str, name: str) -> str:
  21. """构建帖子节点ID"""
  22. return f"帖子_{dimension}_{node_type}_{name}"
  23. def build_persona_node_id(dimension: str, node_type: str, name: str) -> str:
  24. """构建人设节点ID"""
  25. return f"{dimension}_{node_type}_{name}"
  26. def extract_matched_nodes_and_edges(filtered_data: Dict) -> tuple:
  27. """
  28. 从匹配结果中提取帖子节点、人设节点和匹配边
  29. Args:
  30. filtered_data: 匹配结果数据
  31. Returns:
  32. (帖子节点列表, 人设节点ID集合, 匹配边列表)
  33. """
  34. post_nodes = []
  35. persona_node_ids = set()
  36. match_edges = []
  37. how_result = filtered_data.get("how解构结果", {})
  38. # 维度映射
  39. dimension_mapping = {
  40. "灵感点列表": "灵感点",
  41. "目的点列表": "目的点",
  42. "关键点列表": "关键点"
  43. }
  44. for list_key, dimension in dimension_mapping.items():
  45. points = how_result.get(list_key, [])
  46. for point in points:
  47. # 遍历how步骤列表
  48. how_steps = point.get("how步骤列表", [])
  49. for step in how_steps:
  50. features = step.get("特征列表", [])
  51. for feature in features:
  52. feature_name = feature.get("特征名称", "")
  53. weight = feature.get("权重", 0)
  54. match_results = feature.get("匹配结果", [])
  55. if not feature_name:
  56. continue
  57. # 如果有匹配结果,创建帖子节点和匹配边
  58. if match_results:
  59. # 创建帖子节点(标签类型)
  60. post_node_id = build_post_node_id(dimension, "标签", feature_name)
  61. post_node = {
  62. "节点ID": post_node_id,
  63. "节点名称": feature_name,
  64. "节点类型": "标签",
  65. "节点层级": dimension,
  66. "权重": weight,
  67. "source": "帖子"
  68. }
  69. # 避免重复添加
  70. if not any(n["节点ID"] == post_node_id for n in post_nodes):
  71. post_nodes.append(post_node)
  72. # 处理每个匹配结果
  73. for match in match_results:
  74. persona_name = match.get("人设特征名称", "")
  75. persona_dimension = match.get("人设特征层级", "")
  76. persona_type = match.get("特征类型", "标签")
  77. match_detail = match.get("匹配结果", {})
  78. if not persona_name or not persona_dimension:
  79. continue
  80. # 构建人设节点ID
  81. persona_node_id = build_persona_node_id(
  82. persona_dimension, persona_type, persona_name
  83. )
  84. persona_node_ids.add(persona_node_id)
  85. # 创建匹配边
  86. match_edge = {
  87. "源节点ID": post_node_id,
  88. "目标节点ID": persona_node_id,
  89. "边类型": "匹配",
  90. "边详情": {
  91. "相似度": match_detail.get("相似度", 0),
  92. "说明": match_detail.get("说明", "")
  93. }
  94. }
  95. match_edges.append(match_edge)
  96. return post_nodes, persona_node_ids, match_edges
  97. def get_persona_nodes_details(
  98. persona_node_ids: Set[str],
  99. nodes_data: Dict
  100. ) -> List[Dict]:
  101. """
  102. 从节点列表中获取人设节点的详细信息
  103. Args:
  104. persona_node_ids: 人设节点ID集合
  105. nodes_data: 节点列表数据
  106. Returns:
  107. 人设节点详情列表
  108. """
  109. persona_nodes = []
  110. all_nodes = nodes_data.get("节点列表", [])
  111. for node in all_nodes:
  112. if node["节点ID"] in persona_node_ids:
  113. persona_nodes.append(node)
  114. return persona_nodes
  115. def get_edges_between_nodes(
  116. node_ids: Set[str],
  117. edges_data: Dict
  118. ) -> List[Dict]:
  119. """
  120. 获取指定节点之间的边关系
  121. Args:
  122. node_ids: 节点ID集合
  123. edges_data: 边关系数据
  124. Returns:
  125. 节点之间的边列表
  126. """
  127. edges_between = []
  128. all_edges = edges_data.get("边列表", [])
  129. for edge in all_edges:
  130. source_id = edge["源节点ID"]
  131. target_id = edge["目标节点ID"]
  132. # 两个节点都在集合中
  133. if source_id in node_ids and target_id in node_ids:
  134. edges_between.append(edge)
  135. return edges_between
  136. def create_mirrored_post_edges(
  137. match_edges: List[Dict],
  138. persona_edges: List[Dict]
  139. ) -> List[Dict]:
  140. """
  141. 根据人设节点之间的边,创建帖子节点之间的镜像边
  142. 逻辑:如果人设节点A和B之间有边,且帖子节点X匹配A,帖子节点Y匹配B,
  143. 则创建帖子节点X和Y之间的镜像边
  144. Args:
  145. match_edges: 匹配边列表(帖子节点 -> 人设节点)
  146. persona_edges: 人设节点之间的边列表
  147. Returns:
  148. 帖子节点之间的镜像边列表
  149. """
  150. # 构建人设节点到帖子节点的反向映射
  151. # persona_id -> [post_id1, post_id2, ...]
  152. persona_to_posts = {}
  153. for edge in match_edges:
  154. post_id = edge["源节点ID"]
  155. persona_id = edge["目标节点ID"]
  156. if persona_id not in persona_to_posts:
  157. persona_to_posts[persona_id] = []
  158. if post_id not in persona_to_posts[persona_id]:
  159. persona_to_posts[persona_id].append(post_id)
  160. # 根据人设边创建帖子镜像边
  161. post_edges = []
  162. seen_edges = set()
  163. for persona_edge in persona_edges:
  164. source_persona = persona_edge["源节点ID"]
  165. target_persona = persona_edge["目标节点ID"]
  166. edge_type = persona_edge["边类型"]
  167. # 获取匹配到这两个人设节点的帖子节点
  168. source_posts = persona_to_posts.get(source_persona, [])
  169. target_posts = persona_to_posts.get(target_persona, [])
  170. # 为每对帖子节点创建镜像边
  171. for src_post in source_posts:
  172. for tgt_post in target_posts:
  173. if src_post == tgt_post:
  174. continue
  175. # 使用排序后的key避免重复(A-B 和 B-A 视为同一条边)
  176. edge_key = tuple(sorted([src_post, tgt_post])) + (edge_type,)
  177. if edge_key in seen_edges:
  178. continue
  179. seen_edges.add(edge_key)
  180. post_edge = {
  181. "源节点ID": src_post,
  182. "目标节点ID": tgt_post,
  183. "边类型": f"镜像_{edge_type}", # 标记为镜像边
  184. "边详情": {
  185. "原始边类型": edge_type,
  186. "源人设节点": source_persona,
  187. "目标人设节点": target_persona
  188. }
  189. }
  190. post_edges.append(post_edge)
  191. return post_edges
  192. def expand_one_layer(
  193. node_ids: Set[str],
  194. edges_data: Dict,
  195. nodes_data: Dict,
  196. edge_types: List[str] = None,
  197. direction: str = "both"
  198. ) -> tuple:
  199. """
  200. 从指定节点扩展一层,获取相邻节点和连接边
  201. Args:
  202. node_ids: 起始节点ID集合
  203. edges_data: 边关系数据
  204. nodes_data: 节点列表数据
  205. edge_types: 要扩展的边类型列表,None表示所有类型
  206. direction: 扩展方向
  207. - "outgoing": 只沿出边扩展(源节点在集合中,扩展到目标节点)
  208. - "incoming": 只沿入边扩展(目标节点在集合中,扩展到源节点)
  209. - "both": 双向扩展
  210. Returns:
  211. (扩展的节点列表, 扩展的边列表, 扩展的节点ID集合)
  212. """
  213. expanded_node_ids = set()
  214. expanded_edges = []
  215. all_edges = edges_data.get("边列表", [])
  216. # 找出所有与起始节点相连的边和节点
  217. for edge in all_edges:
  218. # 过滤边类型
  219. if edge_types and edge["边类型"] not in edge_types:
  220. continue
  221. source_id = edge["源节点ID"]
  222. target_id = edge["目标节点ID"]
  223. # 沿出边扩展:源节点在集合中,扩展到目标节点
  224. if direction in ["outgoing", "both"]:
  225. if source_id in node_ids and target_id not in node_ids:
  226. expanded_node_ids.add(target_id)
  227. expanded_edges.append(edge)
  228. # 沿入边扩展:目标节点在集合中,扩展到源节点
  229. if direction in ["incoming", "both"]:
  230. if target_id in node_ids and source_id not in node_ids:
  231. expanded_node_ids.add(source_id)
  232. expanded_edges.append(edge)
  233. # 获取扩展节点的详情
  234. expanded_nodes = []
  235. all_nodes = nodes_data.get("节点列表", [])
  236. for node in all_nodes:
  237. if node["节点ID"] in expanded_node_ids:
  238. # 标记为扩展节点
  239. node_copy = node.copy()
  240. node_copy["是否扩展"] = True
  241. node_copy["source"] = "人设"
  242. expanded_nodes.append(node_copy)
  243. return expanded_nodes, expanded_edges, expanded_node_ids
  244. def expand_and_filter_useful_nodes(
  245. matched_persona_ids: Set[str],
  246. match_edges: List[Dict],
  247. edges_data: Dict,
  248. nodes_data: Dict,
  249. exclude_edge_types: List[str] = None
  250. ) -> tuple:
  251. """
  252. 扩展人设节点一层,只保留能产生新帖子连线的扩展节点
  253. 逻辑:如果扩展节点E连接了2个以上的已匹配人设节点,
  254. 那么通过E可以产生新的帖子间连线,保留E
  255. Args:
  256. matched_persona_ids: 已匹配的人设节点ID集合
  257. match_edges: 匹配边列表
  258. edges_data: 边关系数据
  259. nodes_data: 节点列表数据
  260. exclude_edge_types: 要排除的边类型列表
  261. Returns:
  262. (有效扩展节点列表, 扩展边列表, 通过扩展节点的帖子镜像边列表)
  263. """
  264. if exclude_edge_types is None:
  265. exclude_edge_types = []
  266. all_edges = edges_data.get("边列表", [])
  267. # 构建人设节点到帖子节点的映射
  268. persona_to_posts = {}
  269. for edge in match_edges:
  270. post_id = edge["源节点ID"]
  271. persona_id = edge["目标节点ID"]
  272. if persona_id not in persona_to_posts:
  273. persona_to_posts[persona_id] = []
  274. if post_id not in persona_to_posts[persona_id]:
  275. persona_to_posts[persona_id].append(post_id)
  276. # 找出所有扩展节点及其连接的已匹配人设节点
  277. # expanded_node_id -> [(matched_persona_id, edge), ...]
  278. expanded_connections = {}
  279. for edge in all_edges:
  280. # 跳过排除的边类型
  281. if edge["边类型"] in exclude_edge_types:
  282. continue
  283. source_id = edge["源节点ID"]
  284. target_id = edge["目标节点ID"]
  285. # 源节点是已匹配的,目标节点是扩展候选
  286. if source_id in matched_persona_ids and target_id not in matched_persona_ids:
  287. if target_id not in expanded_connections:
  288. expanded_connections[target_id] = []
  289. expanded_connections[target_id].append((source_id, edge))
  290. # 目标节点是已匹配的,源节点是扩展候选
  291. if target_id in matched_persona_ids and source_id not in matched_persona_ids:
  292. if source_id not in expanded_connections:
  293. expanded_connections[source_id] = []
  294. expanded_connections[source_id].append((target_id, edge))
  295. # 过滤:只保留连接2个以上已匹配人设节点的扩展节点
  296. useful_expanded_ids = set()
  297. useful_edges = []
  298. post_mirror_edges = []
  299. seen_mirror_edges = set()
  300. for expanded_id, connections in expanded_connections.items():
  301. connected_personas = list(set([c[0] for c in connections]))
  302. if len(connected_personas) >= 2:
  303. useful_expanded_ids.add(expanded_id)
  304. # 收集边
  305. for persona_id, edge in connections:
  306. useful_edges.append(edge)
  307. # 为通过此扩展节点连接的每对人设节点,创建帖子镜像边
  308. for i, p1 in enumerate(connected_personas):
  309. for p2 in connected_personas[i+1:]:
  310. posts1 = persona_to_posts.get(p1, [])
  311. posts2 = persona_to_posts.get(p2, [])
  312. # 找出连接p1和p2的边类型
  313. edge_types_p1 = [c[1]["边类型"] for c in connections if c[0] == p1]
  314. edge_types_p2 = [c[1]["边类型"] for c in connections if c[0] == p2]
  315. # 用第一个边类型作为代表
  316. edge_type = edge_types_p1[0] if edge_types_p1 else (edge_types_p2[0] if edge_types_p2 else "扩展")
  317. for post1 in posts1:
  318. for post2 in posts2:
  319. if post1 == post2:
  320. continue
  321. # 避免重复
  322. edge_key = tuple(sorted([post1, post2])) + (f"二阶_{edge_type}",)
  323. if edge_key in seen_mirror_edges:
  324. continue
  325. seen_mirror_edges.add(edge_key)
  326. post_mirror_edges.append({
  327. "源节点ID": post1,
  328. "目标节点ID": post2,
  329. "边类型": f"二阶_{edge_type}",
  330. "边详情": {
  331. "原始边类型": edge_type,
  332. "扩展节点": expanded_id,
  333. "源人设节点": p1,
  334. "目标人设节点": p2
  335. }
  336. })
  337. # 获取扩展节点详情
  338. useful_expanded_nodes = []
  339. all_nodes = nodes_data.get("节点列表", [])
  340. for node in all_nodes:
  341. if node["节点ID"] in useful_expanded_ids:
  342. node_copy = node.copy()
  343. node_copy["是否扩展"] = True
  344. useful_expanded_nodes.append(node_copy)
  345. # 边去重
  346. seen_edges = set()
  347. unique_edges = []
  348. for edge in useful_edges:
  349. edge_key = (edge["源节点ID"], edge["目标节点ID"], edge["边类型"])
  350. if edge_key not in seen_edges:
  351. seen_edges.add(edge_key)
  352. unique_edges.append(edge)
  353. return useful_expanded_nodes, unique_edges, post_mirror_edges
  354. def process_filtered_result(
  355. filtered_file: Path,
  356. nodes_data: Dict,
  357. edges_data: Dict,
  358. output_dir: Path
  359. ) -> Dict:
  360. """
  361. 处理单个匹配结果文件
  362. Args:
  363. filtered_file: 匹配结果文件路径
  364. nodes_data: 节点列表数据
  365. edges_data: 边关系数据
  366. output_dir: 输出目录
  367. Returns:
  368. 处理结果统计
  369. """
  370. # 读取匹配结果
  371. with open(filtered_file, "r", encoding="utf-8") as f:
  372. filtered_data = json.load(f)
  373. post_id = filtered_data.get("帖子id", "")
  374. post_detail = filtered_data.get("帖子详情", {})
  375. post_title = post_detail.get("title", "")
  376. # 提取节点和边
  377. post_nodes, persona_node_ids, match_edges = extract_matched_nodes_and_edges(filtered_data)
  378. # 获取人设节点详情(直接匹配的,标记为非扩展)
  379. persona_nodes = get_persona_nodes_details(persona_node_ids, nodes_data)
  380. for node in persona_nodes:
  381. node["是否扩展"] = False
  382. node["source"] = "人设"
  383. # 获取人设节点之间的边
  384. persona_edges = get_edges_between_nodes(persona_node_ids, edges_data)
  385. # 创建帖子节点之间的镜像边(基于直接人设边的投影)
  386. post_edges = create_mirrored_post_edges(match_edges, persona_edges)
  387. # 扩展人设节点一层,只对标签类型的节点通过"属于"边扩展到分类
  388. # 过滤出标签类型的人设节点(只有标签才能"属于"分类)
  389. tag_persona_ids = {pid for pid in persona_node_ids if "_标签_" in pid}
  390. expanded_nodes, expanded_edges, _ = expand_one_layer(
  391. tag_persona_ids, edges_data, nodes_data,
  392. edge_types=["属于"],
  393. direction="outgoing" # 只向外扩展:标签->分类
  394. )
  395. # 创建通过扩展节点的帖子镜像边(正确逻辑)
  396. # 逻辑:帖子->标签->分类,分类之间有边,则对应帖子产生二阶边
  397. # 1. 构建 标签 -> 帖子列表 的映射
  398. tag_to_posts = {}
  399. for edge in match_edges:
  400. post_node_id = edge["源节点ID"]
  401. tag_id = edge["目标节点ID"]
  402. if tag_id not in tag_to_posts:
  403. tag_to_posts[tag_id] = []
  404. if post_node_id not in tag_to_posts[tag_id]:
  405. tag_to_posts[tag_id].append(post_node_id)
  406. # 2. 构建 分类 -> 标签列表 的映射(通过属于边)
  407. expanded_node_ids = set(n["节点ID"] for n in expanded_nodes)
  408. category_to_tags = {} # 分类 -> [连接的标签]
  409. for edge in expanded_edges:
  410. src, tgt = edge["源节点ID"], edge["目标节点ID"]
  411. # 属于边:标签 -> 分类
  412. if tgt in expanded_node_ids and src in persona_node_ids:
  413. if tgt not in category_to_tags:
  414. category_to_tags[tgt] = []
  415. if src not in category_to_tags[tgt]:
  416. category_to_tags[tgt].append(src)
  417. # 3. 获取扩展节点(分类)之间的边
  418. category_edges = []
  419. for edge in edges_data.get("边列表", []):
  420. src, tgt = edge["源节点ID"], edge["目标节点ID"]
  421. # 两端都是扩展节点(分类)
  422. if src in expanded_node_ids and tgt in expanded_node_ids:
  423. category_edges.append(edge)
  424. # 4. 基于分类之间的边,生成帖子之间的二阶镜像边
  425. post_edges_via_expanded = []
  426. seen_mirror = set()
  427. for cat_edge in category_edges:
  428. cat1, cat2 = cat_edge["源节点ID"], cat_edge["目标节点ID"]
  429. edge_type = cat_edge["边类型"]
  430. # 获取连接到这两个分类的标签
  431. tags1 = category_to_tags.get(cat1, [])
  432. tags2 = category_to_tags.get(cat2, [])
  433. # 通过标签找到对应的帖子,产生二阶边
  434. for tag1 in tags1:
  435. for tag2 in tags2:
  436. posts1 = tag_to_posts.get(tag1, [])
  437. posts2 = tag_to_posts.get(tag2, [])
  438. for post1 in posts1:
  439. for post2 in posts2:
  440. if post1 == post2:
  441. continue
  442. edge_key = tuple(sorted([post1, post2])) + (f"二阶_{edge_type}",)
  443. if edge_key in seen_mirror:
  444. continue
  445. seen_mirror.add(edge_key)
  446. post_edges_via_expanded.append({
  447. "源节点ID": post1,
  448. "目标节点ID": post2,
  449. "边类型": f"二阶_{edge_type}",
  450. "边详情": {
  451. "原始边类型": edge_type,
  452. "分类节点1": cat1,
  453. "分类节点2": cat2,
  454. "标签节点1": tag1,
  455. "标签节点2": tag2
  456. }
  457. })
  458. # 只保留对帖子连接有帮助的扩展节点和边
  459. # 1. 找出产生了二阶帖子边的扩展节点(分类)
  460. useful_expanded_ids = set()
  461. for edge in post_edges_via_expanded:
  462. cat1 = edge.get("边详情", {}).get("分类节点1")
  463. cat2 = edge.get("边详情", {}).get("分类节点2")
  464. if cat1:
  465. useful_expanded_ids.add(cat1)
  466. if cat2:
  467. useful_expanded_ids.add(cat2)
  468. # 2. 只保留有用的扩展节点
  469. useful_expanded_nodes = [n for n in expanded_nodes if n["节点ID"] in useful_expanded_ids]
  470. # 3. 只保留连接到有用扩展节点的属于边
  471. useful_expanded_edges = [e for e in expanded_edges
  472. if e["目标节点ID"] in useful_expanded_ids or e["源节点ID"] in useful_expanded_ids]
  473. # 4. 只保留有用的分类之间的边(产生了二阶帖子边的)
  474. useful_category_edges = [e for e in category_edges
  475. if e["源节点ID"] in useful_expanded_ids and e["目标节点ID"] in useful_expanded_ids]
  476. # 合并节点列表
  477. all_nodes = post_nodes + persona_nodes + useful_expanded_nodes
  478. # 合并边列表
  479. all_edges = match_edges + persona_edges + post_edges + useful_expanded_edges + useful_category_edges + post_edges_via_expanded
  480. # 去重边
  481. seen_edges = set()
  482. unique_edges = []
  483. for edge in all_edges:
  484. edge_key = (edge["源节点ID"], edge["目标节点ID"], edge["边类型"])
  485. if edge_key not in seen_edges:
  486. seen_edges.add(edge_key)
  487. unique_edges.append(edge)
  488. all_edges = unique_edges
  489. # 构建节点边索引
  490. edges_by_node = {}
  491. for edge in all_edges:
  492. source_id = edge["源节点ID"]
  493. target_id = edge["目标节点ID"]
  494. edge_type = edge["边类型"]
  495. if source_id not in edges_by_node:
  496. edges_by_node[source_id] = {}
  497. if edge_type not in edges_by_node[source_id]:
  498. edges_by_node[source_id][edge_type] = {}
  499. edges_by_node[source_id][edge_type][target_id] = edge
  500. # 构建输出数据
  501. output_data = {
  502. "说明": {
  503. "帖子ID": post_id,
  504. "帖子标题": post_title,
  505. "描述": "帖子与人设的节点匹配关系",
  506. "统计": {
  507. "帖子节点数": len(post_nodes),
  508. "人设节点数(直接匹配)": len(persona_nodes),
  509. "扩展节点数(有效)": len(useful_expanded_nodes),
  510. "匹配边数": len(match_edges),
  511. "人设节点间边数": len(persona_edges),
  512. "扩展边数(有效)": len(useful_expanded_edges),
  513. "帖子镜像边数(直接)": len(post_edges),
  514. "帖子镜像边数(二阶)": len(post_edges_via_expanded),
  515. "总节点数": len(all_nodes),
  516. "总边数": len(all_edges)
  517. }
  518. },
  519. "帖子节点列表": post_nodes,
  520. "人设节点列表": persona_nodes,
  521. "扩展节点列表": useful_expanded_nodes,
  522. "匹配边列表": match_edges,
  523. "人设节点间边列表": persona_edges,
  524. "扩展边列表": useful_expanded_edges,
  525. "帖子镜像边列表(直接)": post_edges,
  526. "帖子镜像边列表(二阶)": post_edges_via_expanded,
  527. "节点列表": all_nodes,
  528. "边列表": all_edges,
  529. "节点边索引": edges_by_node
  530. }
  531. # 保存输出文件
  532. output_file = output_dir / f"{post_id}_match_graph.json"
  533. with open(output_file, "w", encoding="utf-8") as f:
  534. json.dump(output_data, f, ensure_ascii=False, indent=2)
  535. return {
  536. "帖子ID": post_id,
  537. "帖子节点数": len(post_nodes),
  538. "人设节点数": len(persona_nodes),
  539. "扩展节点数": len(useful_expanded_nodes),
  540. "匹配边数": len(match_edges),
  541. "人设边数": len(persona_edges),
  542. "扩展边数": len(useful_expanded_edges),
  543. "帖子边数(直接)": len(post_edges),
  544. "帖子边数(二阶)": len(post_edges_via_expanded),
  545. "总节点数": len(all_nodes),
  546. "总边数": len(all_edges),
  547. "输出文件": str(output_file)
  548. }
  549. def main():
  550. # 使用路径配置
  551. config = PathConfig()
  552. config.ensure_dirs()
  553. print(f"账号: {config.account_name}")
  554. print(f"输出版本: {config.output_version}")
  555. print()
  556. # 输入文件/目录
  557. filtered_results_dir = config.intermediate_dir / "filtered_results"
  558. nodes_file = config.intermediate_dir / "节点列表.json"
  559. edges_file = config.intermediate_dir / "边关系.json"
  560. # 输出目录
  561. output_dir = config.intermediate_dir / "match_graph"
  562. output_dir.mkdir(parents=True, exist_ok=True)
  563. print(f"输入:")
  564. print(f" 匹配结果目录: {filtered_results_dir}")
  565. print(f" 节点列表: {nodes_file}")
  566. print(f" 边关系: {edges_file}")
  567. print(f"\n输出目录: {output_dir}")
  568. print()
  569. # 读取节点和边数据
  570. print("正在读取节点列表...")
  571. with open(nodes_file, "r", encoding="utf-8") as f:
  572. nodes_data = json.load(f)
  573. print(f" 共 {len(nodes_data.get('节点列表', []))} 个节点")
  574. print("正在读取边关系...")
  575. with open(edges_file, "r", encoding="utf-8") as f:
  576. edges_data = json.load(f)
  577. print(f" 共 {len(edges_data.get('边列表', []))} 条边")
  578. # 处理所有匹配结果文件
  579. print("\n" + "="*60)
  580. print("处理匹配结果文件...")
  581. filtered_files = list(filtered_results_dir.glob("*_filtered.json"))
  582. print(f"找到 {len(filtered_files)} 个匹配结果文件")
  583. results = []
  584. for i, filtered_file in enumerate(filtered_files, 1):
  585. print(f"\n[{i}/{len(filtered_files)}] 处理: {filtered_file.name}")
  586. result = process_filtered_result(filtered_file, nodes_data, edges_data, output_dir)
  587. results.append(result)
  588. print(f" 帖子节点: {result['帖子节点数']}, 人设节点: {result['人设节点数']}, 扩展节点: {result['扩展节点数']}")
  589. print(f" 匹配边: {result['匹配边数']}, 人设边: {result['人设边数']}, 扩展边: {result['扩展边数']}")
  590. print(f" 帖子边(直接): {result['帖子边数(直接)']}, 帖子边(二阶): {result['帖子边数(二阶)']}")
  591. # 汇总统计
  592. print("\n" + "="*60)
  593. print("处理完成!")
  594. print(f"\n汇总:")
  595. print(f" 处理文件数: {len(results)}")
  596. total_post = sum(r['帖子节点数'] for r in results)
  597. total_persona = sum(r['人设节点数'] for r in results)
  598. total_expanded = sum(r['扩展节点数'] for r in results)
  599. total_match = sum(r['匹配边数'] for r in results)
  600. total_persona_edges = sum(r['人设边数'] for r in results)
  601. total_expanded_edges = sum(r['扩展边数'] for r in results)
  602. total_post_edges_direct = sum(r['帖子边数(直接)'] for r in results)
  603. total_post_edges_2hop = sum(r['帖子边数(二阶)'] for r in results)
  604. print(f" 总帖子节点: {total_post}")
  605. print(f" 总人设节点: {total_persona}")
  606. print(f" 总扩展节点: {total_expanded}")
  607. print(f" 总匹配边: {total_match}")
  608. print(f" 总人设边: {total_persona_edges}")
  609. print(f" 总扩展边: {total_expanded_edges}")
  610. print(f" 总帖子边(直接): {total_post_edges_direct}")
  611. print(f" 总帖子边(二阶): {total_post_edges_2hop}")
  612. print(f"\n输出目录: {output_dir}")
  613. if __name__ == "__main__":
  614. main()