build_match_graph.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. 从匹配结果中构建帖子与人设的节点边关系图
  5. 输入:
  6. 1. filtered_results目录下的匹配结果文件
  7. 2. 节点列表.json
  8. 3. 边关系.json
  9. 输出:
  10. 1. match_graph目录下的节点边关系文件
  11. """
  12. import json
  13. from pathlib import Path
  14. from typing import Dict, List, Set, Any, Optional
  15. import sys
  16. # 添加项目根目录到路径
  17. project_root = Path(__file__).parent.parent.parent
  18. sys.path.insert(0, str(project_root))
  19. from script.data_processing.path_config import PathConfig
  20. def build_post_node_id(dimension: str, node_type: str, name: str) -> str:
  21. """构建帖子节点ID
  22. Args:
  23. dimension: 维度(灵感点/关键点/目的点)
  24. node_type: 节点类型(点/标签)
  25. name: 节点名称
  26. """
  27. return f"帖子_{dimension}_{node_type}_{name}"
  28. def build_persona_node_id(dimension: str, node_type: str, name: str) -> str:
  29. """构建人设节点ID"""
  30. return f"{dimension}_{node_type}_{name}"
  31. def extract_matched_nodes_and_edges(filtered_data: Dict) -> tuple:
  32. """
  33. 从匹配结果中提取帖子节点(点+标签)、人设节点和边
  34. Args:
  35. filtered_data: 匹配结果数据
  36. Returns:
  37. (帖子节点列表, 人设节点ID集合, 边列表)
  38. 帖子节点包括:点节点(灵感点/关键点/目的点)和标签节点
  39. 边包括:点→标签的属于边 + 标签→人设的匹配边
  40. """
  41. post_nodes = []
  42. persona_node_ids = set()
  43. edges = [] # 包含属于边和匹配边
  44. how_result = filtered_data.get("how解构结果", {})
  45. # 维度映射
  46. dimension_mapping = {
  47. "灵感点列表": "灵感点",
  48. "目的点列表": "目的点",
  49. "关键点列表": "关键点"
  50. }
  51. for list_key, dimension in dimension_mapping.items():
  52. points = how_result.get(list_key, [])
  53. for point in points:
  54. point_name = point.get("名称", "")
  55. point_desc = point.get("描述", "")
  56. if not point_name:
  57. continue
  58. # 创建帖子点节点
  59. point_node_id = build_post_node_id(dimension, "点", point_name)
  60. point_node = {
  61. "节点ID": point_node_id,
  62. "节点名称": point_name,
  63. "节点类型": "点",
  64. "节点层级": dimension,
  65. "描述": point_desc,
  66. "source": "帖子"
  67. }
  68. # 避免重复添加点节点
  69. if not any(n["节点ID"] == point_node_id for n in post_nodes):
  70. post_nodes.append(point_node)
  71. # 遍历how步骤列表,提取标签节点
  72. how_steps = point.get("how步骤列表", [])
  73. for step in how_steps:
  74. features = step.get("特征列表", [])
  75. for feature in features:
  76. feature_name = feature.get("特征名称", "")
  77. weight = feature.get("权重", 0)
  78. match_results = feature.get("匹配结果", [])
  79. if not feature_name:
  80. continue
  81. # 创建帖子标签节点(无论是否有匹配结果)
  82. tag_node_id = build_post_node_id(dimension, "标签", feature_name)
  83. tag_node = {
  84. "节点ID": tag_node_id,
  85. "节点名称": feature_name,
  86. "节点类型": "标签",
  87. "节点层级": dimension,
  88. "权重": weight,
  89. "source": "帖子",
  90. "已匹配": len(match_results) > 0 # 标记是否有匹配
  91. }
  92. # 避免重复添加标签节点
  93. if not any(n["节点ID"] == tag_node_id for n in post_nodes):
  94. post_nodes.append(tag_node)
  95. # 创建标签→点的属于边
  96. belong_edge = {
  97. "源节点ID": tag_node_id,
  98. "目标节点ID": point_node_id,
  99. "边类型": "属于",
  100. "边详情": {
  101. "说明": f"标签「{feature_name}」属于点「{point_name}」"
  102. }
  103. }
  104. # 避免重复添加属于边
  105. edge_key = (tag_node_id, point_node_id, "属于")
  106. if not any((e["源节点ID"], e["目标节点ID"], e["边类型"]) == edge_key for e in edges):
  107. edges.append(belong_edge)
  108. # 如果有匹配结果,创建匹配边
  109. if match_results:
  110. for match in match_results:
  111. persona_name = match.get("人设特征名称", "")
  112. persona_dimension = match.get("人设特征层级", "")
  113. persona_type = match.get("特征类型", "标签")
  114. match_detail = match.get("匹配结果", {})
  115. if not persona_name or not persona_dimension:
  116. continue
  117. # 构建人设节点ID
  118. persona_node_id = build_persona_node_id(
  119. persona_dimension, persona_type, persona_name
  120. )
  121. persona_node_ids.add(persona_node_id)
  122. # 创建匹配边
  123. match_edge = {
  124. "源节点ID": tag_node_id,
  125. "目标节点ID": persona_node_id,
  126. "边类型": "匹配",
  127. "边详情": {
  128. "相似度": match_detail.get("相似度", 0),
  129. "说明": match_detail.get("说明", "")
  130. }
  131. }
  132. edges.append(match_edge)
  133. return post_nodes, persona_node_ids, edges
  134. def get_persona_nodes_details(
  135. persona_node_ids: Set[str],
  136. nodes_data: Dict
  137. ) -> List[Dict]:
  138. """
  139. 从节点列表中获取人设节点的详细信息
  140. Args:
  141. persona_node_ids: 人设节点ID集合
  142. nodes_data: 节点列表数据
  143. Returns:
  144. 人设节点详情列表
  145. """
  146. persona_nodes = []
  147. all_nodes = nodes_data.get("节点列表", [])
  148. for node in all_nodes:
  149. if node["节点ID"] in persona_node_ids:
  150. persona_nodes.append(node)
  151. return persona_nodes
  152. def get_edges_between_nodes(
  153. node_ids: Set[str],
  154. edges_data: Dict
  155. ) -> List[Dict]:
  156. """
  157. 获取指定节点之间的边关系
  158. Args:
  159. node_ids: 节点ID集合
  160. edges_data: 边关系数据
  161. Returns:
  162. 节点之间的边列表
  163. """
  164. edges_between = []
  165. all_edges = edges_data.get("边列表", [])
  166. for edge in all_edges:
  167. source_id = edge["源节点ID"]
  168. target_id = edge["目标节点ID"]
  169. # 两个节点都在集合中
  170. if source_id in node_ids and target_id in node_ids:
  171. edges_between.append(edge)
  172. return edges_between
  173. def create_mirrored_post_edges(
  174. match_edges: List[Dict],
  175. persona_edges: List[Dict]
  176. ) -> List[Dict]:
  177. """
  178. 根据人设节点之间的边,创建帖子节点之间的镜像边
  179. 逻辑:如果人设节点A和B之间有边,且帖子节点X匹配A,帖子节点Y匹配B,
  180. 则创建帖子节点X和Y之间的镜像边
  181. Args:
  182. match_edges: 匹配边列表(帖子节点 -> 人设节点)
  183. persona_edges: 人设节点之间的边列表
  184. Returns:
  185. 帖子节点之间的镜像边列表
  186. """
  187. # 构建人设节点到帖子节点的反向映射
  188. # persona_id -> [post_id1, post_id2, ...]
  189. persona_to_posts = {}
  190. for edge in match_edges:
  191. post_id = edge["源节点ID"]
  192. persona_id = edge["目标节点ID"]
  193. if persona_id not in persona_to_posts:
  194. persona_to_posts[persona_id] = []
  195. if post_id not in persona_to_posts[persona_id]:
  196. persona_to_posts[persona_id].append(post_id)
  197. # 根据人设边创建帖子镜像边
  198. post_edges = []
  199. seen_edges = set()
  200. for persona_edge in persona_edges:
  201. source_persona = persona_edge["源节点ID"]
  202. target_persona = persona_edge["目标节点ID"]
  203. edge_type = persona_edge["边类型"]
  204. # 获取匹配到这两个人设节点的帖子节点
  205. source_posts = persona_to_posts.get(source_persona, [])
  206. target_posts = persona_to_posts.get(target_persona, [])
  207. # 为每对帖子节点创建镜像边
  208. for src_post in source_posts:
  209. for tgt_post in target_posts:
  210. if src_post == tgt_post:
  211. continue
  212. # 使用排序后的key避免重复(A-B 和 B-A 视为同一条边)
  213. edge_key = tuple(sorted([src_post, tgt_post])) + (edge_type,)
  214. if edge_key in seen_edges:
  215. continue
  216. seen_edges.add(edge_key)
  217. post_edge = {
  218. "源节点ID": src_post,
  219. "目标节点ID": tgt_post,
  220. "边类型": f"镜像_{edge_type}", # 标记为镜像边
  221. "边详情": {
  222. "原始边类型": edge_type,
  223. "源人设节点": source_persona,
  224. "目标人设节点": target_persona
  225. }
  226. }
  227. post_edges.append(post_edge)
  228. return post_edges
  229. def expand_one_layer(
  230. node_ids: Set[str],
  231. edges_data: Dict,
  232. nodes_data: Dict,
  233. edge_types: List[str] = None,
  234. direction: str = "both"
  235. ) -> tuple:
  236. """
  237. 从指定节点扩展一层,获取相邻节点和连接边
  238. Args:
  239. node_ids: 起始节点ID集合
  240. edges_data: 边关系数据
  241. nodes_data: 节点列表数据
  242. edge_types: 要扩展的边类型列表,None表示所有类型
  243. direction: 扩展方向
  244. - "outgoing": 只沿出边扩展(源节点在集合中,扩展到目标节点)
  245. - "incoming": 只沿入边扩展(目标节点在集合中,扩展到源节点)
  246. - "both": 双向扩展
  247. Returns:
  248. (扩展的节点列表, 扩展的边列表, 扩展的节点ID集合)
  249. """
  250. expanded_node_ids = set()
  251. expanded_edges = []
  252. all_edges = edges_data.get("边列表", [])
  253. # 找出所有与起始节点相连的边和节点
  254. for edge in all_edges:
  255. # 过滤边类型
  256. if edge_types and edge["边类型"] not in edge_types:
  257. continue
  258. source_id = edge["源节点ID"]
  259. target_id = edge["目标节点ID"]
  260. # 沿出边扩展:源节点在集合中,扩展到目标节点
  261. if direction in ["outgoing", "both"]:
  262. if source_id in node_ids and target_id not in node_ids:
  263. expanded_node_ids.add(target_id)
  264. expanded_edges.append(edge)
  265. # 沿入边扩展:目标节点在集合中,扩展到源节点
  266. if direction in ["incoming", "both"]:
  267. if target_id in node_ids and source_id not in node_ids:
  268. expanded_node_ids.add(source_id)
  269. expanded_edges.append(edge)
  270. # 获取扩展节点的详情
  271. expanded_nodes = []
  272. all_nodes = nodes_data.get("节点列表", [])
  273. for node in all_nodes:
  274. if node["节点ID"] in expanded_node_ids:
  275. # 标记为扩展节点
  276. node_copy = node.copy()
  277. node_copy["是否扩展"] = True
  278. node_copy["source"] = "人设"
  279. expanded_nodes.append(node_copy)
  280. return expanded_nodes, expanded_edges, expanded_node_ids
  281. def expand_and_filter_useful_nodes(
  282. matched_persona_ids: Set[str],
  283. match_edges: List[Dict],
  284. edges_data: Dict,
  285. nodes_data: Dict,
  286. exclude_edge_types: List[str] = None
  287. ) -> tuple:
  288. """
  289. 扩展人设节点一层,只保留能产生新帖子连线的扩展节点
  290. 逻辑:如果扩展节点E连接了2个以上的已匹配人设节点,
  291. 那么通过E可以产生新的帖子间连线,保留E
  292. Args:
  293. matched_persona_ids: 已匹配的人设节点ID集合
  294. match_edges: 匹配边列表
  295. edges_data: 边关系数据
  296. nodes_data: 节点列表数据
  297. exclude_edge_types: 要排除的边类型列表
  298. Returns:
  299. (有效扩展节点列表, 扩展边列表, 通过扩展节点的帖子镜像边列表)
  300. """
  301. if exclude_edge_types is None:
  302. exclude_edge_types = []
  303. all_edges = edges_data.get("边列表", [])
  304. # 构建人设节点到帖子节点的映射
  305. persona_to_posts = {}
  306. for edge in match_edges:
  307. post_id = edge["源节点ID"]
  308. persona_id = edge["目标节点ID"]
  309. if persona_id not in persona_to_posts:
  310. persona_to_posts[persona_id] = []
  311. if post_id not in persona_to_posts[persona_id]:
  312. persona_to_posts[persona_id].append(post_id)
  313. # 找出所有扩展节点及其连接的已匹配人设节点
  314. # expanded_node_id -> [(matched_persona_id, edge), ...]
  315. expanded_connections = {}
  316. for edge in all_edges:
  317. # 跳过排除的边类型
  318. if edge["边类型"] in exclude_edge_types:
  319. continue
  320. source_id = edge["源节点ID"]
  321. target_id = edge["目标节点ID"]
  322. # 源节点是已匹配的,目标节点是扩展候选
  323. if source_id in matched_persona_ids and target_id not in matched_persona_ids:
  324. if target_id not in expanded_connections:
  325. expanded_connections[target_id] = []
  326. expanded_connections[target_id].append((source_id, edge))
  327. # 目标节点是已匹配的,源节点是扩展候选
  328. if target_id in matched_persona_ids and source_id not in matched_persona_ids:
  329. if source_id not in expanded_connections:
  330. expanded_connections[source_id] = []
  331. expanded_connections[source_id].append((target_id, edge))
  332. # 过滤:只保留连接2个以上已匹配人设节点的扩展节点
  333. useful_expanded_ids = set()
  334. useful_edges = []
  335. post_mirror_edges = []
  336. seen_mirror_edges = set()
  337. for expanded_id, connections in expanded_connections.items():
  338. connected_personas = list(set([c[0] for c in connections]))
  339. if len(connected_personas) >= 2:
  340. useful_expanded_ids.add(expanded_id)
  341. # 收集边
  342. for persona_id, edge in connections:
  343. useful_edges.append(edge)
  344. # 为通过此扩展节点连接的每对人设节点,创建帖子镜像边
  345. for i, p1 in enumerate(connected_personas):
  346. for p2 in connected_personas[i+1:]:
  347. posts1 = persona_to_posts.get(p1, [])
  348. posts2 = persona_to_posts.get(p2, [])
  349. # 找出连接p1和p2的边类型
  350. edge_types_p1 = [c[1]["边类型"] for c in connections if c[0] == p1]
  351. edge_types_p2 = [c[1]["边类型"] for c in connections if c[0] == p2]
  352. # 用第一个边类型作为代表
  353. edge_type = edge_types_p1[0] if edge_types_p1 else (edge_types_p2[0] if edge_types_p2 else "扩展")
  354. for post1 in posts1:
  355. for post2 in posts2:
  356. if post1 == post2:
  357. continue
  358. # 避免重复
  359. edge_key = tuple(sorted([post1, post2])) + (f"二阶_{edge_type}",)
  360. if edge_key in seen_mirror_edges:
  361. continue
  362. seen_mirror_edges.add(edge_key)
  363. post_mirror_edges.append({
  364. "源节点ID": post1,
  365. "目标节点ID": post2,
  366. "边类型": f"二阶_{edge_type}",
  367. "边详情": {
  368. "原始边类型": edge_type,
  369. "扩展节点": expanded_id,
  370. "源人设节点": p1,
  371. "目标人设节点": p2
  372. }
  373. })
  374. # 获取扩展节点详情
  375. useful_expanded_nodes = []
  376. all_nodes = nodes_data.get("节点列表", [])
  377. for node in all_nodes:
  378. if node["节点ID"] in useful_expanded_ids:
  379. node_copy = node.copy()
  380. node_copy["是否扩展"] = True
  381. useful_expanded_nodes.append(node_copy)
  382. # 边去重
  383. seen_edges = set()
  384. unique_edges = []
  385. for edge in useful_edges:
  386. edge_key = (edge["源节点ID"], edge["目标节点ID"], edge["边类型"])
  387. if edge_key not in seen_edges:
  388. seen_edges.add(edge_key)
  389. unique_edges.append(edge)
  390. return useful_expanded_nodes, unique_edges, post_mirror_edges
  391. def process_filtered_result(
  392. filtered_file: Path,
  393. nodes_data: Dict,
  394. edges_data: Dict,
  395. output_dir: Path
  396. ) -> Dict:
  397. """
  398. 处理单个匹配结果文件
  399. Args:
  400. filtered_file: 匹配结果文件路径
  401. nodes_data: 节点列表数据
  402. edges_data: 边关系数据
  403. output_dir: 输出目录
  404. Returns:
  405. 处理结果统计
  406. """
  407. # 读取匹配结果
  408. with open(filtered_file, "r", encoding="utf-8") as f:
  409. filtered_data = json.load(f)
  410. post_id = filtered_data.get("帖子id", "")
  411. post_detail = filtered_data.get("帖子详情", {})
  412. post_title = post_detail.get("title", "")
  413. # 提取节点和边(包括帖子点节点、标签节点、属于边和匹配边)
  414. post_nodes, persona_node_ids, post_edges_raw = extract_matched_nodes_and_edges(filtered_data)
  415. # 分离帖子侧的边:属于边(标签→点)和匹配边(标签→人设)
  416. post_belong_edges = [e for e in post_edges_raw if e["边类型"] == "属于"]
  417. match_edges = [e for e in post_edges_raw if e["边类型"] == "匹配"]
  418. # 统计帖子点节点和标签节点
  419. post_point_nodes = [n for n in post_nodes if n["节点类型"] == "点"]
  420. post_tag_nodes = [n for n in post_nodes if n["节点类型"] == "标签"]
  421. # 获取人设节点详情(直接匹配的,标记为非扩展)
  422. persona_nodes = get_persona_nodes_details(persona_node_ids, nodes_data)
  423. for node in persona_nodes:
  424. node["是否扩展"] = False
  425. node["source"] = "人设"
  426. # 获取人设节点之间的边
  427. persona_edges = get_edges_between_nodes(persona_node_ids, edges_data)
  428. # 创建帖子节点之间的镜像边(基于直接人设边的投影)
  429. post_edges = create_mirrored_post_edges(match_edges, persona_edges)
  430. # 扩展人设节点一层,只对标签类型的节点通过"属于"边扩展到分类
  431. # 过滤出标签类型的人设节点(只有标签才能"属于"分类)
  432. tag_persona_ids = {pid for pid in persona_node_ids if "_标签_" in pid}
  433. expanded_nodes, expanded_edges, _ = expand_one_layer(
  434. tag_persona_ids, edges_data, nodes_data,
  435. edge_types=["属于"],
  436. direction="outgoing" # 只向外扩展:标签->分类
  437. )
  438. # 创建通过扩展节点的帖子镜像边(正确逻辑)
  439. # 逻辑:帖子->标签->分类,分类之间有边,则对应帖子产生二阶边
  440. # 1. 构建 标签 -> 帖子列表 的映射
  441. tag_to_posts = {}
  442. for edge in match_edges:
  443. post_node_id = edge["源节点ID"]
  444. tag_id = edge["目标节点ID"]
  445. if tag_id not in tag_to_posts:
  446. tag_to_posts[tag_id] = []
  447. if post_node_id not in tag_to_posts[tag_id]:
  448. tag_to_posts[tag_id].append(post_node_id)
  449. # 2. 构建 分类 -> 标签列表 的映射(通过属于边)
  450. expanded_node_ids = set(n["节点ID"] for n in expanded_nodes)
  451. category_to_tags = {} # 分类 -> [连接的标签]
  452. for edge in expanded_edges:
  453. src, tgt = edge["源节点ID"], edge["目标节点ID"]
  454. # 属于边:标签 -> 分类
  455. if tgt in expanded_node_ids and src in persona_node_ids:
  456. if tgt not in category_to_tags:
  457. category_to_tags[tgt] = []
  458. if src not in category_to_tags[tgt]:
  459. category_to_tags[tgt].append(src)
  460. # 3. 获取扩展节点(分类)之间的边
  461. category_edges = []
  462. for edge in edges_data.get("边列表", []):
  463. src, tgt = edge["源节点ID"], edge["目标节点ID"]
  464. # 两端都是扩展节点(分类)
  465. if src in expanded_node_ids and tgt in expanded_node_ids:
  466. category_edges.append(edge)
  467. # 4. 基于分类之间的边,生成帖子之间的二阶镜像边
  468. post_edges_via_expanded = []
  469. seen_mirror = set()
  470. for cat_edge in category_edges:
  471. cat1, cat2 = cat_edge["源节点ID"], cat_edge["目标节点ID"]
  472. edge_type = cat_edge["边类型"]
  473. # 获取连接到这两个分类的标签
  474. tags1 = category_to_tags.get(cat1, [])
  475. tags2 = category_to_tags.get(cat2, [])
  476. # 通过标签找到对应的帖子,产生二阶边
  477. for tag1 in tags1:
  478. for tag2 in tags2:
  479. posts1 = tag_to_posts.get(tag1, [])
  480. posts2 = tag_to_posts.get(tag2, [])
  481. for post1 in posts1:
  482. for post2 in posts2:
  483. if post1 == post2:
  484. continue
  485. edge_key = tuple(sorted([post1, post2])) + (f"二阶_{edge_type}",)
  486. if edge_key in seen_mirror:
  487. continue
  488. seen_mirror.add(edge_key)
  489. post_edges_via_expanded.append({
  490. "源节点ID": post1,
  491. "目标节点ID": post2,
  492. "边类型": f"二阶_{edge_type}",
  493. "边详情": {
  494. "原始边类型": edge_type,
  495. "分类节点1": cat1,
  496. "分类节点2": cat2,
  497. "标签节点1": tag1,
  498. "标签节点2": tag2
  499. }
  500. })
  501. # 只保留对帖子连接有帮助的扩展节点和边
  502. # 1. 找出产生了二阶帖子边的扩展节点(分类)
  503. useful_expanded_ids = set()
  504. for edge in post_edges_via_expanded:
  505. cat1 = edge.get("边详情", {}).get("分类节点1")
  506. cat2 = edge.get("边详情", {}).get("分类节点2")
  507. if cat1:
  508. useful_expanded_ids.add(cat1)
  509. if cat2:
  510. useful_expanded_ids.add(cat2)
  511. # 2. 只保留有用的扩展节点
  512. useful_expanded_nodes = [n for n in expanded_nodes if n["节点ID"] in useful_expanded_ids]
  513. # 3. 只保留连接到有用扩展节点的属于边
  514. useful_expanded_edges = [e for e in expanded_edges
  515. if e["目标节点ID"] in useful_expanded_ids or e["源节点ID"] in useful_expanded_ids]
  516. # 4. 只保留有用的分类之间的边(产生了二阶帖子边的)
  517. useful_category_edges = [e for e in category_edges
  518. if e["源节点ID"] in useful_expanded_ids and e["目标节点ID"] in useful_expanded_ids]
  519. # 合并节点列表
  520. all_nodes = post_nodes + persona_nodes + useful_expanded_nodes
  521. # 合并边列表(加入帖子内的属于边)
  522. all_edges = post_belong_edges + match_edges + persona_edges + post_edges + useful_expanded_edges + useful_category_edges + post_edges_via_expanded
  523. # 去重边
  524. seen_edges = set()
  525. unique_edges = []
  526. for edge in all_edges:
  527. edge_key = (edge["源节点ID"], edge["目标节点ID"], edge["边类型"])
  528. if edge_key not in seen_edges:
  529. seen_edges.add(edge_key)
  530. unique_edges.append(edge)
  531. all_edges = unique_edges
  532. # 构建节点边索引
  533. edges_by_node = {}
  534. for edge in all_edges:
  535. source_id = edge["源节点ID"]
  536. target_id = edge["目标节点ID"]
  537. edge_type = edge["边类型"]
  538. if source_id not in edges_by_node:
  539. edges_by_node[source_id] = {}
  540. if edge_type not in edges_by_node[source_id]:
  541. edges_by_node[source_id][edge_type] = {}
  542. edges_by_node[source_id][edge_type][target_id] = edge
  543. # 构建输出数据
  544. output_data = {
  545. "说明": {
  546. "帖子ID": post_id,
  547. "帖子标题": post_title,
  548. "描述": "帖子与人设的节点匹配关系",
  549. "统计": {
  550. "帖子点节点数": len(post_point_nodes),
  551. "帖子标签节点数": len(post_tag_nodes),
  552. "帖子节点总数": len(post_nodes),
  553. "人设节点数(直接匹配)": len(persona_nodes),
  554. "扩展节点数(有效)": len(useful_expanded_nodes),
  555. "帖子属于边数": len(post_belong_edges),
  556. "匹配边数": len(match_edges),
  557. "人设节点间边数": len(persona_edges),
  558. "扩展边数(有效)": len(useful_expanded_edges),
  559. "帖子镜像边数(直接)": len(post_edges),
  560. "帖子镜像边数(二阶)": len(post_edges_via_expanded),
  561. "总节点数": len(all_nodes),
  562. "总边数": len(all_edges)
  563. }
  564. },
  565. "帖子点节点列表": post_point_nodes,
  566. "帖子标签节点列表": post_tag_nodes,
  567. "帖子节点列表": post_nodes,
  568. "人设节点列表": persona_nodes,
  569. "扩展节点列表": useful_expanded_nodes,
  570. "帖子属于边列表": post_belong_edges,
  571. "匹配边列表": match_edges,
  572. "人设节点间边列表": persona_edges,
  573. "扩展边列表": useful_expanded_edges,
  574. "帖子镜像边列表(直接)": post_edges,
  575. "帖子镜像边列表(二阶)": post_edges_via_expanded,
  576. "节点列表": all_nodes,
  577. "边列表": all_edges,
  578. "节点边索引": edges_by_node
  579. }
  580. # 保存输出文件
  581. output_file = output_dir / f"{post_id}_match_graph.json"
  582. with open(output_file, "w", encoding="utf-8") as f:
  583. json.dump(output_data, f, ensure_ascii=False, indent=2)
  584. return {
  585. "帖子ID": post_id,
  586. "帖子点节点数": len(post_point_nodes),
  587. "帖子标签节点数": len(post_tag_nodes),
  588. "帖子节点数": len(post_nodes),
  589. "人设节点数": len(persona_nodes),
  590. "扩展节点数": len(useful_expanded_nodes),
  591. "帖子属于边数": len(post_belong_edges),
  592. "匹配边数": len(match_edges),
  593. "人设边数": len(persona_edges),
  594. "扩展边数": len(useful_expanded_edges),
  595. "帖子边数(直接)": len(post_edges),
  596. "帖子边数(二阶)": len(post_edges_via_expanded),
  597. "总节点数": len(all_nodes),
  598. "总边数": len(all_edges),
  599. "输出文件": str(output_file)
  600. }
  601. def main():
  602. # 使用路径配置
  603. config = PathConfig()
  604. config.ensure_dirs()
  605. print(f"账号: {config.account_name}")
  606. print(f"输出版本: {config.output_version}")
  607. print()
  608. # 输入文件/目录
  609. filtered_results_dir = config.intermediate_dir / "filtered_results"
  610. nodes_file = config.intermediate_dir / "节点列表.json"
  611. edges_file = config.intermediate_dir / "边关系.json"
  612. # 输出目录
  613. output_dir = config.intermediate_dir / "match_graph"
  614. output_dir.mkdir(parents=True, exist_ok=True)
  615. print(f"输入:")
  616. print(f" 匹配结果目录: {filtered_results_dir}")
  617. print(f" 节点列表: {nodes_file}")
  618. print(f" 边关系: {edges_file}")
  619. print(f"\n输出目录: {output_dir}")
  620. print()
  621. # 读取节点和边数据
  622. print("正在读取节点列表...")
  623. with open(nodes_file, "r", encoding="utf-8") as f:
  624. nodes_data = json.load(f)
  625. print(f" 共 {len(nodes_data.get('节点列表', []))} 个节点")
  626. print("正在读取边关系...")
  627. with open(edges_file, "r", encoding="utf-8") as f:
  628. edges_data = json.load(f)
  629. print(f" 共 {len(edges_data.get('边列表', []))} 条边")
  630. # 处理所有匹配结果文件
  631. print("\n" + "="*60)
  632. print("处理匹配结果文件...")
  633. filtered_files = list(filtered_results_dir.glob("*_filtered.json"))
  634. print(f"找到 {len(filtered_files)} 个匹配结果文件")
  635. results = []
  636. for i, filtered_file in enumerate(filtered_files, 1):
  637. print(f"\n[{i}/{len(filtered_files)}] 处理: {filtered_file.name}")
  638. result = process_filtered_result(filtered_file, nodes_data, edges_data, output_dir)
  639. results.append(result)
  640. print(f" 帖子节点: {result['帖子节点数']}, 人设节点: {result['人设节点数']}, 扩展节点: {result['扩展节点数']}")
  641. print(f" 匹配边: {result['匹配边数']}, 人设边: {result['人设边数']}, 扩展边: {result['扩展边数']}")
  642. print(f" 帖子边(直接): {result['帖子边数(直接)']}, 帖子边(二阶): {result['帖子边数(二阶)']}")
  643. # 汇总统计
  644. print("\n" + "="*60)
  645. print("处理完成!")
  646. print(f"\n汇总:")
  647. print(f" 处理文件数: {len(results)}")
  648. total_post = sum(r['帖子节点数'] for r in results)
  649. total_persona = sum(r['人设节点数'] for r in results)
  650. total_expanded = sum(r['扩展节点数'] for r in results)
  651. total_match = sum(r['匹配边数'] for r in results)
  652. total_persona_edges = sum(r['人设边数'] for r in results)
  653. total_expanded_edges = sum(r['扩展边数'] for r in results)
  654. total_post_edges_direct = sum(r['帖子边数(直接)'] for r in results)
  655. total_post_edges_2hop = sum(r['帖子边数(二阶)'] for r in results)
  656. print(f" 总帖子节点: {total_post}")
  657. print(f" 总人设节点: {total_persona}")
  658. print(f" 总扩展节点: {total_expanded}")
  659. print(f" 总匹配边: {total_match}")
  660. print(f" 总人设边: {total_persona_edges}")
  661. print(f" 总扩展边: {total_expanded_edges}")
  662. print(f" 总帖子边(直接): {total_post_edges_direct}")
  663. print(f" 总帖子边(二阶): {total_post_edges_2hop}")
  664. print(f"\n输出目录: {output_dir}")
  665. if __name__ == "__main__":
  666. main()