|
|
@@ -136,7 +136,9 @@ class WebhookService:
|
|
|
|
|
|
for output in outputs:
|
|
|
raw_path_pattern = output.get("path", "")
|
|
|
- match_pattern = output.get("pattern", "*")
|
|
|
+ # Support both string and list for pattern and exclude
|
|
|
+ patterns = output.get("pattern", "*")
|
|
|
+ excludes = output.get("exclude")
|
|
|
|
|
|
path_pattern = normalize_path(raw_path_pattern)
|
|
|
is_dir = is_directory_pattern(raw_path_pattern)
|
|
|
@@ -144,15 +146,21 @@ class WebhookService:
|
|
|
if is_dir:
|
|
|
# Directory pattern: fetch only this directory's files
|
|
|
dir_path = path_pattern.rstrip("/")
|
|
|
- logger.info(f"Fetching directory: {dir_path} with pattern: {match_pattern}")
|
|
|
+ logger.info(f"Fetching directory: {dir_path} with patterns: {patterns}, excludes: {excludes}")
|
|
|
|
|
|
files = await self.gogs.get_directory_tree(owner, repo_name, commit_id, dir_path)
|
|
|
|
|
|
for file_info in files:
|
|
|
file_path = file_info.get("path")
|
|
|
- # Apply pattern matching
|
|
|
- rel_name = file_path[len(dir_path) + 1:] if file_path.startswith(dir_path + "/") else file_path
|
|
|
- if fnmatch.fnmatch(rel_name, match_pattern):
|
|
|
+ # Calculate name relative to the watched directory
|
|
|
+ # e.g. dir_path="a", file_path="a/b.txt" -> rel_name="b.txt"
|
|
|
+ rel_name = (
|
|
|
+ file_path[len(dir_path) + 1 :]
|
|
|
+ if file_path.startswith(dir_path + "/")
|
|
|
+ else file_path
|
|
|
+ )
|
|
|
+
|
|
|
+ if self._match_patterns(rel_name, patterns, excludes):
|
|
|
try:
|
|
|
changed = await self.storage.process_file_with_sha(
|
|
|
version, file_path, file_info.get("sha"), owner, repo_name
|
|
|
@@ -167,15 +175,45 @@ class WebhookService:
|
|
|
|
|
|
file_info = await self.gogs.get_file_info(owner, repo_name, commit_id, path_pattern)
|
|
|
if file_info:
|
|
|
- try:
|
|
|
- changed = await self.storage.process_file_with_sha(
|
|
|
- version, path_pattern, file_info.get("sha"), owner, repo_name
|
|
|
- )
|
|
|
- if changed:
|
|
|
- has_changes = True
|
|
|
- except Exception as e:
|
|
|
- logger.error(f"Failed to process file {path_pattern}: {e}")
|
|
|
+ # Apply pattern matching to the filename for consistency
|
|
|
+ import os
|
|
|
+ filename = os.path.basename(path_pattern)
|
|
|
+ if self._match_patterns(filename, patterns, excludes):
|
|
|
+ try:
|
|
|
+ changed = await self.storage.process_file_with_sha(
|
|
|
+ version, path_pattern, file_info.get("sha"), owner, repo_name
|
|
|
+ )
|
|
|
+ if changed:
|
|
|
+ has_changes = True
|
|
|
+ except Exception as e:
|
|
|
+ logger.error(f"Failed to process file {path_pattern}: {e}")
|
|
|
else:
|
|
|
logger.warning(f"File not found: {path_pattern}")
|
|
|
|
|
|
return has_changes
|
|
|
+
|
|
|
+ def _match_patterns(
|
|
|
+ self,
|
|
|
+ filename: str,
|
|
|
+ include_patterns: str | list[str],
|
|
|
+ exclude_patterns: str | list[str] | None = None,
|
|
|
+ ) -> bool:
|
|
|
+ """Helper to match filename against multiple include and exclude glob patterns."""
|
|
|
+ # Normalize to lists
|
|
|
+ includes = (
|
|
|
+ [include_patterns] if isinstance(include_patterns, str) else include_patterns
|
|
|
+ )
|
|
|
+ excludes = []
|
|
|
+ if exclude_patterns:
|
|
|
+ excludes = (
|
|
|
+ [exclude_patterns] if isinstance(exclude_patterns, str) else exclude_patterns
|
|
|
+ )
|
|
|
+
|
|
|
+ # 1. Check if it matches ANY include pattern (OR logic)
|
|
|
+ is_included = any(fnmatch.fnmatch(filename, p) for p in includes)
|
|
|
+ if not is_included:
|
|
|
+ return False
|
|
|
+
|
|
|
+ # 2. Check if it matches ANY exclude pattern (OR logic: any match means reject)
|
|
|
+ is_excluded = any(fnmatch.fnmatch(filename, p) for p in excludes)
|
|
|
+ return not is_excluded
|