swarm repositories / source
aboutsummaryrefslogtreecommitdiff
path: root/check.py
diff options
context:
space:
mode:
authormain <main@swarm.moe>2026-03-20 23:19:33 -0400
committermain <main@swarm.moe>2026-03-20 23:19:33 -0400
commiteb0f0f73b7da9d76ff6833757fd265725d3e4b14 (patch)
tree38d64a437cac0518caf2cca5aa4bff5984e64515 /check.py
parentae809af85f6687ae21d7e2f7140aa88354c446cc (diff)
downloadfidget_spinner-eb0f0f73b7da9d76ff6833757fd265725d3e4b14.zip
Polish metric slices and MCP time projections
Diffstat (limited to 'check.py')
-rw-r--r--check.py131
1 files changed, 117 insertions, 14 deletions
diff --git a/check.py b/check.py
index 23b558e..50e8ceb 100644
--- a/check.py
+++ b/check.py
@@ -2,34 +2,88 @@
from __future__ import annotations
import argparse
+import os
import subprocess
import tomllib
+from dataclasses import dataclass
from pathlib import Path
+from pathlib import PurePosixPath
ROOT = Path(__file__).resolve().parent
WORKSPACE_MANIFEST = ROOT / "Cargo.toml"
+DEFAULT_MAX_SOURCE_FILE_LINES = 2500
+DEFAULT_SOURCE_FILE_INCLUDE = ("*.rs", "**/*.rs")
+IGNORED_SOURCE_DIRS = frozenset(
+ {".direnv", ".git", ".hg", ".jj", ".svn", "__pycache__", "node_modules", "target", "vendor"}
+)
-def load_commands() -> dict[str, list[str]]:
+@dataclass(frozen=True, slots=True)
+class SourceFilePolicy:
+ max_lines: int
+ include: tuple[str, ...]
+ exclude: tuple[str, ...]
+
+
+def load_workspace_metadata() -> dict[str, object]:
workspace = tomllib.loads(WORKSPACE_MANIFEST.read_text(encoding="utf-8"))
- metadata = workspace["workspace"]["metadata"]["rust-starter"]
+ return workspace["workspace"]["metadata"]["rust-starter"]
+
+
+def load_commands(metadata: dict[str, object]) -> dict[str, list[str]]:
commands: dict[str, list[str]] = {}
- for key in (
- "format_command",
- "clippy_command",
- "test_command",
- "doc_command",
- "fix_command",
- ):
+ for key in ("format_command", "clippy_command", "test_command", "doc_command", "fix_command"):
value = metadata.get(key)
- if isinstance(value, list) and value and all(
- isinstance(part, str) for part in value
- ):
+ if isinstance(value, list) and value and all(isinstance(part, str) for part in value):
commands[key] = value
return commands
+def load_patterns(
+ value: object,
+ *,
+ default: tuple[str, ...],
+ key_path: str,
+ allow_empty: bool,
+) -> tuple[str, ...]:
+ if value is None:
+ return default
+ if not isinstance(value, list) or not all(isinstance(pattern, str) and pattern for pattern in value):
+ raise SystemExit(f"[check] invalid {key_path}: expected a string list")
+ if not allow_empty and not value:
+ raise SystemExit(f"[check] invalid {key_path}: expected at least one pattern")
+ return tuple(value)
+
+
+def load_source_file_policy(metadata: dict[str, object]) -> SourceFilePolicy:
+ raw_policy = metadata.get("source_files")
+ if raw_policy is None:
+ return SourceFilePolicy(DEFAULT_MAX_SOURCE_FILE_LINES, DEFAULT_SOURCE_FILE_INCLUDE, ())
+ if not isinstance(raw_policy, dict):
+ raise SystemExit("[check] invalid workspace.metadata.rust-starter.source_files: expected a table")
+
+ max_lines = raw_policy.get("max_lines", DEFAULT_MAX_SOURCE_FILE_LINES)
+ if not isinstance(max_lines, int) or max_lines <= 0:
+ raise SystemExit(
+ "[check] invalid workspace.metadata.rust-starter.source_files.max_lines: expected a positive integer"
+ )
+
+ include = load_patterns(
+ raw_policy.get("include"),
+ default=DEFAULT_SOURCE_FILE_INCLUDE,
+ key_path="workspace.metadata.rust-starter.source_files.include",
+ allow_empty=False,
+ )
+ exclude = load_patterns(
+ raw_policy.get("exclude"),
+ default=(),
+ key_path="workspace.metadata.rust-starter.source_files.exclude",
+ allow_empty=True,
+ )
+ return SourceFilePolicy(max_lines, include, exclude)
+
+
def run(name: str, argv: list[str]) -> None:
print(f"[check] {name}: {' '.join(argv)}", flush=True)
proc = subprocess.run(argv, cwd=ROOT, check=False)
@@ -49,14 +103,64 @@ def parse_args() -> argparse.Namespace:
return parser.parse_args()
+def matches_pattern(path: PurePosixPath, pattern: str) -> bool:
+ if path.match(pattern):
+ return True
+ prefix = "**/"
+ return pattern.startswith(prefix) and path.match(pattern.removeprefix(prefix))
+
+
+def iter_source_files(policy: SourceFilePolicy) -> list[Path]:
+ paths: list[Path] = []
+ for current_root, dirnames, filenames in os.walk(ROOT):
+ dirnames[:] = sorted(name for name in dirnames if name not in IGNORED_SOURCE_DIRS)
+ current = Path(current_root)
+ for filename in filenames:
+ path = current / filename
+ relative_path = PurePosixPath(path.relative_to(ROOT).as_posix())
+ if not any(matches_pattern(relative_path, pattern) for pattern in policy.include):
+ continue
+ if any(matches_pattern(relative_path, pattern) for pattern in policy.exclude):
+ continue
+ paths.append(path)
+ return sorted(paths)
+
+
+def line_count(path: Path) -> int:
+ return len(path.read_text(encoding="utf-8").splitlines())
+
+
+def enforce_source_file_policy(policy: SourceFilePolicy) -> None:
+ paths = iter_source_files(policy)
+ print(f"[check] source-files: max {policy.max_lines} lines", flush=True)
+ violations: list[tuple[str, int]] = []
+ for path in paths:
+ lines = line_count(path)
+ if lines > policy.max_lines:
+ violations.append((path.relative_to(ROOT).as_posix(), lines))
+ if not violations:
+ return
+
+ print(
+ f"[check] source-files: {len(violations)} file(s) exceed the configured limit",
+ flush=True,
+ )
+ for relative_path, lines in violations:
+ print(f"[check] source-files: {relative_path}: {lines} lines", flush=True)
+ raise SystemExit(1)
+
+
def main() -> None:
- commands = load_commands()
+ metadata = load_workspace_metadata()
+ commands = load_commands(metadata)
+ source_file_policy = load_source_file_policy(metadata)
args = parse_args()
if args.mode == "fix":
run("fix", commands["fix_command"])
return
+ enforce_source_file_policy(source_file_policy)
run("fmt", commands["format_command"])
run("clippy", commands["clippy_command"])
run("test", commands["test_command"])
@@ -70,4 +174,3 @@ if __name__ == "__main__":
main()
except KeyboardInterrupt:
raise SystemExit(130)
-