Skip to content

Commit dbe7098

Browse files
sbryngelsonclaude
andcommitted
Fix review findings: phase labels, unused params, FileNotFoundError guard
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 110e00c commit dbe7098

File tree

3 files changed

+20
-16
lines changed

3 files changed

+20
-16
lines changed

.github/workflows/test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ jobs:
111111
group: phoenix
112112
labels: gt
113113
permissions:
114-
contents: write
114+
contents: write # Required for Commit Cache to Master on push events
115115
steps:
116116
- name: Clone
117117
uses: actions/checkout@v4

toolchain/mfc/test/coverage.py

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def _get_gcov_version(gcov_binary: str) -> str:
7171
return "unknown"
7272

7373

74-
def find_gcov_binary(_root_dir: str = "") -> str: # pylint: disable=unused-argument
74+
def find_gcov_binary() -> str:
7575
"""
7676
Find a GNU gcov binary compatible with the system gfortran.
7777
@@ -368,25 +368,25 @@ def _prepare_test(case, root_dir: str) -> dict: # pylint: disable=unused-argume
368368
}
369369

370370

371-
def build_coverage_cache( # pylint: disable=unused-argument,too-many-locals,too-many-statements
372-
root_dir: str, cases: list, extra_args: list = None, n_jobs: int = None,
371+
def build_coverage_cache( # pylint: disable=too-many-locals,too-many-statements
372+
root_dir: str, cases: list, n_jobs: int = None,
373373
) -> None:
374374
"""
375375
Build the file-level coverage cache by running tests in parallel.
376376
377-
Phase 0 — Prepare all tests: generate .inp files and resolve binary paths.
377+
Phase 1 — Prepare all tests: generate .inp files and resolve binary paths.
378378
This happens single-threaded so the parallel phase has zero Python overhead.
379379
380-
Phase 1 — Run all tests concurrently. Each worker invokes Fortran binaries
380+
Phase 2 — Run all tests concurrently. Each worker invokes Fortran binaries
381381
directly (no ``./mfc.sh run``, no shell scripts). Each test's GCOV_PREFIX
382382
points to an isolated directory so .gcda files don't collide.
383383
384-
Phase 2 — For each test, copy its .gcda tree into the real build directory,
384+
Phase 3 — For each test, copy its .gcda tree into the real build directory,
385385
run gcov to collect which .fpp files had coverage, then remove the .gcda files.
386386
387387
Requires a prior ``--gcov`` build: ``./mfc.sh build --gcov -j 8``
388388
"""
389-
gcov_bin = find_gcov_binary(root_dir)
389+
gcov_bin = find_gcov_binary()
390390
gcno_files = find_gcno_files(root_dir)
391391
strip = _compute_gcov_prefix_strip(root_dir)
392392

@@ -402,8 +402,8 @@ def build_coverage_cache( # pylint: disable=unused-argument,too-many-locals,too
402402
cons.print(f"[dim]GCOV_PREFIX_STRIP={strip}[/dim]")
403403
cons.print()
404404

405-
# Phase 0: Prepare all tests (single-threaded, ~30s for 555 tests).
406-
cons.print("[bold]Phase 0/2: Preparing tests...[/bold]")
405+
# Phase 1: Prepare all tests (single-threaded, ~30s for 555 tests).
406+
cons.print("[bold]Phase 1/3: Preparing tests...[/bold]")
407407
test_infos = []
408408
for i, case in enumerate(cases):
409409
test_infos.append(_prepare_test(case, root_dir))
@@ -413,8 +413,8 @@ def build_coverage_cache( # pylint: disable=unused-argument,too-many-locals,too
413413

414414
gcda_dir = tempfile.mkdtemp(prefix="mfc_gcov_")
415415
try:
416-
# Phase 1: Run all tests in parallel via direct binary invocation.
417-
cons.print("[bold]Phase 1/2: Running tests...[/bold]")
416+
# Phase 2: Run all tests in parallel via direct binary invocation.
417+
cons.print("[bold]Phase 2/3: Running tests...[/bold]")
418418
test_results: dict = {}
419419
all_failures: dict = {}
420420
with ThreadPoolExecutor(max_workers=phase1_jobs) as pool:
@@ -453,10 +453,10 @@ def build_coverage_cache( # pylint: disable=unused-argument,too-many-locals,too
453453
cons.print(f"[yellow]Sample test {sample_uuid}: "
454454
f"no build/ dir in {sample_gcda}[/yellow]")
455455

456-
# Phase 2: Collect gcov coverage from each test's isolated .gcda directory.
456+
# Phase 3: Collect gcov coverage from each test's isolated .gcda directory.
457457
# .gcno files are temporarily copied alongside .gcda files, then removed.
458458
cons.print()
459-
cons.print("[bold]Phase 2/2: Collecting coverage...[/bold]")
459+
cons.print("[bold]Phase 3/3: Collecting coverage...[/bold]")
460460
cache: dict = {}
461461
completed = 0
462462
with ThreadPoolExecutor(max_workers=n_jobs) as pool:
@@ -535,7 +535,11 @@ def load_coverage_cache(root_dir: str) -> Optional[dict]:
535535
return None
536536

537537
cases_py = Path(root_dir) / "toolchain/mfc/test/cases.py"
538-
current_hash = hashlib.sha256(cases_py.read_bytes()).hexdigest()
538+
try:
539+
current_hash = hashlib.sha256(cases_py.read_bytes()).hexdigest()
540+
except FileNotFoundError:
541+
cons.print("[yellow]Warning: cases.py not found; cannot verify cache staleness.[/yellow]")
542+
return None
539543
stored_hash = cache.get("_meta", {}).get("cases_hash", "")
540544

541545
if current_hash != stored_hash:

toolchain/mfc/test/test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,7 @@ def test():
238238
unique_builds.add(slug)
239239

240240
build_coverage_cache(common.MFC_ROOT_DIR, all_cases,
241-
extra_args=ARG("--"), n_jobs=int(ARG("jobs")))
241+
n_jobs=int(ARG("jobs")))
242242
return
243243

244244
cases, skipped_cases = __filter(cases)

0 commit comments

Comments
 (0)