diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5d8a312..81b9a41 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -1,23 +1,42 @@ name: Build, validate & Release +# Usage: +# - For PRs: this workflow runs automatically to validate the package builds and installs correctly on multiple Python versions. No artifacts are published for PRs. +# - For releases: when you push a tag like v1.2.3, this workflow runs the full matrix validation, then builds the release artifacts, and finally publishes to PyPI if all checks pass. + on: + # Release pipeline: run only when pushing a version-like tag (e.g. v1.2.3) push: - tags: [ 'v*.*.*' ] + tags: + - "v*.*.*" + + # Validation pipeline: run on PRs targeting main/master (no publishing) pull_request: - branches: [ main, master ] - types: [ labeled, opened, edited, synchronize, reopened ] + branches: [main, master] + types: [opened, edited, synchronize, reopened] + +# This workflow only needs to read repo contents +permissions: + contents: read jobs: - test: - name: Test / smoke (matrix) + test_matrix: + # PR + tag validation: ensure the project builds and installs on multiple Pythons + name: Test install & smoke (Py ${{ matrix.python-version }}) runs-on: ubuntu-latest strategy: + # Run all versions even if one fails (helps spot version-specific issues) fail-fast: false matrix: - python-version: [ "3.10", "3.11", "3.12" ] + python-version: ["3.10", "3.11", "3.12"] + steps: - - uses: actions/checkout@v6 - - uses: actions/setup-python@v6 + # Fetch repository sources so we can build/test + - name: Checkout sources + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} @@ -31,82 +50,99 @@ jobs: libxkbcommon-x11-0 \ libxcb-cursor0 - - name: Install tools - run: | - python -m pip install --upgrade pip - python -m pip install build twine wheel "packaging>=24.2" + # Install packaging toolchain: + # - build: creates wheel + sdist + # - twine: validates metadata and can upload (upload only happens in publish job) + - name: Install build tools + run: python -m pip install -U pip build twine - - name: Build distributions (sdist + wheel) + # Build distributions just to verify packaging config works on this Python + - name: Build (for validation only) run: python -m build - - name: Inspect dist - run: | - ls -lah dist/ - echo "sdist contents (first ~200 entries):" - tar -tf dist/*.tar.gz | sed -n '1,200p' - - - name: Twine metadata & README check + # Validate dist metadata (README rendering, required fields, etc.) + - name: Twine check run: python -m twine check dist/* + # Smoke test: install the built wheel and verify the package imports - name: Install from wheel & smoke test run: | - python -m pip install dist/*.whl - python - <<'PY' - import importlib - pkg_name = "dlclivegui" - m = importlib.import_module(pkg_name) - print("Imported:", m.__name__, "version:", getattr(m, "__version__", "n/a")) - PY - - if ! command -v dlclivegui >/dev/null 2>&1; then - echo "CLI entry point 'dlclivegui' not found in PATH; skipping CLI smoke test." - else - if command -v dlclivegui >/dev/null 2>&1; then - echo "Running 'dlclivegui --help' smoke test..." - if ! dlclivegui --help >/dev/null 2>&1; then - echo "::error::'dlclivegui --help' failed; this indicates a problem with the installed CLI package." - exit 1 - fi - - build: - name: Build release artifacts (single) + WHEEL=$(ls -1 dist/*.whl | head -n 1) + echo "Using wheel: $WHEEL" + python -m pip install \ + --extra-index-url https://download.pytorch.org/whl/cpu \ + "deeplabcut-live-gui[pytorch] @ file://$(pwd)/${WHEEL}" + python -c "import dlclivegui; print('Imported dlclivegui OK')" + QT_QPA_PLATFORM=offscreen dlclivegui --help + + build_release: + # Tag-only build: produce the "official" release artifacts once matrix passed + name: Build release artifacts runs-on: ubuntu-latest - needs: test - if: ${{ startsWith(github.ref, 'refs/tags/v') }} + needs: test_matrix + # Safety gate: only run for version tags, never for PRs/branches + if: startsWith(github.ref, 'refs/tags/v') + steps: - - uses: actions/checkout@v6 - - uses: actions/setup-python@v6 + # Fetch sources for the tagged revision + - name: Checkout sources + uses: actions/checkout@v6 + + # Use a single, modern Python for the canonical release build + - name: Set up Python (release build) + uses: actions/setup-python@v6 with: python-version: "3.12" - - name: Build distributions (sdist + wheel) - run: | - python -m pip install --upgrade pip - python -m pip install build twine wheel "packaging>=24.2" - python -m build - python -m twine check dist/* + # Install build + validation tooling + - name: Install build tools + run: python -m pip install -U pip build twine + + # Produce both sdist and wheel in dist/ + - name: Build distributions + run: python -m build + + # Re-check metadata on the final artifacts we intend to publish + - name: Twine check + run: python -m twine check dist/* + # Store dist/ outputs so the publish job uploads exactly what we built here - name: Upload dist artifacts uses: actions/upload-artifact@v4 with: name: dist path: dist/* - if-no-files-found: error publish: - name: Publish to PyPI (OIDC) + # Tag-only publish: download built artifacts and upload them to PyPI + name: Publish to PyPI (API token) runs-on: ubuntu-latest - needs: build - if: ${{ startsWith(github.ref, 'refs/tags/v') }} - environment: pypi - permissions: - id-token: write + needs: build_release + # Safety gate: only run for version tags + if: startsWith(github.ref, 'refs/tags/v') + steps: + # Retrieve the exact distributions produced in build_release - name: Download dist artifacts uses: actions/download-artifact@v4 with: name: dist path: dist + # Set up Python (only needed to run Twine) + - name: Set up Python (publish) + uses: actions/setup-python@v6 + with: + python-version: "3.x" + + # Install twine for uploading + - name: Install Twine + run: python -m pip install -U twine + + # Upload to PyPI using an API token stored in repo secrets. + # --skip-existing avoids failing if you re-run a workflow for the same version. - name: Publish to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.TWINE_API_KEY }} + run: python -m twine upload --non-interactive --verbose --skip-existing dist/* diff --git a/.github/workflows/testing-ci.yml b/.github/workflows/testing-ci.yml index 07c8af0..5a06fbb 100644 --- a/.github/workflows/testing-ci.yml +++ b/.github/workflows/testing-ci.yml @@ -55,23 +55,26 @@ jobs: - name: Run tests (exclude hardware) with coverage via tox run: | - tox -q + tox -q | tee tox-output.log + - name: Append Coverage Summary to Job - if: always() + if: matrix.os == 'ubuntu-latest' && matrix.python == '3.12' shell: bash run: | - python -m pip install -U coverage echo "## Coverage Summary" >> "$GITHUB_STEP_SUMMARY" - echo "" >> "$GITHUB_STEP_SUMMARY" echo '```text' >> "$GITHUB_STEP_SUMMARY" - python -m coverage report -m >> "$GITHUB_STEP_SUMMARY" || true + awk ' + /^Name[[:space:]]+Stmts[[:space:]]+Miss/ {p=1} + p==1 {print} + /^Coverage XML written to file/ {exit} + ' tox-output.log >> "$GITHUB_STEP_SUMMARY" || true echo '```' >> "$GITHUB_STEP_SUMMARY" - - name: Upload coverage to Codecov # NOTE : may need to disable this if token is not setup - if: github.event_name == 'pull_request' && (github.base_ref == 'main' || github.base_ref == 'master') + - name: Upload coverage to Codecov + if: matrix.os == 'ubuntu-latest' && matrix.python == '3.12' && github.event_name == 'pull_request' && (github.base_ref == 'main' || github.base_ref == 'master') uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - files: ./coverage.xml - fail_ci_if_error: false + files: ./.coverage.py312.xml + fail_ci_if_error: true diff --git a/pyproject.toml b/pyproject.toml index bb84eac..02e8a96 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -139,6 +139,7 @@ omit = [ branch = true omit = [ "*/__pycache__/*", - "*/site-packages/*", + # "*/site-packages/*", + # breaks CI coverage reporting as it excludes our own installed package ] source = [ "dlclivegui", "tests" ] diff --git a/tox.ini b/tox.ini index 3344818..d6f8d86 100644 --- a/tox.ini +++ b/tox.ini @@ -11,10 +11,6 @@ description = Unit + smoke tests (exclude hardware) with coverage package = wheel extras = test -# Keep behavior aligned with your GitHub Actions job: -commands = - pytest -m "not hardware" --maxfail=1 --disable-warnings \ - --cov=dlclivegui --cov-report=xml --cov-report=term-missing {posargs} # Helpful defaults for headless CI runs (Qt/OpenCV): setenv = @@ -23,6 +19,15 @@ setenv = QT_OPENGL = software # Can help avoid some Windows/OpenCV capture backend flakiness when tests touch video I/O: OPENCV_VIDEOIO_PRIORITY_MSMF = 0 + COVERAGE_FILE = {toxinidir}/.coverage.{envname} + +# Keep behavior aligned with your GitHub Actions job: +commands = + pytest -m "not hardware" --maxfail=1 --disable-warnings \ + --cov={envsitepackagesdir}/dlclivegui \ + --cov-report=xml:{toxinidir}/.coverage.{envname}.xml \ + --cov-report=term-missing \ + {posargs} # Let CI variables pass through (useful for debugging and some GUI/headless setups): passenv =