diff --git a/.github/workflows/coverage-nightly.yaml b/.github/workflows/coverage-nightly.yaml
new file mode 100644
index 000000000000..dfa896867bbf
--- /dev/null
+++ b/.github/workflows/coverage-nightly.yaml
@@ -0,0 +1,179 @@
+name: Coverage (Nightly)
+
+on:
+ schedule:
+ # Run at 2 AM UTC every day
+ - cron: '0 2 * * *'
+ # Allow manual triggers for testing
+ workflow_dispatch:
+ # Allow being called from other workflows
+ workflow_call:
+
+concurrency:
+ group: coverage-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ compile:
+ name: Build with Coverage
+ runs-on: ubuntu-22.04
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Install dependencies
+ run: bash -x .github/scripts/setup.sh
+
+ - name: Build with coverage instrumentation
+ run: |
+ ./configure --enable-debugbuild --enable-coverage CC=clang
+ uv run make -j $(nproc) testpack.tar.bz2
+
+ - name: Upload build artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: cln-coverage-build
+ path: testpack.tar.bz2
+
+ test:
+ name: Test (${{ matrix.name }})
+ runs-on: ubuntu-22.04
+ needs: compile
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - name: sqlite
+ db: sqlite3
+ pytest_par: 10
+ - name: postgres
+ db: postgres
+ pytest_par: 10
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Install dependencies
+ run: bash -x .github/scripts/setup.sh
+
+ - name: Install Bitcoin Core
+ run: bash -x .github/scripts/install-bitcoind.sh
+
+ - name: Download build artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: cln-coverage-build
+
+ - name: Unpack build
+ run: tar -xaf testpack.tar.bz2
+
+ - name: Run tests with coverage
+ env:
+ CLN_COVERAGE_DIR: ${{ github.workspace }}/coverage-raw
+ TEST_DB_PROVIDER: ${{ matrix.db }}
+ PYTEST_PAR: ${{ matrix.pytest_par }}
+ SLOW_MACHINE: 1
+ TIMEOUT: 900
+ run: |
+ mkdir -p "$CLN_COVERAGE_DIR"
+ uv run eatmydata pytest tests/ -n ${PYTEST_PAR} -vvv
+
+ - name: Upload coverage data
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: coverage-raw-${{ matrix.name }}
+ path: coverage-raw/*.profraw
+ if-no-files-found: error
+
+ report:
+ name: Generate Coverage Report
+ runs-on: ubuntu-22.04
+ needs: test
+ if: always()
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Install LLVM tools
+ run: |
+ wget https://apt.llvm.org/llvm.sh
+ chmod +x llvm.sh
+ sudo ./llvm.sh 18
+ sudo ln -sf /usr/bin/llvm-profdata-18 /usr/bin/llvm-profdata
+ sudo ln -sf /usr/bin/llvm-cov-18 /usr/bin/llvm-cov
+
+ - name: Download build artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: cln-coverage-build
+
+ - name: Unpack build
+ run: tar -xaf testpack.tar.bz2
+
+ - name: Download all coverage artifacts
+ uses: actions/download-artifact@v4
+ with:
+ pattern: coverage-raw-*
+ path: coverage-artifacts
+
+ - name: Merge coverage data
+ run: |
+ mkdir -p coverage-raw coverage
+ find coverage-artifacts -name "*.profraw" -exec cp {} coverage-raw/ \;
+ PROFRAW_COUNT=$(ls -1 coverage-raw/*.profraw 2>/dev/null | wc -l)
+ echo "Found $PROFRAW_COUNT profile files"
+ if [ "$PROFRAW_COUNT" -eq 0 ]; then
+ echo "ERROR: No coverage data found"
+ exit 1
+ fi
+ chmod +x contrib/coverage/collect-coverage.sh
+ CLN_COVERAGE_DIR=coverage-raw ./contrib/coverage/collect-coverage.sh
+
+ - name: Generate HTML report
+ run: |
+ chmod +x contrib/coverage/generate-coverage-report.sh
+ ./contrib/coverage/generate-coverage-report.sh
+
+ - name: Upload to Codecov
+ uses: codecov/codecov-action@v4
+ with:
+ files: coverage/merged.profdata
+ flags: integration-tests
+ name: cln-nightly-coverage
+ token: ${{ secrets.CODECOV_TOKEN }}
+ fail_ci_if_error: false
+
+ - name: Upload HTML report
+ uses: actions/upload-artifact@v4
+ with:
+ name: coverage-html-report
+ path: coverage/html
+ retention-days: 90
+
+ - name: Add summary to job
+ run: |
+ echo "## Coverage Summary" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ cat coverage/summary.txt >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "📊 Download detailed HTML report from workflow artifacts" >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/docs-nightly.yaml b/.github/workflows/docs-nightly.yaml
new file mode 100644
index 000000000000..ed2f3d7536a9
--- /dev/null
+++ b/.github/workflows/docs-nightly.yaml
@@ -0,0 +1,100 @@
+name: Documentation (Nightly)
+
+on:
+ schedule:
+ # Run at 4 AM UTC every day
+ - cron: '0 4 * * *'
+ # Allow manual triggers for testing
+ workflow_dispatch:
+ # Allow being called from other workflows
+ workflow_call:
+
+concurrency:
+ group: docs-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ generate-docs:
+ name: Generate Project Documentation
+ runs-on: ubuntu-22.04
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Prepare documentation directory
+ run: |
+ mkdir -p docs-output
+ cp -r doc/* docs-output/
+
+ # Create a simple index.html for the documentation
+ cat > docs-output/index.html <<'EOF'
+
+
+
+ Core Lightning Documentation
+
+
+
+ Core Lightning Documentation
+ Welcome to the Core Lightning documentation site.
+
+
+
Available Documentation
+
This site contains the complete documentation for Core Lightning.
+
+
+
+
+
+
+ EOF
+
+ - name: Upload documentation artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: project-docs
+ path: docs-output
+ retention-days: 90
+
+ - name: Add summary to job
+ run: |
+ echo "## Project Documentation Generated" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "📖 Documentation files have been collected and prepared." >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "Download the artifact to view the documentation." >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/publish-site.yaml b/.github/workflows/publish-site.yaml
new file mode 100644
index 000000000000..376bb6f5b02a
--- /dev/null
+++ b/.github/workflows/publish-site.yaml
@@ -0,0 +1,314 @@
+name: Publish Documentation Site
+
+on:
+ schedule:
+ # Run at 5 AM UTC every day, after other workflows
+ - cron: '0 5 * * *'
+ # Allow manual triggers for testing
+ workflow_dispatch:
+
+# Sets permissions for GitHub Pages deployment
+permissions:
+ contents: read
+ pages: write
+ id-token: write
+
+# Allow only one concurrent deployment
+concurrency:
+ group: pages
+ cancel-in-progress: false
+
+jobs:
+ # Generate coverage reports
+ coverage:
+ name: Generate Coverage Reports
+ uses: ./.github/workflows/coverage-nightly.yaml
+
+ # Generate Python API documentation
+ python-docs:
+ name: Generate Python API Documentation
+ uses: ./.github/workflows/python-docs-nightly.yaml
+
+ # Generate general documentation
+ docs:
+ name: Generate Project Documentation
+ uses: ./.github/workflows/docs-nightly.yaml
+
+ # Combine all documentation and deploy to GitHub Pages
+ deploy:
+ name: Deploy to GitHub Pages
+ runs-on: ubuntu-22.04
+ needs: [coverage, python-docs, docs]
+ if: always() # Run even if some jobs fail
+
+ environment:
+ name: github-pages
+ url: ${{ steps.deployment.outputs.page_url }}
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Download coverage artifact
+ uses: actions/download-artifact@v4
+ continue-on-error: true
+ with:
+ name: coverage-html-report
+ path: site-staging/coverage
+
+ - name: Download Python docs artifact
+ uses: actions/download-artifact@v4
+ continue-on-error: true
+ with:
+ name: python-api-docs
+ path: site-staging/python
+
+ - name: Download project docs artifact
+ uses: actions/download-artifact@v4
+ continue-on-error: true
+ with:
+ name: project-docs
+ path: site-staging/docs
+
+ - name: Create site index
+ run: |
+ cat > site-staging/index.html <<'EOF'
+
+
+
+
+
+ Core Lightning Documentation Hub
+
+
+
+
+
+
+
+
+
Welcome to the Core Lightning documentation portal. Choose a category below to explore.
+
+
+
+
+
+
+
+
+
+ EOF
+
+ # Update timestamp
+ TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M UTC")
+ sed -i "s/TIMESTAMP/$TIMESTAMP/" site-staging/index.html
+
+ - name: Add .nojekyll to prevent Jekyll processing
+ run: |
+ touch site-staging/.nojekyll
+
+ - name: Create 404 page
+ run: |
+ cat > site-staging/404.html <<'EOF'
+
+
+
+ 404 - Page Not Found
+
+
+
+
+
+
+ EOF
+
+ - name: Setup Pages
+ uses: actions/configure-pages@v5
+
+ - name: Upload artifact
+ uses: actions/upload-pages-artifact@v3
+ with:
+ path: site-staging
+
+ - name: Deploy to GitHub Pages
+ id: deployment
+ uses: actions/deploy-pages@v4
+
+ - name: Add summary
+ run: |
+ echo "## 🚀 Documentation Site Deployed" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "The complete documentation site has been deployed to GitHub Pages." >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "### Included Sections:" >> $GITHUB_STEP_SUMMARY
+ echo "- 📖 Project Documentation" >> $GITHUB_STEP_SUMMARY
+ echo "- 🐍 Python API Reference" >> $GITHUB_STEP_SUMMARY
+ echo "- 📊 Code Coverage Reports" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "🔗 **Site URL:** ${{ steps.deployment.outputs.page_url }}" >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/python-docs-nightly.yaml b/.github/workflows/python-docs-nightly.yaml
new file mode 100644
index 000000000000..aecff245bdfc
--- /dev/null
+++ b/.github/workflows/python-docs-nightly.yaml
@@ -0,0 +1,60 @@
+name: Python API Docs (Nightly)
+
+on:
+ schedule:
+ # Run at 3 AM UTC every day
+ - cron: '0 3 * * *'
+ # Allow manual triggers for testing
+ workflow_dispatch:
+ # Allow being called from other workflows
+ workflow_call:
+
+concurrency:
+ group: python-docs-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ generate-docs:
+ name: Generate Python API Documentation
+ runs-on: ubuntu-22.04
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Install dependencies
+ run: |
+ uv sync --all-extras
+
+ - name: Generate documentation
+ run: |
+ make python-docs
+
+ - name: Upload documentation artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-api-docs
+ path: docs/python
+ retention-days: 90
+
+ - name: Add summary to job
+ run: |
+ echo "## Python API Documentation Generated" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "📚 Documentation has been generated for the following packages:" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "- pyln.client - Client library and plugin library" >> $GITHUB_STEP_SUMMARY
+ echo "- pyln.proto - Lightning Network protocol implementation" >> $GITHUB_STEP_SUMMARY
+ echo "- pyln.grpc - gRPC protocol definitions" >> $GITHUB_STEP_SUMMARY
+ echo "- pyln.testing - Testing utilities" >> $GITHUB_STEP_SUMMARY
+ echo "- pyln.spec.bolt7 - BOLT #7 specification implementation" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "Download the artifact to view the complete API documentation." >> $GITHUB_STEP_SUMMARY
diff --git a/.gitignore b/.gitignore
index 05b25838064b..c54cf77954ac 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,6 +24,11 @@ gen_*.h
wire/gen_*_csv
cli/lightning-cli
coverage
+# Coverage profiling data files
+*.profraw
+*.profdata
+# Generated Python API documentation
+docs/python
ccan/config.h
__pycache__
config.vars
diff --git a/Makefile b/Makefile
index d48b75b447ec..3c7d7831c0d9 100644
--- a/Makefile
+++ b/Makefile
@@ -674,6 +674,30 @@ coverage/coverage.info: check pytest
coverage: coverage/coverage.info
genhtml coverage/coverage.info --output-directory coverage
+# Clang coverage targets (source-based coverage)
+coverage-clang-collect:
+ @./contrib/coverage/collect-coverage.sh "$(CLN_COVERAGE_DIR)" coverage/merged.profdata
+
+coverage-clang-report: coverage/merged.profdata
+ @./contrib/coverage/generate-coverage-report.sh coverage/merged.profdata coverage/html
+
+coverage-clang: coverage-clang-collect coverage-clang-report
+ @echo "Coverage report: coverage/html/index.html"
+
+coverage-clang-clean:
+ rm -rf coverage/ "$(CLN_COVERAGE_DIR)"
+
+.PHONY: coverage-clang-collect coverage-clang-report coverage-clang coverage-clang-clean
+
+# Python API documentation targets
+python-docs:
+ @./contrib/api/generate-python-docs.py
+
+python-docs-clean:
+ rm -rf docs/python
+
+.PHONY: python-docs python-docs-clean
+
# We make libwallycore.la a dependency, so that it gets built normally, without ncc.
# Ncc can't handle the libwally source code (yet).
ncc: ${TARGET_DIR}/libwally-core-build/src/libwallycore.la
diff --git a/Taskfile.yml b/Taskfile.yml
new file mode 100644
index 000000000000..0daf83fb62a7
--- /dev/null
+++ b/Taskfile.yml
@@ -0,0 +1,85 @@
+version: '3'
+
+vars:
+ PYTEST_PAR: 4
+
+tasks:
+ build:
+ cmds:
+ - uv run make cln-grpc/proto/node.proto
+ - uv run make default -j {{ .PYTEST_PAR }}
+ test:
+ dir: '.'
+ deps:
+ - build
+ cmds:
+ - uv run pytest --force-flaky -vvv -n {{ .PYTEST_PAR }} tests {{ .CLI_ARGS }}
+
+ test-liquid:
+ env:
+ TEST_NETWORK: "liquid-regtest"
+ cmds:
+ - sed -i 's/TEST_NETWORK=regtest/TEST_NETWORK=liquid-regtest/g' config.vars
+ - uv run make cln-grpc/proto/node.proto
+ - uv run make default -j {{ .PYTEST_PAR }}
+ - uv run pytest --color=yes --force-flaky -vvv -n {{ .PYTEST_PAR }} tests {{ .CLI_ARGS }}
+
+ clean:
+ cmds:
+ - poetry run make distclean
+
+
+ tester-docker-image:
+ cmds:
+ - docker build --build-arg DOCKER_USER=$(whoami) --build-arg UID=$(id -u) --build-arg GID=$(id -g) --network=host -t cln-tester - =1.8,<2'
+
+ in-docker-test:
+ # Just the counterpart called by `isotest` to actually initialize,
+ # build and test CLN.
+ dir: '/test'
+ deps:
+ - in-docker-init
+ - in-docker-build-deps
+ cmds:
+ # This way of copying allows us to copy the dirty tree, without
+ # triggering any of the potentially configured hooks which might
+ # not be available in the docker image.
+ - (cd /repo && git archive --format tar $(git stash create)) | tar -xvf -
+ # Yes, this is not that smart, but the `Makefile` relies on
+ # `git` being able to tell us about the version.
+ - cp -R /repo/.git /test
+ - git submodule update --init --recursive
+ - python3 -m pip install poetry
+ - poetry run make distclean
+ - poetry install --with=dev
+ - poetry run ./configure --disable-valgrind CC='clang'
+ - poetry run make -j 4
+ - poetry run pytest --color=yes -vvv -n {{ .PYTEST_PAR }} tests {{ .CLI_ARGS }}
+
+ kill:
+ cmds:
+ - killall -v bitcoind || true
+ - killall -v elementsd || true
+ - killall -v valgrind.bin || true
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 000000000000..de1b505f8010
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,31 @@
+coverage:
+ status:
+ project:
+ default:
+ # Coverage can decrease by up to 1% and still pass
+ target: auto
+ threshold: 1%
+ patch:
+ default:
+ # New code should maintain coverage
+ target: auto
+
+comment:
+ # Post coverage comments on PRs (if we add PR coverage later)
+ behavior: default
+ layout: "header, diff, files"
+ require_changes: false
+
+# Ignore files that shouldn't affect coverage metrics
+ignore:
+ - "external/**"
+ - "ccan/**"
+ - "*/test/**"
+ - "tools/**"
+ - "contrib/**"
+ - "doc/**"
+ - "devtools/**"
+
+# Don't fail if coverage data is incomplete
+codecov:
+ require_ci_to_pass: false
diff --git a/contrib/api/generate-python-docs.py b/contrib/api/generate-python-docs.py
new file mode 100755
index 000000000000..bfbf664967c7
--- /dev/null
+++ b/contrib/api/generate-python-docs.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python3
+"""
+Generate Python API documentation for all workspace packages using pdoc3.
+
+This script generates HTML documentation for all Python packages in the
+Core Lightning workspace and creates an index page linking to all of them.
+"""
+
+import os
+import subprocess
+import sys
+from datetime import datetime
+from pathlib import Path
+
+# Define packages to document (module name -> source directory)
+# Only includes packages that are in the workspace and can be imported
+PACKAGES = {
+ "pyln.client": "contrib/pyln-client",
+ "pyln.proto": "contrib/pyln-proto",
+ "pyln.grpc": "contrib/pyln-grpc-proto",
+ "pyln.testing": "contrib/pyln-testing",
+ "pyln.spec.bolt7": "contrib/pyln-spec/bolt7",
+}
+
+INDEX_HTML_TEMPLATE = """
+
+
+ Core Lightning Python Packages Documentation
+
+
+
+ Core Lightning Python Packages Documentation
+ This page provides links to the API documentation for all Python packages in the Core Lightning workspace.
+
+
+
+
+
Client library and plugin library for Core Lightning
+
+
+
+
+
Lightning Network protocol implementation
+
+
+
+
+
gRPC protocol definitions for Core Lightning
+
+
+
+
+
Testing utilities for Core Lightning
+
+
+
+
+
BOLT #7 specification implementation
+
+
+
+ Generated on {timestamp}
+
+
+"""
+
+
+def generate_docs(output_dir: Path, repo_root: Path):
+ """Generate documentation for all packages."""
+ print(f"Generating Python documentation for all workspace packages...")
+ print(f"Output directory: {output_dir}")
+
+ # Clean and create output directory
+ if output_dir.exists():
+ import shutil
+ shutil.rmtree(output_dir)
+ output_dir.mkdir(parents=True)
+
+ # Change to repo root for imports to work correctly
+ os.chdir(repo_root)
+
+ # Generate documentation for each package
+ for package, source_dir in PACKAGES.items():
+ print(f"Generating docs for {package} (from {source_dir})...")
+
+ try:
+ # Use pdoc3 to generate HTML documentation
+ subprocess.run(
+ [
+ "uv", "run", "pdoc3",
+ "--html",
+ "--output-dir", str(output_dir),
+ "--force",
+ package
+ ],
+ check=True,
+ cwd=repo_root,
+ )
+ except subprocess.CalledProcessError as e:
+ print(f"Warning: Failed to generate docs for {package}, skipping...")
+ print(f"Error: {e}")
+ continue
+
+ # Create index.html
+ index_path = output_dir / "index.html"
+ timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
+ index_path.write_text(INDEX_HTML_TEMPLATE.format(timestamp=timestamp))
+
+ print("\nDocumentation generated successfully!")
+ print(f"Open {output_dir}/index.html in your browser to view the documentation.")
+
+
+def main():
+ """Main entry point."""
+ # Determine paths
+ script_dir = Path(__file__).parent.resolve()
+ repo_root = script_dir.parent.parent
+
+ # Default output directory
+ output_dir = repo_root / "docs" / "python"
+
+ # Allow override via command line argument
+ if len(sys.argv) > 1:
+ output_dir = Path(sys.argv[1])
+
+ generate_docs(output_dir, repo_root)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/clang-coverage-report.sh b/contrib/clang-coverage-report.sh
deleted file mode 100755
index 246163ce55cb..000000000000
--- a/contrib/clang-coverage-report.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash -eu
-#
-# Generates an HTML coverage report from a raw Clang coverage profile. See
-# https://clang.llvm.org/docs/SourceBasedCodeCoverage.html for more details.
-#
-# Example usage to create full_channel.html from full_channel.profraw for the
-# run-full_channel unit test:
-# ./contrib/clang-coverage-report.sh channeld/test/run-full_channel \
-# full_channel.profraw full_channel.html
-
-if [[ "$#" -ne 3 ]]; then
- echo "Usage: $0 BINARY RAW_PROFILE_FILE TARGET_HTML_FILE"
- exit 1
-fi
-
-readonly BINARY="$1"
-readonly RAW_PROFILE_FILE="$2"
-readonly TARGET_HTML_FILE="$3"
-
-MERGED_PROFILE_FILE=$(mktemp)
-readonly MERGED_PROFILE_FILE
-
-llvm-profdata merge -sparse "${RAW_PROFILE_FILE}" -o "${MERGED_PROFILE_FILE}"
-llvm-cov show "${BINARY}" -instr-profile="${MERGED_PROFILE_FILE}" -format=html \
- > "${TARGET_HTML_FILE}"
-
-rm "${MERGED_PROFILE_FILE}"
diff --git a/contrib/pyln-client/pyln/client/version.py b/contrib/pyln-client/pyln/client/version.py
index 01345c82e77a..6009793947cf 100644
--- a/contrib/pyln-client/pyln/client/version.py
+++ b/contrib/pyln-client/pyln/client/version.py
@@ -75,4 +75,4 @@ def __lt__(self, other: Union[NodeVersion, str]) -> bool:
return False
-__all__ = [NodeVersion]
+__all__ = ["NodeVersion"]
diff --git a/contrib/pyln-testing/pyln/testing/utils.py b/contrib/pyln-testing/pyln/testing/utils.py
index 59ebd5259ffc..8607cd9bdf1d 100644
--- a/contrib/pyln-testing/pyln/testing/utils.py
+++ b/contrib/pyln-testing/pyln/testing/utils.py
@@ -197,6 +197,29 @@ class TailableProc(object):
def __init__(self, outputDir, verbose=True):
self.logs = []
self.env = os.environ.copy()
+
+ # Add coverage support: inject LLVM_PROFILE_FILE if CLN_COVERAGE_DIR is set
+ if os.getenv('CLN_COVERAGE_DIR'):
+ coverage_dir = os.getenv('CLN_COVERAGE_DIR')
+
+ # Organize profraw files by test name for per-test coverage analysis
+ test_name = os.getenv('CLN_TEST_NAME')
+ if test_name:
+ test_coverage_dir = os.path.join(coverage_dir, test_name)
+ os.makedirs(test_coverage_dir, exist_ok=True)
+ profraw_path = test_coverage_dir
+ else:
+ os.makedirs(coverage_dir, exist_ok=True)
+ profraw_path = coverage_dir
+
+ # %p=PID, %m=binary signature prevents collisions across parallel processes
+ # Note: We don't use %c (continuous mode) as it causes "__llvm_profile_counter_bias"
+ # errors with our multi-binary setup. Instead, we validate and filter corrupt files
+ # during collection (see contrib/coverage/collect-coverage.sh)
+ self.env['LLVM_PROFILE_FILE'] = os.path.join(
+ profraw_path, '%p-%m.profraw'
+ )
+
self.proc = None
self.outputDir = outputDir
if not os.path.exists(outputDir):
@@ -1635,6 +1658,10 @@ def __init__(self, request, testname, bitcoind, executor, directory,
else:
self.valgrind = VALGRIND
self.testname = testname
+
+ # Set test name in environment for coverage file organization
+ os.environ['CLN_TEST_NAME'] = testname
+
self.next_id = 1
self.nodes = []
self.reserved_ports = []
diff --git a/doc/COVERAGE.md b/doc/COVERAGE.md
new file mode 100644
index 000000000000..391c168a3e7d
--- /dev/null
+++ b/doc/COVERAGE.md
@@ -0,0 +1,298 @@
+# Code Coverage Guide
+
+This guide explains how to measure code coverage for Core Lightning's test suite.
+
+## Overview
+
+Core Lightning uses Clang's source-based coverage instrumentation to measure which lines of code are executed during tests. This is particularly challenging because:
+
+- CLN is a multi-process application (lightningd + 8 daemon executables)
+- Each test spawns multiple nodes, each running multiple daemon processes
+- Tests run in parallel (10+ workers)
+- Test processes run in temporary directories
+
+Our solution uses `LLVM_PROFILE_FILE` environment variable with unique naming patterns to prevent profile file collisions across parallel processes.
+
+## Local Development Workflow
+
+### Prerequisites
+
+- Clang compiler (clang-15 or later)
+- LLVM tools: `llvm-profdata`, `llvm-cov`
+
+Install on Ubuntu/Debian:
+```bash
+sudo apt-get install clang llvm
+```
+
+### Step 1: Build with Coverage Instrumentation
+
+```bash
+./configure --enable-coverage CC=clang
+make clean # Important: clean previous builds
+make
+```
+
+This compiles all binaries with `-fprofile-instr-generate -fcoverage-mapping` flags.
+
+### Step 2: Run Tests with Coverage Collection
+
+Set the coverage directory and run tests:
+
+```bash
+export CLN_COVERAGE_DIR=/tmp/cln-coverage
+mkdir -p "$CLN_COVERAGE_DIR"
+uv run pytest tests/ -n 10
+```
+
+You can run a subset of tests for faster iteration:
+
+```bash
+uv run pytest tests/test_pay.py -n 10
+```
+
+All test processes will write `.profraw` files to `$CLN_COVERAGE_DIR` with unique names like `12345-67890abcdef.profraw` (PID-signature).
+
+### Step 3: Generate Coverage Reports
+
+Merge all profile files and generate HTML report:
+
+```bash
+make coverage-clang
+```
+
+This runs two scripts:
+1. `contrib/coverage/collect-coverage.sh` - Merges all `.profraw` files into `coverage/merged.profdata`
+2. `contrib/coverage/generate-coverage-report.sh` - Generates HTML report from merged profile
+
+### Step 4: View the Report
+
+Open the HTML report in your browser:
+
+```bash
+xdg-open coverage/html/index.html
+```
+
+Or on macOS:
+
+```bash
+open coverage/html/index.html
+```
+
+The report shows:
+- **Per-file coverage**: Which files have been tested
+- **Line-by-line coverage**: Which lines were executed and how many times
+- **Summary statistics**: Overall coverage percentage
+
+You can also view the text summary:
+
+```bash
+cat coverage/summary.txt
+```
+
+### Step 5: Clean Up
+
+```bash
+make coverage-clang-clean
+```
+
+This removes the `coverage/` directory and `$CLN_COVERAGE_DIR`.
+
+## Complete Example
+
+```bash
+# Build
+./configure --enable-coverage CC=clang
+make
+
+# Test
+export CLN_COVERAGE_DIR=/tmp/cln-coverage
+mkdir -p "$CLN_COVERAGE_DIR"
+uv run pytest tests/test_pay.py tests/test_invoice.py -n 10
+
+# Report
+make coverage-clang
+xdg-open coverage/html/index.html
+
+# Clean
+make coverage-clang-clean
+```
+
+## Advanced Usage
+
+### Running Specific Test Files
+
+For faster development iteration, run only the tests you're working on:
+
+```bash
+uv run pytest tests/test_plugin.py -n 5
+```
+
+### Per-Test Coverage
+
+Coverage data is automatically organized by test name, allowing you to see which code each test exercises:
+
+```bash
+export CLN_COVERAGE_DIR=/tmp/cln-coverage
+mkdir -p "$CLN_COVERAGE_DIR"
+uv run pytest tests/test_pay.py tests/test_invoice.py -n 10
+```
+
+This creates a directory structure like:
+```
+/tmp/cln-coverage/
+ ├── test_pay/
+ │ ├── 12345-abc.profraw
+ │ └── 67890-def.profraw
+ └── test_invoice/
+ ├── 11111-ghi.profraw
+ └── 22222-jkl.profraw
+```
+
+Generate per-test coverage reports:
+```bash
+# Generate text summaries
+./contrib/coverage/per-test-coverage.sh
+
+# Generate HTML reports (optional)
+./contrib/coverage/per-test-coverage-html.sh
+```
+
+This creates:
+- `coverage/per-test/.profdata` - Merged profile for each test
+- `coverage/per-test/.txt` - Text summary for each test
+- `coverage/per-test-html//index.html` - HTML report for each test (if generated)
+
+### Merging Multiple Test Runs
+
+You can accumulate coverage across multiple test runs by reusing the same `CLN_COVERAGE_DIR`:
+
+```bash
+export CLN_COVERAGE_DIR=/tmp/cln-coverage
+mkdir -p "$CLN_COVERAGE_DIR"
+
+# Run different test subsets
+uv run pytest tests/test_pay.py -n 10
+uv run pytest tests/test_invoice.py -n 10
+uv run pytest tests/test_plugin.py -n 10
+
+# Generate combined report (merges all tests)
+make coverage-clang
+
+# Or generate per-test reports
+./contrib/coverage/per-test-coverage.sh
+```
+
+### Manual Collection and Reporting
+
+If you want more control:
+
+```bash
+# Collect and merge
+./contrib/coverage/collect-coverage.sh /tmp/cln-coverage coverage/merged.profdata
+
+# Generate report
+./contrib/coverage/generate-coverage-report.sh coverage/merged.profdata coverage/html
+```
+
+## Continuous Integration
+
+Coverage is automatically measured nightly on the master branch via the `coverage-nightly.yaml` GitHub Actions workflow. The workflow:
+
+1. Builds CLN with coverage instrumentation
+2. Runs tests with both sqlite and postgres databases
+3. Merges coverage from all test runs
+4. Uploads results to Codecov.io
+5. Saves HTML reports as artifacts (90-day retention)
+
+You can view:
+- **Codecov dashboard**: [codecov.io/gh/ElementsProject/lightning](https://codecov.io/gh/ElementsProject/lightning)
+- **HTML artifacts**: Download from GitHub Actions workflow runs
+
+## Troubleshooting
+
+### No .profraw files created
+
+**Problem**: `make coverage-clang` reports "No .profraw files found"
+
+**Solutions**:
+1. Verify `CLN_COVERAGE_DIR` is set: `echo $CLN_COVERAGE_DIR`
+2. Verify you built with coverage: `./configure --enable-coverage CC=clang && make`
+3. Check that tests actually ran successfully
+
+### llvm-profdata not found
+
+**Problem**: `llvm-profdata: command not found`
+
+**Solution**: Install LLVM tools:
+```bash
+sudo apt-get install llvm
+# Or on macOS:
+brew install llvm
+```
+
+### Binary not found errors in generate-coverage-report.sh
+
+**Problem**: Script complains about missing binaries
+
+**Solution**: Make sure you've run `make` to build all CLN executables
+
+### Coverage shows 0% for some files
+
+**Causes**:
+1. Those files weren't executed by your tests (expected)
+2. The binary wasn't instrumented (check build flags)
+3. The profile data is incomplete
+
+### Corrupt .profraw files
+
+**Problem**: `llvm-profdata merge` fails with "invalid instrumentation profile data (file header is corrupt)"
+
+**Cause**: When test processes crash or timeout, they may leave incomplete/corrupt `.profraw` files.
+
+**Solution**: The `collect-coverage.sh` script automatically validates and filters out bad files:
+- **Empty files** - Processes that crash immediately
+- **Incomplete files** (< 1KB) - Processes killed before writing enough data
+- **Corrupt files** - Files with invalid headers or structure
+
+You'll see output like:
+```
+Found 1250 profile files
+ Skipping empty file: /tmp/cln-coverage/12345-abc.profraw
+ Skipping incomplete file (512 bytes): /tmp/cln-coverage/67890-def.profraw
+ Skipping corrupt file: /tmp/cln-coverage/11111-ghi.profraw
+Valid files: 1247
+Filtered out: 3 files
+ - Empty: 1
+ - Incomplete (< 1KB): 1
+ - Corrupt/invalid: 1
+✓ Merged profile: coverage/merged.profdata
+```
+
+To manually review and clean up corrupt files:
+```bash
+./contrib/coverage/cleanup-corrupt-profraw.sh
+```
+
+This will show you which files are corrupt and offer to delete them.
+
+**Prevention**: Incomplete/corrupt files are unavoidable when tests crash/timeout. The collection script handles this automatically by filtering them out during merge.
+
+## Understanding Coverage Metrics
+
+- **Lines**: Percentage of source code lines executed
+- **Functions**: Percentage of functions that were called
+- **Regions**: Percentage of code regions (blocks) executed
+- **Hit count**: Number of times each line was executed
+
+Aim for:
+- **>80% line coverage** for core functionality
+- **>60% overall** given the complexity of CLN
+
+Remember: 100% coverage doesn't mean bug-free code, but low coverage means untested code paths.
+
+## References
+
+- [LLVM Source-Based Code Coverage](https://clang.llvm.org/docs/SourceBasedCodeCoverage.html)
+- [llvm-profdata documentation](https://llvm.org/docs/CommandGuide/llvm-profdata.html)
+- [llvm-cov documentation](https://llvm.org/docs/CommandGuide/llvm-cov.html)
diff --git a/doc/contribute-to-core-lightning/contributor-workflow.md b/doc/contribute-to-core-lightning/contributor-workflow.md
index d941f75fe777..45e50212cb09 100644
--- a/doc/contribute-to-core-lightning/contributor-workflow.md
+++ b/doc/contribute-to-core-lightning/contributor-workflow.md
@@ -91,7 +91,7 @@ LLVM_PROFILE_FILE="full_channel.profraw" ./channeld/test/run-full_channel
Finally, generate an HTML report from the profile. We have a script to make this easier:
```shell
-./contrib/clang-coverage-report.sh channeld/test/run-full_channel \
+./contrib/coverage/clang-coverage-report.sh channeld/test/run-full_channel \
full_channel.profraw full_channel.html
firefox full_channel.html
```
diff --git a/pyproject.toml b/pyproject.toml
index f86ef78e7f5c..9838713c7c9f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -16,14 +16,15 @@ dependencies = [
"pyln-proto",
"pyln-grpc-proto",
"pytest-trackflaky",
- "pyln-testing",
"pytest-rerunfailures>=16.0.1",
+ "pyln-testing",
+ "pyln-proto",
]
package-mode = false
[dependency-groups]
dev = [
# Test dependencies and inherited dependencies belong here
- "crc32c>=2.2.post0", # Belongs to lnprototest
+ "crc32c>=2.2.post0", # Belongs to lnprototest
"pytest>=8.0.0",
"pytest-xdist>=3.6.0",
"pytest-test-groups>=1.2.0",
@@ -34,6 +35,7 @@ dev = [
"flask-socketio>=5",
"tqdm",
"pytest-benchmark",
+ "pdoc3>=0.11.6",
]
[project.optional-dependencies]
diff --git a/tests/conftest.py b/tests/conftest.py
deleted file mode 100644
index 029050742e06..000000000000
--- a/tests/conftest.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import pytest
-
-from pyln.testing.utils import EXPERIMENTAL_DUAL_FUND
-
-
-# This function is based upon the example of how to
-# "[make] test result information available in fixtures" at:
-# https://pytest.org/latest/example/simple.html#making-test-result-information-available-in-fixtures
-# and:
-# https://github.com/pytest-dev/pytest/issues/288
-@pytest.hookimpl(tryfirst=True, hookwrapper=True)
-def pytest_runtest_makereport(item, call):
- # execute all other hooks to obtain the report object
- outcome = yield
- rep = outcome.get_result()
-
- # set a report attribute for each phase of a call, which can
- # be "setup", "call", "teardown"
-
- setattr(item, "rep_" + rep.when, rep)
-
-
-def pytest_configure(config):
- config.addinivalue_line("markers",
- "slow_test: slow tests aren't run under Valgrind")
- config.addinivalue_line("markers",
- "openchannel: Limit this test to only run 'v1' or 'v2' openchannel protocol")
-
-
-def pytest_runtest_setup(item):
- open_versions = [mark.args[0] for mark in item.iter_markers(name='openchannel')]
- if open_versions:
- if 'v1' not in open_versions and not EXPERIMENTAL_DUAL_FUND:
- pytest.skip('v2-only test, EXPERIMENTAL_DUAL_FUND=0')
- if 'v2' not in open_versions and EXPERIMENTAL_DUAL_FUND:
- pytest.skip('v1-only test, EXPERIMENTAL_DUAL_FUND=1')
- else: # If there's no openchannel marker, skip if EXP_DF
- if EXPERIMENTAL_DUAL_FUND:
- pytest.skip('v1-only test, EXPERIMENTAL_DUAL_FUND=1')
diff --git a/uv.lock b/uv.lock
index 5adc81a7abaa..cb3018fe44bc 100644
--- a/uv.lock
+++ b/uv.lock
@@ -457,8 +457,9 @@ dev = [
{ name = "crc32c" },
{ name = "flake8" },
{ name = "flask-socketio" },
+ { name = "pdoc3" },
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
{ name = "pytest-benchmark" },
{ name = "pytest-custom-exit-code" },
{ name = "pytest-test-groups" },
@@ -491,6 +492,7 @@ dev = [
{ name = "crc32c", specifier = ">=2.2.post0" },
{ name = "flake8", specifier = ">=7.0" },
{ name = "flask-socketio", specifier = ">=5" },
+ { name = "pdoc3", specifier = ">=0.11.6" },
{ name = "pytest", specifier = ">=8.0.0" },
{ name = "pytest-benchmark" },
{ name = "pytest-custom-exit-code", specifier = "==0.3.0" },
@@ -1061,6 +1063,33 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" },
]
+[[package]]
+name = "markdown"
+version = "3.9"
+source = { registry = "https://pypi.org/simple" }
+resolution-markers = [
+ "python_full_version < '3.10'",
+]
+dependencies = [
+ { name = "importlib-metadata", marker = "python_full_version < '3.10'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8d/37/02347f6d6d8279247a5837082ebc26fc0d5aaeaf75aa013fcbb433c777ab/markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a", size = 364585, upload-time = "2025-09-04T20:25:22.885Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/70/ae/44c4a6a4cbb496d93c6257954260fe3a6e91b7bed2240e5dad2a717f5111/markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280", size = 107441, upload-time = "2025-09-04T20:25:21.784Z" },
+]
+
+[[package]]
+name = "markdown"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+resolution-markers = [
+ "python_full_version >= '3.10'",
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/7dd27d9d863b3376fcf23a5a13cb5d024aed1db46f963f1b5735ae43b3be/markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e", size = 364931, upload-time = "2025-11-03T19:51:15.007Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" },
+]
+
[[package]]
name = "markupsafe"
version = "3.0.3"
@@ -1197,6 +1226,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
]
+[[package]]
+name = "pdoc3"
+version = "0.11.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mako" },
+ { name = "markdown", version = "3.9", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
+ { name = "markdown", version = "3.10", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ed/f0/07d8b771b99c16a06741cd7b2639494a15357df819ecf899c33b87db6257/pdoc3-0.11.6.tar.gz", hash = "sha256:1ea5e84b87a754d191fb64bf5e517ca6c50d0d84a614c1efecf6b46d290ae387", size = 177107, upload-time = "2025-03-20T22:53:53.099Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/df/98/629f269c2bd91bdcac147aad5cf51ceb645c0196e23a41ee3c051125190f/pdoc3-0.11.6-py3-none-any.whl", hash = "sha256:8b72723767bd48d899812d2aec8375fc1c3476e179455db0b4575e6dccb44b93", size = 255188, upload-time = "2025-03-20T22:53:51.671Z" },
+]
+
[[package]]
name = "pluggy"
version = "1.6.0"
@@ -1441,7 +1484,7 @@ dev = [
{ name = "pyln-bolt7" },
{ name = "pyln-proto" },
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
[package.metadata]
@@ -1501,7 +1544,7 @@ dependencies = [
[package.dev-dependencies]
dev = [
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
[package.metadata]
@@ -1529,7 +1572,7 @@ dependencies = [
{ name = "psycopg2-binary" },
{ name = "pyln-client" },
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
{ name = "python-bitcoinlib" },
{ name = "requests" },
]
@@ -1601,7 +1644,7 @@ wheels = [
[[package]]
name = "pytest"
-version = "9.0.1"
+version = "9.0.2"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.10'",
@@ -1615,9 +1658,9 @@ dependencies = [
{ name = "pygments", marker = "python_full_version >= '3.10'" },
{ name = "tomli", marker = "python_full_version == '3.10.*'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
]
[[package]]
@@ -1627,7 +1670,7 @@ source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "py-cpuinfo" },
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/24/34/9f732b76456d64faffbef6232f1f9dbec7a7c4999ff46282fa418bd1af66/pytest_benchmark-5.2.3.tar.gz", hash = "sha256:deb7317998a23c650fd4ff76e1230066a76cb45dcece0aca5607143c619e7779", size = 341340, upload-time = "2025-11-09T18:48:43.215Z" }
wheels = [
@@ -1640,7 +1683,7 @@ version = "0.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/92/9d/e1eb0af5e96a5c34f59b9aa69dfb680764420fe60f2ec28cfbc5339f99f8/pytest-custom_exit_code-0.3.0.tar.gz", hash = "sha256:51ffff0ee2c1ddcc1242e2ddb2a5fd02482717e33a2326ef330e3aa430244635", size = 3633, upload-time = "2019-08-07T09:45:15.781Z" }
wheels = [
@@ -1672,7 +1715,7 @@ resolution-markers = [
]
dependencies = [
{ name = "packaging", marker = "python_full_version >= '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/de/04/71e9520551fc8fe2cf5c1a1842e4e600265b0815f2016b7c27ec85688682/pytest_rerunfailures-16.1.tar.gz", hash = "sha256:c38b266db8a808953ebd71ac25c381cb1981a78ff9340a14bcb9f1b9bff1899e", size = 30889, upload-time = "2025-10-10T07:06:01.238Z" }
wheels = [
@@ -1685,7 +1728,7 @@ version = "1.2.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9f/5a/c7874fe15e03d86a1109a3274b57a2473edb8a1dda4a4d27f25d848b6ff5/pytest_test_groups-1.2.1.tar.gz", hash = "sha256:67576b295522fc144b3a42fa1801f50ae962389e984b48bab4336686d09032f1", size = 8137, upload-time = "2025-05-08T16:28:19.627Z" }
wheels = [
@@ -1698,7 +1741,7 @@ version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" }
wheels = [
@@ -1711,7 +1754,7 @@ version = "0.1.0"
source = { editable = "contrib/pytest-trackflaky" }
dependencies = [
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
[package.metadata]
@@ -1724,7 +1767,7 @@ source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "execnet" },
{ name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
- { name = "pytest", version = "9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+ { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" }
wheels = [
@@ -2207,11 +2250,11 @@ wheels = [
[[package]]
name = "urllib3"
-version = "2.5.0"
+version = "2.6.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/1c/43/554c2569b62f49350597348fc3ac70f786e3c32e7f19d266e19817812dd3/urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1", size = 432585, upload-time = "2025-12-05T15:08:47.885Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
+ { url = "https://files.pythonhosted.org/packages/56/1a/9ffe814d317c5224166b23e7c47f606d6e473712a2fad0f704ea9b99f246/urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f", size = 131083, upload-time = "2025-12-05T15:08:45.983Z" },
]
[[package]]