diff --git a/.gitignore b/.gitignore index c3c66e4..2924918 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,6 @@ dist/ .env data/ .vscode/ -.DS_Store \ No newline at end of file +.DS_Store +component-detection +tmp-branch-search-cache/ \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..3ef4986 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "component-detection-dependency-submission-action"] + path = component-detection-dependency-submission-action + url = https://github.com/advanced-security/component-detection-dependency-submission-action.git diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..0cda4a1 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,33 @@ +# Changelog + +## [2025-12-09] – 0.2.0 - Branch scanning and dependency submission + +Added: + +- Branch scanning: + - Fetch SBOM diffs for non‑default branches via Dependency Review API. + - Added `--branch-scan`, `--branch-limit`, and `--diff-base` CLI flags. +- Dependency Submission integration: + - Automatically submits dependency snapshots for branches being scanned, if not already present, using Component Detection. + - Language-aware sparse checkout. + - Use a pre-downloaded binary (`--component-detection-bin`) or an auto-downloaded release. + - Allows forcing submission, even if a snapshot already exists. +- Search and matching: + - Refactored search to de-duplicate logic and include branch diffs (added/updated packages only). + - Malware matching enhanced to enumerate packages from diffs; matches annotated with branch. + - CLI and CSV outputs include branch context; CSV adds a `branch` column. +- CLI and UX improvements: + - Argument validation updated: `--sync-sboms` requires `--sbom-cache`. + - Malware-only mode: allow `--sync-malware` without `--sbom-cache` (requires `--malware-cache`). + - JSON/CLI/CSV interaction clarified and documented. + - Added examples for malware-only sync and branch scanning. +- Advisory sync robustness: + - GraphQL advisory sync implements adaptive retries with exponential backoff and `Retry-After` support. + +Fixed: + +- Added `--ghes` flag to ensure proper API URL construction for GitHub Enterprise Server instances. + +## [2025-10-06] - 0.1.0 - Initial public release + +- Initial release, with: SBOM sync; malware sync; malware matching; CLI, file based and interactive PURL searching. SARIF, CSV and JSON outputs supported. diff --git a/README.md b/README.md index 4db1493..52e5bea 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ Supports human-readable, JSON, CSV and SARIF output. SARIF alerts can be uploade - Optional progress bar while fetching SBOMs - Option to suppress secondary rate limit warnings, and full quiet mode to suppress informative messages - Adaptive backoff: each secondary rate limit hit increases the SBOM fetch delay by 10% to reduce future throttling +- Optional branch scanning†: fetch SBOM diffs with Dependency Review for non-default branches and submit missing dependency snapshots if needed with Component Detection + Dependency Submission - Offline caching of SBOMs and security advisories with incremental updates - Matching: - Version-aware matching of SBOM packages against malware advisories @@ -27,9 +28,12 @@ Supports human-readable, JSON, CSV and SARIF output. SARIF alerts can be uploade - Output: - Human-readable console output - JSON or CSV output (to stdout or file) with both search and malware matches - - Optional SARIF 2.1.0 output per repository for malware matches with optional Code Scanning upload + - Optional SARIF 2.1.0 output per repository for malware matches + - includes Code Scanning upload† - Works with GitHub.com, GitHub Enterprise Server, GitHub Enterprise Managed Users and GitHub Enterprise Cloud with Data Residency (custom base URL) +† GitHub Advanced Security/GitHub Code Security required for this feature + ## Usage ### Quick Start @@ -55,6 +59,76 @@ Using GitHub Enterprise Server: npm run start -- --sync-sboms --enterprise ent --base-url https://github.internal/api/v3 --sbom-cache sboms --token $GHES_TOKEN ``` +### 🔀 Branch Scanning & Dependency Review + +Enable branch SBOM collection and dependency diffs with `--branch-scan`. + +Flags: + +```bash +--branch-scan # Fetch SBOMs for non-default branches +--branch-limit # Max number of non-default branches per repo (default 10) +--diff-base # Override base branch for diffs (default: repository default) +``` + +Example: scan first 5 feature branches and diff them against `main`: + +```bash +npm run start -- --sync-sboms --org my-org \ + --sbom-cache sboms --branch-scan --branch-limit 5 \ + --diff-base main --token $GITHUB_TOKEN +``` + +Search results will include branch matches: package PURLs annotated with `@branch` inside the match list (e.g. `pkg:npm/react@18.3.0@feature-x`). Dependency Review additions / updates are also searched; only added/updated head-side packages are considered. + +If a branch SBOM or diff retrieval fails, the error is recorded but does not stop collection for other branches or repositories. + +#### Handling Missing Dependency Review Snapshots + +If the Dependency Review API returns a 404 for a branch diff (commonly due to a missing dependency snapshot on either the base or head commit), the toolkit can optionally attempt to generate and submit a snapshot using Component Detection and Dependency Submission. This is vendored-in and forked from the public [Component Detection Dependency Submission Action](https://github.com/advanced-security/component-detection-dependency-submission-action). + +Enable automatic submission + retry with: + +```bash +--submit-on-missing-snapshot +``` + +The tool will attempt to download the latest Component Detection release from GitHub Releases into the current directory, to run it, unless you provide a local binary with `--component-detection-bin`. + +If submission fails, the original 404 reason is retained and collection proceeds. + +##### Using a Local Component Detection Binary + +Instead of downloading the latest release automatically, you can point the toolkit at a local `component-detection` executable. This is useful if you already manage the binary or need a custom build. + +Pass the path via `--component-detection-bin` and optionally limit languages to reduce sparse checkout size: + +```bash +npm run start -- \ + --sync-sboms --org my-org --sbom-cache sboms \ + --branch-scan --submit-on-missing-snapshot \ + --submit-languages JavaScript,TypeScript \ + --component-detection-bin /usr/local/bin/component-detection +``` + +On MacOS, you may find that system protection prevents running a downloaded binary. You can [check out the .NET code](https://github.com/microsoft/component-detection/) and run it via a wrapper script such as: + +```bash +#!/bin/bash + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +cd "$SCRIPT_DIR" || exit 1 + +dotnet run --project "./src/Microsoft.ComponentDetection/Microsoft.ComponentDetection.csproj" "$@" +``` + +Notes: + +- Providing `--component-detection-bin` skips any download logic and uses your binary directly. +- Snapshot submission performs a language-aware sparse checkout of common manifest/lock files (e.g., `package.json`, `requirements.txt`, `pom.xml`). +- After submission, the toolkit waits briefly and retries the dependency review diff once. + ### 🔑 Authentication A GitHub token with appropriate scope is required when performing network operations such as `--sync-sboms`, `--sync-malware` and `--upload-sarif`. @@ -123,6 +197,12 @@ Offline match with already-cached malware advisories (no network calls): npm run start -- --sbom-cache sboms --malware-cache malware-cache --match-malware ``` +Malware-only advisory sync (no SBOM cache required): + +```bash +npm run start -- --sync-malware --malware-cache malware-cache --token $GITHUB_TOKEN +``` + Write malware matches (and optionally search results later) to a JSON file using `--output-file`: ```bash @@ -131,6 +211,16 @@ npm run start -- --sbom-cache sboms --malware-cache malware-cache --match-malwar If you also perform a search in the same invocation (add `--purl` or `--purl-file`), the JSON file will contain both `malwareMatches` and `search` top-level keys. +#### Advisory Rate Limit Handling + +Advisory sync uses GitHub GraphQL with adaptive retry/backoff to handle secondary rate limits and transient errors: + +- Retries on `403` secondary rate limit, `429`, and `5xx` responses. +- Honors `Retry-After` when provided; otherwise uses exponential backoff with jitter. +- Respects `--quiet` to suppress retry log messages. + +If retries are exhausted, the sync aborts gracefully and leaves previously cached advisories intact. + #### Ignoring Matches Provide a YAML ignore file via `--ignore-file` to suppress specific matches (before SARIF generation / JSON output). Structure: @@ -295,31 +385,42 @@ Then type one PURL query per line. Entering a blank line or using Ctrl+C on a bl | Arg | Purpose | |------|---------| -| `--sbom-cache ` | Directory holding per-repo SBOM JSON files (required for offline mode; used as write target when syncing) | -| `--sync-sboms` | Perform API calls to (re)collect SBOMs; without it the CLI runs offline loading cached SBOMs. Requires a GitHub token | -| `--enterprise ` / `--org ` | Scope selection (mutually exclusive when syncing) | -| `--purl ` | Add a PURL/range/wildcard query (repeatable) | -| `--purl-file ` | File with one query per line | -| `--json` | Emit search JSON to stdout (unless overridden by `--output-file`) | -| `--cli` | Also emit human-readable output when producing JSON (requires `--output-file`) | -| `--output-file ` | Write search JSON payload to file; required when using both `--json` and `--cli` | -| `--interactive` | Enter interactive search prompt after initial processing | -| `--sync-malware` | Fetch & cache malware advisories (MALWARE classification). Requires a GitHub token | -| `--match-malware` | Match current SBOM set against cached advisories | -| `--malware-cache ` | Advisory cache directory (required with malware operations) | -| `--malware-cutoff ` | Ignore advisories whose publishedAt AND updatedAt are both before this date/time (e.g. `2025-09-29` or full timestamp) | -| `--ignore-file ` | YAML ignore file (advisories / purls / scoped blocks) to filter malware matches before output | -| `--ignore-unbounded-malware` | Ignore matches whose advisory vulnerable version range covers all versions (e.g. `*`, `>=0`, `0.0.0`) | -| `--sarif-dir ` | Write SARIF 2.1.0 files per repository (with malware matches) | -| `--upload-sarif` | Upload generated SARIF to Code Scanning (requires --match-malware & --sarif-dir and a GitHub token) | +| `--token ` | GitHub token; required for `--sync-sboms`, `--sync-malware`, and `--upload-sarif` (or use `GITHUB_TOKEN`) | +| `--enterprise ` | Collect across all orgs in an Enterprise (mutually exclusive with `--org`/`--repo` when syncing) | +| `--org ` | Single organization scope (mutually exclusive with `--enterprise`/`--repo` when syncing) | +| `--repo ` | Single repository scope in the form `owner/name` (mutually exclusive with `--enterprise`/`--org` when syncing) | +| `--base-url ` | GitHub Enterprise Server REST base URL (e.g. `https://ghe.example.com/api/v3`) | | `--concurrency ` | Parallel SBOM fetches (default 5) | -| `--sbom-delay ` | Delay between SBOM fetch (dependency-graph/sbom) requests (default 5000) | -| `--light-delay ` | Delay between lightweight metadata calls (listing repos, commit head checks) (default 500) | -| `--base-url ` | GitHub Enterprise Server REST base URL (ends with /api/v3) | -| `--progress` | Show a dynamic progress bar during SBOM collection | -| `--suppress-secondary-rate-limit-logs` | Hide secondary rate limit warning lines (automatically applied with `--progress`) | -| `--quiet` | Suppress all non-error and non-result output (progress bar, JSON and human readable output still show) | -| `--ca-bundle ` | Path to a PEM file containing one or more additional CA certificates (self‑signed / internal PKI) | +| `--sbom-delay ` | Delay between SBOM fetch requests (default 3000) | +| `--light-delay ` | Delay between lightweight metadata requests (default 100) | +| `--sbom-cache ` | Directory to read/write per‑repo SBOM JSON; required for SBOM syncing and offline use | +| `--sync-sboms` | Perform API calls to collect SBOMs; without it the CLI runs offline using `--sbom-cache` | +| `--progress` | Show a progress bar during SBOM collection | +| `--suppress-secondary-rate-limit-logs` | Suppress secondary rate limit warning logs (useful with `--progress`) | +| `--quiet` | Suppress non‑error output (progress bar and machine output still emitted) | +| `--ca-bundle ` | PEM bundle with additional CA certs for REST/GraphQL/SARIF upload | +| `--purl ` | Add a PURL / semver range / wildcard query (repeatable) | +| `--purl-file ` | File with one query per line (supports comments) | +| `--json` | Emit search results as JSON (to stdout unless `--output-file` specified) | +| `--cli` | Also emit human‑readable output when producing JSON/CSV; requires `--output-file` to avoid mixed stdout | +| `--csv` | Emit results (search + malware matches) as CSV (to stdout or `--output-file`) | +| `--output-file ` | Write JSON/CSV output to file; required when using `--cli` with `--json` or `--csv` | +| `--interactive` | Enter interactive PURL search prompt after initial processing | +| `--sync-malware` | Fetch & cache malware advisories (MALWARE); requires a token | +| `--match-malware` | Match SBOM packages against cached malware advisories | +| `--malware-cache ` | Directory to store malware advisory cache (required with malware operations) | +| `--malware-cutoff ` | Exclude advisories whose `publishedAt` and `updatedAt` are both before cutoff | +| `--ignore-file ` | YAML ignore file (advisories / purls / scoped blocks) to filter matches before output | +| `--ignore-unbounded-malware` | Suppress advisories with effectively unbounded vulnerable ranges (e.g. `*`, `>=0`) | +| `--sarif-dir ` | Write SARIF 2.1.0 files per repository (for malware matches) | +| `--upload-sarif` | Upload generated SARIF to Code Scanning (requires `--match-malware` and `--sarif-dir`) | +| `--branch-scan` | Fetch SBOM diffs for non‑default branches (limited by `--branch-limit`) | +| `--branch-limit ` | Limit number of non‑default branches scanned per repository (default 10) | +| `--diff-base ` | Override base branch for dependency review diffs (defaults to repository default branch) | +| `--submit-on-missing-snapshot` | On diff 404, run Component Detection to submit a snapshot, then retry | +| `--submit-languages ` | Limit snapshot submission to specific languages (comma‑separated) | +| `--component-detection-bin ` | Path to local `component-detection` executable (skip download) | +| `--debug` | Enable debug logging | ## Build & test @@ -364,7 +465,7 @@ npm run start -- --sbom-cache fixtures/sboms --malware-cache fixtures/malware-ca Standard & secondary rate limits trigger an automatic retry (up to 2 times). -You can tune concurrency and increase the delay to reduce the chance of hitting rate limits. +You can tune concurrency and increase the various delays to reduce the chance of hitting rate limits, if you find that you have hit rate limits. Each time a secondary rate limit is hit, the delay between fetching SBOMs is increased by 10%, to provide a way to adaptively respond to that rate limit. diff --git a/fixtures/sboms/advanced-security/test-sbom-repo/sbom.json b/fixtures/sboms/advanced-security/test-sbom-repo/sbom.json index 9b4ef08..b621365 100644 --- a/fixtures/sboms/advanced-security/test-sbom-repo/sbom.json +++ b/fixtures/sboms/advanced-security/test-sbom-repo/sbom.json @@ -69,5 +69,25 @@ } ] } + ], + "branchDiffs": [ + { + "latestCommitDate": "2025-12-01T12:39:01.734Z", + "base": "main", + "head": "test", + "retrievedAt": "2025-12-01T12:39:01.734Z", + "changes": [ + { + "changeType": "added", + "name": "chalk", + "ecosystem": "npm", + "packageURL": "pkg:npm/chalk@5.6.1", + "license": "MIT", + "manifest": "package-lock.json", + "scope": "runtime", + "version": "5.6.1" + } + ] + } ] } diff --git a/package-lock.json b/package-lock.json index de6d4ae..87d6bdb 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,6 +8,7 @@ "name": "github-sbom-toolkit", "version": "0.1.0", "dependencies": { + "@github/dependency-submission-toolkit": "^2.0.5", "@octokit/core": "^7.0.6", "@octokit/graphql": "^9.0.1", "@octokit/plugin-paginate-rest": "^14.0.0", @@ -15,7 +16,9 @@ "@octokit/plugin-retry": "^8.0.3", "@octokit/plugin-throttling": "^11.0.3", "chalk": "^5.6.2", + "cross-fetch": "^4.1.0", "inquirer": "^12.11.1", + "octokit": "^5.0.5", "p-limit": "^7.2.0", "packageurl-js": "^2.0.1", "semver": "^7.7.3", @@ -40,6 +43,226 @@ "node": ">=18.0.0" } }, + "node_modules/@actions/core": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", + "license": "MIT", + "dependencies": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "license": "MIT", + "dependencies": { + "@actions/io": "^1.0.1" + } + }, + "node_modules/@actions/github": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.1.tgz", + "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==", + "license": "MIT", + "dependencies": { + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.2.2", + "@octokit/plugin-rest-endpoint-methods": "^10.4.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "undici": "^5.28.5" + } + }, + "node_modules/@actions/github/node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/core": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz", + "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==", + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/endpoint": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/graphql": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", + "license": "MIT", + "dependencies": { + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz", + "integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/github/node_modules/@octokit/request": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/github/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@actions/github/node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "license": "Apache-2.0" + }, + "node_modules/@actions/github/node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "license": "ISC" + }, + "node_modules/@actions/http-client": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", + "license": "MIT", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", + "license": "MIT" + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.10", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz", @@ -661,6 +884,64 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/@github/dependency-submission-toolkit": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@github/dependency-submission-toolkit/-/dependency-submission-toolkit-2.0.5.tgz", + "integrity": "sha512-bCgbNa1WZZuexw5B3DVlIrkiMLf4kDtdPIdvAh7SibtvOM+lMCcLZXsEz3ukGY3QYay0+FPuMiiINw0LCRJJ5w==", + "license": "MIT", + "workspaces": [ + "example" + ], + "dependencies": { + "@actions/core": "^1.10.1", + "@actions/github": "^6.0.0", + "@octokit/request-error": "^6.1.1", + "@octokit/webhooks-types": "^7.5.0", + "packageurl-js": "^1.2.1" + } + }, + "node_modules/@github/dependency-submission-toolkit/node_modules/@octokit/openapi-types": { + "version": "25.1.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", + "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==", + "license": "MIT" + }, + "node_modules/@github/dependency-submission-toolkit/node_modules/@octokit/request-error": { + "version": "6.1.8", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", + "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^14.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@github/dependency-submission-toolkit/node_modules/@octokit/types": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz", + "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^25.1.0" + } + }, + "node_modules/@github/dependency-submission-toolkit/node_modules/packageurl-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/packageurl-js/-/packageurl-js-1.2.1.tgz", + "integrity": "sha512-cZ6/MzuXaoFd16/k0WnwtI298UCaDHe/XlSh85SeOKbGZ1hq0xvNbx3ILyCMyk7uFQxl6scF3Aucj6/EO9NwcA==", + "license": "MIT" + }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -1085,6 +1366,105 @@ "node": ">= 8" } }, + "node_modules/@octokit/app": { + "version": "16.1.2", + "resolved": "https://registry.npmjs.org/@octokit/app/-/app-16.1.2.tgz", + "integrity": "sha512-8j7sEpUYVj18dxvh0KWj6W/l6uAiVRBl1JBDVRqH1VHKAO/G5eRVl4yEoYACjakWers1DjUkcCHyJNQK47JqyQ==", + "license": "MIT", + "dependencies": { + "@octokit/auth-app": "^8.1.2", + "@octokit/auth-unauthenticated": "^7.0.3", + "@octokit/core": "^7.0.6", + "@octokit/oauth-app": "^8.0.3", + "@octokit/plugin-paginate-rest": "^14.0.0", + "@octokit/types": "^16.0.0", + "@octokit/webhooks": "^14.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/app/node_modules/@octokit/plugin-paginate-rest": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-14.0.0.tgz", + "integrity": "sha512-fNVRE7ufJiAA3XUrha2omTA39M6IXIc6GIZLvlbsm8QOQCYvpq/LkMNGyFlB1d8hTDzsAXa3OKtybdMAYsV/fw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/auth-app": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-8.1.2.tgz", + "integrity": "sha512-db8VO0PqXxfzI6GdjtgEFHY9tzqUql5xMFXYA12juq8TeTgPAuiiP3zid4h50lwlIP457p5+56PnJOgd2GGBuw==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-app": "^9.0.3", + "@octokit/auth-oauth-user": "^6.0.2", + "@octokit/request": "^10.0.6", + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0", + "toad-cache": "^3.7.0", + "universal-github-app-jwt": "^2.2.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/auth-oauth-app": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-9.0.3.tgz", + "integrity": "sha512-+yoFQquaF8OxJSxTb7rnytBIC2ZLbLqA/yb71I4ZXT9+Slw4TziV9j/kyGhUFRRTF2+7WlnIWsePZCWHs+OGjg==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-device": "^8.0.3", + "@octokit/auth-oauth-user": "^6.0.2", + "@octokit/request": "^10.0.6", + "@octokit/types": "^16.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/auth-oauth-device": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-8.0.3.tgz", + "integrity": "sha512-zh2W0mKKMh/VWZhSqlaCzY7qFyrgd9oTWmTmHaXnHNeQRCZr/CXy2jCgHo4e4dJVTiuxP5dLa0YM5p5QVhJHbw==", + "license": "MIT", + "dependencies": { + "@octokit/oauth-methods": "^6.0.2", + "@octokit/request": "^10.0.6", + "@octokit/types": "^16.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/auth-oauth-user": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-6.0.2.tgz", + "integrity": "sha512-qLoPPc6E6GJoz3XeDG/pnDhJpTkODTGG4kY0/Py154i/I003O9NazkrwJwRuzgCalhzyIeWQ+6MDvkUmKXjg/A==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-device": "^8.0.3", + "@octokit/oauth-methods": "^6.0.2", + "@octokit/request": "^10.0.6", + "@octokit/types": "^16.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, "node_modules/@octokit/auth-token": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-6.0.0.tgz", @@ -1094,6 +1474,19 @@ "node": ">= 20" } }, + "node_modules/@octokit/auth-unauthenticated": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-7.0.3.tgz", + "integrity": "sha512-8Jb1mtUdmBHL7lGmop9mU9ArMRUTRhg8vp0T1VtZ4yd9vEm3zcLwmjQkhNEduKawOOORie61xhtYIhTDN+ZQ3g==", + "license": "MIT", + "dependencies": { + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + } + }, "node_modules/@octokit/core": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.6.tgz", @@ -1139,12 +1532,73 @@ "node": ">= 20" } }, + "node_modules/@octokit/oauth-app": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-8.0.3.tgz", + "integrity": "sha512-jnAjvTsPepyUaMu9e69hYBuozEPgYqP4Z3UnpmvoIzHDpf8EXDGvTY1l1jK0RsZ194oRd+k6Hm13oRU8EoDFwg==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-app": "^9.0.2", + "@octokit/auth-oauth-user": "^6.0.1", + "@octokit/auth-unauthenticated": "^7.0.2", + "@octokit/core": "^7.0.5", + "@octokit/oauth-authorization-url": "^8.0.0", + "@octokit/oauth-methods": "^6.0.1", + "@types/aws-lambda": "^8.10.83", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/oauth-authorization-url": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-8.0.0.tgz", + "integrity": "sha512-7QoLPRh/ssEA/HuHBHdVdSgF8xNLz/Bc5m9fZkArJE5bb6NmVkDm3anKxXPmN1zh6b5WKZPRr3697xKT/yM3qQ==", + "license": "MIT", + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/oauth-methods": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-6.0.2.tgz", + "integrity": "sha512-HiNOO3MqLxlt5Da5bZbLV8Zarnphi4y9XehrbaFMkcoJ+FL7sMxH/UlUsCVxpddVu4qvNDrBdaTVE2o4ITK8ng==", + "license": "MIT", + "dependencies": { + "@octokit/oauth-authorization-url": "^8.0.0", + "@octokit/request": "^10.0.6", + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + } + }, "node_modules/@octokit/openapi-types": { "version": "27.0.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-27.0.0.tgz", "integrity": "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA==", "license": "MIT" }, + "node_modules/@octokit/openapi-webhooks-types": { + "version": "12.0.3", + "resolved": "https://registry.npmjs.org/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-12.0.3.tgz", + "integrity": "sha512-90MF5LVHjBedwoHyJsgmaFhEN1uzXyBDRLEBe7jlTYx/fEhPAk3P3DAJsfZwC54m8hAIryosJOL+UuZHB3K3yA==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-graphql": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-6.0.0.tgz", + "integrity": "sha512-crfpnIoFiBtRkvPqOyLOsw12XsveYuY2ieP6uYDosoUegBJpSVxGwut9sxUgFFcll3VTOTqpUf8yGd8x1OmAkQ==", + "license": "MIT", + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, "node_modules/@octokit/plugin-paginate-rest": { "version": "14.0.0", "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-14.0.0.tgz", @@ -1245,6 +1699,41 @@ "@octokit/openapi-types": "^27.0.0" } }, + "node_modules/@octokit/webhooks": { + "version": "14.1.3", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-14.1.3.tgz", + "integrity": "sha512-gcK4FNaROM9NjA0mvyfXl0KPusk7a1BeA8ITlYEZVQCXF5gcETTd4yhAU0Kjzd8mXwYHppzJBWgdBVpIR9wUcQ==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-webhooks-types": "12.0.3", + "@octokit/request-error": "^7.0.0", + "@octokit/webhooks-methods": "^6.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-6.0.0.tgz", + "integrity": "sha512-MFlzzoDJVw/GcbfzVC1RLR36QqkTLUf79vLVO3D+xn7r0QgxnFoLZgtrzxiQErAjFUOdH6fas2KeQJ1yr/qaXQ==", + "license": "MIT", + "engines": { + "node": ">= 20" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "7.6.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.6.1.tgz", + "integrity": "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw==", + "license": "MIT" + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.159", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.159.tgz", + "integrity": "sha512-SAP22WSGNN12OQ8PlCzGzRCZ7QDCwI85dQZbmpz7+mAk+L7j+wI7qnvmdKh+o7A5LaOp6QnOZ2NJphAZQTTHQg==", + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -2098,6 +2587,15 @@ "dev": true, "license": "MIT" }, + "node_modules/cross-fetch": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.1.0.tgz", + "integrity": "sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw==", + "license": "MIT", + "dependencies": { + "node-fetch": "^2.7.0" + } + }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -2138,6 +2636,12 @@ "dev": true, "license": "MIT" }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "license": "ISC" + }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -2965,6 +3469,87 @@ "dev": true, "license": "MIT" }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/octokit": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/octokit/-/octokit-5.0.5.tgz", + "integrity": "sha512-4+/OFSqOjoyULo7eN7EA97DE0Xydj/PW5aIckxqQIoFjFwqXKuFCvXUJObyJfBF9Khu4RL/jlDRI9FPaMGfPnw==", + "license": "MIT", + "dependencies": { + "@octokit/app": "^16.1.2", + "@octokit/core": "^7.0.6", + "@octokit/oauth-app": "^8.0.3", + "@octokit/plugin-paginate-graphql": "^6.0.0", + "@octokit/plugin-paginate-rest": "^14.0.0", + "@octokit/plugin-rest-endpoint-methods": "^17.0.0", + "@octokit/plugin-retry": "^8.0.3", + "@octokit/plugin-throttling": "^11.0.3", + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0", + "@octokit/webhooks": "^14.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/octokit/node_modules/@octokit/plugin-paginate-rest": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-14.0.0.tgz", + "integrity": "sha512-fNVRE7ufJiAA3XUrha2omTA39M6IXIc6GIZLvlbsm8QOQCYvpq/LkMNGyFlB1d8hTDzsAXa3OKtybdMAYsV/fw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/octokit/node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-17.0.0.tgz", + "integrity": "sha512-B5yCyIlOJFPqUUeiD0cnBJwWJO8lkJs5d8+ze9QDP6SvfiXSz1BF+91+0MeI1d2yxgOhU/O+CvtiZ9jSkHhFAw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -3344,6 +3929,21 @@ "node": ">=8.0" } }, + "node_modules/toad-cache": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz", + "integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, "node_modules/ts-api-utils": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", @@ -3383,6 +3983,15 @@ "fsevents": "~2.3.3" } }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "license": "MIT", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -3410,6 +4019,18 @@ "node": ">=14.17" } }, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, "node_modules/undici-types": { "version": "7.16.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", @@ -3417,6 +4038,12 @@ "devOptional": true, "license": "MIT" }, + "node_modules/universal-github-app-jwt": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-2.2.2.tgz", + "integrity": "sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw==", + "license": "MIT" + }, "node_modules/universal-user-agent": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", @@ -3433,6 +4060,22 @@ "punycode": "^2.1.0" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -3473,6 +4116,12 @@ "node": ">=8" } }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/package.json b/package.json index 3a16355..96d51fc 100644 --- a/package.json +++ b/package.json @@ -12,12 +12,13 @@ "start": "node dist/cli.js", "dev": "tsx src/cli.ts", "lint": "eslint . --ext .ts --max-warnings=0", - "test": "node dist/test-fixture-match.js" + "test": "node dist/test-fixture-match.js && node dist/test-branch-search.js" }, "engines": { "node": ">=18.0.0" }, "dependencies": { + "@github/dependency-submission-toolkit": "^2.0.5", "@octokit/core": "^7.0.6", "@octokit/graphql": "^9.0.1", "@octokit/plugin-paginate-rest": "^14.0.0", @@ -25,7 +26,9 @@ "@octokit/plugin-retry": "^8.0.3", "@octokit/plugin-throttling": "^11.0.3", "chalk": "^5.6.2", + "cross-fetch": "^4.1.0", "inquirer": "^12.11.1", + "octokit": "^5.0.5", "p-limit": "^7.2.0", "packageurl-js": "^2.0.1", "semver": "^7.7.3", @@ -43,4 +46,4 @@ "tsx": "^4.20.6", "typescript": "^5.9.3" } -} \ No newline at end of file +} diff --git a/src/cli.ts b/src/cli.ts index d45450a..831b22c 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -5,7 +5,10 @@ import chalk from "chalk"; import { SbomCollector } from "./sbomCollector.js"; import inquirer from "inquirer"; // still used elsewhere if needed import readline from "readline"; -const { MalwareAdvisorySync } = await import("./malwareAdvisories.js"); +import { CollectionSummary, RepositorySbom } from "./types.js"; +import { MalwareAdvisorySync } from "./malwareAdvisories.js"; +import { MalwareMatch } from "./malwareMatcher.js"; +import fs from "fs"; async function main() { const argv = await yargs(hideBin(process.argv)) @@ -13,7 +16,9 @@ async function main() { .option("token", { type: "string", describe: "GitHub token with repo + security_events scope" }) .option("enterprise", { type: "string", describe: "Enterprise slug (mutually exclusive with --org)" }) .option("org", { type: "string", describe: "Single organization login" }) + .option("repo", { type: "string", describe: "Single repository name" }) .option("base-url", { type: "string", describe: "GitHub Enterprise Server base URL, e.g. https://github.mycompany.com/api/v3" }) + .option("ghes", { type: "boolean", default: false, describe: "Indicates that the provided base URL is for GitHub Enterprise Server" }) .option("concurrency", { type: "number", default: 5 }) .option("sbom-delay", { type: "number", default: 3000, describe: "Delay (ms) between SBOM fetch requests" }) .option("light-delay", { type: "number", default: 100, describe: "Delay (ms) between lightweight metadata requests (org/repo listing, commit head checks)" }) @@ -25,7 +30,7 @@ async function main() { .option("quiet", { type: "boolean", default: false, describe: "Suppress all non-error output (does not suppress progress bar or JSON)" }) .option("interactive", { type: "boolean", default: false, describe: "Enter interactive PURL search mode after collection" }) .option("sync-malware", { type: "boolean", default: false, describe: "Sync malware advisories (MALWARE classification) to local cache" }) - .option("malware-cache", { type: "string", default: "malware-cache", describe: "Directory to store malware advisory cache" }) + .option("malware-cache", { type: "string", describe: "Directory to store malware advisory cache" }) .option("malware-since", { type: "string", describe: "Override last sync timestamp (ISO) for malware advisory incremental sync" }) .option("ca-bundle", { type: "string", describe: "Path to PEM file with additional CA certificate(s) (self-signed/internal)" }) .option("match-malware", { type: "boolean", default: false, describe: "After sync/load, match SBOM packages against malware advisories" }) @@ -39,13 +44,27 @@ async function main() { .option("csv", { type: "boolean", describe: "Emit results (search + malware matches) as CSV" }) .option("ignore-file", { type: "string", describe: "Path to YAML ignore file (advisories, purls, scoped ignores)" }) .option("ignore-unbounded-malware", { type: "boolean", default: false, describe: "Ignore malware advisories whose vulnerable range covers all versions (e.g. '*', '>=0')" }) + .option("branch-scan", { type: "boolean", default: false, describe: "Fetch SBOM diffs for non-default branches (limited by --branch-limit)" }) + .option("branch-limit", { type: "number", default: undefined, describe: "Limit number of non-default branches to scan per repository" }) + .option("diff-base", { type: "string", describe: "Override base branch for dependency review diffs (defaults to default branch)" }) + .option("submit-on-missing-snapshot", { type: "boolean", default: false, describe: "When dependency review diff returns 404 (missing snapshot), run Component Detection to submit a snapshot, then retry." }) + .option("submit-languages", { type: "array", describe: "Limit snapshot submission to these languages (e.g., JavaScript,TypeScript,Python,Maven)." }) + .option("component-detection-bin", { type: "string", describe: "Path to a local component-detection executable to use for snapshot submission (skips download)." }) + .option("force-submission", { type: "boolean", default: false, describe: "Always run Dependency Submission for scanned branches before fetching diffs." }) + .option("snapshot-ingestion-delay", { type: "number", default: 1500, describe: "Delay (ms) after snapshot submission to allow ingestion before dependency review (default: 1500ms)" }) + .option("retry-ingestion-delay", { type: "number", default: 3000, describe: "Delay (ms) after snapshot submission before retrying dependency review on 404 (default: 3000ms)" }) + .option("debug", { type: "boolean", default: false, describe: "Enable debug logging" }) .check(args => { const syncing = !!args.syncSboms; if (syncing) { - if (!args.enterprise && !args.org) throw new Error("Provide --enterprise or --org with --sync-sboms"); + if (!args.enterprise && !args.org && !args.repo) throw new Error("Provide --enterprise, --org or --repo with --sync-sboms"); if (args.enterprise && args.org) throw new Error("Specify only one of --enterprise or --org"); + if (args.repo && (args.enterprise || args.org)) throw new Error("Specify only one of --enterprise, --org, or --repo"); + if (args.repo && !(args.repo as string).includes("/")) throw new Error("--repo must be in the format owner/repo"); + if (syncing && !args.sbomCache) throw new Error("--sync-sboms requires --sbom-cache to write updated SBOMs to disk"); } else { - if (!args.sbomCache) throw new Error("Offline mode requires --sbom-cache (omit --sync-sboms)"); + const malwareOnly = !!args["sync-malware"] && !args.sbomCache && !args.purl && !args["purl-file"] && !args["match-malware"] && !args.uploadSarif && !args.interactive; + if (!malwareOnly && !args.sbomCache) throw new Error("Offline mode requires --sbom-cache unless running --sync-malware by itself"); } // If --cli is specified in combination with JSON or CSV, require an output file to avoid mixed stdout streams. if (args.cli && !args.outputFile && (args.json || args.csv)) { @@ -72,6 +91,14 @@ async function main() { .help() .parseAsync(); + const debug = argv.debug as boolean; + + if (debug) { + console.debug(chalk.blue("Debug logging enabled")); + } else { + console.debug = () => { }; + } + const token = argv.token as string | undefined || process.env.GITHUB_TOKEN; // Require a token for any network operation (syncing SBOMs, malware advisories, or SARIF upload) @@ -88,11 +115,18 @@ async function main() { const wantCsv = !!argv.csv; const hasOutputFile = !!argv.outputFile; const wantCli = !!argv.cli && hasOutputFile; // only allow CLI alongside machine output when writing file - const collector = new SbomCollector({ + + let sboms: RepositorySbom[] = []; + let summary: CollectionSummary | undefined; + + const needCollector = !!argv.syncSboms || !!argv.sbomCache || !!argv.purl || !!argv["purl-file"] || !!argv["match-malware"] || !!argv.uploadSarif || !!argv.interactive; + const collector = needCollector ? new SbomCollector({ token: token, enterprise: argv.enterprise as string | undefined, org: argv.org as string | undefined, + repo: argv.repo as string | undefined, baseUrl: argv["base-url"] as string | undefined, + ghes: argv.ghes as boolean | undefined, concurrency: argv.concurrency as number, delayMsBetweenRepos: argv["sbom-delay"] as number, lightDelayMs: argv["light-delay"] as number, @@ -102,30 +136,42 @@ async function main() { suppressSecondaryRateLimitLogs: argv.suppressSecondaryRateLimitLogs as boolean, quiet, caBundlePath: argv["ca-bundle"] as string | undefined, - }); + includeBranches: argv["branch-scan"] as boolean, + branchLimit: argv["branch-limit"] as number | undefined, + branchDiffBase: argv["diff-base"] as string | undefined, + submitOnMissingSnapshot: argv["submit-on-missing-snapshot"] as boolean, + forceSubmission: argv["force-submission"] as boolean, + submitLanguages: (argv["submit-languages"] as string[] | undefined) || undefined, + componentDetectionBinPath: argv["component-detection-bin"] as string | undefined, + snapshotIngestionDelayMs: argv["snapshot-ingestion-delay"] as number | undefined, + retryIngestionDelayMs: argv["retry-ingestion-delay"] as number | undefined, + }) : undefined; - if (!quiet) process.stderr.write(chalk.cyan(offline ? "Loading SBOMs from cache..." : "Collecting SBOMs from cache & GitHub...") + "\n"); - const sboms = await collector.collect(); - const summary = collector.getSummary(); - if (!quiet) process.stderr.write(chalk.green(`Done. Success: ${summary.successCount} / ${summary.repositoryCount}. Failed: ${summary.failedCount}. Cached: ${summary.skippedCount}`) + "\n"); + if (collector && (argv.sbomCache || argv.syncSboms)) { + if (!quiet) process.stderr.write(chalk.cyan(offline ? "Loading SBOMs from cache..." : "Collecting SBOMs from cache & GitHub...") + "\n"); + sboms = await collector.collect(); + summary = collector.getSummary(); + if (!quiet) process.stderr.write(chalk.green(`Done. Success: ${summary.successCount} / ${summary.repositoryCount}. Failed: ${summary.failedCount}. Cached: ${summary.skippedCount}`) + "\n"); + } const mas = new MalwareAdvisorySync({ token: token!, baseUrl: argv["base-url"] ? (argv["base-url"] as string).replace(/\/v3$/, "/graphql") : undefined, cacheDir: argv["malware-cache"] as string | undefined, since: argv["malware-since"] as string | undefined, - caBundlePath: argv["ca-bundle"] as string | undefined + caBundlePath: argv["ca-bundle"] as string | undefined, + quiet }); - if (argv["sync-malware"]) { - + if (argv.syncMalware) { if (!quiet) process.stderr.write(chalk.cyan("Syncing malware advisories from GitHub Advisory Database...") + "\n"); const { added, updated, total } = await mas.sync(); if (!quiet) process.stderr.write(chalk.green(`Malware advisories sync complete. Added: ${added}, Updated: ${updated}, Total cached: ${total}`) + "\n"); } - let malwareMatches: import("./malwareMatcher.js").MalwareMatch[] | undefined; + let malwareMatches: MalwareMatch[] = []; + if (argv["match-malware"]) { const { matchMalware, buildSarifPerRepo, writeSarifFiles, uploadSarifPerRepo } = await import("./malwareMatcher.js"); malwareMatches = matchMalware(mas.getAdvisories(), sboms, { advisoryDateCutoff: argv["malware-cutoff"] as string | undefined }); @@ -166,7 +212,8 @@ async function main() { const showMalwareCli = (!wantJson && !wantCsv) || wantCli; // show only in pure CLI or combined mode if (showMalwareCli && !quiet) { for (const m of malwareMatches) { - process.stdout.write(`${m.repo} :: ${m.purl} => ${m.advisoryGhsaId} (${m.vulnerableVersionRange ?? "(no range)"}) {advisory: ${m.reason}} ${m.advisoryPermalink}\n`); + const branchInfo = m.branch ? ` [branch: ${m.branch}]` : ""; + process.stdout.write(`${m.repo} :: ${m.purl} => ${m.advisoryGhsaId} (${m.vulnerableVersionRange ?? "(no range)"}){advisory: ${m.reason}}${branchInfo} ${m.advisoryPermalink}\n`); } } if (argv.sarifDir) { @@ -184,10 +231,6 @@ async function main() { } } } - // Incremental write now handled inside collector; retain legacy behavior only if user wants to force a re-write - if (!quiet && argv.syncSboms && argv["sbom-cache"] && summary.repositoryCount === summary.skippedCount) { - process.stderr.write(chalk.blue("All repositories reused from cache (no new SBOM writes).") + "\n"); - } const runSearchCli = (purls: string[], results: Map) => { if (!results.size) { @@ -219,16 +262,17 @@ async function main() { } const combinedPurlsRaw = [...(argv.purl as string[] ?? []), ...filePurls]; const combinedPurls = combinedPurlsRaw.map(p => p.startsWith("pkg:") ? p : `pkg:${p}`); + let searchMap: Map | undefined; - if (combinedPurls.length) { + if (combinedPurls.length && collector) { searchMap = collector.searchByPurlsWithReasons(combinedPurls); } + if (wantJson) { const jsonSearch = Array.from((searchMap || new Map()).entries()).map(([repo, entries]) => ({ repo, matches: entries })); if (hasOutputFile) { try { - const fs = await import("fs"); - let existing: { search?: unknown; malwareMatches?: import("./malwareMatcher.js").MalwareMatch[] } = {}; + let existing: { search?: unknown; malwareMatches?: MalwareMatch[] } = {}; if (fs.existsSync(argv.outputFile as string)) { try { existing = JSON.parse(fs.readFileSync(argv.outputFile as string, "utf8")); } catch { existing = {}; } } @@ -256,14 +300,14 @@ async function main() { for (const { purl, reason } of entries) searchRows.push({ repo, purl, reason }); } } - const malwareRows: Array<{ repo: string; purl: string; advisory: string; range: string | null; updatedAt: string }> = []; - if (malwareMatches) { + const malwareRows: Array<{ repo: string; purl: string; advisory: string; range: string | null; updatedAt: string; branch: string | undefined }> = []; + if (malwareMatches.length) { for (const m of malwareMatches) { - malwareRows.push({ repo: m.repo, purl: m.purl, advisory: m.advisoryGhsaId, range: m.vulnerableVersionRange, updatedAt: m.advisoryUpdatedAt }); + malwareRows.push({ repo: m.repo, purl: m.purl, advisory: m.advisoryGhsaId, range: m.vulnerableVersionRange, updatedAt: m.advisoryUpdatedAt, branch: m.branch }); } } // CSV columns: type,repo,purl,reason_or_advisory,range,updatedAt - const header = ["type", "repo", "purl", "reason_or_advisory", "range", "updatedAt"]; + const header = ["type", "repo", "purl", "reason_or_advisory", "range", "updatedAt", "branch"]; const sanitize = (val: unknown): string => { if (val === null || val === undefined) return ""; let s = String(val); @@ -291,7 +335,8 @@ async function main() { sanitize(r.purl), sanitize(r.advisory), sanitize(r.range ?? ""), - sanitize(r.updatedAt) + sanitize(r.updatedAt), + sanitize(r.branch ?? "") ].join(",")); } const csvPayload = lines.join("\n") + "\n"; @@ -369,6 +414,11 @@ async function main() { } const list = trimmed.split(/[\s,]+/).filter(Boolean); try { + if (!collector) { + console.error(chalk.red("Interactive search requires SBOMs; provide --sbom-cache or run with --sync-sboms.")); + rl.prompt(); + return; + } const map = collector.searchByPurlsWithReasons(list.map(p => p.startsWith("pkg:") ? p : `pkg:${p}`)); runSearchCli(list, map); } catch (e) { @@ -388,8 +438,12 @@ async function main() { { name: "purl", message: "Enter a PURL (blank to exit)", type: "input" } ]); if (!ans.purl) break; - const map = collector.searchByPurlsWithReasons([ans.purl.startsWith("pkg:") ? ans.purl : `pkg:${ans.purl}`]); - runSearchCli([ans.purl], map); + if (!collector) { + console.error(chalk.red("Interactive search requires SBOMs; provide --sbom-cache or run with --sync-sboms.")); + continue; + } + const map = collector.searchByPurlsWithReasons([ans.purl.startsWith("pkg:") ? ans.purl : `pkg:${ans.purl}`]); + runSearchCli([ans.purl], map); } } } diff --git a/src/componentDetection.ts b/src/componentDetection.ts new file mode 100644 index 0000000..669d5fe --- /dev/null +++ b/src/componentDetection.ts @@ -0,0 +1,432 @@ +import { Octokit } from "@octokit/core"; +import { + PackageCache, + Package, + Manifest, +} from '@github/dependency-submission-toolkit' +import fetch from 'cross-fetch' +import fs from 'fs' +import { spawn } from 'child_process'; +import path from 'path'; +import { tmpdir } from 'os'; +import { StringDecoder } from 'node:string_decoder'; + +export default class ComponentDetection { + public componentDetectionPath: string = process.platform === "win32" ? './component-detection.exe' : './component-detection'; + public outputPath: string; + octokit: Octokit; + baseUrl: string; + + constructor(octokit: Octokit, baseUrl: string, executablePath?: string) { + this.octokit = octokit; + this.baseUrl = baseUrl; + if (executablePath) { + this.componentDetectionPath = executablePath; + } + + // Set the output path + this.outputPath = (() => { + const tmpDir = fs.mkdtempSync(path.join(tmpdir(), 'component-detection-')); + return path.join(tmpDir, 'output.json'); + })(); + } + + // This is the default entry point for this class. + // If executablePath is provided, use it directly and skip download. + async scanAndGetManifests(path: string): Promise { + if (!fs.existsSync(this.componentDetectionPath)) { + await this.downloadLatestRelease(); + } + + // make an empty file to write results into + fs.writeFileSync(this.outputPath, '', { flag: 'w' }); + + if (!await this.runComponentDetection(path)) { + return; + } + + return await this.getManifestsFromResults(this.outputPath, path); + } + // Get the latest release from the component-detection repo, download the tarball, and extract it + public async downloadLatestRelease() { + try { + const statResult = fs.statSync(this.componentDetectionPath); + if (statResult && statResult.isFile()) { + console.debug(`Component-detection binary already exists at ${this.componentDetectionPath}, skipping download.`); + return; + } + } catch (error) { + // File does not exist, proceed to download + } + + try { + console.debug(`Downloading latest release for ${process.platform}`); + const downloadURL = await this.getLatestReleaseURL(); + const blob = await (await fetch(new URL(downloadURL))).blob(); + const arrayBuffer = await blob.arrayBuffer(); + const buffer = Buffer.from(arrayBuffer); + + // Write the blob to a file + console.debug(`Writing binary to file ${this.componentDetectionPath}`); + await fs.writeFileSync(this.componentDetectionPath, buffer, { mode: 0o755, flag: 'w' }); + } catch (error: any) { + console.error(error); + } + } + + // Run the component-detection CLI on the path specified + public runComponentDetection(path: string): Promise { + console.debug(`Running component-detection on ${path}`); + + console.debug(`Writing to output file: ${this.outputPath}`); + + return new Promise((resolve, reject) => { + try { + const child = spawn(`${this.componentDetectionPath}`, ['scan', '--SourceDirectory', path, '--ManifestFile', this.outputPath], { stdio: 'pipe' }); + const pid = child.pid; + + child.on('error', (err) => { + console.error(`Component-detection process ${pid} error: ${err instanceof Error ? err.message : String(err)}`); + reject(err); + }); + + child.on('exit', (code) => { + console.debug(`Component-detection process ${pid} exited with code ${code}`); + if (code === 0) { + console.debug(`Component-detection completed successfully.`); + resolve(true); + } else { + console.error(`Component-detection failed with exit code ${code}.`); + const decoder = new StringDecoder('utf8'); + const stdout = child.stdout.read(); + const stderr = child.stderr.read(); + if (stdout) { + console.error(decoder.write(stdout)); + } + if (stderr) { + console.error(decoder.write(stderr)); + } + resolve(false); + } + }); + } catch (error: any) { + console.error(error); + reject(error); + } + }); + } + + public async getManifestsFromResults(file: string, path: string): Promise { + console.debug(`Reading results from ${file}`); + const results = await fs.readFileSync(file, 'utf8'); + let json: any; + try { + json = JSON.parse(results); + } catch (err: any) { + throw new Error(`Failed to parse JSON results from component-detection output file "${file}": ${err instanceof Error ? err.message : String(err)}`); + } + + let dependencyGraphs: DependencyGraphs = this.normalizeDependencyGraphPaths(json.dependencyGraphs, path); + + return this.processComponentsToManifests(json.componentsFound, dependencyGraphs); + } + + public processComponentsToManifests(componentsFound: any[], dependencyGraphs: DependencyGraphs): Manifest[] { + // Parse the result file and add the packages to the package cache + const packageCache = new PackageCache(); + const packages: Array = []; + + for (const component of componentsFound) { + // Skip components without packageUrl + if (!component.component.packageUrl) { + console.debug(`Skipping component detected without packageUrl: ${JSON.stringify({ + id: component.component.id, + name: component.component.name || 'unnamed', + type: component.component.type || 'unknown' + }, null, 2)}`); + continue; + } + + console.debug(`Processing component: ${component.component.id}`); + console.debug(`Component details: ${JSON.stringify(component.component.packageUrl, null, 2)}`); + + const packageUrl = ComponentDetection.makePackageUrl(component.component.packageUrl); + + // Skip if the packageUrl is empty (indicates an invalid or missing packageUrl) + if (!packageUrl) { + console.debug(`Skipping component with invalid packageUrl: ${component.component.id}`); + continue; + } + + if (!packageCache.hasPackage(packageUrl)) { + const pkg = new ComponentDetectionPackage(packageUrl, component.component.id, + component.isDevelopmentDependency, component.topLevelReferrers, component.locationsFoundAt, component.containerDetailIds, component.containerLayerIds); + packageCache.addPackage(pkg); + packages.push(pkg); + } + } + + // Set the transitive dependencies + console.debug("Sorting out transitive dependencies"); + for (const pkg of packages) { + for (const referrer of pkg.topLevelReferrers) { + // Skip if referrer doesn't have a valid packageUrl + if (!referrer.packageUrl) { + console.debug(`Skipping referrer without packageUrl for component: ${pkg.id}`); + continue; + } + + const referrerUrl = ComponentDetection.makePackageUrl(referrer.packageUrl); + referrer.packageUrlString = referrerUrl; + + // Skip if the generated packageUrl is empty + if (!referrerUrl) { + console.debug(`Skipping referrer with invalid packageUrl for component: ${pkg.id}`); + continue; + } + + try { + const referrerPackage = packageCache.lookupPackage(referrerUrl); + if (referrerPackage === pkg) { + console.debug(`Found self-reference for package: ${pkg.id}`); + continue; // Skip self-references + } + if (referrerPackage) { + referrerPackage.dependsOn(pkg); + } + } catch (error) { + console.debug(`Error looking up referrer package: ${error}`); + } + } + } + + // Create manifests + const manifests: Array = []; + + console.debug("Dependency Graphs:"); + console.debug(JSON.stringify(dependencyGraphs, null, 2)); + + // Check the locationsFoundAt for every package and add each as a manifest + this.addPackagesToManifests(packages, manifests, dependencyGraphs); + + return manifests; + } + + private addPackagesToManifests(packages: Array, manifests: Array, dependencyGraphs: DependencyGraphs): void { + packages.forEach((pkg: ComponentDetectionPackage) => { + pkg.locationsFoundAt.forEach((location: any) => { + // Use the normalized path (remove leading slash if present) + let normalizedLocation = location.startsWith('/') ? location.substring(1) : location; + // Unescape the path, as upstream ComponentDetection emits locationsFoundAt in URL-encoded form + normalizedLocation = decodeURIComponent(normalizedLocation); + + if (!manifests.find((manifest: Manifest) => manifest.name === normalizedLocation)) { + const manifest = new Manifest(normalizedLocation, normalizedLocation); + manifests.push(manifest); + } + + const depGraphEntry = dependencyGraphs[normalizedLocation]; + if (!depGraphEntry) { + console.warn(`No dependency graph entry found for manifest location: ${normalizedLocation}`); + return; // Skip this location if not found in dependencyGraphs + } + + const directDependencies = depGraphEntry.explicitlyReferencedComponentIds; + if (directDependencies.includes(pkg.id)) { + manifests + .find((manifest: Manifest) => manifest.name === normalizedLocation) + ?.addDirectDependency( + pkg, + ComponentDetection.getDependencyScope(pkg) + ); + } else { + manifests + .find((manifest: Manifest) => manifest.name === normalizedLocation) + ?.addIndirectDependency( + pkg, + ComponentDetection.getDependencyScope(pkg) + ); + } + }); + }); + } + + private static getDependencyScope(pkg: ComponentDetectionPackage) { + return pkg.isDevelopmentDependency ? 'development' : 'runtime' + } + + public static makePackageUrl(packageUrlJson: any): string { + // Handle case when packageUrlJson is null or undefined + if ( + !packageUrlJson || + typeof packageUrlJson.Scheme !== 'string' || + typeof packageUrlJson.Type !== 'string' || + !packageUrlJson.Scheme || + !packageUrlJson.Type + ) { + console.debug(`Warning: Received null or undefined packageUrlJson. Unable to create package URL.`); + return ""; // Return a blank string for unknown packages + } + + try { + let packageUrl = `${packageUrlJson.Scheme}:${packageUrlJson.Type}/`; + if (packageUrlJson.Namespace) { + packageUrl += `${packageUrlJson.Namespace.replaceAll("@", "%40")}/`; + } + packageUrl += `${packageUrlJson.Name.replaceAll("@", "%40")}`; + if (packageUrlJson.Version) { + packageUrl += `@${packageUrlJson.Version}`; + } + if (packageUrlJson.Qualifiers && Object.keys(packageUrlJson.Qualifiers).length > 0) { + const qualifierString = Object.entries(packageUrlJson.Qualifiers) + .map(([key, value]) => `${key}=${value}`) + .join("&"); + packageUrl += `?${qualifierString}`; + } + return packageUrl; + } catch (error) { + console.debug(`Error creating package URL from packageUrlJson: ${JSON.stringify(packageUrlJson, null, 2)}`); + console.debug(`Error details: ${error}`); + return ""; // Return a blank string for error cases + } + } + + private async getLatestReleaseURL(): Promise { + let octokit: Octokit = this.octokit; + + if (this.baseUrl !== 'https://api.github.com') { + octokit = new Octokit({ + auth: "", request: { fetch: fetch }, log: { + debug: console.debug, + info: console.info, + warn: console.warn, + error: console.error + }, + }); + } + + const owner = "microsoft"; + const repo = "component-detection"; + console.debug(`Attempting to download latest release from ${owner}/${repo}`); + + try { + const latestRelease = await octokit.request("GET /repos/{owner}/{repo}/releases/latest", { owner, repo }); + + let downloadURL: string = ""; + // TODO: do we need to handle different architectures here? + // can we allow x64 on MacOS? We could allow an input parameter to override? + let assetName: string; + if (process.platform === "win32") { + assetName = "component-detection-win-x64.exe"; + } else if (process.platform === "linux") { + assetName = "component-detection-linux-x64"; + } else if (process.platform === "darwin") { + assetName = "component-detection-osx-arm64"; + } else { + throw new Error(`Unsupported platform: ${process.platform}`); + } + latestRelease.data.assets.forEach((asset: any) => { + if (asset.name === assetName) { + downloadURL = asset.browser_download_url; + } + }); + + return downloadURL; + } catch (error: any) { + console.error(error); + console.debug(error.message); + console.debug(error.stack); + throw new Error("Failed to download latest release"); + } + } + + /** + * Normalizes the keys of a DependencyGraphs object to be relative paths from the resolved filePath input. + * @param dependencyGraphs The DependencyGraphs object to normalize. + * @param filePathInput The filePath input (relative or absolute) from the action configuration. + * @returns A new DependencyGraphs object with relative path keys. + */ + public normalizeDependencyGraphPaths( + dependencyGraphs: DependencyGraphs, + filePathInput: string + ): DependencyGraphs { + // Resolve the base directory from filePathInput (relative to cwd if not absolute) + const baseDir = path.resolve(process.cwd(), filePathInput); + // Use a null-prototype object to avoid prototype pollution + const normalized: DependencyGraphs = Object.create(null); + for (const absPath in dependencyGraphs) { + // Only process own properties + if (!Object.prototype.hasOwnProperty.call(dependencyGraphs, absPath)) continue; + // Make the path relative to the baseDir + let relPath = path.relative(baseDir, absPath).replace(/\\/g, '/'); + // Guard against special keys that could lead to prototype injection + if (relPath === '__proto__' || relPath === 'constructor' || relPath === 'prototype') { + console.warn(`Skipping unsafe manifest key: ${relPath}`); + continue; + } + // Define property safely + Object.defineProperty(normalized, relPath, { + value: dependencyGraphs[absPath], + enumerable: true, + configurable: false, + writable: false, + }); + } + return normalized; + } +} + +/** + * Type for referrer objects in topLevelReferrers array + */ +type TopLevelReferrer = { + packageUrl?: any; + packageUrlString?: string; +}; + +class ComponentDetectionPackage extends Package { + public packageUrlString: string; + + constructor(packageUrl: string, public id: string, public isDevelopmentDependency: boolean, public topLevelReferrers: TopLevelReferrer[], + public locationsFoundAt: string[], public containerDetailIds: string[], public containerLayerIds: string[]) { + super(packageUrl); + this.packageUrlString = packageUrl; + } +} + +/** + * Types for the dependencyGraphs section of output.json + */ +export type DependencyGraph = { + /** + * The dependency graph: keys are component IDs, values are either null (no dependencies) or an array of component IDs (dependencies) + */ + graph: Record; + /** + * Explicitly referenced component IDs + */ + explicitlyReferencedComponentIds: string[]; + /** + * Development dependencies + */ + developmentDependencies: string[]; + /** + * Regular dependencies + */ + dependencies: string[]; +}; + +/** + * The top-level dependencyGraphs object: keys are manifest file paths, values are DependencyGraph objects + */ +export type DependencyGraphs = Record; + + + + + + + + + + diff --git a/src/componentSubmission.ts b/src/componentSubmission.ts new file mode 100644 index 0000000..1ceaaaf --- /dev/null +++ b/src/componentSubmission.ts @@ -0,0 +1,255 @@ +import chalk from 'chalk'; +import { execFile } from 'child_process'; +import path from 'path'; +import fs from 'fs'; +import os from 'os'; + +import ComponentDetection from './componentDetection.js'; +import { + Job, + Snapshot, +} from '@github/dependency-submission-toolkit'; +import { Octokit } from 'octokit'; +import { RequestError } from '@octokit/request-error' + +export interface SubmitOpts { + octokit: Octokit; + owner: string; + repo: string; + branch: string; + token?: string; + baseUrl?: string; + caBundlePath?: string; + quiet?: boolean; + languages?: string[]; + componentDetectionBinPath?: string; // optional path to component-detection executable + lightDelayMs?: number; +} + +export async function getLanguageIntersection(octokit: Octokit, owner: string, repo: string, languages: string[] | undefined, quiet: boolean = false, lightDelayMs: number = 0): Promise { + const langResp = await octokit.request('GET /repos/{owner}/{repo}/languages', { owner, repo }); + + await new Promise(r => setTimeout(r, lightDelayMs)); + + const repoLangs = Object.keys(langResp.data || {}); + const wanted = languages; + const intersect = wanted ? repoLangs.filter(l => wanted.some(w => w.toLowerCase() === l.toLowerCase())) : repoLangs; + if (!intersect.length) { + if (!quiet) console.error(chalk.yellow(`Skipping submission: none of selected languages present in repo (${repoLangs.join(', ')})`)); + return []; + } + return intersect; +} + +export async function sparseCheckout(owner: string, repo: string, branch: string, destDir: string, intersect: string[], baseUrl?: string, lightDelayMs?: number) { + const cwd = destDir; + const repoUrl = (baseUrl && baseUrl.includes('api/v3')) + ? baseUrl.replace(/\/api\/v3$/, '') + `/${owner}/${repo}.git` + : `https://github.com/${owner}/${repo}.git`; + const patterns = buildSparsePatterns(intersect); + // init repo + await execGit(['init'], { cwd }); + await execGit(['remote', 'add', 'origin', repoUrl], { cwd }); + await execGit(['config', 'core.sparseCheckout', 'true'], { cwd }); + fs.mkdirSync(path.join(cwd, '.git', 'info'), { recursive: true }); + fs.writeFileSync(path.join(cwd, '.git', 'info', 'sparse-checkout'), patterns.join('\n') + '\n', 'utf8'); + await execGit(['fetch', '--depth=1', 'origin', branch], { cwd }); + await execGit(['checkout', 'FETCH_HEAD'], { cwd }); + + await new Promise(r => setTimeout(r, lightDelayMs)); + + const { stdout: shaOut } = await execGit(['rev-parse', 'HEAD'], { cwd: destDir }); + const sha = shaOut.trim(); + console.debug(`Checked out ${owner}/${repo}@${branch} to ${destDir} at commit ${sha}`); + return sha; +} + +export async function submitSnapshotIfPossible(opts: SubmitOpts): Promise { + if (!opts.octokit) { + throw new Error('Octokit instance is required in opts.octokit'); + } + + const tmp = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'cd-submission-')); + + try { + const intersect = await getLanguageIntersection(opts.octokit, opts.owner, opts.repo, opts.languages, opts.quiet, opts.lightDelayMs); + // Create temp dir and sparse checkout only manifest files according to selected languages + if (!intersect.length) { + // No matching languages, skip submission + return true; + } + console.debug(chalk.green(`Sparse checkout into ${tmp} for languages: ${intersect.join(', ')}`)); + + const sha = await sparseCheckout(opts.owner, opts.repo, opts.branch, tmp, intersect, opts.baseUrl, opts.lightDelayMs); + + // Run the ComponentDetection module to detect components and submit snapshot + if (!sha) { + if (!opts.quiet) console.error(chalk.red(`Failed to determine SHA for ${opts.owner}/${opts.repo} on branch ${opts.branch}`)); + return false; + } + return await runComponentDetectionAndSubmit(opts.octokit, tmp, opts.owner, opts.repo, sha, opts.branch, opts.componentDetectionBinPath); + + } catch (e) { + if (!opts.quiet) console.error(chalk.red(`Component Detection failed: ${(e as Error).message}`)); + return false; + } finally { + // Clean up temp dir + await fs.promises.rm(tmp, { recursive: true, force: true }); + } +} + +function buildSparsePatterns(langs: string[]): string[] { + const set = new Set(); + const add = (p: string) => set.add(p); + for (const l of langs) { + const ll = l.toLowerCase(); + if (ll === 'javascript' || ll === 'typescript') { + add('**/package.json'); + add('**/package-lock.json'); + add('**/yarn.lock'); + add('**/pnpm-lock.yaml'); + } else if (ll === 'python') { + add('**/requirements.txt'); + add('**/Pipfile.lock'); + add('**/poetry.lock'); + add('**/pyproject.toml'); + } else if (ll === 'go') { + add('**/go.mod'); + add('**/go.sum'); + } else if (ll === 'ruby') { + add('**/Gemfile.lock'); + add('**/gems.locked'); + } else if (ll === 'rust') { + add('**/Cargo.toml'); + add('**/Cargo.lock'); + } else if (ll === 'java') { + // Maven & Gradle + add('**/pom.xml'); + add('**/build.gradle'); + add('**/build.gradle.kts'); + add('**/settings.gradle'); + add('**/settings.gradle.kts'); + add('**/gradle.lockfile'); + } else if (ll === 'c#' || ll === 'csharp') { + add('**/packages.lock.json'); + add('**/*.csproj'); + add('**/*.sln'); + } + } + // Include root lockfiles only if JavaScript/TypeScript is among selected languages + if (langs.some(l => ['javascript', 'typescript', 'node', 'js', 'ts'].includes(l.toLowerCase()))) { + add('package.json'); add('package-lock.json'); add('yarn.lock'); add('pnpm-lock.yaml'); + } + return Array.from(set); +} + +async function execGit(args: string[], opts: { cwd: string, quiet?: boolean }): Promise<{ stdout: string; stderr: string }> { + return await new Promise<{ stdout: string; stderr: string }>((resolve, reject) => { + execFile('git', args, { cwd: opts.cwd, encoding: 'utf8', maxBuffer: 10 * 1024 * 1024 }, (error, stdout, stderr) => { + if (error) { + const msg = stderr?.trim() || error.message; + reject(new Error(`git ${args.join(' ')} failed: ${msg}`)); + } else { + resolve({ stdout, stderr: stderr ?? '' }); + } + }); + }); +} + +export async function runComponentDetectionAndSubmit(octokit: Octokit, tmpDir: string, owner: string, repo: string, sha: string, ref: string, componentDetectionBinPath?: string): Promise { + + const componentDetection = new ComponentDetection(octokit, '', componentDetectionBinPath); + + let manifests = await componentDetection.scanAndGetManifests(tmpDir); + + // Get detector configuration inputs + const detectorName = "Component Detection in GitHub SBOM Toolkit: advanced-security/github-sbom-toolkit"; + const detectorVersion = "0.0.1"; + const detectorUrl = "https://github.com/advanced-security/github-sbom-toolkit"; + + // Use provided detector config or defaults + const detector = { + name: detectorName, + version: detectorVersion, + url: detectorUrl, + }; + + const date = new Date().toISOString(); + + const job: Job = { + correlator: 'github-sbom-toolkit', + id: `${owner}-${repo}-${ref}-${date}-${Math.floor(Math.random() * Number.MAX_SAFE_INTEGER).toString()}` + }; + + let snapshot = new Snapshot(detector, undefined, job); + snapshot.ref = `refs/heads/${ref}`; + snapshot.sha = sha; + + console.debug(`Submitting snapshot for ${owner}/${repo} at ${snapshot.ref} (${snapshot.sha}) with ${manifests?.length || 0} manifests`); + + manifests?.forEach((manifest) => { + snapshot.addManifest(manifest); + }); + + return await submitSnapshot(octokit, snapshot, { owner, repo }); +} + +/** + * submitSnapshot submits a snapshot to the Dependency Submission API - vendored in and modified from @github/dependency-submission-toolkit, to make it work at the CLI, vs in Actions. + * + * @param {Octokit} octokit - The Octokit instance for GitHub API requests + * @param {Snapshot} snapshot - The dependency snapshot to submit + * @param {Repo} repo - The repository owner and name + * @returns {Promise} true if submission was successful, false otherwise + */ +export async function submitSnapshot( + octokit: Octokit, + snapshot: Snapshot, + repo: { owner: string; repo: string } +): Promise { + console.debug('Submitting snapshot...') + console.debug(snapshot.prettyJSON()) + + try { + const response = await octokit.request( + 'POST /repos/{owner}/{repo}/dependency-graph/snapshots', + { + headers: { + accept: 'application/vnd.github+json' + }, + owner: repo.owner, + repo: repo.repo, + ...snapshot + } + ) + const result = response.data.result + if (result === 'SUCCESS' || result === 'ACCEPTED') { + console.debug( + `Snapshot successfully created at ${response.data.created_at.toString()}` + + ` with id ${response.data.id}` + ) + return true + } else { + console.error( + `Snapshot creation failed with result: "${result}: ${response.data.message}"` + ) + return false + } + } catch (error) { + if (error instanceof RequestError) { + console.error( + `HTTP Status ${error.status} for request ${error.request.method} ${error.request.url}` + ) + if (error.response) { + console.error( + `Response body:\n${JSON.stringify(error.response.data, undefined, 2)}` + ) + } + } + if (error instanceof Error) { + console.error(error.message) + if (error.stack) console.error(error.stack) + } + return false + } +} \ No newline at end of file diff --git a/src/ignore.ts b/src/ignore.ts index e64e330..d56ffba 100644 --- a/src/ignore.ts +++ b/src/ignore.ts @@ -67,7 +67,7 @@ function parsePurlIgnore(raw: string): ParsedPurlIgnore | null { export class IgnoreMatcher { private globalAdvisories: Set = new Set(); private globalPurls: ParsedPurlIgnore[] = []; - private scoped: Array<{ scope: string; isRepo: boolean; advisories: Set; purls: ParsedPurlIgnore[] } > = []; + private scoped: Array<{ scope: string; isRepo: boolean; advisories: Set; purls: ParsedPurlIgnore[] }> = []; static load(filePath: string, opts?: IgnoreMatcherOptions): IgnoreMatcher | undefined { const abs = path.isAbsolute(filePath) ? filePath : path.join(opts?.cwd || process.cwd(), filePath); diff --git a/src/malwareAdvisories.ts b/src/malwareAdvisories.ts index 7e449e7..2f64679 100644 --- a/src/malwareAdvisories.ts +++ b/src/malwareAdvisories.ts @@ -32,6 +32,7 @@ export interface MalwareSyncOptions { since?: string; // ISO timestamp to fetch updates since (overrides cache timestamp) pageSize?: number; // default 100 (max allowed depends on API) caBundlePath?: string; // path to PEM bundle for self-signed/internal certs + quiet?: boolean; // suppress informational retry / rate limit logs } const CACHE_FILENAME = "malware-advisories.json"; @@ -180,13 +181,13 @@ export class MalwareAdvisorySync { request: agent ? { agent } : undefined }); - this.cachePath = path.join(this.opts.cacheDir, CACHE_FILENAME); + this.cachePath = this.opts.cacheDir ? path.join(this.opts.cacheDir, CACHE_FILENAME) : undefined; this.cache = this.loadCache(); } private loadCache(): MalwareAdvisoryCacheFile { if (!this.cachePath) { - console.warn("No cache path defined; cannot load malware advisory cache."); + console.debug("No cache path defined; cannot load malware advisory cache."); return { schemaVersion: 1, lastSync: new Date(0).toISOString(), advisories: [] }; } try { @@ -215,6 +216,44 @@ export class MalwareAdvisorySync { getAdvisories(): MalwareAdvisoryNode[] { return this.cache.advisories; } getLastSync(): string { return this.cache.lastSync; } + private async runQueryWithRetries(query: string, variables: Record, context: string, maxAttempts = 8): Promise { + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return await this.gql(query as any, variables); + } catch (e) { + // Attempt to classify error + const err = e as any; + const status: number | undefined = err.status || err.response?.status; + const message: string = (err.message || "") + " " + JSON.stringify(err.response?.data || {}); + const retryAfterHeader = err.response?.headers?.['retry-after']; + const isSecondary = status === 403 && /secondary rate limit/i.test(message); + const isRetryableStatus = isSecondary || status === 429 || (status && status >= 500 && status < 600); + const shouldRetry = isRetryableStatus && attempt < maxAttempts; + if (!shouldRetry) { + if (!this.opts.quiet) console.error(`GraphQL ${context} failed (status=${status ?? 'n/a'}): ${message.trim()}`); + throw e; + } + let waitMs: number; + if (retryAfterHeader && /^\d+$/.test(retryAfterHeader)) { + waitMs = parseInt(retryAfterHeader, 10) * 1000; + } else { + // Exponential backoff starting ~2s, capped at 30s + waitMs = Math.min(30000, Math.round(2000 * Math.pow(1.6, attempt - 1))); + } + // Add small jitter (0-500ms) + waitMs += Math.floor(Math.random() * 500); + if (!this.opts.quiet) { + const kind = isSecondary ? "secondary-rate-limit" : `status-${status}`; + console.warn(`Retrying ${context} after ${kind} (${attempt}/${maxAttempts}) in ${waitMs}ms...`); + } + await new Promise(r => setTimeout(r, waitMs)); + } + } + // Should never reach here due to throw in catch when attempts exhausted + throw new Error(`Exhausted retries for ${context}`); + } + async sync(): Promise<{ added: number; updated: number; total: number }> { const since = this.opts.since || this.cache.lastSync; let after: string | null = null; @@ -229,82 +268,84 @@ export class MalwareAdvisorySync { const pendingVulnPages: Array<{ ghsaId: string; cursor: string | null; acc: { ecosystem: string | null; name: string | null; updatedAt: string | null; vulnerableVersionRange: string | null }[] }> = []; for (; ;) { + let result: { securityAdvisories: SecurityAdvisoryConnection } | undefined; try { - const result = await this.gql<{ securityAdvisories: SecurityAdvisoryConnection }>(MALWARE_ADVISORIES_QUERY, { - first: pageSize, - after, - updatedSince: since - }); - - const conn: SecurityAdvisoryConnection = result.securityAdvisories; - for (const edge of conn.edges) { - const node = edge.node; - const initialVulns = (node.vulnerabilities?.nodes || []).map(v => ({ - ecosystem: v.package ? v.package.ecosystem : null, - name: v.package ? v.package.name : null, - updatedAt: v.updatedAt || null, - vulnerableVersionRange: v.vulnerableVersionRange || null - })); - const advisory: MalwareAdvisoryNode = { - ghsaId: node.ghsaId, - permalink: node.permalink, - summary: node.summary, - description: node.description, - updatedAt: node.updatedAt, - publishedAt: node.publishedAt, - withdrawnAt: node.withdrawnAt, - references: node.references || [], - identifiers: node.identifiers || [], - severity: node.severity, - cvss: node.cvss, - vulnerabilities: initialVulns - }; - const existing = existingByGhsa.get(advisory.ghsaId); - if (!existing) { - existingByGhsa.set(advisory.ghsaId, advisory); - added++; - } else if (existing.updatedAt !== advisory.updatedAt || existing.description !== advisory.description) { - existingByGhsa.set(advisory.ghsaId, advisory); - updated++; - } - const vulnPageInfo = node.vulnerabilities?.pageInfo; - if (vulnPageInfo?.hasNextPage) { - pendingVulnPages.push({ ghsaId: advisory.ghsaId, cursor: vulnPageInfo.endCursor, acc: advisory.vulnerabilities }); - } - } - if (!conn.pageInfo.hasNextPage || !conn.pageInfo.endCursor) break; - after = conn.pageInfo.endCursor; + result = await this.runQueryWithRetries<{ securityAdvisories: SecurityAdvisoryConnection }>( + MALWARE_ADVISORIES_QUERY, + { first: pageSize, after, updatedSince: since }, + "malware-advisories-page" + ); } catch (e) { - console.error("Error fetching malware advisories:", e); + // Hard failure (non-retryable or exhausted retries) ends loop + if (!this.opts.quiet) console.error("Aborting advisory sync due to error."); break; } + const conn: SecurityAdvisoryConnection = result.securityAdvisories; + for (const edge of conn.edges) { + const node = edge.node; + const initialVulns = (node.vulnerabilities?.nodes || []).map(v => ({ + ecosystem: v.package ? v.package.ecosystem : null, + name: v.package ? v.package.name : null, + updatedAt: v.updatedAt || null, + vulnerableVersionRange: v.vulnerableVersionRange || null + })); + const advisory: MalwareAdvisoryNode = { + ghsaId: node.ghsaId, + permalink: node.permalink, + summary: node.summary, + description: node.description, + updatedAt: node.updatedAt, + publishedAt: node.publishedAt, + withdrawnAt: node.withdrawnAt, + references: node.references || [], + identifiers: node.identifiers || [], + severity: node.severity, + cvss: node.cvss, + vulnerabilities: initialVulns + }; + const existing = existingByGhsa.get(advisory.ghsaId); + if (!existing) { + existingByGhsa.set(advisory.ghsaId, advisory); + added++; + } else if (existing.updatedAt !== advisory.updatedAt || existing.description !== advisory.description) { + existingByGhsa.set(advisory.ghsaId, advisory); + updated++; + } + const vulnPageInfo = node.vulnerabilities?.pageInfo; + if (vulnPageInfo?.hasNextPage) { + pendingVulnPages.push({ ghsaId: advisory.ghsaId, cursor: vulnPageInfo.endCursor, acc: advisory.vulnerabilities }); + } + } + if (!conn.pageInfo.hasNextPage || !conn.pageInfo.endCursor) break; + after = conn.pageInfo.endCursor; } // Fetch remaining vulnerability pages per advisory sequentially for (const pending of pendingVulnPages) { let vAfter = pending.cursor; for (; ;) { + let res: { securityAdvisory: { vulnerabilities: { pageInfo: { hasNextPage: boolean; endCursor: string | null }; nodes: { package?: { ecosystem: string; name: string }; updatedAt?: string; vulnerableVersionRange?: string }[] } } } | undefined; try { - const res = await this.gql<{ securityAdvisory: { vulnerabilities: { pageInfo: { hasNextPage: boolean; endCursor: string | null }; nodes: { package?: { ecosystem: string; name: string }; updatedAt?: string; vulnerableVersionRange?: string }[] } } }>(SINGLE_ADVISORY_VULNS_QUERY, { - ghsaId: pending.ghsaId, - first: 100, - after: vAfter - }); - const vulnConn = res.securityAdvisory.vulnerabilities; - for (const n of vulnConn.nodes) { - pending.acc.push({ - ecosystem: n.package ? n.package.ecosystem : null, - name: n.package ? n.package.name : null, - updatedAt: n.updatedAt || null, - vulnerableVersionRange: n.vulnerableVersionRange || null - }); - } - if (!vulnConn.pageInfo.hasNextPage || !vulnConn.pageInfo.endCursor) break; - vAfter = vulnConn.pageInfo.endCursor; + res = await this.runQueryWithRetries<{ securityAdvisory: { vulnerabilities: { pageInfo: { hasNextPage: boolean; endCursor: string | null }; nodes: { package?: { ecosystem: string; name: string }; updatedAt?: string; vulnerableVersionRange?: string }[] } } }>( + SINGLE_ADVISORY_VULNS_QUERY, + { ghsaId: pending.ghsaId, first: 100, after: vAfter }, + `advisory-vulns:${pending.ghsaId}` + ); } catch (e) { - console.error(`Error paginating vulnerabilities for ${pending.ghsaId}:`, e); + if (!this.opts.quiet) console.error(`Aborting vuln pagination for ${pending.ghsaId}`); break; } + const vulnConn = res.securityAdvisory.vulnerabilities; + for (const n of vulnConn.nodes) { + pending.acc.push({ + ecosystem: n.package ? n.package.ecosystem : null, + name: n.package ? n.package.name : null, + updatedAt: n.updatedAt || null, + vulnerableVersionRange: n.vulnerableVersionRange || null + }); + } + if (!vulnConn.pageInfo.hasNextPage || !vulnConn.pageInfo.endCursor) break; + vAfter = vulnConn.pageInfo.endCursor; } } diff --git a/src/malwareMatcher.ts b/src/malwareMatcher.ts index cfae6ab..22e9753 100644 --- a/src/malwareMatcher.ts +++ b/src/malwareMatcher.ts @@ -24,6 +24,7 @@ export interface MalwareMatch { packageName: string; ecosystem: string; version: string | null; + branch?: string; // default branch or named head branch from diff advisoryGhsaId: string; advisoryPermalink: string; vulnerableVersionRange: string | null; @@ -105,6 +106,8 @@ export interface MatchMalwareOptions { advisoryDateCutoff?: string; } +const ecosystemsWithNamespace = new Set(['maven', 'nuget', 'composer', 'golang']); + export function matchMalware(advisories: MalwareAdvisoryNode[], sboms: RepositorySbom[], opts?: MatchMalwareOptions): MalwareMatch[] { const matches: MalwareMatch[] = []; @@ -145,23 +148,54 @@ export function matchMalware(advisories: MalwareAdvisoryNode[], sboms: Repositor } } - // Helper to enumerate packages with fallback to raw SPDX packages inside repoSbom.sbom if flattened list empty - const enumeratePackages = (repo: RepositorySbom): EnumeratedPackage[] => { + // Enumerate base packages, falling back to raw SPDX if flattened list empty + const enumerateBasePackages = (repo: RepositorySbom): Array => { const explicit: SbomPackage[] = Array.isArray(repo.packages) ? repo.packages : []; - if (explicit.length > 0) return explicit as EnumeratedPackage[]; - const rawMaybe: unknown = repo.sbom?.packages; - if (Array.isArray(rawMaybe)) { - return rawMaybe as EnumeratedPackage[]; + let list: Array = []; + if (explicit.length > 0) { + list = explicit as Array; + } else { + const rawMaybe: unknown = repo.sbom?.packages; + if (Array.isArray(rawMaybe)) { + list = rawMaybe as Array; + } + } + // Annotate with default branch for reporting (if known) + const branchName = repo.defaultBranch || undefined; + return list.map(p => ({ ...p, __branch: branchName })); + }; + + // Enumerate packages implied by branch diffs (added/updated head-side versions) + const enumerateDiffPackages = (repo: RepositorySbom): Array<{ purl: string; name?: string; ecosystem?: string; version?: string; __branch: string }> => { + const out: Array<{ purl: string; name?: string; ecosystem?: string; version?: string; __branch: string }> = []; + if (!(repo.branchDiffs instanceof Map)) return out; + for (const diff of repo.branchDiffs.values()) { + const branchName = diff.head; + for (const change of diff.changes) { + if (change.changeType !== 'added' && change.changeType !== 'updated') continue; + let p = change.packageURL; + if (!p && change.ecosystem && change.name && change.version) { + if (ecosystemsWithNamespace.has(change.ecosystem) && change.namespace) { + p = `pkg:${change.ecosystem}/${encodeURIComponent(change.namespace)}/${encodeURIComponent(change.name)}${change.version ? '@' + change.version : ''}`; + } else { + p = `pkg:${change.ecosystem}/${encodeURIComponent(change.name)}${change.version ? '@' + change.version : ''}`; + } + } + if (!p) continue; + out.push({ purl: p, name: change.name, ecosystem: change.ecosystem, version: change.version, __branch: branchName }); + } } - return []; + return out; }; for (const repoSbom of sboms) { - const pkgs = enumeratePackages(repoSbom); - if (!pkgs.length) continue; + const basePkgs = enumerateBasePackages(repoSbom); + const diffPkgs = enumerateDiffPackages(repoSbom); + const combined = [...basePkgs, ...diffPkgs]; + if (!combined.length) continue; - for (const pkg of pkgs) { - const pkgAny = pkg as unknown as PkgLike; + for (const pkg of combined) { + const pkgAny = pkg as unknown as { purl?: string; externalRefs?: Array<{ referenceType?: string; referenceLocator?: string }>; name?: string; version?: string; versionInfo?: string }; const candidatePurls = new Set(); if (pkgAny.purl) candidatePurls.add(pkgAny.purl); if (Array.isArray(pkgAny.externalRefs)) { @@ -169,6 +203,8 @@ export function matchMalware(advisories: MalwareAdvisoryNode[], sboms: Repositor if (ref?.referenceType === "purl" && ref.referenceLocator) candidatePurls.add(ref.referenceLocator); } } + // If this is a diff-derived synthetic package, candidate set may be empty except constructed purl + if (candidatePurls.size === 0 && (pkg as { purl?: string }).purl) candidatePurls.add((pkg as { purl?: string }).purl as string); // If no purls found, skip (can't map ecosystem reliably) if (candidatePurls.size === 0) continue; @@ -200,6 +236,7 @@ export function matchMalware(advisories: MalwareAdvisoryNode[], sboms: Repositor packageName: parsed.name, ecosystem, version, + branch: (pkg as any).__branch, advisoryGhsaId: adv.ghsaId, advisoryPermalink: adv.permalink, vulnerableVersionRange: vuln.vulnerableVersionRange, diff --git a/src/sbomCollector.ts b/src/sbomCollector.ts index 13020a8..0e1a684 100644 --- a/src/sbomCollector.ts +++ b/src/sbomCollector.ts @@ -1,7 +1,8 @@ import { createOctokit } from "./octokit.js"; -import type { RepositorySbom, CollectionSummary, SbomPackage, Sbom } from "./types.js"; +import type { RepositorySbom, CollectionSummary, SbomPackage, Sbom, BranchDependencyDiff, DependencyReviewPackageChange } from "./types.js"; import * as semver from "semver"; import { readAll, writeOne } from "./serialization.js"; +import { submitSnapshotIfPossible } from "./componentSubmission.js"; // p-limit lacks bundled types in some versions; declare minimal shape // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore @@ -12,7 +13,9 @@ export interface CollectorOptions { token: string | undefined; // GitHub token with repo + security_events scope enterprise?: string; // Enterprise slug to enumerate orgs org?: string; // Single org alternative - baseUrl?: string; // For GHES + repo?: string; // Single repo alternative + baseUrl?: string; // For GHES, EMU and Data Residency + ghes?: boolean; // Is this a GHES instance? concurrency?: number; // parallel repo SBOM fetches includePrivate?: boolean; delayMsBetweenRepos?: number; @@ -24,6 +27,25 @@ export interface CollectorOptions { suppressSecondaryRateLimitLogs?: boolean; // suppress secondary rate limit warning logs (so they don't break the progress bar) quiet?: boolean; // suppress non-error logging (does not affect progress bar) caBundlePath?: string; // path to PEM CA bundle for self-signed/internal certs + includeBranches?: boolean; // when true, fetch SBOM for non-default branches + branchLimit?: number; // limit number of branches per repo (excluding default) + branchDiffBase?: string; // override base branch for diffs (defaults to default branch) + submitOnMissingSnapshot?: boolean; // run component detection submission when diff 404 + forceSubmission?: boolean; // always submit snapshot for branches prior to diff + submitLanguages?: string[]; // limit submission to these languages + componentDetectionBinPath?: string; // optional path to component-detection executable + snapshotIngestionDelayMs?: number; // delay after snapshot submission to allow ingestion before dependency review (default: 1500ms) + retryIngestionDelayMs?: number; // delay after snapshot submission before retrying dependency review on 404 (default: 3000ms) +} + +interface ParsedQuery { + raw: string; + lower: string; + isPrefixWildcard: boolean; + exact?: string; + type?: string; + name?: string; + versionConstraint?: string; } export class SbomCollector { @@ -35,8 +57,18 @@ export class SbomCollector { private decisions: Record = {}; // repo -> reason constructor(options: CollectorOptions) { - if (!options.loadFromDir && !options.enterprise && !options.org) { - throw new Error("Either enterprise/org or loadFromDir must be specified"); + if (!options.loadFromDir && !options.enterprise && !options.org && !options.repo) { + throw new Error("One of enterprise/org/repo or loadFromDir must be specified"); + } + // Validate repo format if provided + if (options.repo) { + if (typeof options.repo !== "string" || !options.repo.includes("/")) { + throw new Error('If specifying "repo", it must be in the format "org/repo".'); + } + const [orgPart, repoPart] = options.repo.split("/"); + if (!orgPart || !repoPart) { + throw new Error('If specifying "repo", it must be in the format "org/repo" with both parts non-empty.'); + } } // Spread user options first then apply defaults via nullish coalescing so that // passing undefined does not erase defaults @@ -44,7 +76,9 @@ export class SbomCollector { this.opts = { token: o.token, enterprise: o.enterprise, + ghes: o.ghes ?? false, org: o.org, + repo: o.repo, baseUrl: o.baseUrl, concurrency: o.concurrency ?? 5, includePrivate: o.includePrivate ?? true, @@ -56,7 +90,16 @@ export class SbomCollector { showProgressBar: o.showProgressBar ?? false, suppressSecondaryRateLimitLogs: o.suppressSecondaryRateLimitLogs ?? false, quiet: o.quiet ?? false, - caBundlePath: o.caBundlePath + caBundlePath: o.caBundlePath, + includeBranches: o.includeBranches ?? false, + branchLimit: o.branchLimit, + branchDiffBase: o.branchDiffBase, + submitOnMissingSnapshot: o.submitOnMissingSnapshot ?? false, + forceSubmission: o.forceSubmission ?? false, + submitLanguages: o.submitLanguages ?? undefined, + componentDetectionBinPath: o.componentDetectionBinPath, + snapshotIngestionDelayMs: o.snapshotIngestionDelayMs ?? 1500, + retryIngestionDelayMs: o.retryIngestionDelayMs ?? 3000 } as Required; if (this.opts.token) { @@ -95,8 +138,8 @@ export class SbomCollector { async collect(): Promise { // Offline mode: load from directory if provided if (this.opts.loadFromDir) { - // find just the path for a single org, if given - const loadPath = this.opts.org ? `${this.opts.loadFromDir}/${this.opts.org}` : this.opts.loadFromDir; + // find just the path for a single org or repo, if given + const loadPath = this.opts.org ? `${this.opts.loadFromDir}/${this.opts.org}` : this.opts.repo ? `${this.opts.loadFromDir}/${this.opts.repo}` : this.opts.loadFromDir; if (!this.opts.quiet) process.stderr.write(chalk.blue(`Loading SBOMs from cache at ${loadPath}`) + "\n"); @@ -132,19 +175,27 @@ export class SbomCollector { process.stderr.write(chalk.blue(`Getting list of organizations for enterprise ${this.opts.enterprise}`) + "\n"); } - const orgs = this.opts.org ? [this.opts.org] : await this.listEnterpriseOrgs(this.opts.enterprise!); + const orgs = this.opts.org ? [this.opts.org] : this.opts.enterprise ? await this.listEnterpriseOrgs(this.opts.enterprise, this.opts.ghes) : [this.opts.repo.split("/")[0]]; this.summary.orgs = orgs; // Pre-list all repos if showing progress bar so we know the total upfront const orgRepoMap: Record = {}; let totalRepos = 0; - for (const org of orgs) { - if (!this.opts.quiet) process.stderr.write(chalk.blue(`Listing repositories for org ${org}`) + "\n"); - if (this.opts.lightDelayMs) await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); - const repos = await this.listOrgRepos(org); - orgRepoMap[org] = repos; - totalRepos += repos.length; + + if (!this.opts.repo) { + for (const org of orgs) { + if (!this.opts.quiet) process.stderr.write(chalk.blue(`Listing repositories for org ${org}`) + "\n"); + if (this.opts.lightDelayMs) await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + const repos = await this.listOrgRepos(org); + orgRepoMap[org] = repos; + totalRepos += repos.length; + } + } else { + totalRepos = 1; + const [org, repoName] = this.opts.repo.split("/"); + orgRepoMap[org] = [await this.getRepo(org, repoName)]; } + this.summary.repositoryCount = totalRepos; let processed = 0; @@ -222,8 +273,11 @@ export class SbomCollector { this.decisions[fullName] = `Fetching because error comparing pushed_at (${baseline.repoPushedAt} / ${repo.pushed_at})`; } } else { - this.decisions[fullName] = baseline ? `Fetching because missing pushed_at (${baseline.repoPushedAt} / ${repo.pushed_at})` : "Fetching because no baseline"; + this.decisions[fullName] = baseline ? `Fetching because of missing pushed_at (${baseline.repoPushedAt} / ${repo.pushed_at})` : "Fetching because no baseline"; } + + let sbom: RepositorySbom | undefined = undefined; + if (!skipped) { const res = await this.fetchSbom(org, repo.name, repo); if (this.opts.delayMsBetweenRepos) { @@ -233,18 +287,95 @@ export class SbomCollector { res.defaultBranchCommitSha = pendingCommitMeta.sha; res.defaultBranchCommitDate = pendingCommitMeta.date; } - newSboms.push(res); - if (res.error) this.summary.failedCount++; else this.summary.successCount++; - // Write freshly fetched SBOM immediately if a cache directory is configured - if (this.opts.loadFromDir && this.opts.syncSboms && this.opts.loadFromDir.length) { - try { writeOne(res, { outDir: this.opts.loadFromDir }); } catch { /* ignore write errors */ } + + sbom = res; + } else { + sbom = baseline; + } + + // Branch scanning (optional) + if (this.opts.includeBranches && sbom && sbom.sbom) { + + console.debug(chalk.blue(`Scanning branches for ${fullName}...`)); + + try { + const branches = await this.listBranches(org, repo.name); + + if (this.opts.lightDelayMs) await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + + const nonDefault = branches.filter(b => b.name !== sbom.defaultBranch); + const limited = this.opts.branchLimit && this.opts.branchLimit > 0 ? nonDefault.slice(0, this.opts.branchLimit) : nonDefault; + const branchDiffs: Map = new Map(); + for (const b of limited) { + + // get the commits, compare to the stored diff info. If the latest commit is newer, then fetch diff, otherwise skip + const latestCommit = await this.getLatestCommit(org, repo.name, b.name); + + if (this.opts.lightDelayMs) await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + + if (!latestCommit) { + console.error(chalk.red(`Failed to get latest commit for ${fullName} branch ${b.name}.`)); + continue; + } + + const existing = sbom.branchDiffs instanceof Map ? sbom.branchDiffs.get(b.name) : undefined; + if (await this.isCommitNewer(latestCommit, existing) || this.opts.forceSubmission) { + console.debug(chalk.green(`Fetching branch diff for ${fullName} branch ${b.name}...`)); + } else { + console.debug(chalk.yellow(`Skipping branch diff for ${fullName} branch ${b.name} (no new commits).`)); + // keep existing diff + if (existing) { + branchDiffs.set(b.name, existing); + } + continue; + } + + const base = this.opts.branchDiffBase || sbom?.defaultBranch; + if (!base) { console.error(chalk.red(`Cannot compute branch diff for ${fullName} branch ${b.name} because base branch is undefined.`)); continue; } + + // Optionally perform dependency submission up front for the branch + if (this.opts.forceSubmission) { + try { + console.debug(chalk.blue(`Force-submission enabled: submitting component snapshot for ${fullName} branch ${b.name}...`)); + if (await submitSnapshotIfPossible({ octokit: this.octokit, owner: org, repo: repo.name, branch: b.name, languages: this.opts.submitLanguages, quiet: this.opts.quiet, componentDetectionBinPath: this.opts.componentDetectionBinPath })) { + // Brief delay to allow GitHub to ingest the submitted snapshot before attempting dependency review. + // This prevents race conditions where the review diff is requested before the snapshot is available. + await new Promise(r => setTimeout(r, this.opts.snapshotIngestionDelayMs)); + } + } catch (subErr) { + console.error(chalk.red(`Force submission failed for ${fullName} branch ${b.name}: ${(subErr as Error).message}`)); + } + } + const diff = await this.fetchDependencyReviewDiff(org, repo.name, base, b.name, 1, latestCommit); + branchDiffs.set(b.name, diff); + } + if (branchDiffs.size) sbom.branchDiffs = branchDiffs; + } catch (e) { + // Non-fatal; annotate decision + this.decisions[fullName] = (this.decisions[fullName] || "") + ` (branch scan error: ${(e as Error).message})`; + console.debug((e as Error).message); } } + + if (!sbom || sbom.error) this.summary.failedCount++; else this.summary.successCount++; + + // Write freshly fetched SBOM immediately if a cache directory is configured + if (sbom && !sbom.error && this.opts.loadFromDir && this.opts.syncSboms && this.opts.loadFromDir.length) { + try { writeOne(sbom, { outDir: this.opts.loadFromDir }); } catch { /* ignore write errors */ } + } + + if (sbom) { + newSboms.push(sbom); + } processed++; renderBar(); })); await Promise.all(tasks); - newSboms = newSboms.filter(s => repoNames.has(s.repo)); + + newSboms = newSboms.filter(s => { + const repoToCheck = s.repo.includes("/") ? s.repo.split("/")[1] : s.repo; + return repoNames.has(repoToCheck); + }); this.sboms.push(...newSboms); } if (this.opts.showProgressBar) process.stdout.write("\n"); @@ -253,11 +384,35 @@ export class SbomCollector { return this.sboms; } - private async listEnterpriseOrgs(enterprise: string): Promise { - // GitHub API: GET /enterprises/{enterprise}/orgs (preview might require accept header) - + private async getLatestCommit(org: string, repo: string, branch: string): Promise<{ sha?: string; commitDate?: string } | null> { if (!this.octokit) throw new Error("No Octokit instance"); + try { + const resp = await this.octokit.request("GET /repos/{owner}/{repo}/commits", { owner: org, repo, sha: branch }); + await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + + const commitSha = resp.data?.[0]?.sha; + const commitDate = resp.data?.[0]?.commit?.author?.date; + return { sha: commitSha, commitDate }; + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + console.error(`Failed to get latest commit for ${org}/${repo} branch ${branch}: ${msg}`); + return null; + } + } + + private async isCommitNewer(latestCommit: { sha?: string; commitDate?: string }, existingDiff?: BranchDependencyDiff): Promise { + if (!existingDiff || !existingDiff.latestCommitDate) { + return true; + } + if (latestCommit.commitDate && existingDiff.latestCommitDate) { + return new Date(latestCommit.commitDate) > new Date(existingDiff.latestCommitDate); + } + return false; + } + + private async listEnterpriseOrgs(enterprise: string, ghes: boolean): Promise { + if (!this.octokit) throw new Error("No Octokit instance"); interface Org { login: string } try { const orgs: string[] = []; @@ -265,7 +420,7 @@ export class SbomCollector { let page = 1; let done = false; while (!done) { - const resp = await this.octokit.request("GET /enterprises/{enterprise}/orgs", { enterprise, per_page, page }); + const resp = await this.octokit.request(ghes ? "GET /orgs" : "GET /enterprises/{enterprise}/orgs", { enterprise, per_page, page }); const items = resp.data as unknown as Org[]; for (const o of items) orgs.push(o.login); if (items.length < per_page) done = true; else page++; @@ -280,7 +435,6 @@ export class SbomCollector { private async listOrgRepos(org: string): Promise<{ name: string; pushed_at?: string; updated_at?: string; default_branch?: string }[]> { if (!this.octokit) throw new Error("No Octokit instance"); - // GET /orgs/{org}/repos interface RepoMeta { name: string; pushed_at?: string; updated_at?: string; default_branch?: string } const repos: RepoMeta[] = []; const per_page = 100; @@ -289,6 +443,9 @@ export class SbomCollector { while (!done) { try { const resp = await this.octokit.request("GET /orgs/{org}/repos", { org, per_page, page, type: this.opts.includePrivate ? "all" : "public" }); + + await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + const items = resp.data as Array<{ name: string; pushed_at?: string; updated_at?: string; default_branch?: string }>; for (const r of items) { repos.push({ name: r.name, pushed_at: r.pushed_at, updated_at: r.updated_at, default_branch: r.default_branch }); @@ -303,12 +460,27 @@ export class SbomCollector { return repos; } + private async getRepo(org: string, repo: string): Promise<{ name: string; pushed_at?: string; updated_at?: string; default_branch?: string }> { + if (!this.octokit) throw new Error("No Octokit instance"); + + try { + const resp = await this.octokit.request("GET /repos/{owner}/{repo}", { owner: org, repo }); + + await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + + const data = resp.data as { name: string; pushed_at?: string; updated_at?: string; default_branch?: string }; + return data; + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + throw new Error(`Failed to get repo metadata for ${org}/${repo}: ${msg}`); + } + } + private async fetchSbom(org: string, repo: string, repoMeta?: { pushed_at?: string; updated_at?: string; default_branch?: string }): Promise { if (!this.octokit) throw new Error("No Octokit instance"); const fullName = `${org}/${repo}`; try { - // TODO: Ensure dependency graph is enabled before requesting SBOM const resp = await this.octokit.request("GET /repos/{owner}/{repo}/dependency-graph/sbom", { owner: org, repo, headers: { Accept: "application/vnd.github+json" } }); const sbomWrapper = resp.data as { sbom?: Sbom }; const packages: SbomPackage[] = sbomWrapper?.sbom?.packages ?? []; @@ -340,18 +512,85 @@ export class SbomCollector { } } + private async listBranches(org: string, repo: string): Promise<{ name: string; protected?: boolean; commit?: { sha?: string } }[]> { + if (!this.octokit) throw new Error("No Octokit instance"); + const branches: { name: string; protected?: boolean; commit?: { sha?: string } }[] = []; + const per_page = 100; let page = 1; let done = false; + while (!done) { + try { + const resp = await this.octokit.request("GET /repos/{owner}/{repo}/branches", { owner: org, repo, per_page, page }); + await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + + const data = resp.data as Array<{ name: string; protected?: boolean; commit?: { sha?: string } }>; + branches.push(...data); + if (data.length < per_page) done = true; else page++; + } catch (e) { + throw new Error(`Failed listing branches for ${org}/${repo}: ${(e as Error).message}`); + } + } + return branches; + } + + private async fetchDependencyReviewDiff(org: string, repo: string, base: string, head: string, retries: number, latestCommit?: { sha?: string; commitDate?: string }): Promise { + if (!this.octokit) throw new Error("No Octokit instance"); + if (retries < 0) { + return { latestCommitDate: undefined, base, head, retrievedAt: new Date().toISOString(), changes: [], error: "Exceeded maximum retries for fetching dependency review diff" }; + } + try { + const basehead = `${base}...${head}`; + const resp = await this.octokit.request("GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}", { owner: org, repo, basehead, headers: { Accept: "application/vnd.github+json" } }); + await new Promise(r => setTimeout(r, this.opts.lightDelayMs)); + + // Response shape includes change_set array (per docs). We normalize to DependencyReviewPackageChange[] + const raw = resp.data; + + const changes: DependencyReviewPackageChange[] = []; + for (const c of raw) { + const obj = c as Record; + const change: DependencyReviewPackageChange = { + changeType: String(obj.change_type || "unknown"), + name: obj.name as string | undefined, + ecosystem: obj.ecosystem as string | undefined, + packageURL: obj.package_url as string | undefined, + license: obj.license as string | undefined, + manifest: obj.manifest as string | undefined, + scope: obj.scope as string | undefined, + version: obj.version as string | undefined + }; + changes.push(change); + } + return { latestCommitDate: latestCommit?.commitDate || new Date().toISOString(), base, head, retrievedAt: new Date().toISOString(), changes }; + } catch (e) { + const status = (e as { status?: number })?.status; + let reason = e instanceof Error ? e.message : String(e); + if (status === 404) { + reason = "Dependency review unavailable (missing snapshot, feature disabled, or repo not found)"; + // Optional retry path: submit snapshot then retry once + if (this.opts.submitOnMissingSnapshot) { + console.log(chalk.blue(`Attempting to submit component snapshot for ${org}/${repo} branch ${head} before retrying dependency review diff...`)); + try { + const ok = await submitSnapshotIfPossible({ octokit: this.octokit, owner: org, repo: repo, branch: head, languages: this.opts.submitLanguages, quiet: this.opts.quiet, componentDetectionBinPath: this.opts.componentDetectionBinPath }); + if (ok) { + // Delay after snapshot submission to allow GitHub to ingest and process the snapshot + // before retrying the dependency review API. This helps avoid 404 errors on retry. + console.debug(chalk.blue(`Snapshot submission attempted; waiting ${this.opts.retryIngestionDelayMs / 1000} seconds before retrying dependency review diff for ${org}/${repo} ${base}...${head}...`)); + await new Promise(r => setTimeout(r, this.opts.retryIngestionDelayMs)); + return await this.fetchDependencyReviewDiff(org, repo, base, head, retries - 1, latestCommit); + } + } catch (subErr) { + console.error(chalk.red(`Snapshot submission failed for ${org}/${repo} branch ${head}: ${(subErr as Error).message}`)); + reason += ` (submission attempt failed: ${(subErr as Error).message})`; + } + } + } + return { latestCommitDate: undefined, base, head, retrievedAt: new Date().toISOString(), changes: [], error: reason }; + } + } + // New method including the query that produced each match searchByPurlsWithReasons(purls: string[]): Map { purls = purls.map(q => q.startsWith("pkg:") ? q : `pkg:${q}`); - interface ParsedQuery { - raw: string; - lower: string; - isPrefixWildcard: boolean; - exact?: string; - type?: string; - name?: string; - versionConstraint?: string; - } + const looksLikeSemverRange = (v: string) => /[\^~><=]|\|\|/.test(v.trim()); const parseQuery = (raw: string): ParsedQuery | null => { const trimmed = raw.trim(); @@ -379,6 +618,7 @@ export class SbomCollector { const queries: ParsedQuery[] = purls.map(parseQuery).filter((q): q is ParsedQuery => !!q); const results = new Map(); if (!queries.length) return results; + for (const repoSbom of this.sboms) { if (repoSbom.error) continue; interface ExtRef { referenceType: string; referenceLocator: string } @@ -388,36 +628,18 @@ export class SbomCollector { const candidatePurls: string[] = []; if (refs) for (const r of refs) if (r.referenceType === "purl" && r.referenceLocator) candidatePurls.push(r.referenceLocator); if ((pkg as { purl?: string }).purl) candidatePurls.push((pkg as { purl?: string }).purl as string); - const unique = Array.from(new Set(candidatePurls)); - for (const p of unique) { - const pLower = p.toLowerCase(); - for (const q of queries) { - if (q.isPrefixWildcard) { - const prefix = q.lower.slice(0, -1); - if (pLower.startsWith(prefix)) { if (!found.has(p)) found.set(p, q.raw); } - continue; - } - if (q.versionConstraint && q.type && q.name) { - if (!pLower.startsWith("pkg:")) continue; - const body = p.slice(4); - const atIdx = body.indexOf("@"); - const main = atIdx >= 0 ? body.slice(0, atIdx) : body; - const ver = atIdx >= 0 ? body.slice(atIdx + 1) : (pkg.version as string | undefined) || undefined; - const slashIdx = main.indexOf("/"); - if (slashIdx < 0) continue; - const pType = main.slice(0, slashIdx).toLowerCase(); - const pName = main.slice(slashIdx + 1); - if (pType === q.type && pName.toLowerCase() === q.name.toLowerCase() && ver) { - try { - const coerced = semver.coerce(ver)?.version || ver; - if (semver.valid(coerced) && semver.satisfies(coerced, q.versionConstraint, { includePrerelease: true })) { - if (!found.has(p)) found.set(p, q.raw); - } - } catch { /* ignore */ } - } - } else if (q.exact) { - if (pLower === q.exact) { if (!found.has(p)) found.set(p, q.raw); } - } + applyQueries(candidatePurls, queries, found, undefined, (pkg.version as string | undefined) || undefined); + } + // Include dependency review diff additions/updates (head packages only) + if (repoSbom.branchDiffs) { + const diffs = repoSbom.branchDiffs.values(); + for (const diff of diffs) { + for (const change of diff.changes) { + if (change.changeType !== "added" && change.changeType !== "updated") continue; + const candidatePurls: string[] = []; + if ((change as { purl?: string }).purl) candidatePurls.push((change as { purl?: string }).purl as string); + if (change.packageURL) candidatePurls.push(change.packageURL); + applyQueries(candidatePurls, queries, found, diff.head, change.version); } } } @@ -426,3 +648,45 @@ export class SbomCollector { return results; } } + +function applyQueries( + candidatePurls: string[], + queries: ParsedQuery[], + found: Map, + branchTag?: string, + fallbackVersion?: string +) { + const unique = Array.from(new Set(candidatePurls)); + for (const p of unique) { + const pLower = p.toLowerCase(); + const outKey = branchTag ? `${p}@${branchTag}` : p; + for (const q of queries) { + if (q.isPrefixWildcard) { + const prefix = q.lower.slice(0, -1); + if (pLower.startsWith(prefix)) { if (!found.has(outKey)) found.set(outKey, q.raw); } + continue; + } + if (q.versionConstraint && q.type && q.name) { + if (!pLower.startsWith("pkg:")) continue; + const body = p.slice(4); + const atIdx = body.indexOf("@"); + const main = atIdx >= 0 ? body.slice(0, atIdx) : body; + const ver = atIdx >= 0 ? body.slice(atIdx + 1) : fallbackVersion; + const slashIdx = main.indexOf("/"); + if (slashIdx < 0) continue; + const pType = main.slice(0, slashIdx).toLowerCase(); + const pName = main.slice(slashIdx + 1); + if (pType === q.type && pName.toLowerCase() === q.name.toLowerCase() && ver) { + try { + const coerced = semver.coerce(ver)?.version || ver; + if (semver.valid(coerced) && semver.satisfies(coerced, q.versionConstraint, { includePrerelease: true })) { + if (!found.has(outKey)) found.set(outKey, q.raw); + } + } catch { /* ignore */ } + } + } else if (q.exact) { + if (pLower === q.exact) { if (!found.has(outKey)) found.set(outKey, q.raw); } + } + } + } +} diff --git a/src/serialization.ts b/src/serialization.ts index 385f076..c68b4cf 100644 --- a/src/serialization.ts +++ b/src/serialization.ts @@ -13,7 +13,7 @@ export function writeAll(sboms: RepositorySbom[], { outDir, flatten = false }: S const fileDir = path.join(outDir, repoPath); const filePath = flatten ? path.join(outDir, `${repoPath}.json`) : path.join(fileDir, "sbom.json"); fs.mkdirSync(flatten ? path.dirname(filePath) : fileDir, { recursive: true }); - fs.writeFileSync(filePath, JSON.stringify(s, null, 2), "utf8"); + fs.writeFileSync(filePath, JSON.stringify(prepareForWrite(s), null, 2), "utf8"); } } @@ -22,7 +22,7 @@ export function writeOne(sbom: RepositorySbom, { outDir, flatten = false }: Seri const fileDir = path.join(outDir, repoPath); const filePath = flatten ? path.join(outDir, `${repoPath}.json`) : path.join(fileDir, "sbom.json"); fs.mkdirSync(flatten ? path.dirname(filePath) : fileDir, { recursive: true }); - fs.writeFileSync(filePath, JSON.stringify(sbom, null, 2), "utf8"); + fs.writeFileSync(filePath, JSON.stringify(prepareForWrite(sbom), null, 2), "utf8"); } export interface ReadOptions { @@ -48,7 +48,7 @@ export function readAll(dir: string, opts: ReadOptions = {}): RepositorySbom[] { const pushIfValid = (filePath: string) => { try { const raw = fs.readFileSync(filePath, "utf8"); - const obj = JSON.parse(raw); + const obj = reviveAfterRead(JSON.parse(raw)); if (obj && obj.repo && Array.isArray(obj.packages)) { results.push(obj as RepositorySbom); } @@ -91,3 +91,27 @@ export function readAll(dir: string, opts: ReadOptions = {}): RepositorySbom[] { } return results; } + +// Convert Maps to plain serializable structures before JSON.stringify +function prepareForWrite(sbom: RepositorySbom): unknown { + const clone: any = { ...sbom }; + if (clone.branchDiffs instanceof Map) { + // store as array of diff objects for backward compatibility + clone.branchDiffs = Array.from(clone.branchDiffs.values()); + } + return clone; +} + +// Convert array representations back into Maps after JSON.parse +function reviveAfterRead(obj: any): any { + if (obj && obj.branchDiffs && Array.isArray(obj.branchDiffs)) { + const map = new Map(); + for (const diff of obj.branchDiffs) { + if (diff && typeof diff.head === 'string') { + map.set(diff.head, diff); + } + } + obj.branchDiffs = map; + } + return obj; +} diff --git a/src/test-branch-search.ts b/src/test-branch-search.ts new file mode 100644 index 0000000..208e797 --- /dev/null +++ b/src/test-branch-search.ts @@ -0,0 +1,83 @@ +import fs from 'fs'; +import path from 'path'; +import { SbomCollector } from './sbomCollector.js'; +import type { RepositorySbom } from './types.js'; + +// This test harness validates that branch SBOMs and dependency review diffs +// participate in search results. It constructs a synthetic repo SBOM object, +// writes it to a temp cache directory, then performs searches. + +async function main() { + const tempRoot = path.join(process.cwd(), 'tmp-branch-search-cache'); + const org = 'example-org'; + const repo = 'demo-repo'; + const repoDir = path.join(tempRoot, org, repo); + fs.rmSync(tempRoot, { recursive: true, force: true }); + fs.mkdirSync(repoDir, { recursive: true }); + + const basePackages = [ + { name: 'chalk', version: '5.6.1', purl: 'pkg:npm/chalk@5.6.1' }, + { name: 'react', version: '18.2.0', purl: 'pkg:npm/react@18.2.0' } + ]; + const diffChanges = [ + { changeType: 'added', name: 'lodash', ecosystem: 'npm', purl: 'pkg:npm/lodash@4.17.21', version: '4.17.21' }, + { changeType: 'updated', name: 'react', ecosystem: 'npm', purl: 'pkg:npm/react@18.3.0', version: '18.3.0' }, + { changeType: 'removed', name: 'chalk', ecosystem: 'npm', purl: 'pkg:npm/chalk@5.6.1', version: '5.6.1' }, + { changeType: 'removed', name: 'react', ecosystem: 'npm', purl: 'pkg:npm/react@18.2.0', version: '18.2.0' } + ]; + + const synthetic = { + repo: `${org}/${repo}`, + org: org, + retrievedAt: new Date().toISOString(), + packages: basePackages, + branchDiffs: [ + { + latestCommitDate: new Date().toISOString(), + base: 'main', + head: 'feature-x', + retrievedAt: new Date().toISOString(), + changes: diffChanges + } + ] + }; + + fs.writeFileSync(path.join(repoDir, 'sbom.json'), JSON.stringify(synthetic, null, 2), 'utf8'); + + const collector = new SbomCollector({ + token: undefined, + org, + loadFromDir: tempRoot, + syncSboms: false, + quiet: true + }); + await collector.collect(); + + const queries = [ + 'pkg:npm/react@>=18.2.0 <19.0.0', // should match base & branch updated version + 'pkg:npm/lodash@4.17.21', // should match added in branch diff + 'pkg:npm/chalk@5.6.1' // base only + ]; + + console.debug(JSON.stringify(collector.getAllSboms())); + + const results = collector.searchByPurlsWithReasons(queries); + + if (!results.size) { + console.error('No search results found; expected matches from branch data'); + process.exit(1); + } + const entries = results.get(`${org}/${repo}`); + if (!entries || entries.length < 4) { + console.error(`Unexpected number of matches: ${(entries || []).length}`); + console.error(JSON.stringify(entries, null, 2)); + process.exit(1); + } + + process.stdout.write('Branch search test passed. Matches:\n'); + for (const e of entries) { + process.stdout.write(` ${e.purl} {query: ${e.reason}}\n`); + } +} + +main().catch(e => { console.error(e); process.exit(1); }); diff --git a/src/test-fixture-match.ts b/src/test-fixture-match.ts index d949e9b..f43b58d 100644 --- a/src/test-fixture-match.ts +++ b/src/test-fixture-match.ts @@ -6,17 +6,21 @@ import path from "path"; // Load SBOM fixture const sboms = readAll(path.join(process.cwd(), "fixtures/sboms")); + // Load malware advisory fixture const cachePath = path.join(process.cwd(), "fixtures/malware-cache/malware-advisories.json"); const cache = JSON.parse(fs.readFileSync(cachePath, "utf8")); const advisories: MalwareAdvisoryNode[] = cache.advisories; const matches = matchMalware(advisories, sboms); + process.stdout.write("Matches:\n"); + for (const m of matches) { - process.stdout.write(`${m.repo} => ${m.purl} matched advisory ${m.advisoryGhsaId} range ${m.vulnerableVersionRange}\n`); + process.stdout.write(`${m.repo} => ${m.purl} matched advisory ${m.advisoryGhsaId} range ${m.vulnerableVersionRange}${m.branch ? ` on branch ${m.branch}` : ""}\n`); } -if (!matches.length) { - console.error("No matches found - expected chalk 5.6.1"); + +if (matches.length !== 2) { + console.error("Did not find 2 matches - expected chalk 5.6.1 on default and test branches"); process.exit(1); } diff --git a/src/types.ts b/src/types.ts index 3d1a62b..8af5472 100644 --- a/src/types.ts +++ b/src/types.ts @@ -77,6 +77,8 @@ export interface RepositorySbom { etag?: string; // ETag from SBOM response (future: conditional requests) defaultBranchCommitSha?: string; // commit SHA of default branch at time of retrieval defaultBranchCommitDate?: string; // ISO date of that commit + // Branch-level diffs (optional when branch scanning enabled) + branchDiffs?: Map; } export interface CollectionSummary { @@ -94,3 +96,36 @@ export interface SearchResultEntry { repository: string; matches: SbomPackage[]; } + +// Branch-specific SBOM capture +export interface BranchSbom { + branch: string; + commitSha?: string; + retrievedAt: string; + sbom?: Sbom; + packages: SbomPackage[]; + error?: string; +} + +// Dependency Review change format (subset; future-proof with index signature) +export interface DependencyReviewPackageChange { + changeType: string; // added | removed | updated + name?: string; // package name + ecosystem?: string; // e.g. npm, maven, pip + namespace?: string; // e.g. groupId for maven + packageURL?: string; // raw package URL + license?: string; + manifest?: string; // manifest path + scope?: string; // e.g. runtime, development + version?: string; // for added/removed + [k: string]: unknown; +} + +export interface BranchDependencyDiff { + latestCommitDate?: string; + base: string; // base branch + head: string; // head branch + retrievedAt: string; + changes: DependencyReviewPackageChange[]; + error?: string; +}