From ea5044dda26f3ef18d46f139c4d9eb97baec5ba8 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 17 Jul 2023 13:28:38 -0700 Subject: [PATCH 01/80] Create script to update repo skeleton #80 Signed-off-by: Jono Yang --- etc/scripts/update_skeleton.py | 104 +++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 etc/scripts/update_skeleton.py diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py new file mode 100644 index 00000000..635898ba --- /dev/null +++ b/etc/scripts/update_skeleton.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/skeleton for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from pathlib import Path +import os +import subprocess + +import click + + +NEXB_PUBLIC_REPO_NAMES=[ + "aboutcode-toolkit", + "ahocode", + "bitcode", + "clearcode-toolkit", + "commoncode", + "container-inspector", + "debian-inspector", + "deltacode", + "elf-inspector", + "extractcode", + "fetchcode", + "gemfileparser2", + "gh-issue-sandbox", + "go-inspector", + "heritedcode", + "license-expression", + "license_copyright_pipeline", + "nuget-inspector", + "pip-requirements-parser", + "plugincode", + "purldb", + "pygmars", + "python-inspector", + "sanexml", + "saneyaml", + "scancode-analyzer", + "scancode-toolkit-contrib", + "scancode-toolkit-reference-scans", + "thirdparty-toolkit", + "tracecode-toolkit", + "tracecode-toolkit-strace", + "turbo-spdx", + "typecode", + "univers", +] + + +@click.command() +@click.help_option("-h", "--help") +def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): + """ + Update project files of nexB projects that use the skeleton + + This script will: + - Clone the repo + - Add the skeleton repo as a new origin + - Create a new branch named "update-skeleton-files" + - Merge in the new skeleton files into the "update-skeleton-files" branch + + The user will need to save merge commit messages that pop up when running + this script in addition to resolving the merge conflicts on repos that have + them. + """ + + # Create working directory + work_dir_path = Path("/tmp/update_skeleton/") + if not os.path.exists(work_dir_path): + os.makedirs(work_dir_path, exist_ok=True) + + for repo_name in repo_names: + # Move to work directory + os.chdir(work_dir_path) + + # Clone repo + repo_git = f"git@github.com:nexB/{repo_name}.git" + subprocess.run(["git", "clone", repo_git]) + + # Go into cloned repo + os.chdir(work_dir_path / repo_name) + + # Add skeleton as an origin + subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:nexB/skeleton.git"]) + + # Fetch skeleton files + subprocess.run(["git", "fetch", "skeleton"]) + + # Create and checkout new branch + subprocess.run(["git", "checkout", "-b", "update-skeleton-files"]) + + # Merge skeleton files into the repo + subprocess.run(["git", "merge", "skeleton/main", "--allow-unrelated-histories"]) + + +if __name__ == "__main__": + update_skeleton_files() From 5ab9b3a15e0095a8186ca3870cb9f9eade83833d Mon Sep 17 00:00:00 2001 From: Omkar Phansopkar Date: Wed, 18 Oct 2023 15:42:56 +0530 Subject: [PATCH 02/80] Added docs server script, dark mode & copybutton for docs Signed-off-by: Omkar Phansopkar --- .github/workflows/docs-ci.yml | 3 --- docs/Makefile | 8 ++++++++ docs/make.bat | 12 ++++++++++++ docs/scripts/doc8_style_check.sh | 0 docs/source/conf.py | 4 ++++ setup.cfg | 3 +++ 6 files changed, 27 insertions(+), 3 deletions(-) mode change 100644 => 100755 docs/scripts/doc8_style_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 511b7c28..ada779bf 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -20,9 +20,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Give permission to run scripts - run: chmod +x ./docs/scripts/doc8_style_check.sh - - name: Install Dependencies run: pip install -e .[docs] diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf1..788b0396 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,6 +5,7 @@ # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build +SPHINXAUTOBUILD = sphinx-autobuild SOURCEDIR = source BUILDDIR = build @@ -14,6 +15,13 @@ help: .PHONY: help Makefile +# Run the development server using sphinx-autobuild +docs: + @echo + @echo "Starting up the docs server..." + @echo + $(SPHINXAUTOBUILD) --port 8000 --watch ${SOURCEDIR} $(SOURCEDIR) "$(BUILDDIR)/html" $(SPHINXOPTS) $(O) + # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile diff --git a/docs/make.bat b/docs/make.bat index 6247f7e2..4a3c1a48 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -7,11 +7,16 @@ REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) +if "%SPHINXAUTOBUILD%" == "" ( + set SPHINXAUTOBUILD=sphinx-autobuild +) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help +if "%1" == "docs" goto docs + %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. @@ -28,6 +33,13 @@ if errorlevel 9009 ( %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end +:docs +@echo +@echo Starting up the docs server... +@echo +%SPHINXAUTOBUILD% --port 8000 --watch %SOURCEDIR% %SOURCEDIR% %BUILDDIR%\html %SPHINXOPTS% %O% +goto end + :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh old mode 100644 new mode 100755 diff --git a/docs/source/conf.py b/docs/source/conf.py index 918d62c1..54e5e665 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -30,6 +30,10 @@ extensions = [ "sphinx.ext.intersphinx", "sphinx_reredirects", + 'sphinx_rtd_theme', + "sphinx_rtd_dark_mode", + "sphinx.ext.extlinks", + "sphinx_copybutton", ] diff --git a/setup.cfg b/setup.cfg index d6c7da7d..bd0e58a7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -62,4 +62,7 @@ docs = sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 doc8>=0.11.2 + sphinx-autobuild + sphinx-rtd-dark-mode>=1.3.0 + sphinx-copybutton From 0a9d983650bf042a5bd2c277711b637979e566f1 Mon Sep 17 00:00:00 2001 From: "John M. Horan" Date: Mon, 20 Nov 2023 16:46:54 -0800 Subject: [PATCH 03/80] Update CSS to widen page and handle mobile #84 Reference: https://github.com/nexB/skeleton/issues/84 Signed-off-by: John M. Horan --- docs/source/_static/theme_overrides.css | 363 +----------------- .../_static/theme_overrides_SUPERSEDED.css | 353 +++++++++++++++++ docs/source/conf.py | 15 +- 3 files changed, 380 insertions(+), 351 deletions(-) create mode 100644 docs/source/_static/theme_overrides_SUPERSEDED.css diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css index 9662d63a..de5ae433 100644 --- a/docs/source/_static/theme_overrides.css +++ b/docs/source/_static/theme_overrides.css @@ -1,353 +1,26 @@ -body { - color: #000000; -} - -p { - margin-bottom: 10px; -} - -.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { - margin-bottom: 10px; -} - -.custom_header_01 { - color: #cc0000; - font-size: 22px; - font-weight: bold; - line-height: 50px; -} - -h1, h2, h3, h4, h5, h6 { - margin-bottom: 20px; - margin-top: 20px; -} - -h5 { - font-size: 18px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -h6 { - font-size: 15px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -/* custom admonitions */ -/* success */ -.custom-admonition-success .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-success.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* important */ -.custom-admonition-important .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #000000; -} -div.custom-admonition-important.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* caution */ -.custom-admonition-caution .admonition-title { - color: #000000; - background: #ffff99; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #e8e8e8; -} -div.custom-admonition-caution.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* note */ -.custom-admonition-note .admonition-title { - color: #ffffff; - background: #006bb3; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-note.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* todo */ -.custom-admonition-todo .admonition-title { - color: #000000; - background: #cce6ff; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #99ccff; -} -div.custom-admonition-todo.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #99ccff; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* examples */ -.custom-admonition-examples .admonition-title { - color: #000000; - background: #ffe6cc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #d8d8d8; -} -div.custom-admonition-examples.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - +/* this is the container for the pages */ .wy-nav-content { max-width: 100%; - padding-right: 100px; - padding-left: 100px; - background-color: #f2f2f2; -} - -div.rst-content { - background-color: #ffffff; - border: solid 1px #e5e5e5; - padding: 20px 40px 20px 40px; -} - -.rst-content .guilabel { - border: 1px solid #ffff99; - background: #ffff99; - font-size: 100%; - font-weight: normal; - border-radius: 4px; - padding: 2px 0px; - margin: auto 2px; - vertical-align: middle; -} - -.rst-content kbd { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - border: solid 1px #d8d8d8; - background-color: #f5f5f5; - padding: 0px 3px; - border-radius: 3px; -} - -.wy-nav-content-wrap a { - color: #0066cc; - text-decoration: none; -} -.wy-nav-content-wrap a:hover { - color: #0099cc; - text-decoration: underline; -} - -.wy-nav-top a { - color: #ffffff; -} - -/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ -.wy-table-responsive table td { - white-space: normal !important; -} - -.rst-content table.docutils td, -.rst-content table.docutils th { - padding: 5px 10px 5px 10px; -} -.rst-content table.docutils td p, -.rst-content table.docutils th p { - font-size: 14px; - margin-bottom: 0px; -} -.rst-content table.docutils td p cite, -.rst-content table.docutils th p cite { - font-size: 14px; - background-color: transparent; -} - -.colwidths-given th { - border: solid 1px #d8d8d8 !important; -} -.colwidths-given td { - border: solid 1px #d8d8d8 !important; -} - -/*handles single-tick inline code*/ -.wy-body-for-nav cite { - color: #000000; - background-color: transparent; - font-style: normal; - font-family: "Courier New"; - font-size: 13px; - padding: 3px 3px 3px 3px; -} - -.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - font-size: 13px; - overflow: visible; - white-space: pre-wrap; - color: #000000; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] { - background-color: #f8f8f8; - border: solid 1px #e8e8e8; -} - -/* This enables inline code to wrap. */ -code, .rst-content tt, .rst-content code { - white-space: pre-wrap; - padding: 2px 3px 1px; - border-radius: 3px; - font-size: 13px; - background-color: #ffffff; -} - -/* use this added class for code blocks attached to bulleted list items */ -.highlight-top-margin { - margin-top: 20px !important; -} - -/* change color of inline code block */ -span.pre { - color: #e01e5a; -} - -.wy-body-for-nav blockquote { - margin: 1em 0; - padding-left: 1em; - border-left: 4px solid #ddd; - color: #000000; -} - -/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ -.rst-content .section ol p, .rst-content .section ul p { - margin-bottom: 0px; -} - -/* add spacing between bullets for legibility */ -.rst-content .section ol li, .rst-content .section ul li { - margin-bottom: 5px; -} - -.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { - margin-top: 5px; -} - -/* but exclude the toctree bullets */ -.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { + padding: 0px 40px 0px 0px; margin-top: 0px; - margin-bottom: 0px; } -/* remove extra space at bottom of multine list-table cell */ -.rst-content .line-block { - margin-left: 0px; - margin-bottom: 0px; - line-height: 24px; +.wy-nav-content-wrap { + border-right: solid 1px; } -/* fix extra vertical spacing in page toctree */ -.rst-content .toctree-wrapper ul li ul, article ul li ul { - margin-top: 0; - margin-bottom: 0; -} - -/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ -.reference.internal.toc-index { - color: #d9d9d9; -} - -.reference.internal.toc-index.current { - background-color: #ffffff; - color: #000000; - font-weight: bold; -} - -.toc-index-div { - border-top: solid 1px #000000; - margin-top: 10px; - padding-top: 5px; -} - -.indextable ul li { - font-size: 14px; - margin-bottom: 5px; -} - -/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ -.indextable.genindextable { - margin-bottom: 20px; -} - -div.genindex-jumpbox { - margin-bottom: 10px; -} - -/* rst image classes */ - -.clear-both { - clear: both; - } - -.float-left { - float: left; - margin-right: 20px; -} - -img { - border: solid 1px #e8e8e8; -} - -/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ -.img-title { - color: #000000; - /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ - line-height: 3.0; - font-style: italic; - font-weight: 600; -} - -.img-title-para { - color: #000000; - margin-top: 20px; - margin-bottom: 0px; - font-style: italic; - font-weight: 500; -} - -.red { - color: red; +div.rst-content { + max-width: 1300px; + border: 0; + padding: 0px 80px 10px 80px; + margin-left: 50px; +} + +@media (max-width: 768px) { + div.rst-content { + max-width: 1300px; + border: 0; + padding: 0px 10px 10px 10px; + margin-left: 0px; + } } diff --git a/docs/source/_static/theme_overrides_SUPERSEDED.css b/docs/source/_static/theme_overrides_SUPERSEDED.css new file mode 100644 index 00000000..9662d63a --- /dev/null +++ b/docs/source/_static/theme_overrides_SUPERSEDED.css @@ -0,0 +1,353 @@ +body { + color: #000000; +} + +p { + margin-bottom: 10px; +} + +.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { + margin-bottom: 10px; +} + +.custom_header_01 { + color: #cc0000; + font-size: 22px; + font-weight: bold; + line-height: 50px; +} + +h1, h2, h3, h4, h5, h6 { + margin-bottom: 20px; + margin-top: 20px; +} + +h5 { + font-size: 18px; + color: #000000; + font-style: italic; + margin-bottom: 10px; +} + +h6 { + font-size: 15px; + color: #000000; + font-style: italic; + margin-bottom: 10px; +} + +/* custom admonitions */ +/* success */ +.custom-admonition-success .admonition-title { + color: #000000; + background: #ccffcc; + border-radius: 5px 5px 0px 0px; +} +div.custom-admonition-success.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* important */ +.custom-admonition-important .admonition-title { + color: #000000; + background: #ccffcc; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #000000; +} +div.custom-admonition-important.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* caution */ +.custom-admonition-caution .admonition-title { + color: #000000; + background: #ffff99; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #e8e8e8; +} +div.custom-admonition-caution.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* note */ +.custom-admonition-note .admonition-title { + color: #ffffff; + background: #006bb3; + border-radius: 5px 5px 0px 0px; +} +div.custom-admonition-note.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* todo */ +.custom-admonition-todo .admonition-title { + color: #000000; + background: #cce6ff; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #99ccff; +} +div.custom-admonition-todo.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #99ccff; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* examples */ +.custom-admonition-examples .admonition-title { + color: #000000; + background: #ffe6cc; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #d8d8d8; +} +div.custom-admonition-examples.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +.wy-nav-content { + max-width: 100%; + padding-right: 100px; + padding-left: 100px; + background-color: #f2f2f2; +} + +div.rst-content { + background-color: #ffffff; + border: solid 1px #e5e5e5; + padding: 20px 40px 20px 40px; +} + +.rst-content .guilabel { + border: 1px solid #ffff99; + background: #ffff99; + font-size: 100%; + font-weight: normal; + border-radius: 4px; + padding: 2px 0px; + margin: auto 2px; + vertical-align: middle; +} + +.rst-content kbd { + font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; + border: solid 1px #d8d8d8; + background-color: #f5f5f5; + padding: 0px 3px; + border-radius: 3px; +} + +.wy-nav-content-wrap a { + color: #0066cc; + text-decoration: none; +} +.wy-nav-content-wrap a:hover { + color: #0099cc; + text-decoration: underline; +} + +.wy-nav-top a { + color: #ffffff; +} + +/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ +.wy-table-responsive table td { + white-space: normal !important; +} + +.rst-content table.docutils td, +.rst-content table.docutils th { + padding: 5px 10px 5px 10px; +} +.rst-content table.docutils td p, +.rst-content table.docutils th p { + font-size: 14px; + margin-bottom: 0px; +} +.rst-content table.docutils td p cite, +.rst-content table.docutils th p cite { + font-size: 14px; + background-color: transparent; +} + +.colwidths-given th { + border: solid 1px #d8d8d8 !important; +} +.colwidths-given td { + border: solid 1px #d8d8d8 !important; +} + +/*handles single-tick inline code*/ +.wy-body-for-nav cite { + color: #000000; + background-color: transparent; + font-style: normal; + font-family: "Courier New"; + font-size: 13px; + padding: 3px 3px 3px 3px; +} + +.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { + font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; + font-size: 13px; + overflow: visible; + white-space: pre-wrap; + color: #000000; +} + +.rst-content pre.literal-block, .rst-content div[class^='highlight'] { + background-color: #f8f8f8; + border: solid 1px #e8e8e8; +} + +/* This enables inline code to wrap. */ +code, .rst-content tt, .rst-content code { + white-space: pre-wrap; + padding: 2px 3px 1px; + border-radius: 3px; + font-size: 13px; + background-color: #ffffff; +} + +/* use this added class for code blocks attached to bulleted list items */ +.highlight-top-margin { + margin-top: 20px !important; +} + +/* change color of inline code block */ +span.pre { + color: #e01e5a; +} + +.wy-body-for-nav blockquote { + margin: 1em 0; + padding-left: 1em; + border-left: 4px solid #ddd; + color: #000000; +} + +/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ +.rst-content .section ol p, .rst-content .section ul p { + margin-bottom: 0px; +} + +/* add spacing between bullets for legibility */ +.rst-content .section ol li, .rst-content .section ul li { + margin-bottom: 5px; +} + +.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { + margin-top: 5px; +} + +/* but exclude the toctree bullets */ +.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { + margin-top: 0px; + margin-bottom: 0px; +} + +/* remove extra space at bottom of multine list-table cell */ +.rst-content .line-block { + margin-left: 0px; + margin-bottom: 0px; + line-height: 24px; +} + +/* fix extra vertical spacing in page toctree */ +.rst-content .toctree-wrapper ul li ul, article ul li ul { + margin-top: 0; + margin-bottom: 0; +} + +/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ +.reference.internal.toc-index { + color: #d9d9d9; +} + +.reference.internal.toc-index.current { + background-color: #ffffff; + color: #000000; + font-weight: bold; +} + +.toc-index-div { + border-top: solid 1px #000000; + margin-top: 10px; + padding-top: 5px; +} + +.indextable ul li { + font-size: 14px; + margin-bottom: 5px; +} + +/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ +.indextable.genindextable { + margin-bottom: 20px; +} + +div.genindex-jumpbox { + margin-bottom: 10px; +} + +/* rst image classes */ + +.clear-both { + clear: both; + } + +.float-left { + float: left; + margin-right: 20px; +} + +img { + border: solid 1px #e8e8e8; +} + +/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ +.img-title { + color: #000000; + /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ + line-height: 3.0; + font-style: italic; + font-weight: 600; +} + +.img-title-para { + color: #000000; + margin-top: 20px; + margin-bottom: 0px; + font-style: italic; + font-weight: 500; +} + +.red { + color: red; +} diff --git a/docs/source/conf.py b/docs/source/conf.py index 54e5e665..7771ff09 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -30,7 +30,7 @@ extensions = [ "sphinx.ext.intersphinx", "sphinx_reredirects", - 'sphinx_rtd_theme', + "sphinx_rtd_theme", "sphinx_rtd_dark_mode", "sphinx.ext.extlinks", "sphinx_copybutton", @@ -47,7 +47,10 @@ intersphinx_mapping = { "aboutcode": ("https://aboutcode.readthedocs.io/en/latest/", None), - "scancode-workbench": ("https://scancode-workbench.readthedocs.io/en/develop/", None), + "scancode-workbench": ( + "https://scancode-workbench.readthedocs.io/en/develop/", + None, + ), } @@ -82,7 +85,9 @@ "conf_py_path": "/docs/source/", # path in the checkout to the docs root } -html_css_files = ["_static/theme_overrides.css"] +html_css_files = [ + "theme_overrides.css", +] # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. @@ -108,6 +113,4 @@ # -- Options for LaTeX output ------------------------------------------------- -latex_elements = { - 'classoptions': ',openany,oneside' -} \ No newline at end of file +latex_elements = {"classoptions": ",openany,oneside"} From 4e36fc601eaa17bde0d2a4bebfed70d7bde28e7c Mon Sep 17 00:00:00 2001 From: "John M. Horan" Date: Tue, 16 Jan 2024 12:22:54 -0800 Subject: [PATCH 04/80] Delete theme_overrides_SUPERSEDED.css as no longer needed #84 Reference: https://github.com/nexB/skeleton/issues/84 Signed-off-by: John M. Horan --- .../_static/theme_overrides_SUPERSEDED.css | 353 ------------------ 1 file changed, 353 deletions(-) delete mode 100644 docs/source/_static/theme_overrides_SUPERSEDED.css diff --git a/docs/source/_static/theme_overrides_SUPERSEDED.css b/docs/source/_static/theme_overrides_SUPERSEDED.css deleted file mode 100644 index 9662d63a..00000000 --- a/docs/source/_static/theme_overrides_SUPERSEDED.css +++ /dev/null @@ -1,353 +0,0 @@ -body { - color: #000000; -} - -p { - margin-bottom: 10px; -} - -.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { - margin-bottom: 10px; -} - -.custom_header_01 { - color: #cc0000; - font-size: 22px; - font-weight: bold; - line-height: 50px; -} - -h1, h2, h3, h4, h5, h6 { - margin-bottom: 20px; - margin-top: 20px; -} - -h5 { - font-size: 18px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -h6 { - font-size: 15px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -/* custom admonitions */ -/* success */ -.custom-admonition-success .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-success.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* important */ -.custom-admonition-important .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #000000; -} -div.custom-admonition-important.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* caution */ -.custom-admonition-caution .admonition-title { - color: #000000; - background: #ffff99; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #e8e8e8; -} -div.custom-admonition-caution.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* note */ -.custom-admonition-note .admonition-title { - color: #ffffff; - background: #006bb3; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-note.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* todo */ -.custom-admonition-todo .admonition-title { - color: #000000; - background: #cce6ff; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #99ccff; -} -div.custom-admonition-todo.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #99ccff; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* examples */ -.custom-admonition-examples .admonition-title { - color: #000000; - background: #ffe6cc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #d8d8d8; -} -div.custom-admonition-examples.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -.wy-nav-content { - max-width: 100%; - padding-right: 100px; - padding-left: 100px; - background-color: #f2f2f2; -} - -div.rst-content { - background-color: #ffffff; - border: solid 1px #e5e5e5; - padding: 20px 40px 20px 40px; -} - -.rst-content .guilabel { - border: 1px solid #ffff99; - background: #ffff99; - font-size: 100%; - font-weight: normal; - border-radius: 4px; - padding: 2px 0px; - margin: auto 2px; - vertical-align: middle; -} - -.rst-content kbd { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - border: solid 1px #d8d8d8; - background-color: #f5f5f5; - padding: 0px 3px; - border-radius: 3px; -} - -.wy-nav-content-wrap a { - color: #0066cc; - text-decoration: none; -} -.wy-nav-content-wrap a:hover { - color: #0099cc; - text-decoration: underline; -} - -.wy-nav-top a { - color: #ffffff; -} - -/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ -.wy-table-responsive table td { - white-space: normal !important; -} - -.rst-content table.docutils td, -.rst-content table.docutils th { - padding: 5px 10px 5px 10px; -} -.rst-content table.docutils td p, -.rst-content table.docutils th p { - font-size: 14px; - margin-bottom: 0px; -} -.rst-content table.docutils td p cite, -.rst-content table.docutils th p cite { - font-size: 14px; - background-color: transparent; -} - -.colwidths-given th { - border: solid 1px #d8d8d8 !important; -} -.colwidths-given td { - border: solid 1px #d8d8d8 !important; -} - -/*handles single-tick inline code*/ -.wy-body-for-nav cite { - color: #000000; - background-color: transparent; - font-style: normal; - font-family: "Courier New"; - font-size: 13px; - padding: 3px 3px 3px 3px; -} - -.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - font-size: 13px; - overflow: visible; - white-space: pre-wrap; - color: #000000; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] { - background-color: #f8f8f8; - border: solid 1px #e8e8e8; -} - -/* This enables inline code to wrap. */ -code, .rst-content tt, .rst-content code { - white-space: pre-wrap; - padding: 2px 3px 1px; - border-radius: 3px; - font-size: 13px; - background-color: #ffffff; -} - -/* use this added class for code blocks attached to bulleted list items */ -.highlight-top-margin { - margin-top: 20px !important; -} - -/* change color of inline code block */ -span.pre { - color: #e01e5a; -} - -.wy-body-for-nav blockquote { - margin: 1em 0; - padding-left: 1em; - border-left: 4px solid #ddd; - color: #000000; -} - -/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ -.rst-content .section ol p, .rst-content .section ul p { - margin-bottom: 0px; -} - -/* add spacing between bullets for legibility */ -.rst-content .section ol li, .rst-content .section ul li { - margin-bottom: 5px; -} - -.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { - margin-top: 5px; -} - -/* but exclude the toctree bullets */ -.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { - margin-top: 0px; - margin-bottom: 0px; -} - -/* remove extra space at bottom of multine list-table cell */ -.rst-content .line-block { - margin-left: 0px; - margin-bottom: 0px; - line-height: 24px; -} - -/* fix extra vertical spacing in page toctree */ -.rst-content .toctree-wrapper ul li ul, article ul li ul { - margin-top: 0; - margin-bottom: 0; -} - -/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ -.reference.internal.toc-index { - color: #d9d9d9; -} - -.reference.internal.toc-index.current { - background-color: #ffffff; - color: #000000; - font-weight: bold; -} - -.toc-index-div { - border-top: solid 1px #000000; - margin-top: 10px; - padding-top: 5px; -} - -.indextable ul li { - font-size: 14px; - margin-bottom: 5px; -} - -/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ -.indextable.genindextable { - margin-bottom: 20px; -} - -div.genindex-jumpbox { - margin-bottom: 10px; -} - -/* rst image classes */ - -.clear-both { - clear: both; - } - -.float-left { - float: left; - margin-right: 20px; -} - -img { - border: solid 1px #e8e8e8; -} - -/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ -.img-title { - color: #000000; - /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ - line-height: 3.0; - font-style: italic; - font-weight: 600; -} - -.img-title-para { - color: #000000; - margin-top: 20px; - margin-bottom: 0px; - font-style: italic; - font-weight: 500; -} - -.red { - color: red; -} From 7d74b8a3c98761293cd133d543e4d58a525dc7bf Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Thu, 18 Jan 2024 17:11:14 +0530 Subject: [PATCH 05/80] Fix top padding for rst content Signed-off-by: Ayan Sinha Mahapatra --- docs/source/_static/theme_overrides.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css index de5ae433..5863ccf5 100644 --- a/docs/source/_static/theme_overrides.css +++ b/docs/source/_static/theme_overrides.css @@ -12,7 +12,7 @@ div.rst-content { max-width: 1300px; border: 0; - padding: 0px 80px 10px 80px; + padding: 10px 80px 10px 80px; margin-left: 50px; } From 008d521aec51e5983f6d6a2adc4efa7fd92159cf Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Mon, 19 Feb 2024 15:21:45 +0530 Subject: [PATCH 06/80] Update CI runners and python version Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 2 +- .github/workflows/pypi-release.yml | 8 ++++---- azure-pipelines.yml | 22 +++++++++++++++------- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index ada779bf..8c2abfe9 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -4,7 +4,7 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: max-parallel: 4 diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 95857301..d2206c87 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -21,10 +21,10 @@ on: jobs: build-pypi-distribs: name: Build and publish library to PyPI - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -47,7 +47,7 @@ jobs: name: Create GH release needs: - build-pypi-distribs - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Download built archives @@ -67,7 +67,7 @@ jobs: name: Create PyPI release needs: - create-gh-release - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Download built archives diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 764883de..373b78cd 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,7 +11,7 @@ jobs: parameters: job_name: ubuntu20_cpython image_name: ubuntu-20.04 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: macos11_cpython image_name: macOS-11 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos12_cpython image_name: macOS-12 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,15 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos14_cpython + image_name: macOS-14 + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +59,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -59,6 +67,6 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 124da3dcef0d95a6f6aa76ed849f47ada25b83e2 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Mon, 1 Jul 2024 15:11:21 +0530 Subject: [PATCH 07/80] Replace deprecated macos CI runners Replace macos-11 runners with macos-14 runners. Reference: https://github.com/actions/runner-images?tab=readme-ov-file#available-images Reference: https://github.com/nexB/skeleton/issues/89 Signed-off-by: Ayan Sinha Mahapatra --- azure-pipelines.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 373b78cd..c2a3b522 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -25,24 +25,24 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos11_cpython - image_name: macOS-11 + job_name: macos12_cpython + image_name: macOS-12 python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs - template: etc/ci/azure-posix.yml parameters: - job_name: macos12_cpython - image_name: macOS-12 + job_name: macos13_cpython + image_name: macOS-13 python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs - template: etc/ci/azure-posix.yml parameters: - job_name: macos13_cpython - image_name: macOS-13 + job_name: macos14_cpython_arm64 + image_name: macOS-14 python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -50,8 +50,8 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython - image_name: macOS-14 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + image_name: macOS-14-large + python_versions: ['3.8', '3.8', '3.9', '3.10', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs From be4e14d414cf4f7112b529dc71f7abccc9dcf24a Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Mon, 1 Jul 2024 16:00:40 +0530 Subject: [PATCH 08/80] Update minimum required python version to 3.8 Signed-off-by: Ayan Sinha Mahapatra --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index bd0e58a7..a8e20c5d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,7 +38,7 @@ zip_safe = false setup_requires = setuptools_scm[toml] >= 4 -python_requires = >=3.7 +python_requires = >=3.8 install_requires = From 9c57f340d22d8891a5614a93553b20d75e2f3136 Mon Sep 17 00:00:00 2001 From: Chin Yeung Li Date: Tue, 20 Aug 2024 16:46:20 +0800 Subject: [PATCH 09/80] Update link references of ownership from nexB to aboutcode-org Signed-off-by: Chin Yeung Li --- Makefile | 4 +- NOTICE | 2 +- configure | 2 +- configure.bat | 2 +- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 2 +- docs/source/skeleton-usage.rst | 2 +- etc/scripts/check_thirdparty.py | 5 +- etc/scripts/fetch_thirdparty.py | 19 ++++-- etc/scripts/gen_requirements.py | 2 +- etc/scripts/gen_requirements_dev.py | 2 +- etc/scripts/utils_dejacode.py | 11 ++-- etc/scripts/utils_requirements.py | 11 ++-- etc/scripts/utils_thirdparty.py | 89 +++++++++++++++++--------- setup.cfg | 2 +- tests/test_skeleton_codestyle.py | 2 +- 16 files changed, 100 insertions(+), 59 deletions(-) diff --git a/Makefile b/Makefile index cc36c355..94451b33 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -35,7 +35,7 @@ check: @echo "-> Run pycodestyle (PEP8) validation" @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . + @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . @echo "-> Run black validation" @${ACTIVATE} black --check --check -l 100 src tests setup.py diff --git a/NOTICE b/NOTICE index 65936b2b..cbdaef79 100644 --- a/NOTICE +++ b/NOTICE @@ -2,7 +2,7 @@ # Copyright (c) nexB Inc. and others. # SPDX-License-Identifier: Apache-2.0 # -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Visit https://aboutcode.org and https://github.com/aboutcode-org/ for support and download. # ScanCode is a trademark of nexB Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/configure b/configure index 926a894e..22d92885 100755 --- a/configure +++ b/configure @@ -3,7 +3,7 @@ # Copyright (c) nexB Inc. and others. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/ for support or download. +# See https://github.com/aboutcode-org/ for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # diff --git a/configure.bat b/configure.bat index 5e95b311..5b9a9d68 100644 --- a/configure.bat +++ b/configure.bat @@ -4,7 +4,7 @@ @rem Copyright (c) nexB Inc. and others. All rights reserved. @rem SPDX-License-Identifier: Apache-2.0 @rem See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -@rem See https://github.com/nexB/ for support or download. +@rem See https://github.com/aboutcode-org/ for support or download. @rem See https://aboutcode.org for more information about nexB OSS projects. diff --git a/docs/source/conf.py b/docs/source/conf.py index 7771ff09..8c88fa2c 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -43,7 +43,7 @@ # This points to aboutcode.readthedocs.io # In case of "undefined label" ERRORS check docs on intersphinx to troubleshoot -# Link was created at commit - https://github.com/nexB/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 +# Link was created at commit - https://github.com/aboutcode-org/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 intersphinx_mapping = { "aboutcode": ("https://aboutcode.readthedocs.io/en/latest/", None), diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 13882e10..5640db26 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -12,7 +12,7 @@ To get started, create or identify a working directory on your local machine. Open that directory and execute the following command in a terminal session:: - git clone https://github.com/nexB/skeleton.git + git clone https://github.com/aboutcode-org/skeleton.git That will create an ``/skeleton`` directory in your working directory. Now you can install the dependencies in a virtualenv:: diff --git a/docs/source/skeleton-usage.rst b/docs/source/skeleton-usage.rst index cde23dcd..6cb4cc5f 100644 --- a/docs/source/skeleton-usage.rst +++ b/docs/source/skeleton-usage.rst @@ -118,7 +118,7 @@ corrected. You can check to see if your corrections are valid by running: Once the wheels are collected and the ABOUT files are generated and correct, upload them to thirdparty.aboutcode.org/pypi by placing the wheels and ABOUT files from the thirdparty directory to the pypi directory at -https://github.com/nexB/thirdparty-packages +https://github.com/aboutcode-org/thirdparty-packages Usage after project initialization diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index b052f25b..2daded94 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import click @@ -17,7 +17,8 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, + path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index eedf05c6..3f9ff527 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -55,7 +55,8 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, + path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -224,7 +225,8 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os( + pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -260,13 +262,14 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append( + environment) if TRACE: print(f" NOT FOUND") if (sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + ): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -289,7 +292,8 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any( + d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -299,7 +303,8 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses( + dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 07e26f77..2b65ae80 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import argparse diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 12cc06d3..5db1c48e 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import argparse diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c42e6c93..652252d4 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import io @@ -33,7 +33,8 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print( + "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,7 +69,8 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print( + f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -149,7 +151,8 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) + for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 0fc25a35..1c502390 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -102,7 +102,8 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join( + f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -113,10 +114,12 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception( + f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", + "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index addf8e5e..46dc7289 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import email @@ -245,9 +245,11 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print( + f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list(package.get_supported_wheels(environment=environment)) + supported_wheels = list( + package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,7 +293,8 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print( + f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -300,7 +303,8 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print( + f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -533,7 +537,8 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version(name=self.name, version=self.version) + package = repo.get_package_version( + name=self.name, version=self.version) if not package: if TRACE: print( @@ -772,7 +777,8 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print(f"Failed to fetch NOTICE file: {self.notice_download_url}") + print( + f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -821,9 +827,11 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url(use_cached_index=use_cached_index).links + urls = LinksRepository.from_url( + use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") for l in self.extra_data.get("licenses", {})] + extra_lic_names = [l.get("file") + for l in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] extra_lic_names = [ln for ln in extra_lic_names if ln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] @@ -834,7 +842,8 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename(filename=filename, urls=urls) + lic_url = get_license_link_for_filename( + filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -911,7 +920,8 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [c for c in classifiers if not c.startswith("License")] + other_classifiers = [ + c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -953,7 +963,8 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string(self.package_url) + purl_from_self = packageurl.PackageURL.from_string( + self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1003,7 +1014,8 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception( + f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1397,7 +1409,8 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) + for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1458,7 +1471,8 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith( + EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1476,7 +1490,8 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print(f" Skipping invalid distribution from: {path_or_url}") + print( + f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1525,7 +1540,8 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict(help="Python implementation supported by this environment."), + metadata=dict( + help="Python implementation supported by this environment."), repr=False, ) @@ -1539,7 +1555,8 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict(help="List of platform tags supported by this environment."), + metadata=dict( + help="List of platform tags supported by this environment."), repr=False, ) @@ -1623,7 +1640,8 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict(help="A set of already fetched package normalized names."), + metadata=dict( + help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1654,10 +1672,12 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print( + f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print(f"WARNING: package {name} not found in repo: {self.index_url}") + print( + f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1842,7 +1862,8 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content(path_or_url=path_or_url, as_text=as_text) + content = get_file_content( + path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1864,7 +1885,8 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content( + url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1930,7 +1952,8 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException( + f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2021,7 +2044,8 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2033,7 +2057,8 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2044,7 +2069,8 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list(other_local_version.get_distributions()) + latest_local_dists = list( + other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2070,7 +2096,8 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2111,7 +2138,8 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files( + dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2259,7 +2287,8 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join( + dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: diff --git a/setup.cfg b/setup.cfg index a8e20c5d..ef7d369b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,7 +6,7 @@ license = Apache-2.0 description = skeleton long_description = file:README.rst long_description_content_type = text/x-rst -url = https://github.com/nexB/skeleton +url = https://github.com/aboutcode-org/skeleton author = nexB. Inc. and others author_email = info@aboutcode.org diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 2eb6e558..b4ce8c16 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -3,7 +3,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # From a92905297acf39ecd820bfb133f8670c39b40c97 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Fri, 17 Jan 2025 20:07:25 +0100 Subject: [PATCH 10/80] Drop deprecated macos-12 runner --- azure-pipelines.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index c2a3b522..39601e62 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -23,14 +23,6 @@ jobs: test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos12_cpython - image_name: macOS-12 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython From 4af4fce3cc57d001c6c26f77d477cd44cef2ffef Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Sat, 15 Feb 2025 00:09:49 +0530 Subject: [PATCH 11/80] Update CI/Actions runners Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 8 ++++---- .github/workflows/pypi-release.yml | 18 +++++++++--------- azure-pipelines.yml | 22 +++++++++++----------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 8c2abfe9..621de4b2 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -4,19 +4,19 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 strategy: max-parallel: 4 matrix: - python-version: [3.9] + python-version: [3.12] steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index d2206c87..a66c9c80 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -21,14 +21,14 @@ on: jobs: build-pypi-distribs: name: Build and publish library to PyPI - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.12 - name: Install pypa/build run: python -m pip install build --user @@ -37,7 +37,7 @@ jobs: run: python -m build --sdist --wheel --outdir dist/ - name: Upload built archives - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pypi_archives path: dist/* @@ -47,17 +47,17 @@ jobs: name: Create GH release needs: - build-pypi-distribs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download built archives - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: pypi_archives path: dist - name: Create GH release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: draft: true files: dist/* @@ -67,11 +67,11 @@ jobs: name: Create PyPI release needs: - create-gh-release - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download built archives - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: pypi_archives path: dist diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 39601e62..a220f2be 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -9,17 +9,17 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: ubuntu20_cpython - image_name: ubuntu-20.04 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + job_name: ubuntu22_cpython + image_name: ubuntu-22.04 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs - template: etc/ci/azure-posix.yml parameters: - job_name: ubuntu22_cpython - image_name: ubuntu-22.04 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + job_name: ubuntu24_cpython + image_name: ubuntu-24.04 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos14_cpython_arm64 image_name: macOS-14 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14-large - python_versions: ['3.8', '3.8', '3.9', '3.10', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +51,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -59,6 +59,6 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 320ec21daa249ceae0c07787f9e52134b3ad06ab Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Thu, 27 Mar 2025 14:54:31 -0700 Subject: [PATCH 12/80] Replace black and isort with ruff * Use ruff config and Make commands from scancode.io Signed-off-by: Jono Yang --- Makefile | 27 ++++++++++++--------------- pyproject.toml | 37 +++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index 94451b33..1738b20a 100644 --- a/Makefile +++ b/Makefile @@ -17,27 +17,24 @@ dev: @echo "-> Configure the development envt." ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py - -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py - doc8: @echo "-> Run doc8 validation" @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" diff --git a/pyproject.toml b/pyproject.toml index cde79074..01e60fc6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,40 @@ addopts = [ "--strict-markers", "--doctest-modules" ] + +[tool.ruff] +line-length = 88 +extend-exclude = [] +target-version = "py310" + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle + "W", # pycodestyle warnings + "D", # pydocstyle + "F", # Pyflakes + "UP", # pyupgrade + "S", # flake8-bandit + "I", # isort + "C9", # McCabe complexity +] +ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] + +[tool.ruff.lint.isort] +force-single-line = true +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here diff --git a/setup.cfg b/setup.cfg index ef7d369b..aaec643f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,8 +54,7 @@ testing = aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - isort + ruff docs = Sphinx>=5.0.2 From d4e29c36c21ab81797604911cdeaea83d80e8088 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:46:06 +0100 Subject: [PATCH 13/80] Use org standard 100 line length Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 01e60fc6..cea91bd1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ addopts = [ ] [tool.ruff] -line-length = 88 +line-length = 100 extend-exclude = [] target-version = "py310" From 6c028f7219ae876ea62074ae435e574525e205d6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 08:40:28 +0100 Subject: [PATCH 14/80] Lint all common code directories Signed-off-by: Philippe Ombredanne --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea91bd1..9e627366 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,14 @@ addopts = [ line-length = 100 extend-exclude = [] target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "doc/**/*", + "*.py" +] [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 233f3edabfbab390029fb9f1842bf43766b04583 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 09:07:47 +0100 Subject: [PATCH 15/80] Remove unused targets Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1738b20a..930e8010 100644 --- a/Makefile +++ b/Makefile @@ -48,4 +48,4 @@ docs: rm -rf docs/_build/ @${ACTIVATE} sphinx-build docs/ docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs From 55545bf7a1a8f119a560c7f548ce5a460f39f37d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 11:03:05 +0100 Subject: [PATCH 16/80] Improve import sorting Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 - etc/scripts/fetch_thirdparty.py | 2 +- etc/scripts/test_utils_pip_compatibility_tags.py | 3 +-- etc/scripts/utils_dejacode.py | 1 - etc/scripts/utils_pip_compatibility_tags.py | 14 ++++++-------- etc/scripts/utils_thirdparty.py | 3 +-- pyproject.toml | 7 ++++++- 7 files changed, 15 insertions(+), 16 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 2daded94..62dbb14f 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -12,7 +12,6 @@ import utils_thirdparty - @click.command() @click.option( "-d", diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 3f9ff527..30d376c4 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -16,8 +16,8 @@ import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c56..a33b8b38 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -25,14 +25,13 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest import utils_pip_compatibility_tags - @pytest.mark.parametrize( "version_info, expected", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 652252d4..c71543f6 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -14,7 +14,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0cd..de0ac95d 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,14 +27,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 46dc7289..b0295eca 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -25,14 +25,13 @@ import packageurl import requests import saneyaml +import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version -import utils_pip_compatibility_tags - """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. diff --git a/pyproject.toml b/pyproject.toml index 9e627366..ba55770f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,10 +76,15 @@ select = [ "I", # isort "C9", # McCabe complexity ] -ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] [tool.ruff.lint.isort] force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + sections = { django = ["django"] } section-order = [ "future", From 0b63e5073b6b1cdc0960abe35060ad0fdb67b665 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:35:16 +0100 Subject: [PATCH 17/80] Apply small code updates Signed-off-by: Philippe Ombredanne --- etc/scripts/utils_requirements.py | 20 ++++++++----- etc/scripts/utils_thirdparty.py | 48 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 31 deletions(-) diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 1c502390..a9ac2235 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -57,21 +57,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -120,7 +124,7 @@ def get_installed_reqs(site_packages_dir): # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -150,9 +154,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index b0295eca..6d5ffdce 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -559,7 +558,8 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - assert self.filename + if not self.filename: + raise ValueError(f"self.filename has no value but is required: {self.filename!r}") if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -829,10 +829,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): urls = LinksRepository.from_url( use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") - for l in self.extra_data.get("licenses", {})] + extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [ln for ln in extra_lic_names if ln] + extra_lic_names = [eln for eln in extra_lic_names if eln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -853,7 +852,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except: + except Exception: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -866,8 +865,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except: - msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' + except Exception: + msg = f"No text for license {filename} in expression " + f"{self.license_expression!r} from {self}" print(msg) errors.append(msg) @@ -1009,7 +1009,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [l for l in urls if l.endswith(f"/{filename}")] + path_or_url = [url for url in urls if url.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1140,7 +1140,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -1301,7 +1300,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except: + except Exception: return False @@ -1489,8 +1488,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print( - f" Skipping invalid distribution from: {path_or_url}") + print(f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1500,8 +1498,7 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - for wheel in self.wheels: - yield wheel + yield from self.wheels def get_url_for_filename(self, filename): """ @@ -1632,7 +1629,8 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " + "available in this repo" ), ) @@ -1647,7 +1645,8 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. " + "Otherwise, fetch and cache." ), ) @@ -1656,7 +1655,8 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - assert name + if not name: + raise ValueError(f"name is required: {name!r}") normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1713,7 +1713,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] + links = [link.partition("#sha256=") for link in links] links = [url for url, _, _sha256 in links] return links @@ -1936,7 +1936,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2161,7 +2161,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: @@ -2227,7 +2227,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print(f"Downloading wheels using command:", " ".join(cli_args)) + print("Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2260,7 +2260,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2312,5 +2312,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(l).lower() for l in declared_licenses] + lics = [python_safe_name(lic).lower() for lic in declared_licenses] return " AND ".join(lics).lower() From 092f545f5b87442ae22884cb4d5381883343a1c2 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:42:03 +0100 Subject: [PATCH 18/80] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 3 +- etc/scripts/fetch_thirdparty.py | 26 ++++----- etc/scripts/gen_pypi_simple.py | 4 +- etc/scripts/utils_dejacode.py | 15 +++--- etc/scripts/utils_requirements.py | 9 ++-- etc/scripts/utils_thirdparty.py | 90 +++++++++++-------------------- 6 files changed, 50 insertions(+), 97 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 62dbb14f..1aa4e28a 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,8 +16,7 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 30d376c4..c2246837 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -55,8 +55,7 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -121,7 +120,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -132,7 +131,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -143,7 +142,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -225,8 +224,7 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os( - pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -250,7 +248,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,14 +259,11 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append( - environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: print(f" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -292,8 +286,7 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any( - d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -303,8 +296,7 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses( - dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90dc..cfe68e67 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -69,7 +69,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -200,11 +199,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c71543f6..cd39cda3 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -32,8 +32,7 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print( - "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,8 +67,7 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print( - f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -150,12 +148,11 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) - for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -181,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -209,5 +206,5 @@ def create_dejacode_package(distribution): if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index a9ac2235..167bc9f5 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -106,8 +106,7 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join( - f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -118,12 +117,10 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception( - f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", - "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") # noqa: S603 diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6d5ffdce..4ea1babb 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -243,11 +243,9 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print( - f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list( - package.get_supported_wheels(environment=environment)) + supported_wheels = list(package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,8 +289,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print( - f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -301,8 +298,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print( - f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -357,7 +353,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -535,8 +530,7 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version( - name=self.name, version=self.version) + package = repo.get_package_version(name=self.name, version=self.version) if not package: if TRACE: print( @@ -776,8 +770,7 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print( - f"Failed to fetch NOTICE file: {self.notice_download_url}") + print(f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -826,8 +819,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url( - use_cached_index=use_cached_index).links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -840,8 +832,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename( - filename=filename, urls=urls) + lic_url = get_license_link_for_filename(filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -919,8 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [ - c for c in classifiers if not c.startswith("License")] + other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -962,8 +952,7 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string( - self.package_url) + purl_from_self = packageurl.PackageURL.from_string(self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1013,8 +1002,7 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception( - f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1102,7 +1090,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1407,8 +1394,7 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) - for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1469,8 +1455,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith( - EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1536,8 +1521,7 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict( - help="Python implementation supported by this environment."), + metadata=dict(help="Python implementation supported by this environment."), repr=False, ) @@ -1551,8 +1535,7 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict( - help="List of platform tags supported by this environment."), + metadata=dict(help="List of platform tags supported by this environment."), repr=False, ) @@ -1637,8 +1620,7 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict( - help="A set of already fetched package normalized names."), + metadata=dict(help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1671,12 +1653,10 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print( - f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print( - f"WARNING: package {name} not found in repo: {self.index_url}") + print(f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1861,8 +1841,7 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content( - path_or_url=path_or_url, as_text=as_text) + content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1884,8 +1863,7 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content( - url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1936,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -1951,8 +1929,7 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException( - f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2043,8 +2020,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2056,8 +2032,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2068,8 +2043,7 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list( - other_local_version.get_distributions()) + latest_local_dists = list(other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2095,8 +2069,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2137,8 +2110,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files( - dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2161,10 +2133,9 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() @@ -2260,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2286,8 +2257,7 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join( - dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: From d05665ad44a50b71f66b974ad24c81f7443e8180 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:02:19 +0100 Subject: [PATCH 19/80] Apply cosmetic refactorings Signed-off-by: Philippe Ombredanne --- docs/source/conf.py | 3 ++- etc/scripts/check_thirdparty.py | 4 +--- etc/scripts/fetch_thirdparty.py | 17 ++++++++--------- etc/scripts/gen_pypi_simple.py | 15 +++++++-------- etc/scripts/gen_requirements.py | 4 ++-- etc/scripts/gen_requirements_dev.py | 4 ++-- .../test_utils_pip_compatibility_tags.py | 9 +++++---- etc/scripts/utils_dejacode.py | 9 +++++---- etc/scripts/utils_pip_compatibility_tags.py | 8 +++++--- etc/scripts/utils_requirements.py | 3 +-- etc/scripts/utils_thirdparty.py | 3 ++- 11 files changed, 40 insertions(+), 39 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8c88fa2c..8aad8294 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -94,7 +94,8 @@ html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 1aa4e28a..bb8347a5 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -41,8 +40,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index c2246837..76a19a60 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -10,7 +9,6 @@ # import itertools -import os import sys from collections import defaultdict @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -261,7 +260,7 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: @@ -276,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -295,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index cfe68e67..89d06265 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -132,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -177,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 2b65ae80..1b879442 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 5db1c48e..85482056 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index a33b8b38..de4b7066 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -50,7 +51,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -81,7 +82,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -104,7 +105,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index cd39cda3..b6bff518 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -25,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -50,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -93,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -113,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -201,6 +201,7 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index de0ac95d..dd954bca 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -130,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -140,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 167bc9f5..b9b2c0e7 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 4ea1babb..aafc1d69 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -91,7 +91,8 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as + {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: From 63bcbf507e8a25f22853d56605c107e47c3673cc Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:05:23 +0100 Subject: [PATCH 20/80] Reformat test code Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + pyproject.toml | 19 +++++++++++-------- tests/test_skeleton_codestyle.py | 25 ++++++++++++++++--------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 2d48196f..8a93c94d 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ +/.ruff_cache/ diff --git a/pyproject.toml b/pyproject.toml index ba55770f..a872ab3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,16 +67,17 @@ include = [ [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle warnings - "D", # pydocstyle - "F", # Pyflakes - "UP", # pyupgrade - "S", # flake8-bandit +# "E", # pycodestyle +# "W", # pycodestyle warnings +# "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit "I", # isort - "C9", # McCabe complexity +# "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] + [tool.ruff.lint.isort] force-single-line = true @@ -100,3 +101,5 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index b4ce8c16..8cd85c94 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -7,30 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import configparser import subprocess import unittest -import configparser - class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ + # This test shouldn't run in proliferated repositories. + + # TODO: update with switch to pyproject.toml setup_cfg = configparser.ConfigParser() setup_cfg.read("setup.cfg") if setup_cfg["metadata"]["name"] != "skeleton": return - args = "venv/bin/black --check -l 100 setup.py etc tests" + commands = [ + ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "format", "--check"], + ] + command = None try: - subprocess.check_output(args.split()) + for command in commands: + subprocess.check_output(command) # noqa: S603 except subprocess.CalledProcessError as e: print("===========================================================") print(e.output) print("===========================================================") raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", + f"Code style and linting command check failed: {' '.join(command)!r}.\n" + "You can check and format the code using:\n" + " make valid\n", + "OR:\n ruff format\n", + " ruff check --fix\n", e.output, ) from e From 9d1393a85303bf8cf92c9a25aa5cc50bdfd080d1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:08:25 +0100 Subject: [PATCH 21/80] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 + etc/scripts/test_utils_pip_compatibility_tags.py | 1 + tests/test_skeleton_codestyle.py | 1 + 3 files changed, 3 insertions(+) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index bb8347a5..65ae595e 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -11,6 +11,7 @@ import utils_thirdparty + @click.command() @click.option( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index de4b7066..0e9c360a 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -33,6 +33,7 @@ import utils_pip_compatibility_tags + @pytest.mark.parametrize( "version_info, expected", [ diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 8cd85c94..7135ac0d 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -11,6 +11,7 @@ import subprocess import unittest + class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): # This test shouldn't run in proliferated repositories. From f10b783b6b6fe33032a7862352ed532294efdf14 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:10:45 +0100 Subject: [PATCH 22/80] Refine ruff configuration Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a872ab3a..0f8bd587 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,11 +72,11 @@ select = [ # "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade -# "S", # flake8-bandit + "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] [tool.ruff.lint.isort] From 1d6c8f3bb8755aa7c9d2804240c01b0161417328 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:01 +0100 Subject: [PATCH 23/80] Format doc Signed-off-by: Philippe Ombredanne --- AUTHORS.rst | 2 +- README.rst | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 51a19cc8..16e20464 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,3 +1,3 @@ The following organizations or individuals have contributed to this repo: -- +- diff --git a/README.rst b/README.rst index 6cbd8395..3d6cb4e1 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,6 @@ A Simple Python Project Skeleton ================================ + This repo attempts to standardize the structure of the Python-based project's repositories using modern Python packaging and configuration techniques. Using this `blog post`_ as inspiration, this repository serves as the base for @@ -47,16 +48,19 @@ Release Notes - 2022-03-04: - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies - There are now fewer scripts. See etc/scripts/README.rst for details + - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party + dependencies. There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: - - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` + and ``requirements-dev.txt`` - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` - - Update README.rst with instructions for generating ``requirements.txt`` and ``requirements-dev.txt``, - as well as collecting dependencies as wheels and generating ABOUT files for them. + - Update README.rst with instructions for generating ``requirements.txt`` + and ``requirements-dev.txt``, as well as collecting dependencies as wheels and generating + ABOUT files for them. - 2021-05-11: - - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which + Python version is used. From 0213c1ea9a15ab94a854b8d7af27a1a036e393f4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:35 +0100 Subject: [PATCH 24/80] Run doc8 on all rst files Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930e8010..debc404e 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst valid: @echo "-> Run Ruff format" From c112f2a9c20d58e986424f5f32bd259814fc8e3f Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:55:20 +0100 Subject: [PATCH 25/80] Enable doc style checks Signed-off-by: Philippe Ombredanne --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0f8bd587..51761ff8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "doc/**/*", - "*.py" + "*.py", + "." ] [tool.ruff.lint] @@ -69,10 +70,10 @@ include = [ select = [ # "E", # pycodestyle # "W", # pycodestyle warnings -# "D", # pydocstyle + "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade - "S", # flake8-bandit +# "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] From 944b6c5371bea2ce0763fd26888de6436116d185 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:34:26 +0100 Subject: [PATCH 26/80] Add support for new OS versions Signed-off-by: Philippe Ombredanne --- README.rst | 50 +++++++++++++++++++++++++++++++++++++++++---- azure-pipelines.yml | 36 ++++++++++++++++++++++++++++++-- 2 files changed, 80 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index 6cbd8395..f848b4b3 100644 --- a/README.rst +++ b/README.rst @@ -1,9 +1,11 @@ A Simple Python Project Skeleton ================================ -This repo attempts to standardize the structure of the Python-based project's -repositories using modern Python packaging and configuration techniques. -Using this `blog post`_ as inspiration, this repository serves as the base for -all new Python projects and is mergeable in existing repositories as well. + +This repo attempts to standardize the structure of the Python-based project's repositories using +modern Python packaging and configuration techniques that can then be applied to many repos. + +Using this `blog post`_ as inspiration, this repository serves as the base for all new Python +projects and is mergeable in existing repositories as well. .. _blog post: https://blog.jaraco.com/a-project-skeleton-for-python-projects/ @@ -13,6 +15,7 @@ Usage A brand new project ------------------- + .. code-block:: bash git init my-new-repo @@ -26,6 +29,7 @@ From here, you can make the appropriate changes to the files for your specific p Update an existing project --------------------------- + .. code-block:: bash cd my-existing-project @@ -41,17 +45,54 @@ More usage instructions can be found in ``docs/skeleton-usage.rst``. Release Notes ============= +- 2025-03-29: + + - Add support for beta macOS-15 + - Add support for beta windows-2025 + +- 2025-02-14: + + - Drop support for Python 3.8, add support in CI for Python 3.13, use Python 3.12 as default + version. + +- 2025-01-17: + + - Drop support for macOS-12, add support for macOS-14 + - Add support in CI for ubuntu-24.04 + - Add support in CI for Python 3.12 + +- 2024-08-20: + + - Update references of ownership from nexB to aboutcode-org + +- 2024-07-01: + + - Drop support for Python 3.8 + - Drop support for macOS-11, add support for macOS-14 + +- 2024-02-19: + + - Replace support in CI of default ubuntu-20.04 by ubuntu-22.04 + +- 2023-10-18: + + - Add dark mode support in documentation + - 2023-07-18: + - Add macOS-13 job in azure-pipelines.yml - 2022-03-04: + - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` + - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` @@ -59,4 +100,5 @@ Release Notes as well as collecting dependencies as wheels and generating ABOUT files for them. - 2021-05-11: + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a220f2be..80ae45b1 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,11 +26,27 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython + image_name: macOS-13-xlarge + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos13_cpython_arm64 image_name: macOS-13 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos14_cpython + image_name: macOS-14-large + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython_arm64 @@ -41,8 +57,16 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos14_cpython - image_name: macOS-14-large + job_name: macos15_cpython + image_name: macOS-15 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos15_cpython_arm64 + image_name: macOS-15-large python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -62,3 +86,11 @@ jobs: python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs + + - template: etc/ci/azure-win.yml + parameters: + job_name: win2025_cpython + image_name: windows-2025 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv\Scripts\pytest -n 2 -vvs From 136af3912336616fbd2431a96230961517a2c356 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 12:45:32 +0200 Subject: [PATCH 27/80] Update scripts aboutcode references Signed-off-by: Philippe Ombredanne --- etc/scripts/update_skeleton.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py index 635898ba..5705fc43 100644 --- a/etc/scripts/update_skeleton.py +++ b/etc/scripts/update_skeleton.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. AboutCode, and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -16,7 +15,7 @@ import click -NEXB_PUBLIC_REPO_NAMES=[ +ABOUTCODE_PUBLIC_REPO_NAMES=[ "aboutcode-toolkit", "ahocode", "bitcode", @@ -56,9 +55,9 @@ @click.command() @click.help_option("-h", "--help") -def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): +def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): """ - Update project files of nexB projects that use the skeleton + Update project files of AboutCode projects that use the skeleton This script will: - Clone the repo @@ -81,14 +80,14 @@ def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): os.chdir(work_dir_path) # Clone repo - repo_git = f"git@github.com:nexB/{repo_name}.git" + repo_git = f"git@github.com:aboutcode-org/{repo_name}.git" subprocess.run(["git", "clone", repo_git]) # Go into cloned repo os.chdir(work_dir_path / repo_name) # Add skeleton as an origin - subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:nexB/skeleton.git"]) + subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"]) # Fetch skeleton files subprocess.run(["git", "fetch", "skeleton"]) From da8eff0383611df60311b8bac599657450eaeb52 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:40:36 +0200 Subject: [PATCH 28/80] Do not format more test data Signed-off-by: Philippe Ombredanne --- pyproject.toml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 51761ff8..7d807ebf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,25 @@ include = [ "src/**/*.py", "etc/**/*.py", "test/**/*.py", + "tests/**/*.py", "doc/**/*", + "docs/**/*", "*.py", "." ] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +] + [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 4f9e936d452acc3822df8d3f932cbd7071b31d72 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:58:36 +0200 Subject: [PATCH 29/80] Do not treat rst as Python Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7d807ebf..5e16b564 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "tests/**/*.py", - "doc/**/*", - "docs/**/*", + "doc/**/*.py", + "docs/**/*.py", "*.py", "." ] From a2809fb28c60b54aec0c367285acacdea1cb03a8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 16:41:57 +0200 Subject: [PATCH 30/80] Combine testing and docs extra for simplicity Signed-off-by: Philippe Ombredanne --- configure | 2 -- configure.bat | 4 ---- setup.cfg | 3 --- 3 files changed, 9 deletions(-) diff --git a/configure b/configure index 22d92885..83fd2035 100755 --- a/configure +++ b/configure @@ -30,7 +30,6 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -185,7 +184,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index 5b9a9d68..18b37038 100644 --- a/configure.bat +++ b/configure.bat @@ -28,7 +28,6 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) diff --git a/setup.cfg b/setup.cfg index aaec643f..ad8e0d8f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,8 +55,6 @@ testing = pycodestyle >= 2.8.0 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 @@ -64,4 +62,3 @@ docs = sphinx-autobuild sphinx-rtd-dark-mode>=1.3.0 sphinx-copybutton - From 43b96c28baaa1621d24b6f5791c6d915d2edc5f3 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 17:18:19 +0200 Subject: [PATCH 31/80] Refine checking of docs with doc8 Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- pyproject.toml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index debc404e..d21a2f95 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst + @${ACTIVATE} doc8 docs/ *.rst valid: @echo "-> Run Ruff format" diff --git a/pyproject.toml b/pyproject.toml index 5e16b564..bfb1d353 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,3 +119,10 @@ max-complexity = 10 # Place paths of files to be ignored by ruff here "tests/*" = ["S101"] "test_*.py" = ["S101"] + + +[tool.doc8] + +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 +verbose=0 From b7194c80c9425087f1d05e430bd9d6a14fb9c3a0 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:41:00 +0200 Subject: [PATCH 32/80] Refine doc handling * remove CI scripts and use Makefile targets instead * ensure doc8 runs quiet * add new docs-check make target to run documentation and links checks * update oudated doc for docs contribution Signed-off-by: Philippe Ombredanne --- .github/workflows/docs-ci.yml | 12 +++++------- Makefile | 10 +++++++--- docs/scripts/doc8_style_check.sh | 5 ----- docs/scripts/sphinx_build_link_check.sh | 5 ----- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 8 ++++---- pyproject.toml | 2 -- 7 files changed, 17 insertions(+), 27 deletions(-) delete mode 100755 docs/scripts/doc8_style_check.sh delete mode 100644 docs/scripts/sphinx_build_link_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 621de4b2..10ba5faa 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -21,14 +21,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/Makefile b/Makefile index d21a2f95..413399e5 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 docs/ *.rst + @${ACTIVATE} doc8 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -46,6 +46,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ -.PHONY: conf dev check valid clean test docs +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ + +.PHONY: conf dev check valid clean test docs docs-check diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100755 index 94163239..00000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c5426863..00000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8aad8294..056ca6ea 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 5640db26..041b3583 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -147,7 +147,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -169,11 +169,11 @@ What is checked: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +ScanCode toolkit documentation uses `Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ for more information. For example:: @@ -230,7 +230,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: diff --git a/pyproject.toml b/pyproject.toml index bfb1d353..c9e67720 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,7 +122,5 @@ max-complexity = 10 [tool.doc8] - ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] max-line-length=100 -verbose=0 From a5bcdbdd71d1542a0e9ec9b190a2e3d573c53744 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:49:01 +0200 Subject: [PATCH 33/80] Add twine check to release publication Signed-off-by: Philippe Ombredanne --- .github/workflows/pypi-release.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index a66c9c80..cf0579a7 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -30,12 +30,15 @@ jobs: with: python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user build twine - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* + - name: Upload built archives uses: actions/upload-artifact@v4 with: From a6c25fb2a2fa35311d26621b9db400ca52bd376e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 19:16:31 +0200 Subject: [PATCH 34/80] Refine doc contribution docs Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 119 ++++++++----------------- 1 file changed, 38 insertions(+), 81 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 041b3583..dee9296d 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/aboutcode-org/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. From 68daae1e7e475a89568e353f64f29af13754ce9e Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Thu, 27 Mar 2025 14:54:31 -0700 Subject: [PATCH 35/80] Replace black and isort with ruff * Use ruff config and Make commands from scancode.io Signed-off-by: Jono Yang --- Makefile | 27 ++++++++++++--------------- pyproject.toml | 37 +++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index 94451b33..1738b20a 100644 --- a/Makefile +++ b/Makefile @@ -17,27 +17,24 @@ dev: @echo "-> Configure the development envt." ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py - -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py - doc8: @echo "-> Run doc8 validation" @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" diff --git a/pyproject.toml b/pyproject.toml index cde79074..01e60fc6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,40 @@ addopts = [ "--strict-markers", "--doctest-modules" ] + +[tool.ruff] +line-length = 88 +extend-exclude = [] +target-version = "py310" + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle + "W", # pycodestyle warnings + "D", # pydocstyle + "F", # Pyflakes + "UP", # pyupgrade + "S", # flake8-bandit + "I", # isort + "C9", # McCabe complexity +] +ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] + +[tool.ruff.lint.isort] +force-single-line = true +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here diff --git a/setup.cfg b/setup.cfg index ef7d369b..aaec643f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,8 +54,7 @@ testing = aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - isort + ruff docs = Sphinx>=5.0.2 From 6a8c9ae144a1985b59fb69a0b2c55e32831714b8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:46:06 +0100 Subject: [PATCH 36/80] Use org standard 100 line length Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 01e60fc6..cea91bd1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ addopts = [ ] [tool.ruff] -line-length = 88 +line-length = 100 extend-exclude = [] target-version = "py310" From 2fd31d54afa47418c764de0f1a30d67c7059ed7b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 08:40:28 +0100 Subject: [PATCH 37/80] Lint all common code directories Signed-off-by: Philippe Ombredanne --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea91bd1..9e627366 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,14 @@ addopts = [ line-length = 100 extend-exclude = [] target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "doc/**/*", + "*.py" +] [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From eb5fc82ab1cab8a4742a2b9028d1436956960e81 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 09:07:47 +0100 Subject: [PATCH 38/80] Remove unused targets Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1738b20a..930e8010 100644 --- a/Makefile +++ b/Makefile @@ -48,4 +48,4 @@ docs: rm -rf docs/_build/ @${ACTIVATE} sphinx-build docs/ docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs From 529d51621c9e2af8e1ec2503044b8752c71c3ba7 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 11:03:05 +0100 Subject: [PATCH 39/80] Improve import sorting Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 - etc/scripts/fetch_thirdparty.py | 2 +- etc/scripts/test_utils_pip_compatibility_tags.py | 3 +-- etc/scripts/utils_dejacode.py | 1 - etc/scripts/utils_pip_compatibility_tags.py | 14 ++++++-------- etc/scripts/utils_thirdparty.py | 3 +-- pyproject.toml | 7 ++++++- 7 files changed, 15 insertions(+), 16 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 2daded94..62dbb14f 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -12,7 +12,6 @@ import utils_thirdparty - @click.command() @click.option( "-d", diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 3f9ff527..30d376c4 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -16,8 +16,8 @@ import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c56..a33b8b38 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -25,14 +25,13 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest import utils_pip_compatibility_tags - @pytest.mark.parametrize( "version_info, expected", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 652252d4..c71543f6 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -14,7 +14,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0cd..de0ac95d 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,14 +27,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 46dc7289..b0295eca 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -25,14 +25,13 @@ import packageurl import requests import saneyaml +import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version -import utils_pip_compatibility_tags - """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. diff --git a/pyproject.toml b/pyproject.toml index 9e627366..ba55770f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,10 +76,15 @@ select = [ "I", # isort "C9", # McCabe complexity ] -ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] [tool.ruff.lint.isort] force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + sections = { django = ["django"] } section-order = [ "future", From aae1a2847c0e493b0e8bea542da30dbdfb2be68e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:35:16 +0100 Subject: [PATCH 40/80] Apply small code updates Signed-off-by: Philippe Ombredanne --- etc/scripts/utils_requirements.py | 20 ++++++++----- etc/scripts/utils_thirdparty.py | 48 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 31 deletions(-) diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 1c502390..a9ac2235 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -57,21 +57,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -120,7 +124,7 @@ def get_installed_reqs(site_packages_dir): # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -150,9 +154,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index b0295eca..6d5ffdce 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -559,7 +558,8 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - assert self.filename + if not self.filename: + raise ValueError(f"self.filename has no value but is required: {self.filename!r}") if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -829,10 +829,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): urls = LinksRepository.from_url( use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") - for l in self.extra_data.get("licenses", {})] + extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [ln for ln in extra_lic_names if ln] + extra_lic_names = [eln for eln in extra_lic_names if eln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -853,7 +852,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except: + except Exception: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -866,8 +865,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except: - msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' + except Exception: + msg = f"No text for license {filename} in expression " + f"{self.license_expression!r} from {self}" print(msg) errors.append(msg) @@ -1009,7 +1009,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [l for l in urls if l.endswith(f"/{filename}")] + path_or_url = [url for url in urls if url.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1140,7 +1140,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -1301,7 +1300,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except: + except Exception: return False @@ -1489,8 +1488,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print( - f" Skipping invalid distribution from: {path_or_url}") + print(f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1500,8 +1498,7 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - for wheel in self.wheels: - yield wheel + yield from self.wheels def get_url_for_filename(self, filename): """ @@ -1632,7 +1629,8 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " + "available in this repo" ), ) @@ -1647,7 +1645,8 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. " + "Otherwise, fetch and cache." ), ) @@ -1656,7 +1655,8 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - assert name + if not name: + raise ValueError(f"name is required: {name!r}") normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1713,7 +1713,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] + links = [link.partition("#sha256=") for link in links] links = [url for url, _, _sha256 in links] return links @@ -1936,7 +1936,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2161,7 +2161,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: @@ -2227,7 +2227,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print(f"Downloading wheels using command:", " ".join(cli_args)) + print("Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2260,7 +2260,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2312,5 +2312,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(l).lower() for l in declared_licenses] + lics = [python_safe_name(lic).lower() for lic in declared_licenses] return " AND ".join(lics).lower() From 037f9fc1b03736eeac9e0eefac3e35acc916d193 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:42:03 +0100 Subject: [PATCH 41/80] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 3 +- etc/scripts/fetch_thirdparty.py | 26 ++++----- etc/scripts/gen_pypi_simple.py | 4 +- etc/scripts/utils_dejacode.py | 15 +++--- etc/scripts/utils_requirements.py | 9 ++-- etc/scripts/utils_thirdparty.py | 90 +++++++++++-------------------- 6 files changed, 50 insertions(+), 97 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 62dbb14f..1aa4e28a 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,8 +16,7 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 30d376c4..c2246837 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -55,8 +55,7 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -121,7 +120,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -132,7 +131,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -143,7 +142,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -225,8 +224,7 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os( - pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -250,7 +248,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,14 +259,11 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append( - environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: print(f" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -292,8 +286,7 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any( - d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -303,8 +296,7 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses( - dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90dc..cfe68e67 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -69,7 +69,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -200,11 +199,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c71543f6..cd39cda3 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -32,8 +32,7 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print( - "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,8 +67,7 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print( - f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -150,12 +148,11 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) - for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -181,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -209,5 +206,5 @@ def create_dejacode_package(distribution): if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index a9ac2235..167bc9f5 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -106,8 +106,7 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join( - f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -118,12 +117,10 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception( - f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", - "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") # noqa: S603 diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6d5ffdce..4ea1babb 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -243,11 +243,9 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print( - f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list( - package.get_supported_wheels(environment=environment)) + supported_wheels = list(package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,8 +289,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print( - f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -301,8 +298,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print( - f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -357,7 +353,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -535,8 +530,7 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version( - name=self.name, version=self.version) + package = repo.get_package_version(name=self.name, version=self.version) if not package: if TRACE: print( @@ -776,8 +770,7 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print( - f"Failed to fetch NOTICE file: {self.notice_download_url}") + print(f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -826,8 +819,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url( - use_cached_index=use_cached_index).links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -840,8 +832,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename( - filename=filename, urls=urls) + lic_url = get_license_link_for_filename(filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -919,8 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [ - c for c in classifiers if not c.startswith("License")] + other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -962,8 +952,7 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string( - self.package_url) + purl_from_self = packageurl.PackageURL.from_string(self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1013,8 +1002,7 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception( - f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1102,7 +1090,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1407,8 +1394,7 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) - for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1469,8 +1455,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith( - EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1536,8 +1521,7 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict( - help="Python implementation supported by this environment."), + metadata=dict(help="Python implementation supported by this environment."), repr=False, ) @@ -1551,8 +1535,7 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict( - help="List of platform tags supported by this environment."), + metadata=dict(help="List of platform tags supported by this environment."), repr=False, ) @@ -1637,8 +1620,7 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict( - help="A set of already fetched package normalized names."), + metadata=dict(help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1671,12 +1653,10 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print( - f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print( - f"WARNING: package {name} not found in repo: {self.index_url}") + print(f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1861,8 +1841,7 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content( - path_or_url=path_or_url, as_text=as_text) + content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1884,8 +1863,7 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content( - url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1936,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -1951,8 +1929,7 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException( - f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2043,8 +2020,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2056,8 +2032,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2068,8 +2043,7 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list( - other_local_version.get_distributions()) + latest_local_dists = list(other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2095,8 +2069,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2137,8 +2110,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files( - dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2161,10 +2133,9 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() @@ -2260,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2286,8 +2257,7 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join( - dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: From 1189dda52570ed018f52eacd38c4990e8be8ff1a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:02:19 +0100 Subject: [PATCH 42/80] Apply cosmetic refactorings Signed-off-by: Philippe Ombredanne --- docs/source/conf.py | 3 ++- etc/scripts/check_thirdparty.py | 4 +--- etc/scripts/fetch_thirdparty.py | 17 ++++++++--------- etc/scripts/gen_pypi_simple.py | 15 +++++++-------- etc/scripts/gen_requirements.py | 4 ++-- etc/scripts/gen_requirements_dev.py | 4 ++-- .../test_utils_pip_compatibility_tags.py | 9 +++++---- etc/scripts/utils_dejacode.py | 9 +++++---- etc/scripts/utils_pip_compatibility_tags.py | 8 +++++--- etc/scripts/utils_requirements.py | 3 +-- etc/scripts/utils_thirdparty.py | 3 ++- 11 files changed, 40 insertions(+), 39 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8c88fa2c..8aad8294 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -94,7 +94,8 @@ html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 1aa4e28a..bb8347a5 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -41,8 +40,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index c2246837..76a19a60 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -10,7 +9,6 @@ # import itertools -import os import sys from collections import defaultdict @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -261,7 +260,7 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: @@ -276,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -295,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index cfe68e67..89d06265 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -132,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -177,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 2b65ae80..1b879442 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 5db1c48e..85482056 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index a33b8b38..de4b7066 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -50,7 +51,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -81,7 +82,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -104,7 +105,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index cd39cda3..b6bff518 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -25,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -50,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -93,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -113,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -201,6 +201,7 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index de0ac95d..dd954bca 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -130,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -140,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 167bc9f5..b9b2c0e7 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 4ea1babb..aafc1d69 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -91,7 +91,8 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as + {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: From 84257fbe200e5780cb13536a5c8eb56a88539e6a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:05:23 +0100 Subject: [PATCH 43/80] Reformat test code Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + pyproject.toml | 19 +++++++++++-------- tests/test_skeleton_codestyle.py | 25 ++++++++++++++++--------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 2d48196f..8a93c94d 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ +/.ruff_cache/ diff --git a/pyproject.toml b/pyproject.toml index ba55770f..a872ab3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,16 +67,17 @@ include = [ [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle warnings - "D", # pydocstyle - "F", # Pyflakes - "UP", # pyupgrade - "S", # flake8-bandit +# "E", # pycodestyle +# "W", # pycodestyle warnings +# "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit "I", # isort - "C9", # McCabe complexity +# "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] + [tool.ruff.lint.isort] force-single-line = true @@ -100,3 +101,5 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index b4ce8c16..8cd85c94 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -7,30 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import configparser import subprocess import unittest -import configparser - class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ + # This test shouldn't run in proliferated repositories. + + # TODO: update with switch to pyproject.toml setup_cfg = configparser.ConfigParser() setup_cfg.read("setup.cfg") if setup_cfg["metadata"]["name"] != "skeleton": return - args = "venv/bin/black --check -l 100 setup.py etc tests" + commands = [ + ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "format", "--check"], + ] + command = None try: - subprocess.check_output(args.split()) + for command in commands: + subprocess.check_output(command) # noqa: S603 except subprocess.CalledProcessError as e: print("===========================================================") print(e.output) print("===========================================================") raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", + f"Code style and linting command check failed: {' '.join(command)!r}.\n" + "You can check and format the code using:\n" + " make valid\n", + "OR:\n ruff format\n", + " ruff check --fix\n", e.output, ) from e From 00684f733a0873bd837af85471814907ba93f456 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:08:25 +0100 Subject: [PATCH 44/80] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 + etc/scripts/test_utils_pip_compatibility_tags.py | 1 + tests/test_skeleton_codestyle.py | 1 + 3 files changed, 3 insertions(+) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index bb8347a5..65ae595e 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -11,6 +11,7 @@ import utils_thirdparty + @click.command() @click.option( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index de4b7066..0e9c360a 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -33,6 +33,7 @@ import utils_pip_compatibility_tags + @pytest.mark.parametrize( "version_info, expected", [ diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 8cd85c94..7135ac0d 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -11,6 +11,7 @@ import subprocess import unittest + class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): # This test shouldn't run in proliferated repositories. From 7c4278df4e8acf04888a188d115b4c687060f1e5 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:10:45 +0100 Subject: [PATCH 45/80] Refine ruff configuration Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a872ab3a..0f8bd587 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,11 +72,11 @@ select = [ # "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade -# "S", # flake8-bandit + "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] [tool.ruff.lint.isort] From 47cb840db13d3e7328dba8d8e62197cda82e48ec Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:01 +0100 Subject: [PATCH 46/80] Format doc Signed-off-by: Philippe Ombredanne --- AUTHORS.rst | 2 +- README.rst | 29 ++++++++++++++--------------- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 51a19cc8..16e20464 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,3 +1,3 @@ The following organizations or individuals have contributed to this repo: -- +- diff --git a/README.rst b/README.rst index f848b4b3..01d02108 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,10 @@ A Simple Python Project Skeleton ================================ -This repo attempts to standardize the structure of the Python-based project's repositories using -modern Python packaging and configuration techniques that can then be applied to many repos. - -Using this `blog post`_ as inspiration, this repository serves as the base for all new Python -projects and is mergeable in existing repositories as well. +This repo attempts to standardize the structure of the Python-based project's +repositories using modern Python packaging and configuration techniques. +Using this `blog post`_ as inspiration, this repository serves as the base for +all new Python projects and is mergeable in existing repositories as well. .. _blog post: https://blog.jaraco.com/a-project-skeleton-for-python-projects/ @@ -69,7 +68,7 @@ Release Notes - Drop support for Python 3.8 - Drop support for macOS-11, add support for macOS-14 - + - 2024-02-19: - Replace support in CI of default ubuntu-20.04 by ubuntu-22.04 @@ -86,19 +85,19 @@ Release Notes - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies - There are now fewer scripts. See etc/scripts/README.rst for details + - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party + dependencies. There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: - - - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` - + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` + and ``requirements-dev.txt`` - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` - - Update README.rst with instructions for generating ``requirements.txt`` and ``requirements-dev.txt``, - as well as collecting dependencies as wheels and generating ABOUT files for them. + - Update README.rst with instructions for generating ``requirements.txt`` + and ``requirements-dev.txt``, as well as collecting dependencies as wheels and generating + ABOUT files for them. - 2021-05-11: - - - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which + Python version is used. From 7b29b5914a443069a7ff967eb4ef096034333248 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:35 +0100 Subject: [PATCH 47/80] Run doc8 on all rst files Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930e8010..debc404e 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst valid: @echo "-> Run Ruff format" From 86c7ca45d3132e5f6873658ab1743b6e27cfeb58 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:55:20 +0100 Subject: [PATCH 48/80] Enable doc style checks Signed-off-by: Philippe Ombredanne --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0f8bd587..51761ff8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "doc/**/*", - "*.py" + "*.py", + "." ] [tool.ruff.lint] @@ -69,10 +70,10 @@ include = [ select = [ # "E", # pycodestyle # "W", # pycodestyle warnings -# "D", # pydocstyle + "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade - "S", # flake8-bandit +# "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] From 71583c5ecdfe5dd352b7f2bb9d26deaad971e151 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:40:36 +0200 Subject: [PATCH 49/80] Do not format more test data Signed-off-by: Philippe Ombredanne --- pyproject.toml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 51761ff8..7d807ebf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,25 @@ include = [ "src/**/*.py", "etc/**/*.py", "test/**/*.py", + "tests/**/*.py", "doc/**/*", + "docs/**/*", "*.py", "." ] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +] + [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 0f1a40382bdcadf82512395787faab50927256f6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:58:36 +0200 Subject: [PATCH 50/80] Do not treat rst as Python Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7d807ebf..5e16b564 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "tests/**/*.py", - "doc/**/*", - "docs/**/*", + "doc/**/*.py", + "docs/**/*.py", "*.py", "." ] From 6bea6577864bf432438dabaed9e46a721aae2961 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 16:41:57 +0200 Subject: [PATCH 51/80] Combine testing and docs extra for simplicity Signed-off-by: Philippe Ombredanne --- configure | 2 -- configure.bat | 4 ---- setup.cfg | 3 --- 3 files changed, 9 deletions(-) diff --git a/configure b/configure index 22d92885..83fd2035 100755 --- a/configure +++ b/configure @@ -30,7 +30,6 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -185,7 +184,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index 5b9a9d68..18b37038 100644 --- a/configure.bat +++ b/configure.bat @@ -28,7 +28,6 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) diff --git a/setup.cfg b/setup.cfg index aaec643f..ad8e0d8f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,8 +55,6 @@ testing = pycodestyle >= 2.8.0 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 @@ -64,4 +62,3 @@ docs = sphinx-autobuild sphinx-rtd-dark-mode>=1.3.0 sphinx-copybutton - From c615589b54bfac74b6f17b2233b2afe60bf1d0f6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 17:18:19 +0200 Subject: [PATCH 52/80] Refine checking of docs with doc8 Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- pyproject.toml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index debc404e..d21a2f95 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst + @${ACTIVATE} doc8 docs/ *.rst valid: @echo "-> Run Ruff format" diff --git a/pyproject.toml b/pyproject.toml index 5e16b564..bfb1d353 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,3 +119,10 @@ max-complexity = 10 # Place paths of files to be ignored by ruff here "tests/*" = ["S101"] "test_*.py" = ["S101"] + + +[tool.doc8] + +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 +verbose=0 From 04e0a89a3bbf5359f27b6e3ef9a5026638e63de8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:41:00 +0200 Subject: [PATCH 53/80] Refine doc handling * remove CI scripts and use Makefile targets instead * ensure doc8 runs quiet * add new docs-check make target to run documentation and links checks * update oudated doc for docs contribution Signed-off-by: Philippe Ombredanne --- .github/workflows/docs-ci.yml | 12 +++++------- Makefile | 10 +++++++--- docs/scripts/doc8_style_check.sh | 5 ----- docs/scripts/sphinx_build_link_check.sh | 5 ----- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 8 ++++---- pyproject.toml | 2 -- 7 files changed, 17 insertions(+), 27 deletions(-) delete mode 100755 docs/scripts/doc8_style_check.sh delete mode 100644 docs/scripts/sphinx_build_link_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 621de4b2..10ba5faa 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -21,14 +21,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/Makefile b/Makefile index d21a2f95..413399e5 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 docs/ *.rst + @${ACTIVATE} doc8 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -46,6 +46,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ -.PHONY: conf dev check valid clean test docs +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ + +.PHONY: conf dev check valid clean test docs docs-check diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100755 index 94163239..00000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c5426863..00000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8aad8294..056ca6ea 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 5640db26..041b3583 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -147,7 +147,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -169,11 +169,11 @@ What is checked: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +ScanCode toolkit documentation uses `Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ for more information. For example:: @@ -230,7 +230,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: diff --git a/pyproject.toml b/pyproject.toml index bfb1d353..c9e67720 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,7 +122,5 @@ max-complexity = 10 [tool.doc8] - ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] max-line-length=100 -verbose=0 From 8897cc63eb9ef9b06a1fdc77ebfe21289c69961b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:49:01 +0200 Subject: [PATCH 54/80] Add twine check to release publication Signed-off-by: Philippe Ombredanne --- .github/workflows/pypi-release.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index a66c9c80..cf0579a7 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -30,12 +30,15 @@ jobs: with: python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user build twine - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* + - name: Upload built archives uses: actions/upload-artifact@v4 with: From 3d42985990860188c5fbf9f64f3fd4d14c590a65 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 19:16:31 +0200 Subject: [PATCH 55/80] Refine doc contribution docs Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 119 ++++++++----------------- 1 file changed, 38 insertions(+), 81 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 041b3583..dee9296d 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/aboutcode-org/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. From f428366859a143add93160bd0b1ae685be89fcbb Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 13:35:36 -0700 Subject: [PATCH 56/80] Update codestyle command * Remove trailing whitespace Signed-off-by: Jono Yang --- docs/source/contribute/contrib_doc.rst | 4 ++-- tests/test_skeleton_codestyle.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index dee9296d..2a719a52 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -97,7 +97,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -263,7 +263,7 @@ Converting from Markdown ------------------------ If you want to convert a ``.md`` file to a ``.rst`` file, this -`tool `_ does it pretty well. +`tool `_ does it pretty well. You will still have to clean up and check for errors as this contains a lot of bugs. But this is definitely better than converting everything by yourself. diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 7135ac0d..6060c085 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -23,7 +23,7 @@ def test_skeleton_codestyle(self): return commands = [ - ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "check"], ["venv/bin/ruff", "format", "--check"], ] command = None From f0d0e21d5e6f98645b02ff9a8fee6ee3def1be75 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 13:44:30 -0700 Subject: [PATCH 57/80] Update README.rst Signed-off-by: Jono Yang --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index 01d02108..11a4dfb0 100644 --- a/README.rst +++ b/README.rst @@ -44,6 +44,10 @@ More usage instructions can be found in ``docs/skeleton-usage.rst``. Release Notes ============= +- 2025-03-31: + + - Use ruff as the main code formatting tool, add ruff rules to pyproject.toml + - 2025-03-29: - Add support for beta macOS-15 From f3a8aa6cee5f645a668750ab7e6bf0cdc774e041 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 14:31:37 -0700 Subject: [PATCH 58/80] Update BUILDDIR envvar in docs/Makefile Signed-off-by: Jono Yang --- docs/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Makefile b/docs/Makefile index 788b0396..94f686b2 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -7,7 +7,7 @@ SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SPHINXAUTOBUILD = sphinx-autobuild SOURCEDIR = source -BUILDDIR = build +BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: From 5b0f4d6b4079719caa9ed97efb2ba776bc2bbac1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:42:52 +0200 Subject: [PATCH 59/80] Fix doc line length Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index dee9296d..2a719a52 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -97,7 +97,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -263,7 +263,7 @@ Converting from Markdown ------------------------ If you want to convert a ``.md`` file to a ``.rst`` file, this -`tool `_ does it pretty well. +`tool `_ does it pretty well. You will still have to clean up and check for errors as this contains a lot of bugs. But this is definitely better than converting everything by yourself. From e776fef5ad595378752d39425109a4cbd2cb5175 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:49:40 +0200 Subject: [PATCH 60/80] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/update_skeleton.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py index 5705fc43..374c06f2 100644 --- a/etc/scripts/update_skeleton.py +++ b/etc/scripts/update_skeleton.py @@ -15,7 +15,7 @@ import click -ABOUTCODE_PUBLIC_REPO_NAMES=[ +ABOUTCODE_PUBLIC_REPO_NAMES = [ "aboutcode-toolkit", "ahocode", "bitcode", @@ -87,7 +87,9 @@ def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): os.chdir(work_dir_path / repo_name) # Add skeleton as an origin - subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"]) + subprocess.run( + ["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"] + ) # Fetch skeleton files subprocess.run(["git", "fetch", "skeleton"]) From 2a43f4cdc8105473b279eea873db00addaa14551 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:59:05 +0200 Subject: [PATCH 61/80] Correct supported runner on Azure See for details: https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml macOS ARM images do not seem to be supported there Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 80ae45b1..fb03c090 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,14 +26,6 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython - image_name: macOS-13-xlarge - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos13_cpython_arm64 image_name: macOS-13 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: @@ -42,14 +34,6 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython - image_name: macOS-14-large - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos14_cpython_arm64 image_name: macOS-14 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: @@ -63,14 +47,6 @@ jobs: test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos15_cpython_arm64 - image_name: macOS-15-large - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml parameters: job_name: win2019_cpython From 4a15550b7bcea5ec949a5049fe1a501d3bb888ff Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 19:34:14 +0200 Subject: [PATCH 62/80] Add code checks to CI Remove running "make check" as a test Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 8 ++++++ tests/test_skeleton_codestyle.py | 44 -------------------------------- 2 files changed, 8 insertions(+), 44 deletions(-) delete mode 100644 tests/test_skeleton_codestyle.py diff --git a/azure-pipelines.yml b/azure-pipelines.yml index fb03c090..ad18b28a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -7,6 +7,14 @@ jobs: + - template: etc/ci/azure-posix.yml + parameters: + job_name: run_code_checks + image_name: ubuntu-24.04 + python_versions: ['3.12'] + test_suites: + all: make check + - template: etc/ci/azure-posix.yml parameters: job_name: ubuntu22_cpython diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py deleted file mode 100644 index 6060c085..00000000 --- a/tests/test_skeleton_codestyle.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) nexB Inc. and others. All rights reserved. -# ScanCode is a trademark of nexB Inc. -# SPDX-License-Identifier: Apache-2.0 -# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/aboutcode-org/skeleton for support or download. -# See https://aboutcode.org for more information about nexB OSS projects. -# - -import configparser -import subprocess -import unittest - - -class BaseTests(unittest.TestCase): - def test_skeleton_codestyle(self): - # This test shouldn't run in proliferated repositories. - - # TODO: update with switch to pyproject.toml - setup_cfg = configparser.ConfigParser() - setup_cfg.read("setup.cfg") - if setup_cfg["metadata"]["name"] != "skeleton": - return - - commands = [ - ["venv/bin/ruff", "check"], - ["venv/bin/ruff", "format", "--check"], - ] - command = None - try: - for command in commands: - subprocess.check_output(command) # noqa: S603 - except subprocess.CalledProcessError as e: - print("===========================================================") - print(e.output) - print("===========================================================") - raise Exception( - f"Code style and linting command check failed: {' '.join(command)!r}.\n" - "You can check and format the code using:\n" - " make valid\n", - "OR:\n ruff format\n", - " ruff check --fix\n", - e.output, - ) from e From b2d7512735ca257088d2cac4b55590fc5d7b20b4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 20:15:18 +0200 Subject: [PATCH 63/80] Revert support for Python 3.13 This is not yet supported everywhere Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ad18b28a..7a2d4d9b 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +51,7 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -59,7 +59,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -67,7 +67,7 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -75,6 +75,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 2e3464b79811bcf505d93692da2418e2444150ed Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:52:32 +0200 Subject: [PATCH 64/80] Ignore local .env file Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8a93c94d..4818bb3a 100644 --- a/.gitignore +++ b/.gitignore @@ -73,3 +73,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ /.ruff_cache/ +.env \ No newline at end of file From d4af79f0da82ab0d16dcf60b363a6a5290cd9403 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:54:29 +0200 Subject: [PATCH 65/80] Add correct extras for documentation Signed-off-by: Philippe Ombredanne --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 8ab23688..7e399c8a 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -26,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - docs + - testing From 49bfd37c7273f2118d35585c67e53f0cf7642f43 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:58:37 +0200 Subject: [PATCH 66/80] Improve MANIFEST Signed-off-by: Philippe Ombredanne --- MANIFEST.in | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index ef3721e8..0f197075 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,6 @@ graft src +graft docs +graft etc include *.LICENSE include NOTICE @@ -6,10 +8,18 @@ include *.ABOUT include *.toml include *.yml include *.rst +include *.png include setup.* include configure* include requirements* -include .git* +include .dockerignore +include .gitignore +include .readthedocs.yml +include manage.py +include Dockerfile* +include Makefile +include MANIFEST.in -global-exclude *.py[co] __pycache__ *.*~ +include .VERSION +global-exclude *.py[co] __pycache__ *.*~ From 5bc987a16cb3ae0f6de101a9c9e277df431f4317 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:12:58 +0200 Subject: [PATCH 67/80] Improve cleaning on POSIX Signed-off-by: Philippe Ombredanne --- configure | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/configure b/configure index 83fd2035..3dd9a0ab 100755 --- a/configure +++ b/configure @@ -35,7 +35,7 @@ DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constrai VIRTUALENV_DIR=venv # Cleanable files and directories to delete with the --clean option -CLEANABLE="build dist venv .cache .eggs" +CLEANABLE="build dist venv .cache .eggs *.egg-info docs/_build/ pip-selfcheck.json" # extra arguments passed to pip PIP_EXTRA_ARGS=" " @@ -167,6 +167,7 @@ clean() { for cln in $CLEANABLE; do rm -rf "${CFG_ROOT_DIR:?}/${cln:?}"; done + find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete set +e exit } From 887779a9bd36650ffc6751f0069b492e80dd2f08 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:19:44 +0200 Subject: [PATCH 68/80] Rename dev extra to "dev" Instead of testing ... and update references accordingly Signed-off-by: Philippe Ombredanne --- .readthedocs.yml | 2 +- Makefile | 7 ++++++- configure | 2 +- configure.bat | 2 +- setup.cfg | 2 +- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7e399c8a..683f3a82 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -26,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - testing + - dev diff --git a/Makefile b/Makefile index 413399e5..3041547b 100644 --- a/Makefile +++ b/Makefile @@ -13,8 +13,13 @@ PYTHON_EXE?=python3 VENV=venv ACTIVATE?=. ${VENV}/bin/activate; + +conf: + @echo "-> Install dependencies" + ./configure + dev: - @echo "-> Configure the development envt." + @echo "-> Configure and install development dependencies" ./configure --dev doc8: diff --git a/configure b/configure index 3dd9a0ab..5ef0e063 100755 --- a/configure +++ b/configure @@ -29,7 +29,7 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" -DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +DEV_REQUIREMENTS="--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" # where we create a virtualenv VIRTUALENV_DIR=venv diff --git a/configure.bat b/configure.bat index 18b37038..3e9881fb 100644 --- a/configure.bat +++ b/configure.bat @@ -27,7 +27,7 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" -set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +set "DEV_REQUIREMENTS=--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" diff --git a/setup.cfg b/setup.cfg index ad8e0d8f..99ba2607 100644 --- a/setup.cfg +++ b/setup.cfg @@ -48,7 +48,7 @@ where = src [options.extras_require] -testing = +dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 From 209231f0d27de0b0cfcc51eeb0fbaf9393d3df1c Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:50:44 +0200 Subject: [PATCH 69/80] Add more excludes from tests Signed-off-by: Philippe Ombredanne --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c9e67720..bcca1a8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ norecursedirs = [ "dist", "build", "_build", - "dist", "etc", "local", "ci", @@ -34,7 +33,9 @@ norecursedirs = [ "thirdparty", "tmp", "venv", + ".venv", "tests/data", + "*/tests/test_data", ".eggs", "src/*/data", "tests/*/data" From 47bce2da33db6b3ce3bb16831ddca89a65494e23 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:54:23 +0200 Subject: [PATCH 70/80] Do not lint django migrations Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bcca1a8a..d79574ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,6 +66,7 @@ include = [ "docs/**/*.py", "*.py", "." + ] # ignore test data and testfiles: they should never be linted nor formatted exclude = [ @@ -78,9 +79,10 @@ exclude = [ # vulnerablecode, fetchcode "**/tests/*/test_data/**/*", "**/tests/test_data/**/*", +# django migrations + "**/migrations/**/*" ] - [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ From 5025cfb59f0555bf4b40cd75e75ce41188e19e11 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:58:07 +0200 Subject: [PATCH 71/80] Add README.rst to list of "license files" Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 99ba2607..e5b56dad 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,6 +28,7 @@ license_files = AUTHORS.rst CHANGELOG.rst CODE_OF_CONDUCT.rst + README.rst [options] package_dir = From 548a72eac69e4400e4b01f22941d38fe1cb4648d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 18:59:08 +0200 Subject: [PATCH 72/80] Use Python 3.9 as lowest suupported version Signed-off-by: Philippe Ombredanne --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index e5b56dad..a9c5dbc6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,6 +31,8 @@ license_files = README.rst [options] +python_requires = >=3.9 + package_dir = =src packages = find: @@ -39,7 +41,6 @@ zip_safe = false setup_requires = setuptools_scm[toml] >= 4 -python_requires = >=3.8 install_requires = From 3d256b4ac7976b46c23424e86bb62a38f0e4a095 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 19:01:22 +0200 Subject: [PATCH 73/80] Drop pycodestyle Not used anymore Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index a9c5dbc6..6d0b6488 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,6 @@ dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 - pycodestyle >= 2.8.0 twine ruff Sphinx>=5.0.2 From 645052974bf7e6f45c1e55a24a2acaa0cee24523 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 19:26:17 +0200 Subject: [PATCH 74/80] Bump pytest minimal version Signed-off-by: Philippe Ombredanne --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 6d0b6488..69f850ca 100644 --- a/setup.cfg +++ b/setup.cfg @@ -51,7 +51,7 @@ where = src [options.extras_require] dev = - pytest >= 6, != 7.0.0 + pytest >= 7.0.1 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 twine From af87cfab2d06fb034a90412a87e0d4e660e214ee Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 00:40:47 +0530 Subject: [PATCH 75/80] Update CI runners and scripts Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 2 +- .github/workflows/pypi-release.yml | 4 +- azure-pipelines.yml | 24 ++++-------- configure | 2 +- configure.bat | 4 +- etc/ci/azure-container-deb.yml | 2 +- etc/ci/azure-container-rpm.yml | 2 +- etc/scripts/utils_thirdparty.py | 61 +++++++++++++++--------------- 8 files changed, 46 insertions(+), 55 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 10ba5faa..8d8aa551 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -9,7 +9,7 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: [3.12] + python-version: [3.13] steps: - name: Checkout code diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index cf0579a7..7f813614 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -31,10 +31,10 @@ jobs: python-version: 3.12 - name: Install pypa/build and twine - run: python -m pip install --user build twine + run: python -m pip install --user --upgrade build twine pkginfo - name: Build a binary wheel and a source tarball - run: python -m build --sdist --wheel --outdir dist/ + run: python -m build --sdist --outdir dist/ - name: Validate wheel and sdis for Pypi run: python -m twine check dist/* diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7a2d4d9b..4d347b70 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,7 +11,7 @@ jobs: parameters: job_name: run_code_checks image_name: ubuntu-24.04 - python_versions: ['3.12'] + python_versions: ['3.13'] test_suites: all: make check @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,23 +51,15 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml - parameters: - job_name: win2019_cpython - image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12'] - test_suites: - all: venv\Scripts\pytest -n 2 -vvs - - template: etc/ci/azure-win.yml parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -75,6 +67,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs diff --git a/configure b/configure index 5ef0e063..6d317d4c 100755 --- a/configure +++ b/configure @@ -110,7 +110,7 @@ create_virtualenv() { fi $PYTHON_EXECUTABLE "$VIRTUALENV_PYZ" \ - --wheel embed --pip embed --setuptools embed \ + --pip embed --setuptools embed \ --seeder pip \ --never-download \ --no-periodic-update \ diff --git a/configure.bat b/configure.bat index 3e9881fb..15ab7015 100644 --- a/configure.bat +++ b/configure.bat @@ -110,7 +110,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( if exist "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ( %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ @@ -126,7 +126,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( ) ) %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ diff --git a/etc/ci/azure-container-deb.yml b/etc/ci/azure-container-deb.yml index 85b611d3..d80e8dfb 100644 --- a/etc/ci/azure-container-deb.yml +++ b/etc/ci/azure-container-deb.yml @@ -21,7 +21,7 @@ jobs: - job: ${{ parameters.job_name }} pool: - vmImage: 'ubuntu-16.04' + vmImage: 'ubuntu-22.04' container: image: ${{ parameters.container }} diff --git a/etc/ci/azure-container-rpm.yml b/etc/ci/azure-container-rpm.yml index 1e6657d0..a64138c9 100644 --- a/etc/ci/azure-container-rpm.yml +++ b/etc/ci/azure-container-rpm.yml @@ -1,6 +1,6 @@ parameters: job_name: '' - image_name: 'ubuntu-16.04' + image_name: 'ubuntu-22.04' container: '' python_path: '' python_version: '' diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index aafc1d69..6f812f09 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -24,13 +25,14 @@ import packageurl import requests import saneyaml -import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version +import utils_pip_compatibility_tags + """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. @@ -91,8 +93,7 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as - {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: @@ -114,13 +115,14 @@ TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "37", "38", "39", "310" +PYTHON_VERSIONS = "39", "310", "311", "312", "313" PYTHON_DOT_VERSIONS_BY_VER = { - "37": "3.7", - "38": "3.8", "39": "3.9", "310": "3.10", + "311": "3.11", + "312": "3.12", + "313": "3.13", } @@ -132,10 +134,11 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "37": ["cp37", "cp37m", "abi3"], - "38": ["cp38", "cp38m", "abi3"], "39": ["cp39", "cp39m", "abi3"], "310": ["cp310", "cp310m", "abi3"], + "311": ["cp311", "cp311m", "abi3"], + "312": ["cp312", "cp312m", "abi3"], + "313": ["cp313", "cp313m", "abi3"], } PLATFORMS_BY_OS = { @@ -553,8 +556,7 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - if not self.filename: - raise ValueError(f"self.filename has no value but is required: {self.filename!r}") + assert self.filename if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -822,9 +824,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): """ urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] + extra_lic_names = [l.get("file") for l in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [eln for eln in extra_lic_names if eln] + extra_lic_names = [ln for ln in extra_lic_names if ln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -844,7 +846,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except Exception: + except: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -857,9 +859,8 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except Exception: - msg = f"No text for license {filename} in expression " - f"{self.license_expression!r} from {self}" + except: + msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' print(msg) errors.append(msg) @@ -999,7 +1000,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [url for url in urls if url.endswith(f"/{filename}")] + path_or_url = [l for l in urls if l.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1288,7 +1289,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except Exception: + except: return False @@ -1484,7 +1485,8 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - yield from self.wheels + for wheel in self.wheels: + yield wheel def get_url_for_filename(self, filename): """ @@ -1613,8 +1615,7 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " - "available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" ), ) @@ -1628,8 +1629,7 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. " - "Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." ), ) @@ -1638,8 +1638,7 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - if not name: - raise ValueError(f"name is required: {name!r}") + assert name normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1694,7 +1693,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [link.partition("#sha256=") for link in links] + links = [l.partition("#sha256=") for l in links] links = [url for url, _, _sha256 in links] return links @@ -1915,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2134,7 +2133,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: stdouts = [] @@ -2199,7 +2198,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print("Downloading wheels using command:", " ".join(cli_args)) + print(f"Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2232,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2283,5 +2282,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(lic).lower() for lic in declared_licenses] + lics = [python_safe_name(l).lower() for l in declared_licenses] return " AND ".join(lics).lower() From 72c7d266275a472073d2829a25d730ada9436ab3 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 18:16:30 +0530 Subject: [PATCH 76/80] Add missing wheel builds on release CI Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/pypi-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 7f813614..d41fbf22 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -34,7 +34,7 @@ jobs: run: python -m pip install --user --upgrade build twine pkginfo - name: Build a binary wheel and a source tarball - run: python -m build --sdist --outdir dist/ + run: python -m build --wheel --sdist --outdir dist/ - name: Validate wheel and sdis for Pypi run: python -m twine check dist/* From 8c15812ae0191dfe82268f03ae26170032f3bf29 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Wed, 16 Jul 2025 16:16:12 -0700 Subject: [PATCH 77/80] Update code style Signed-off-by: Jono Yang --- pyproject.toml | 4 ++-- src/pygmars/parse.py | 3 +-- src/pygmars/tree.py | 2 +- tests/test_parse_doctest.py | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d79574ef..71477368 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ include = [ "." ] -# ignore test data and testfiles: they should never be linted nor formatted +# ignore test data and testfiles: they should never be linted nor formatted exclude = [ # main style "**/tests/data/**/*", @@ -95,7 +95,7 @@ select = [ "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D404", "D415", "I001"] [tool.ruff.lint.isort] diff --git a/src/pygmars/parse.py b/src/pygmars/parse.py index f931d3d4..f33dbc4f 100644 --- a/src/pygmars/parse.py +++ b/src/pygmars/parse.py @@ -7,7 +7,7 @@ # See https://aboutcode.org for more information about nexB OSS projects. # -""" +r""" This module defines ``Parser`` which is a regular expression-based parser to parse list of Tokens in a parse tree where each node has a label. @@ -269,7 +269,6 @@ def to_tree(self, label="GROUP", pieces_splitter=re.compile(r"[{}]").split): # ['ads', '', '', '', ''] # not match, match, not match, match, match for piece in pieces_splitter(self._parse_string): - # Find the list of tokens contained in this piece. length = piece.count("<") subsequence = tree[index : index + length] diff --git a/src/pygmars/tree.py b/src/pygmars/tree.py index 36ee2352..1ccbc25b 100644 --- a/src/pygmars/tree.py +++ b/src/pygmars/tree.py @@ -178,7 +178,7 @@ def __setitem__(self, index, value): return list.__setitem__(self, index, value) elif isinstance(index, (list, tuple)): if len(index) == 0: - raise IndexError("The tree position () may not be " "assigned to.") + raise IndexError("The tree position () may not be assigned to.") elif len(index) == 1: self[index[0]] = value else: diff --git a/tests/test_parse_doctest.py b/tests/test_parse_doctest.py index 818eb914..03cca09d 100644 --- a/tests/test_parse_doctest.py +++ b/tests/test_parse_doctest.py @@ -10,7 +10,7 @@ # # Natural Language Toolkit (NLTK) # URL: -""" +r""" ========== Parsing ========== From 15624786bc8081c1b5c9a0e6447ee7b970ae7801 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Wed, 16 Jul 2025 16:25:58 -0700 Subject: [PATCH 78/80] Remove docs extra_require and join with dev Signed-off-by: Jono Yang --- setup.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index a9133957..e9d4a784 100644 --- a/setup.cfg +++ b/setup.cfg @@ -56,8 +56,6 @@ dev = aboutcode-toolkit >= 7.0.2 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 From 4aba2e7655f3bb7abcbc986bed9522595f87fccd Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Wed, 16 Jul 2025 16:31:08 -0700 Subject: [PATCH 79/80] Update test expectations Signed-off-by: Jono Yang --- tests/test_parse_doctest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_parse_doctest.py b/tests/test_parse_doctest.py index 03cca09d..c99eb995 100644 --- a/tests/test_parse_doctest.py +++ b/tests/test_parse_doctest.py @@ -29,7 +29,7 @@ >>> tag_pattern = "
?*" >>> regexp_pattern = label_pattern_to_regex(tag_pattern) >>> regexp_pattern - '(?:<(?:DT)>)?(?:<(?:JJ)>)*(?:<(?:NN[^\\\\{\\\\}<>]*)>)' + '(?:<(?:DT)>)?(?:<(?:JJ)>)*(?:<(?:NN[^\\{\\}<>]*)>)' Create a Rule and parse something: From 3be2da9dc73d1ed7da3e9d9921e18adc733de511 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Wed, 16 Jul 2025 16:37:40 -0700 Subject: [PATCH 80/80] Update doc link Signed-off-by: Jono Yang --- docs/source/contribute/contrib_doc.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 2a719a52..50c592cc 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -245,12 +245,11 @@ Style Conventions for the Documentaion 6. Spelling - You should check for spelling errors before you push changes. `Aspell `_ + You should check for spelling errors before you push changes. ``Aspell`` is a GNU project Command Line tool you can use for this purpose. Download and install Aspell, then execute ``aspell check `` for all the files changed. Be careful about not changing commands or other stuff as Aspell gives prompts for a lot of them. Also delete the - temporary ``.bak`` files generated. Refer the `manual `_ for more - information on how to use. + temporary ``.bak`` files generated. 7. Notes and Warning Snippets