diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml new file mode 100644 index 00000000..511b7c28 --- /dev/null +++ b/.github/workflows/docs-ci.yml @@ -0,0 +1,37 @@ +name: CI Documentation + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-20.04 + + strategy: + max-parallel: 4 + matrix: + python-version: [3.9] + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Give permission to run scripts + run: chmod +x ./docs/scripts/doc8_style_check.sh + + - name: Install Dependencies + run: pip install -e .[docs] + + - name: Check Sphinx Documentation build minimally + working-directory: ./docs + run: sphinx-build -E -W source build + + - name: Check for documentation style errors + working-directory: ./docs + run: ./scripts/doc8_style_check.sh + + diff --git a/.gitignore b/.gitignore index 31f9176f..b7916761 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.py[cod] # virtualenv and other misc bits +/src/*.egg-info *.egg-info /dist /build diff --git a/.readthedocs.yml b/.readthedocs.yml index 1b71cd9e..8ab23688 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,6 +5,17 @@ # Required version: 2 +# Build in latest ubuntu/python +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build PDF & ePub +formats: + - epub + - pdf + # Where the Sphinx conf.py file is located sphinx: configuration: docs/source/conf.py diff --git a/CODE_OF_CONDUCT.rst b/CODE_OF_CONDUCT.rst new file mode 100644 index 00000000..590ba198 --- /dev/null +++ b/CODE_OF_CONDUCT.rst @@ -0,0 +1,86 @@ +Contributor Covenant Code of Conduct +==================================== + +Our Pledge +---------- + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our +project and our community a harassment-free experience for everyone, +regardless of age, body size, disability, ethnicity, gender identity and +expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +Our Standards +------------- + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual + attention or advances +- Trolling, insulting/derogatory comments, and personal or political + attacks +- Public or private harassment +- Publishing others’ private information, such as a physical or + electronic address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +Our Responsibilities +-------------------- + +Project maintainers are responsible for clarifying the standards of +acceptable behavior and are expected to take appropriate and fair +corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, +or reject comments, commits, code, wiki edits, issues, and other +contributions that are not aligned to this Code of Conduct, or to ban +temporarily or permanently any contributor for other behaviors that they +deem inappropriate, threatening, offensive, or harmful. + +Scope +----- + +This Code of Conduct applies both within project spaces and in public +spaces when an individual is representing the project or its community. +Examples of representing a project or community include using an +official project e-mail address, posting via an official social media +account, or acting as an appointed representative at an online or +offline event. Representation of a project may be further defined and +clarified by project maintainers. + +Enforcement +----------- + +Instances of abusive, harassing, or otherwise unacceptable behavior may +be reported by contacting the project team at pombredanne@gmail.com +or on the Gitter chat channel at https://gitter.im/aboutcode-org/discuss . +All complaints will be reviewed and investigated and will result in a +response that is deemed necessary and appropriate to the circumstances. +The project team is obligated to maintain confidentiality with regard to +the reporter of an incident. Further details of specific enforcement +policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in +good faith may face temporary or permanent repercussions as determined +by other members of the project’s leadership. + +Attribution +----------- + +This Code of Conduct is adapted from the `Contributor Covenant`_ , +version 1.4, available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +.. _Contributor Covenant: https://www.contributor-covenant.org diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..cc36c355 --- /dev/null +++ b/Makefile @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/skeleton for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +# Python version can be specified with `$ PYTHON_EXE=python3.x make conf` +PYTHON_EXE?=python3 +VENV=venv +ACTIVATE?=. ${VENV}/bin/activate; + +dev: + @echo "-> Configure the development envt." + ./configure --dev + +isort: + @echo "-> Apply isort changes to ensure proper imports ordering" + ${VENV}/bin/isort --sl -l 100 src tests setup.py + +black: + @echo "-> Apply black code formatter" + ${VENV}/bin/black -l 100 src tests setup.py + +doc8: + @echo "-> Run doc8 validation" + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + +valid: isort black + +check: + @echo "-> Run pycodestyle (PEP8) validation" + @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . + @echo "-> Run isort imports ordering validation" + @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . + @echo "-> Run black validation" + @${ACTIVATE} black --check --check -l 100 src tests setup.py + +clean: + @echo "-> Clean the Python env" + ./configure --clean + +test: + @echo "-> Run the test suite" + ${VENV}/bin/pytest -vvs + +docs: + rm -rf docs/_build/ + @${ACTIVATE} sphinx-build docs/ docs/_build/ + +.PHONY: conf dev check valid black isort clean test docs diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 6610396b..80aee3fe 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -9,8 +9,8 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: ubuntu18_cpython - image_name: ubuntu-18.04 + job_name: ubuntu20_cpython + image_name: ubuntu-20.04 python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] test_suites: all: | @@ -19,8 +19,8 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: ubuntu20_cpython - image_name: ubuntu-20.04 + job_name: ubuntu22_cpython + image_name: ubuntu-22.04 python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] test_suites: all: | @@ -29,8 +29,8 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos1015_cpython - image_name: macos-10.15 + job_name: macos11_cpython + image_name: macOS-11 python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] test_suites: all: | @@ -39,8 +39,18 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos11_cpython - image_name: macos-11 + job_name: macos12_cpython + image_name: macOS-12 + python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] + test_suites: + all: | + source venv/bin/activate + pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos13_cpython + image_name: macOS-13 python_versions: ['3.7', '3.8', '3.9', '3.10', '3.11'] test_suites: all: | diff --git a/configure b/configure index 8c5f4abc..926a894e 100755 --- a/configure +++ b/configure @@ -30,12 +30,13 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv # Cleanable files and directories to delete with the --clean option -CLEANABLE="build venv" +CLEANABLE="build dist venv .cache .eggs" # extra arguments passed to pip PIP_EXTRA_ARGS=" " @@ -51,13 +52,20 @@ CFG_ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CFG_BIN_DIR=$CFG_ROOT_DIR/$VIRTUALENV_DIR/bin +################################ +# Install with or without and index. With "--no-index" this is using only local wheels +# This is an offline mode with no index and no network operations +# NO_INDEX="--no-index " +NO_INDEX="" + + ################################ # Thirdparty package locations and index handling -# Find packages from the local thirdparty directory or from thirdparty.aboutcode.org -if [ -f "$CFG_ROOT_DIR/thirdparty" ]; then - PIP_EXTRA_ARGS="--find-links $CFG_ROOT_DIR/thirdparty" +# Find packages from the local thirdparty directory if present +THIRDPARDIR=$CFG_ROOT_DIR/thirdparty +if [[ "$(echo $THIRDPARDIR/*.whl)x" != "$THIRDPARDIR/*.whlx" ]]; then + PIP_EXTRA_ARGS="$NO_INDEX --find-links $THIRDPARDIR" fi -PIP_EXTRA_ARGS="$PIP_EXTRA_ARGS --find-links https://thirdparty.aboutcode.org/pypi/simple/links.html" ################################ @@ -177,10 +185,12 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; + docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done + PIP_EXTRA_ARGS="$PIP_EXTRA_ARGS" find_python diff --git a/configure.bat b/configure.bat index e38b5fb3..5e95b311 100644 --- a/configure.bat +++ b/configure.bat @@ -28,12 +28,13 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @rem # Cleanable files and directories to delete with the --clean option -set "CLEANABLE=build venv" +set "CLEANABLE=build dist venv .cache .eggs" @rem # extra arguments passed to pip set "PIP_EXTRA_ARGS= " @@ -51,11 +52,10 @@ set "CFG_BIN_DIR=%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\Scripts" @rem ################################ @rem # Thirdparty package locations and index handling -@rem # Find packages from the local thirdparty directory or from thirdparty.aboutcode.org +@rem # Find packages from the local thirdparty directory if exist "%CFG_ROOT_DIR%\thirdparty" ( set PIP_EXTRA_ARGS=--find-links "%CFG_ROOT_DIR%\thirdparty" ) -set "PIP_EXTRA_ARGS=%PIP_EXTRA_ARGS% --find-links https://thirdparty.aboutcode.org/pypi/simple/links.html" @rem ################################ @@ -68,7 +68,6 @@ if not defined CFG_QUIET ( @rem ################################ @rem # Main command line entry point set "CFG_REQUIREMENTS=%REQUIREMENTS%" -set "NO_INDEX=--no-index" :again if not "%1" == "" ( @@ -77,6 +76,9 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) + if "%1" EQU "--docs" ( + set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" + ) shift goto again ) diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css new file mode 100644 index 00000000..9662d63a --- /dev/null +++ b/docs/source/_static/theme_overrides.css @@ -0,0 +1,353 @@ +body { + color: #000000; +} + +p { + margin-bottom: 10px; +} + +.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { + margin-bottom: 10px; +} + +.custom_header_01 { + color: #cc0000; + font-size: 22px; + font-weight: bold; + line-height: 50px; +} + +h1, h2, h3, h4, h5, h6 { + margin-bottom: 20px; + margin-top: 20px; +} + +h5 { + font-size: 18px; + color: #000000; + font-style: italic; + margin-bottom: 10px; +} + +h6 { + font-size: 15px; + color: #000000; + font-style: italic; + margin-bottom: 10px; +} + +/* custom admonitions */ +/* success */ +.custom-admonition-success .admonition-title { + color: #000000; + background: #ccffcc; + border-radius: 5px 5px 0px 0px; +} +div.custom-admonition-success.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* important */ +.custom-admonition-important .admonition-title { + color: #000000; + background: #ccffcc; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #000000; +} +div.custom-admonition-important.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* caution */ +.custom-admonition-caution .admonition-title { + color: #000000; + background: #ffff99; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #e8e8e8; +} +div.custom-admonition-caution.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* note */ +.custom-admonition-note .admonition-title { + color: #ffffff; + background: #006bb3; + border-radius: 5px 5px 0px 0px; +} +div.custom-admonition-note.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* todo */ +.custom-admonition-todo .admonition-title { + color: #000000; + background: #cce6ff; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #99ccff; +} +div.custom-admonition-todo.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #99ccff; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +/* examples */ +.custom-admonition-examples .admonition-title { + color: #000000; + background: #ffe6cc; + border-radius: 5px 5px 0px 0px; + border-bottom: solid 1px #d8d8d8; +} +div.custom-admonition-examples.admonition { + color: #000000; + background: #ffffff; + border: solid 1px #cccccc; + border-radius: 5px; + box-shadow: 1px 1px 5px 3px #d8d8d8; + margin: 20px 0px 30px 0px; +} + +.wy-nav-content { + max-width: 100%; + padding-right: 100px; + padding-left: 100px; + background-color: #f2f2f2; +} + +div.rst-content { + background-color: #ffffff; + border: solid 1px #e5e5e5; + padding: 20px 40px 20px 40px; +} + +.rst-content .guilabel { + border: 1px solid #ffff99; + background: #ffff99; + font-size: 100%; + font-weight: normal; + border-radius: 4px; + padding: 2px 0px; + margin: auto 2px; + vertical-align: middle; +} + +.rst-content kbd { + font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; + border: solid 1px #d8d8d8; + background-color: #f5f5f5; + padding: 0px 3px; + border-radius: 3px; +} + +.wy-nav-content-wrap a { + color: #0066cc; + text-decoration: none; +} +.wy-nav-content-wrap a:hover { + color: #0099cc; + text-decoration: underline; +} + +.wy-nav-top a { + color: #ffffff; +} + +/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ +.wy-table-responsive table td { + white-space: normal !important; +} + +.rst-content table.docutils td, +.rst-content table.docutils th { + padding: 5px 10px 5px 10px; +} +.rst-content table.docutils td p, +.rst-content table.docutils th p { + font-size: 14px; + margin-bottom: 0px; +} +.rst-content table.docutils td p cite, +.rst-content table.docutils th p cite { + font-size: 14px; + background-color: transparent; +} + +.colwidths-given th { + border: solid 1px #d8d8d8 !important; +} +.colwidths-given td { + border: solid 1px #d8d8d8 !important; +} + +/*handles single-tick inline code*/ +.wy-body-for-nav cite { + color: #000000; + background-color: transparent; + font-style: normal; + font-family: "Courier New"; + font-size: 13px; + padding: 3px 3px 3px 3px; +} + +.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { + font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; + font-size: 13px; + overflow: visible; + white-space: pre-wrap; + color: #000000; +} + +.rst-content pre.literal-block, .rst-content div[class^='highlight'] { + background-color: #f8f8f8; + border: solid 1px #e8e8e8; +} + +/* This enables inline code to wrap. */ +code, .rst-content tt, .rst-content code { + white-space: pre-wrap; + padding: 2px 3px 1px; + border-radius: 3px; + font-size: 13px; + background-color: #ffffff; +} + +/* use this added class for code blocks attached to bulleted list items */ +.highlight-top-margin { + margin-top: 20px !important; +} + +/* change color of inline code block */ +span.pre { + color: #e01e5a; +} + +.wy-body-for-nav blockquote { + margin: 1em 0; + padding-left: 1em; + border-left: 4px solid #ddd; + color: #000000; +} + +/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ +.rst-content .section ol p, .rst-content .section ul p { + margin-bottom: 0px; +} + +/* add spacing between bullets for legibility */ +.rst-content .section ol li, .rst-content .section ul li { + margin-bottom: 5px; +} + +.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { + margin-top: 5px; +} + +/* but exclude the toctree bullets */ +.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { + margin-top: 0px; + margin-bottom: 0px; +} + +/* remove extra space at bottom of multine list-table cell */ +.rst-content .line-block { + margin-left: 0px; + margin-bottom: 0px; + line-height: 24px; +} + +/* fix extra vertical spacing in page toctree */ +.rst-content .toctree-wrapper ul li ul, article ul li ul { + margin-top: 0; + margin-bottom: 0; +} + +/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ +.reference.internal.toc-index { + color: #d9d9d9; +} + +.reference.internal.toc-index.current { + background-color: #ffffff; + color: #000000; + font-weight: bold; +} + +.toc-index-div { + border-top: solid 1px #000000; + margin-top: 10px; + padding-top: 5px; +} + +.indextable ul li { + font-size: 14px; + margin-bottom: 5px; +} + +/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ +.indextable.genindextable { + margin-bottom: 20px; +} + +div.genindex-jumpbox { + margin-bottom: 10px; +} + +/* rst image classes */ + +.clear-both { + clear: both; + } + +.float-left { + float: left; + margin-right: 20px; +} + +img { + border: solid 1px #e8e8e8; +} + +/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ +.img-title { + color: #000000; + /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ + line-height: 3.0; + font-style: italic; + font-weight: 600; +} + +.img-title-para { + color: #000000; + margin-top: 20px; + margin-bottom: 0px; + font-style: italic; + font-weight: 500; +} + +.red { + color: red; +} diff --git a/docs/source/conf.py b/docs/source/conf.py index 74b8649c..918d62c1 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -28,16 +28,22 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ -'sphinx.ext.intersphinx', + "sphinx.ext.intersphinx", + "sphinx_reredirects", ] + +# Redirects for olds pages +# See https://documatt.gitlab.io/sphinx-reredirects/usage.html +redirects = {} + # This points to aboutcode.readthedocs.io # In case of "undefined label" ERRORS check docs on intersphinx to troubleshoot # Link was created at commit - https://github.com/nexB/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 intersphinx_mapping = { - 'aboutcode': ('https://aboutcode.readthedocs.io/en/latest/', None), - 'scancode-workbench': ('https://scancode-workbench.readthedocs.io/en/develop/', None), + "aboutcode": ("https://aboutcode.readthedocs.io/en/latest/", None), + "scancode-workbench": ("https://scancode-workbench.readthedocs.io/en/develop/", None), } @@ -62,15 +68,42 @@ # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] -master_doc = 'index' +master_doc = "index" html_context = { - "css_files": [ - "_static/theme_overrides.css", # override wide tables in RTD theme - ], "display_github": True, "github_user": "nexB", "github_repo": "nexb-skeleton", "github_version": "develop", # branch "conf_py_path": "/docs/source/", # path in the checkout to the docs root } + +html_css_files = ["_static/theme_overrides.css"] + + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +html_show_sphinx = True + +# Define CSS and HTML abbreviations used in .rst files. These are examples. +# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +rst_prolog = """ +.. |psf| replace:: Python Software Foundation + +.. # define a hard line break for HTML +.. |br| raw:: html + +
+ +.. role:: red + +.. role:: img-title + +.. role:: img-title-para + +""" + +# -- Options for LaTeX output ------------------------------------------------- + +latex_elements = { + 'classoptions': ',openany,oneside' +} \ No newline at end of file diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst new file mode 100644 index 00000000..13882e10 --- /dev/null +++ b/docs/source/contribute/contrib_doc.rst @@ -0,0 +1,314 @@ +.. _contrib_doc_dev: + +Contributing to the Documentation +================================= + +.. _contrib_doc_setup_local: + +Setup Local Build +----------------- + +To get started, create or identify a working directory on your local machine. + +Open that directory and execute the following command in a terminal session:: + + git clone https://github.com/nexB/skeleton.git + +That will create an ``/skeleton`` directory in your working directory. +Now you can install the dependencies in a virtualenv:: + + cd skeleton + ./configure --docs + +.. note:: + + In case of windows, run ``configure --docs`` instead of this. + +Now, this will install the following prerequisites: + +- Sphinx +- sphinx_rtd_theme (the format theme used by ReadTheDocs) +- docs8 (style linter) + +These requirements are already present in setup.cfg and `./configure --docs` installs them. + +Now you can build the HTML documents locally:: + + source venv/bin/activate + cd docs + make html + +Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the +documentation .html files:: + + open build/html/index.html + +.. note:: + + In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t + get a file descriptor referring to the console”, try: + + :: + + see build/html/index.html + +You now have a local build of the AboutCode documents. + +.. _contrib_doc_share_improvements: + +Share Document Improvements +--------------------------- + +Ensure that you have the latest files:: + + git pull + git status + +Before commiting changes run Continious Integration Scripts locally to run tests. Refer +:ref:`doc_ci` for instructions on the same. + +Follow standard git procedures to upload your new and modified files. The following commands are +examples:: + + git status + git add source/index.rst + git add source/how-to-scan.rst + git status + git commit -m "New how-to document that explains how to scan" + git status + git push + git status + +The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your +Pull Request is Merged. + +Refer the `Pro Git Book `_ available online for Git tutorials +covering more complex topics on Branching, Merging, Rebasing etc. + +.. _doc_ci: + +Continuous Integration +---------------------- + +The documentations are checked on every new commit through Travis-CI, so that common errors are +avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects +of the documentation : + +1. Successful Builds (By using ``sphinx-build``) +2. No Broken Links (By Using ``link-check``) +3. Linting Errors (By Using ``Doc8``) + +So run these scripts at your local system before creating a Pull Request:: + + cd docs + ./scripts/sphinx_build_link_check.sh + ./scripts/doc8_style_check.sh + +If you don't have permission to run the scripts, run:: + + chmod u+x ./scripts/doc8_style_check.sh + +.. _doc_style_docs8: + +Style Checks Using ``Doc8`` +--------------------------- + +How To Run Style Tests +^^^^^^^^^^^^^^^^^^^^^^ + +In the project root, run the following commands:: + + $ cd docs + $ ./scripts/doc8_style_check.sh + +A sample output is:: + + Scanning... + Validating... + docs/source/misc/licence_policy_plugin.rst:37: D002 Trailing whitespace + docs/source/misc/faq.rst:45: D003 Tabulation used for indentation + docs/source/misc/faq.rst:9: D001 Line too long + docs/source/misc/support.rst:6: D005 No newline at end of file + ======== + Total files scanned = 34 + Total files ignored = 0 + Total accumulated errors = 326 + Detailed error counts: + - CheckCarriageReturn = 0 + - CheckIndentationNoTab = 75 + - CheckMaxLineLength = 190 + - CheckNewlineEndOfFile = 13 + - CheckTrailingWhitespace = 47 + - CheckValidity = 1 + +Now fix the errors and run again till there isn't any style error in the documentation. + +What is Checked? +^^^^^^^^^^^^^^^^ + +PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. + +What is checked: + + - invalid rst format - D000 + - lines should not be longer than 100 characters - D001 + + - RST exception: line with no whitespace except in the beginning + - RST exception: lines with http or https URLs + - RST exception: literal blocks + - RST exception: rst target directives + + - no trailing whitespace - D002 + - no tabulation for indentation - D003 + - no carriage returns (use UNIX newlines) - D004 + - no newline at end of file - D005 + +.. _doc_interspinx: + +Interspinx +---------- + +ScanCode toolkit documentation uses `Intersphinx `_ +to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. + +To link sections in the same documentation, standart reST labels are used. Refer +`Cross-Referencing `_ for more information. + +For example:: + + .. _my-reference-label: + + Section to cross-reference + -------------------------- + + This is the text of the section. + + It refers to the section itself, see :ref:`my-reference-label`. + +Now, using Intersphinx, you can create these labels in one Sphinx Documentation and then referance +these labels from another Sphinx Documentation, hosted in different locations. + +You just have to add the following in the ``conf.py`` file for your Sphinx Documentation, where you +want to add the links:: + + extensions = [ + 'sphinx.ext.intersphinx' + ] + + intersphinx_mapping = {'aboutcode': ('https://aboutcode.readthedocs.io/en/latest/', None)} + +To show all Intersphinx links and their targets of an Intersphinx mapping file, run:: + + python -msphinx.ext.intersphinx https://aboutcode.readthedocs.io/en/latest/objects.inv + +.. WARNING:: + + ``python -msphinx.ext.intersphinx https://aboutcode.readthedocs.io/objects.inv`` will give + error. + +This enables you to create links to the ``aboutcode`` Documentation in your own Documentation, +where you modified the configuration file. Links can be added like this:: + + For more details refer :ref:`aboutcode:doc_style_guide`. + +You can also not use the ``aboutcode`` label assigned to all links from aboutcode.readthedocs.io, +if you don't have a label having the same name in your Sphinx Documentation. Example:: + + For more details refer :ref:`doc_style_guide`. + +If you have a label in your documentation which is also present in the documentation linked by +Intersphinx, and you link to that label, it will create a link to the local label. + +For more information, refer this tutorial named +`Using Intersphinx `_. + +.. _doc_style_conv: + +Style Conventions for the Documentaion +-------------------------------------- + +1. Headings + + (`Refer `_) + Normally, there are no heading levels assigned to certain characters as the structure is + determined from the succession of headings. However, this convention is used in Python’s Style + Guide for documenting which you may follow: + + # with overline, for parts + + * with overline, for chapters + + =, for sections + + -, for subsections + + ^, for sub-subsections + + ", for paragraphs + +2. Heading Underlines + + Do not use underlines that are longer/shorter than the title headline itself. As in: + + :: + + Correct : + + Extra Style Checks + ------------------ + + Incorrect : + + Extra Style Checks + ------------------------ + +.. note:: + + Underlines shorter than the Title text generates Errors on sphinx-build. + + +3. Internal Links + + Using ``:ref:`` is advised over standard reStructuredText links to sections (like + ```Section title`_``) because it works across files, when section headings are changed, will + raise warnings if incorrect, and works for all builders that support cross-references. + However, external links are created by using the standard ```Section title`_`` method. + +4. Eliminate Redundancy + + If a section/file has to be repeated somewhere else, do not write the exact same section/file + twice. Use ``.. include: ../README.rst`` instead. Here, ``../`` refers to the documentation + root, so file location can be used accordingly. This enables us to link documents from other + upstream folders. + +5. Using ``:ref:`` only when necessary + + Use ``:ref:`` to create internal links only when needed, i.e. it is referenced somewhere. + Do not create references for all the sections and then only reference some of them, because + this created unnecessary references. This also generates ERROR in ``restructuredtext-lint``. + +6. Spelling + + You should check for spelling errors before you push changes. `Aspell `_ + is a GNU project Command Line tool you can use for this purpose. Download and install Aspell, + then execute ``aspell check `` for all the files changed. Be careful about not + changing commands or other stuff as Aspell gives prompts for a lot of them. Also delete the + temporary ``.bak`` files generated. Refer the `manual `_ for more + information on how to use. + +7. Notes and Warning Snippets + + Every ``Note`` and ``Warning`` sections are to be kept in ``rst_snippets/note_snippets/`` and + ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are + frequently used in multiple files. + +Converting from Markdown +------------------------ + +If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ +does it pretty well. You'd still have to clean up and check for errors as this contains a lot of +bugs. But this is definitely better than converting everything by yourself. + +This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for +Sphinx/ReadTheDocs hosting. diff --git a/docs/source/index.rst b/docs/source/index.rst index 67fcf213..eb63717b 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -5,7 +5,8 @@ Welcome to nexb-skeleton's documentation! :maxdepth: 2 :caption: Contents: - skeleton/index + skeleton-usage + contribute/contrib_doc Indices and tables ================== diff --git a/docs/skeleton-usage.rst b/docs/source/skeleton-usage.rst similarity index 98% rename from docs/skeleton-usage.rst rename to docs/source/skeleton-usage.rst index ad9b9ffe..cde23dcd 100644 --- a/docs/skeleton-usage.rst +++ b/docs/source/skeleton-usage.rst @@ -73,11 +73,13 @@ To generate requirements.txt: python etc/scripts/gen_requirements.py -s venv/lib/python/site-packages/ -Replace \ with the version number of the Python being used, for example: ``venv/lib/python3.6/site-packages/`` +Replace \ with the version number of the Python being used, for example: +``venv/lib/python3.6/site-packages/`` To generate requirements-dev.txt after requirements.txt has been generated: .. code-block:: bash + ./configure --dev python etc/scripts/gen_requirements_dev.py -s venv/lib/python/site-packages/ diff --git a/docs/source/skeleton/index.rst b/docs/source/skeleton/index.rst deleted file mode 100644 index 7dfc6cb4..00000000 --- a/docs/source/skeleton/index.rst +++ /dev/null @@ -1,15 +0,0 @@ -# Docs Structure Guide -# Rst docs - https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html -# -# 1. Place docs in folders under source for different sections -# 2. Link them by adding individual index files in each section -# to the main index, and then files for each section to their -# respective index files. -# 3. Use `.. include` statements to include other .rst files -# or part of them, or use hyperlinks to a section of the docs, -# to get rid of repetition. -# https://docutils.sourceforge.io/docs/ref/rst/directives.html#including-an-external-document-fragment -# -# Note: Replace these guide/placeholder docs - -.. include:: ../../../README.rst diff --git a/etc/scripts/README.rst b/etc/scripts/README.rst index edf82e44..5e54a2cc 100755 --- a/etc/scripts/README.rst +++ b/etc/scripts/README.rst @@ -21,7 +21,7 @@ Pre-requisites virtualenv or in the the main configured development virtualenv. These requireements need to be installed:: - pip install --requirement etc/release/requirements.txt + pip install --requirement etc/scripts/requirements.txt TODO: we need to pin the versions of these tools @@ -34,7 +34,7 @@ Scripts ~~~~~~~ **gen_requirements.py**: create/update requirements files from currently - installed requirements. + installed requirements. **gen_requirements_dev.py** does the same but can subtract the main requirements to get extra requirements used in only development. @@ -50,7 +50,7 @@ The sequence of commands to run are: ./configure --clean ./configure - python etc/release/gen_requirements.py --site-packages-dir + python etc/scripts/gen_requirements.py --site-packages-dir * You can optionally install or update extra main requirements after the ./configure step such that these are included in the generated main requirements. @@ -59,7 +59,7 @@ The sequence of commands to run are: ./configure --clean ./configure --dev - python etc/release/gen_requirements_dev.py --site-packages-dir + python etc/scripts/gen_requirements_dev.py --site-packages-dir * You can optionally install or update extra dev requirements after the ./configure step such that these are included in the generated dev diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 0f04b349..b052f25b 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,7 +16,7 @@ @click.command() @click.option( "-d", - "--dest_dir", + "--dest", type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", @@ -35,7 +35,7 @@ ) @click.help_option("-h", "--help") def check_thirdparty_dir( - dest_dir, + dest, wheels, sdists, ): @@ -45,7 +45,7 @@ def check_thirdparty_dir( # check for problems print(f"==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( - dest_dir=dest_dir, + dest_dir=dest, report_missing_sources=sdists, report_missing_wheels=wheels, ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 22147b20..eedf05c6 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -12,13 +12,15 @@ import itertools import os import sys +from collections import defaultdict import click import utils_thirdparty import utils_requirements -TRACE = True +TRACE = False +TRACE_DEEP = False @click.command() @@ -99,11 +101,49 @@ "index_urls", type=str, metavar="INDEX", - default=utils_thirdparty.PYPI_INDEXES, + default=utils_thirdparty.PYPI_INDEX_URLS, show_default=True, multiple=True, help="PyPI index URL(s) to use for wheels and sources, in order of preferences.", ) +@click.option( + "--use-cached-index", + is_flag=True, + help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", +) +@click.option( + "--sdist-only", + "sdist_only", + type=str, + metavar="SDIST", + default=tuple(), + show_default=False, + multiple=True, + help="Package name(s) that come only in sdist format (no wheels). " + "The command will not fail and exit if no wheel exists for these names", +) +@click.option( + "--wheel-only", + "wheel_only", + type=str, + metavar="WHEEL", + default=tuple(), + show_default=False, + multiple=True, + help="Package name(s) that come only in wheel format (no sdist). " + "The command will not fail and exit if no sdist exists for these names", +) +@click.option( + "--no-dist", + "no_dist", + type=str, + metavar="DIST", + default=tuple(), + show_default=False, + multiple=True, + help="Package name(s) that do not come either in wheel or sdist format. " + "The command will not fail and exit if no distribution exists for these names", +) @click.help_option("-h", "--help") def fetch_thirdparty( requirements_files, @@ -115,9 +155,13 @@ def fetch_thirdparty( wheels, sdists, index_urls, + use_cached_index, + sdist_only, + wheel_only, + no_dist, ): """ - Download to --dest-dir THIRDPARTY_DIR the PyPI wheels, source distributions, + Download to --dest THIRDPARTY_DIR the PyPI wheels, source distributions, and their ABOUT metadata, license and notices files. Download the PyPI packages listed in the combination of: @@ -125,16 +169,23 @@ def fetch_thirdparty( - the pip name==version --specifier SPECIFIER(s) - any pre-existing wheels or sdsists found in --dest-dir THIRDPARTY_DIR. - Download wheels with the --wheels option for the ``--python-version`` PYVER(s) - and ``--operating_system`` OS(s) combinations defaulting to all supported combinations. + Download wheels with the --wheels option for the ``--python-version`` + PYVER(s) and ``--operating_system`` OS(s) combinations defaulting to all + supported combinations. Download sdists tarballs with the --sdists option. - Generate or Download .ABOUT, .LICENSE and .NOTICE files for all the wheels and sources fetched. + Generate or Download .ABOUT, .LICENSE and .NOTICE files for all the wheels + and sources fetched. - Download wheels and sdists the provided PyPI simple --index-url INDEX(s) URLs. + Download from the provided PyPI simple --index-url INDEX(s) URLs. """ + if not (wheels or sdists): + print("Error: one or both of --wheels and --sdists is required.") + sys.exit(1) + print(f"COLLECTING REQUIRED NAMES & VERSIONS FROM {dest_dir}") + existing_packages_by_nv = { (package.name, package.version): package for package in utils_thirdparty.get_local_packages(directory=dest_dir) @@ -150,147 +201,105 @@ def fetch_thirdparty( required_name_versions.update(nvs) for specifier in specifiers: - nv = utils_requirements.get_name_version( + nv = utils_requirements.get_required_name_version( requirement=specifier, with_unpinned=latest_version, ) required_name_versions.add(nv) + if latest_version: + names = set(name for name, _version in sorted(required_name_versions)) + required_name_versions = {(n, None) for n in names} + if not required_name_versions: print("Error: no requirements requested.") sys.exit(1) - if not os.listdir(dest_dir) and not (wheels or sdists): - print("Error: one or both of --wheels and --sdists is required.") - sys.exit(1) - - if latest_version: - latest_name_versions = set() - names = set(name for name, _version in sorted(required_name_versions)) - for name in sorted(names): - latests = utils_thirdparty.PypiPackage.sorted( - utils_thirdparty.get_package_versions( - name=name, version=None, index_urls=index_urls - ) - ) - if not latests: - print(f"No distribution found for: {name}") - continue - latest = latests[-1] - latest_name_versions.add((latest.name, latest.version)) - required_name_versions = latest_name_versions - - if TRACE: - print("required_name_versions:", required_name_versions) + if TRACE_DEEP: + print("required_name_versions:") + for n, v in required_name_versions: + print(f" {n} @ {v}") + # create the environments matrix we need for wheels + environments = None if wheels: - # create the environments matrix we need for wheels evts = itertools.product(python_versions, operating_systems) environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] - wheels_not_found = {} - sdists_not_found = {} - # iterate over requirements, one at a time + # Collect PyPI repos + repos = [] + for index_url in index_urls: + index_url = index_url.strip("/") + existing = utils_thirdparty.DEFAULT_PYPI_REPOS_BY_URL.get(index_url) + if existing: + existing.use_cached_index = use_cached_index + repos.append(existing) + else: + repo = utils_thirdparty.PypiSimpleRepository( + index_url=index_url, + use_cached_index=use_cached_index, + ) + repos.append(repo) + + wheels_or_sdist_not_found = defaultdict(list) + for name, version in sorted(required_name_versions): nv = name, version - existing_package = existing_packages_by_nv.get(nv) + print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if existing_package: - existing_wheels = list( - existing_package.get_supported_wheels(environment=environment) - ) - else: - existing_wheels = None - - if existing_wheels: - if TRACE: - print( - f"====> Wheels already available: {name}=={version} on: {environment}: {existing_package.wheels!r}" - ) - if all(w.is_pure() for w in existing_wheels): - break - else: - continue if TRACE: - print(f"Fetching wheel for: {name}=={version} on: {environment}") - - try: - ( - fetched_wheel_filenames, - existing_wheel_filenames, - ) = utils_thirdparty.download_wheel( - name=name, - version=version, - environment=environment, - dest_dir=dest_dir, - index_urls=index_urls, - ) - if TRACE: - if existing_wheel_filenames: - print( - f" ====> Wheels already available: {name}=={version} on: {environment}" - ) - for whl in existing_wheel_filenames: - print(f" {whl}") - if fetched_wheel_filenames: - print(f" ====> Wheels fetched: {name}=={version} on: {environment}") - for whl in fetched_wheel_filenames: - print(f" {whl}") - - fwfns = fetched_wheel_filenames + existing_wheel_filenames - - if all(utils_thirdparty.Wheel.from_filename(f).is_pure() for f in fwfns): - break - - except utils_thirdparty.DistributionNotFound as e: - wheels_not_found[f"{name}=={version}"] = str(e) - - if sdists: - if existing_package and existing_package.sdist: - if TRACE: - print( - f" ====> Sdist already available: {name}=={version}: {existing_package.sdist!r}" - ) - continue + print(f" ==> Fetching wheel for envt: {environment}") - if TRACE: - print(f" Fetching sdist for: {name}=={version}") - - try: - fetched = utils_thirdparty.download_sdist( + fetched = utils_thirdparty.download_wheel( name=name, version=version, + environment=environment, dest_dir=dest_dir, - index_urls=index_urls, + repos=repos, ) + if not fetched: + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) + if TRACE: + print(f" NOT FOUND") + if (sdists or + (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) + ): + if TRACE: + print(f" ==> Fetching sdist: {name}=={version}") + + fetched = utils_thirdparty.download_sdist( + name=name, + version=version, + dest_dir=dest_dir, + repos=repos, + ) + if not fetched: + wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - if not fetched: - print( - f" ====> Sdist already available: {name}=={version} on: {environment}" - ) - else: - print( - f" ====> Sdist fetched: {fetched} for {name}=={version} on: {environment}" - ) - - except utils_thirdparty.DistributionNotFound as e: - sdists_not_found[f"{name}=={version}"] = str(e) - - if wheels and wheels_not_found: - print(f"==> MISSING WHEELS") - for wh in wheels_not_found: - print(f" {wh}") - - if sdists and sdists_not_found: - print(f"==> MISSING SDISTS") - for sd in sdists_not_found: - print(f" {sd}") + print(f" NOT FOUND") + + mia = [] + for nv, dists in wheels_or_sdist_not_found.items(): + name, _, version = nv.partition("==") + if name in no_dist: + continue + sdist_missing = sdists and "sdist" in dists and not name in wheel_only + if sdist_missing: + mia.append(f"SDist missing: {nv} {dists}") + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + if wheels_missing: + mia.append(f"Wheels missing: {nv} {dists}") + + if mia: + for m in mia: + print(m) + raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 8de2b960..214d90dc 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -25,26 +25,26 @@ class InvalidDistributionFilename(Exception): def get_package_name_from_filename(filename): """ - Return the package name extracted from a package ``filename``. - Optionally ``normalize`` the name according to distribution name rules. + Return the normalized package name extracted from a package ``filename``. + Normalization is done according to distribution name rules. Raise an ``InvalidDistributionFilename`` if the ``filename`` is invalid:: >>> get_package_name_from_filename("foo-1.2.3_rc1.tar.gz") 'foo' - >>> get_package_name_from_filename("foo-bar-1.2-py27-none-any.whl") + >>> get_package_name_from_filename("foo_bar-1.2-py27-none-any.whl") 'foo-bar' >>> get_package_name_from_filename("Cython-0.17.2-cp26-none-linux_x86_64.whl") 'cython' >>> get_package_name_from_filename("python_ldap-2.4.19-cp27-none-macosx_10_10_x86_64.whl") 'python-ldap' - >>> get_package_name_from_filename("foo.whl") - Traceback (most recent call last): - ... - InvalidDistributionFilename: ... - >>> get_package_name_from_filename("foo.png") - Traceback (most recent call last): - ... - InvalidFilePackageName: ... + >>> try: + ... get_package_name_from_filename("foo.whl") + ... except InvalidDistributionFilename: + ... pass + >>> try: + ... get_package_name_from_filename("foo.png") + ... except InvalidDistributionFilename: + ... pass """ if not filename or not filename.endswith(dist_exts): raise InvalidDistributionFilename(filename) @@ -118,7 +118,7 @@ def build_per_package_index(pkg_name, packages, base_url): """ document.append(header) - for package in packages: + for package in sorted(packages, key=lambda p: p.archive_file): document.append(package.simple_index_entry(base_url)) footer = """ @@ -141,8 +141,8 @@ def build_links_package_index(packages_by_package_name, base_url): """ document.append(header) - for _name, packages in packages_by_package_name.items(): - for package in packages: + for _name, packages in sorted(packages_by_package_name.items(), key=lambda i: i[0]): + for package in sorted(packages, key=lambda p: p.archive_file): document.append(package.simple_index_entry(base_url)) footer = """ diff --git a/etc/scripts/requirements.txt b/etc/scripts/requirements.txt index 6591e49c..7c514da9 100644 --- a/etc/scripts/requirements.txt +++ b/etc/scripts/requirements.txt @@ -1,12 +1,12 @@ aboutcode_toolkit -github-release-retry2 attrs commoncode click requests saneyaml -romp pip setuptools twine -wheel \ No newline at end of file +wheel +build +packvers diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 31adab47..f6e91a91 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -15,7 +15,7 @@ import requests import saneyaml -from packaging import version as packaging_version +from packvers import version as packaging_version """ Utility to create and retrieve package and ABOUT file data from DejaCode. diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index 5d5eb34c..af42a0cd 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,7 +27,7 @@ import re -from packaging.tags import ( +from packvers.tags import ( compatible_tags, cpython_tags, generic_tags, diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index fbc456db..0fc25a35 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -8,6 +8,8 @@ # See https://github.com/nexB/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # + +import os import re import subprocess @@ -41,23 +43,23 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue - yield get_name_version(requirement=req_line, with_unpinned=with_unpinned) + yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) -def get_name_version(requirement, with_unpinned=False): +def get_required_name_version(requirement, with_unpinned=False): """ Return a (name, version) tuple given a`requirement` specifier string. Requirement version must be pinned. If ``with_unpinned`` is True, unpinned requirements are accepted and only the name portion is returned. For example: - >>> assert get_name_version("foo==1.2.3") == ("foo", "1.2.3") - >>> assert get_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") - >>> assert get_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") - >>> assert get_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_name_version("foo>=1.2") + >>> assert get_required_name_version("foo==1.2.3") == ("foo", "1.2.3") + >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") + >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") + >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") >>> try: - ... assert not get_name_version("foo", with_unpinned=False) + ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ @@ -110,6 +112,8 @@ def get_installed_reqs(site_packages_dir): Return the installed pip requirements as text found in `site_packages_dir` as a text. """ + if not os.path.exists(site_packages_dir): + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 829cf8c0..addf8e5e 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -8,7 +8,6 @@ # See https://github.com/nexB/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # -from collections import defaultdict import email import itertools import os @@ -18,6 +17,8 @@ import tempfile import time import urllib +from collections import defaultdict +from urllib.parse import quote_plus import attr import license_expression @@ -27,12 +28,10 @@ from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name -from packaging import tags as packaging_tags -from packaging import version as packaging_version -from urllib.parse import quote_plus +from packvers import tags as packaging_tags +from packvers import version as packaging_version import utils_pip_compatibility_tags -from utils_requirements import load_requirements """ Utilities to manage Python thirparty libraries source, binaries and metadata in @@ -111,15 +110,14 @@ """ -TRACE = True +TRACE = False TRACE_DEEP = False TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "36", "37", "38", "39", "310" +PYTHON_VERSIONS = "37", "38", "39", "310" PYTHON_DOT_VERSIONS_BY_VER = { - "36": "3.6", "37": "3.7", "38": "3.8", "39": "3.9", @@ -135,7 +133,6 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "36": ["cp36", "cp36m", "abi3"], "37": ["cp37", "cp37m", "abi3"], "38": ["cp38", "cp38m", "abi3"], "39": ["cp39", "cp39m", "abi3"], @@ -168,6 +165,16 @@ def get_python_dot_version(version): "macosx_10_15_x86_64", "macosx_11_0_x86_64", "macosx_11_intel", + "macosx_11_0_x86_64", + "macosx_11_intel", + "macosx_10_9_universal2", + "macosx_10_10_universal2", + "macosx_10_11_universal2", + "macosx_10_12_universal2", + "macosx_10_13_universal2", + "macosx_10_14_universal2", + "macosx_10_15_universal2", + "macosx_11_0_universal2", # 'macosx_11_0_arm64', ], "windows": [ @@ -178,18 +185,19 @@ def get_python_dot_version(version): THIRDPARTY_DIR = "thirdparty" CACHE_THIRDPARTY_DIR = ".cache/thirdparty" -ABOUT_BASE_URL = "https://thirdparty.aboutcode.org/pypi" +################################################################################ +ABOUT_BASE_URL = "https://thirdparty.aboutcode.org/pypi" ABOUT_PYPI_SIMPLE_URL = f"{ABOUT_BASE_URL}/simple" ABOUT_LINKS_URL = f"{ABOUT_PYPI_SIMPLE_URL}/links.html" - PYPI_SIMPLE_URL = "https://pypi.org/simple" -PYPI_INDEXES = (PYPI_SIMPLE_URL, ABOUT_PYPI_SIMPLE_URL) +PYPI_INDEX_URLS = (PYPI_SIMPLE_URL, ABOUT_PYPI_SIMPLE_URL) + +################################################################################ EXTENSIONS_APP = (".pyz",) EXTENSIONS_SDIST = ( ".tar.gz", - ".tar.bz2", ".zip", ".tar.xz", ) @@ -216,119 +224,90 @@ class DistributionNotFound(Exception): pass -def download_wheel( - name, - version, - environment, - dest_dir=THIRDPARTY_DIR, - index_urls=PYPI_INDEXES, -): +def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tuple()): """ Download the wheels binary distribution(s) of package ``name`` and - ``version`` matching the ``environment`` Environment constraints from the - PyPI simple repository ``index_urls`` list of URLs into the ``dest_dir`` - directory. + ``version`` matching the ``environment`` Environment constraints into the + ``dest_dir`` directory. Return a list of fetched_wheel_filenames, possibly + empty. - Raise a DistributionNotFound if no wheel is not found. Otherwise, return a - tuple of lists of (fetched_wheel_filenames, existing_wheel_filenames) + Use the first PyPI simple repository from a list of ``repos`` that contains this wheel. """ if TRACE_DEEP: - print(f" download_wheel: {name}=={version}: {environment}") + print(f" download_wheel: {name}=={version} for envt: {environment}") - fetched_wheel_filenames = [] - existing_wheel_filenames = [] - try: - for pypi_package in get_package_versions( - name=name, - version=version, - index_urls=index_urls, - ): - if not pypi_package.wheels: - continue - - supported_wheels = list(pypi_package.get_supported_wheels(environment=environment)) - if not supported_wheels: - continue + if not repos: + repos = DEFAULT_PYPI_REPOS - for wheel in supported_wheels: - if os.path.exists(os.path.join(dest_dir, wheel.filename)): - # do not refetch - existing_wheel_filenames.append(wheel.filename) - continue + fetched_wheel_filenames = [] - if TRACE: - print(f" Fetching wheel from index: {wheel.download_url}") - fetched_wheel_filename = wheel.download(dest_dir=dest_dir) - fetched_wheel_filenames.add(fetched_wheel_filename) + for repo in repos: + package = repo.get_package_version(name=name, version=version) + if not package: + if TRACE_DEEP: + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") + continue + supported_wheels = list(package.get_supported_wheels(environment=environment)) + if not supported_wheels: + if TRACE_DEEP: + print( + f" download_wheel: No supported wheel for {name}=={version}: {environment} " + ) + continue - except Exception as e: - raise DistributionNotFound(f"Failed to fetch wheel: {name}=={version}: {e}") from e + for wheel in supported_wheels: + if TRACE_DEEP: + print( + f" download_wheel: Getting wheel from index (or cache): {wheel.download_url}" + ) + fetched_wheel_filename = wheel.download(dest_dir=dest_dir) + fetched_wheel_filenames.append(fetched_wheel_filename) - if not fetched_wheel_filenames and not existing_wheel_filenames: - raise DistributionNotFound(f"Failed to fetch wheel: {name}=={version}: No wheel found") + if fetched_wheel_filenames: + # do not futher fetch from other repos if we find in first, typically PyPI + break - return fetched_wheel_filenames, existing_wheel_filenames + return fetched_wheel_filenames -def download_sdist( - name, - version, - dest_dir=THIRDPARTY_DIR, - index_urls=PYPI_INDEXES, -): +def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): """ Download the sdist source distribution of package ``name`` and ``version`` - from the PyPI simple repository ``index_urls`` list of URLs into the - ``dest_dir`` directory. + into the ``dest_dir`` directory. Return a fetched filename or None. - Raise a DistributionNotFound if this was not found. Return the filename if - downloaded and False if not downloaded because it already exists. + Use the first PyPI simple repository from a list of ``repos`` that contains + this sdist. """ - if TRACE_DEEP: - print(f"download_sdist: {name}=={version}: ") + if TRACE: + print(f" download_sdist: {name}=={version}") - try: - for pypi_package in get_package_versions( - name=name, - version=version, - index_urls=index_urls, - ): - if not pypi_package.sdist: - continue + if not repos: + repos = DEFAULT_PYPI_REPOS - if os.path.exists(os.path.join(dest_dir, pypi_package.sdist.filename)): - # do not refetch - return False - if TRACE: - print(f" Fetching sources from index: {pypi_package.sdist.download_url}") - fetched = pypi_package.sdist.download(dest_dir=dest_dir) - if fetched: - return pypi_package.sdist.filename + fetched_sdist_filename = None - except Exception as e: - raise DistributionNotFound(f"Failed to fetch sdist: {name}=={version}: {e}") from e + for repo in repos: + package = repo.get_package_version(name=name, version=version) + + if not package: + if TRACE_DEEP: + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") + continue + sdist = package.sdist + if not sdist: + if TRACE_DEEP: + print(f" download_sdist: No sdist for {name}=={version}") + continue - raise DistributionNotFound(f"Failed to fetch sdist: {name}=={version}: No sources found") + if TRACE_DEEP: + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) + if fetched_sdist_filename: + # do not futher fetch from other repos if we find in first, typically PyPI + break -def get_package_versions( - name, - version=None, - index_urls=PYPI_INDEXES, -): - """ - Yield PypiPackages with ``name`` and ``version`` from the PyPI simple - repository ``index_urls`` list of URLs. - If ``version`` is not provided, return the latest available versions. - """ - for index_url in index_urls: - try: - repo = get_pypi_repo(index_url) - package = repo.get_package(name, version) - if package: - yield package - except RemoteNotFetchedException as e: - print(f"Failed to fetch PyPI package {name} @ {version} info from {index_url}: {e}") + return fetched_sdist_filename ################################################################################ @@ -362,17 +341,6 @@ def normalize_name(name): """ return name and re.sub(r"[-_.]+", "-", name).lower() or name - @staticmethod - def standardize_name(name): - """ - Return a standardized package name, e.g. lowercased and using - not _ - """ - return name and re.sub(r"[-_]+", "-", name).lower() or name - - @property - def name_ver(self): - return f"{self.name}-{self.version}" - def sortable_name_version(self): """ Return a tuple of values to sort by name, then version. @@ -388,7 +356,7 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another dist of mapping + # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", "copyright", @@ -406,6 +374,13 @@ class Distribution(NameVer): metadata=dict(help="File name."), ) + path_or_url = attr.ib( + repr=False, + type=str, + default="", + metadata=dict(help="Path or URL"), + ) + sha256 = attr.ib( repr=False, type=str, @@ -530,36 +505,50 @@ def package_url(self): """ Return a Package URL string of self. """ - return str(packageurl.PackageURL(**self.purl_identifiers())) + return str( + packageurl.PackageURL( + type=self.type, + namespace=self.namespace, + name=self.name, + version=self.version, + subpath=self.subpath, + qualifiers=self.qualifiers, + ) + ) @property def download_url(self): return self.get_best_download_url() - def get_best_download_url( - self, - index_urls=tuple([PYPI_SIMPLE_URL, ABOUT_PYPI_SIMPLE_URL]), - ): + def get_best_download_url(self, repos=tuple()): """ - Return the best download URL for this distribution where best means that - PyPI is better and our selfhosted repo URLs are second. - If none is found, return a synthetic remote URL. + Return the best download URL for this distribution where best means this + is the first URL found for this distribution found in the list of + ``repos``. + + If none is found, return a synthetic PyPI remote URL. """ - for index_url in index_urls: - pypi_package = get_pypi_package( - name=self.normalized_name, - version=self.version, - index_url=index_url, - ) - if pypi_package: - if isinstance(pypi_package, tuple): - raise Exception("############", repr(pypi_package)) - try: - pypi_url = pypi_package.get_url_for_filename(self.filename) - except Exception as e: - raise Exception(repr(pypi_package)) from e - if pypi_url: - return pypi_url + + if not repos: + repos = DEFAULT_PYPI_REPOS + + for repo in repos: + package = repo.get_package_version(name=self.name, version=self.version) + if not package: + if TRACE: + print( + f" get_best_download_url: {self.name}=={self.version} " + f"not found in {repo.index_url}" + ) + continue + pypi_url = package.get_url_for_filename(self.filename) + if pypi_url: + return pypi_url + else: + if TRACE: + print( + f" get_best_download_url: {self.filename} not found in {repo.index_url}" + ) def download(self, dest_dir=THIRDPARTY_DIR): """ @@ -567,16 +556,17 @@ def download(self, dest_dir=THIRDPARTY_DIR): Return the fetched filename. """ assert self.filename - if TRACE: + if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", self.filename, ) - fetch_and_save_path_or_url( - filename=self.filename, - dest_dir=dest_dir, + # FIXME: + fetch_and_save( path_or_url=self.path_or_url, + dest_dir=dest_dir, + filename=self.filename, as_text=False, ) return self.filename @@ -601,7 +591,7 @@ def notice_download_url(self): def from_path_or_url(cls, path_or_url): """ Return a distribution built from the data found in the filename of a - `path_or_url` string. Raise an exception if this is not a valid + ``path_or_url`` string. Raise an exception if this is not a valid filename. """ filename = os.path.basename(path_or_url.strip("/")) @@ -632,47 +622,6 @@ def from_filename(cls, filename): clazz = cls.get_dist_class(filename) return clazz.from_filename(filename) - def purl_identifiers(self, skinny=False): - """ - Return a mapping of non-empty identifier name/values for the purl - fields. If skinny is True, only inlucde type, namespace and name. - """ - identifiers = dict( - type=self.type, - namespace=self.namespace, - name=self.name, - ) - - if not skinny: - identifiers.update( - version=self.version, - subpath=self.subpath, - qualifiers=self.qualifiers, - ) - - return {k: v for k, v in sorted(identifiers.items()) if v} - - def identifiers(self, purl_as_fields=True): - """ - Return a mapping of non-empty identifier name/values. - Return each purl fields separately if purl_as_fields is True. - Otherwise return a package_url string for the purl. - """ - if purl_as_fields: - identifiers = self.purl_identifiers() - else: - identifiers = dict(package_url=self.package_url) - - identifiers.update( - download_url=self.download_url, - filename=self.filename, - md5=self.md5, - sha1=self.sha1, - package_url=self.package_url, - ) - - return {k: v for k, v in sorted(identifiers.items()) if v} - def has_key_metadata(self): """ Return True if this distribution has key metadata required for basic attribution. @@ -802,7 +751,7 @@ def load_remote_about_data(self): NOTICE file if any. Return True if the data was updated. """ try: - about_text = fetch_content_from_path_or_url_through_cache( + about_text = CACHE.get( path_or_url=self.about_download_url, as_text=True, ) @@ -816,7 +765,7 @@ def load_remote_about_data(self): notice_file = about_data.pop("notice_file", None) if notice_file: try: - notice_text = fetch_content_from_path_or_url_through_cache( + notice_text = CACHE.get( path_or_url=self.notice_download_url, as_text=True, ) @@ -867,12 +816,12 @@ def get_license_keys(self): return ["unknown"] return keys - def fetch_license_files(self, dest_dir=THIRDPARTY_DIR): + def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): """ Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url().links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [l.get("file") for l in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -887,10 +836,10 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR): # try remotely first lic_url = get_license_link_for_filename(filename=filename, urls=urls) - fetch_and_save_path_or_url( - filename=filename, - dest_dir=dest_dir, + fetch_and_save( path_or_url=lic_url, + dest_dir=dest_dir, + filename=filename, as_text=True, ) if TRACE: @@ -900,10 +849,10 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR): try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" - fetch_and_save_path_or_url( - filename=filename, - dest_dir=dest_dir, + fetch_and_save( path_or_url=lic_url, + dest_dir=dest_dir, + filename=filename, as_text=True, ) if TRACE: @@ -961,7 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): declared_license = [raw_data["License"]] + [ c for c in classifiers if c.startswith("License") ] - license_expression = compute_normalized_license_expression(declared_license) + license_expression = get_license_expression(declared_license) other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] @@ -1062,6 +1011,84 @@ class InvalidDistributionFilename(Exception): pass +def get_sdist_name_ver_ext(filename): + """ + Return a (name, version, extension) if filename is a valid sdist name. Some legacy + binary builds have weird names. Return False otherwise. + + In particular they do not use PEP440 compliant versions and/or mix tags, os + and arch names in tarball names and versions: + + >>> assert get_sdist_name_ver_ext("intbitset-1.3.tar.gz") + >>> assert not get_sdist_name_ver_ext("intbitset-1.3.linux-x86_64.tar.gz") + >>> assert get_sdist_name_ver_ext("intbitset-1.4a.tar.gz") + >>> assert get_sdist_name_ver_ext("intbitset-1.4a.zip") + >>> assert not get_sdist_name_ver_ext("intbitset-2.0.linux-x86_64.tar.gz") + >>> assert get_sdist_name_ver_ext("intbitset-2.0.tar.gz") + >>> assert not get_sdist_name_ver_ext("intbitset-2.1-1.src.rpm") + >>> assert not get_sdist_name_ver_ext("intbitset-2.1-1.x86_64.rpm") + >>> assert not get_sdist_name_ver_ext("intbitset-2.1.linux-x86_64.tar.gz") + >>> assert not get_sdist_name_ver_ext("cffi-1.2.0-1.tar.gz") + >>> assert not get_sdist_name_ver_ext("html5lib-1.0-reupload.tar.gz") + >>> assert not get_sdist_name_ver_ext("selenium-2.0-dev-9429.tar.gz") + >>> assert not get_sdist_name_ver_ext("testfixtures-1.8.0dev-r4464.tar.gz") + """ + name_ver = None + extension = None + + for ext in EXTENSIONS_SDIST: + if filename.endswith(ext): + name_ver, extension, _ = filename.rpartition(ext) + break + + if not extension or not name_ver: + return False + + name, _, version = name_ver.rpartition("-") + + if not name or not version: + return False + + # weird version + if any( + w in version + for w in ( + "x86_64", + "i386", + ) + ): + return False + + # all char versions + if version.isalpha(): + return False + + # non-pep 440 version + if "-" in version: + return False + + # single version + if version.isdigit() and len(version) == 1: + return False + + # r1 version + if len(version) == 2 and version[0] == "r" and version[1].isdigit(): + return False + + # dotless version (but calver is OK) + if "." not in version and len(version) < 3: + return False + + # version with dashes selenium-2.0-dev-9429.tar.gz + if name.endswith(("dev",)) and "." not in version: + return False + # version pre or post, old legacy + if version.startswith(("beta", "rc", "pre", "post", "final")): + return False + + return name, version, extension + + @attr.attributes class Sdist(Distribution): @@ -1078,21 +1105,11 @@ def from_filename(cls, filename): Return a Sdist object built from a filename. Raise an exception if this is not a valid sdist filename """ - name_ver = None - extension = None - - for ext in EXTENSIONS_SDIST: - if filename.endswith(ext): - name_ver, extension, _ = filename.rpartition(ext) - break - - if not extension or not name_ver: + name_ver_ext = get_sdist_name_ver_ext(filename) + if not name_ver_ext: raise InvalidDistributionFilename(filename) - name, _, version = name_ver.rpartition("-") - - if not name or not version: - raise InvalidDistributionFilename(filename) + name, version, extension = name_ver_ext return cls( type="pypi", @@ -1280,8 +1297,8 @@ def is_pure_wheel(filename): @attr.attributes class PypiPackage(NameVer): """ - A Python package with its "distributions", e.g. wheels and source - distribution , ABOUT files and licenses or notices. + A Python package contains one or more wheels and one source distribution + from a repository. """ sdist = attr.ib( @@ -1298,16 +1315,6 @@ class PypiPackage(NameVer): metadata=dict(help="List of Wheel for this package"), ) - @property - def specifier(self): - """ - A requirement specifier for this package - """ - if self.version: - return f"{self.name}=={self.version}" - else: - return self.name - def get_supported_wheels(self, environment, verbose=TRACE_ULTRA_DEEP): """ Yield all the Wheel of this package supported and compatible with the @@ -1328,10 +1335,10 @@ def package_from_dists(cls, dists): For example: >>> w1 = Wheel(name='bitarray', version='0.8.1', build='', - ... python_versions=['cp36'], abis=['cp36m'], + ... python_versions=['cp38'], abis=['cp38m'], ... platforms=['linux_x86_64']) >>> w2 = Wheel(name='bitarray', version='0.8.1', build='', - ... python_versions=['cp36'], abis=['cp36m'], + ... python_versions=['cp38'], abis=['cp38m'], ... platforms=['macosx_10_9_x86_64', 'macosx_10_10_x86_64']) >>> sd = Sdist(name='bitarray', version='0.8.1') >>> package = PypiPackage.package_from_dists(dists=[w1, w2, sd]) @@ -1389,17 +1396,20 @@ def packages_from_dir(cls, directory): Yield PypiPackages built from files found in at directory path. """ base = os.path.abspath(directory) + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] + if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) - return cls.packages_from_many_paths_or_urls(paths) + return PypiPackage.packages_from_many_paths_or_urls(paths) @classmethod def packages_from_many_paths_or_urls(cls, paths_or_urls): """ Yield PypiPackages built from a list of paths or URLs. + These are sorted by name and then by version from oldest to newest. """ - dists = cls.dists_from_paths_or_urls(paths_or_urls) + dists = PypiPackage.dists_from_paths_or_urls(paths_or_urls) if TRACE_ULTRA_DEEP: print("packages_from_many_paths_or_urls: dists:", dists) @@ -1414,54 +1424,11 @@ def packages_from_many_paths_or_urls(cls, paths_or_urls): print("packages_from_many_paths_or_urls", package) yield package - @classmethod - def get_versions(cls, name, packages): - """ - Return a subset list of package versions from a list of `packages` that - match PypiPackage `name`. - The list is sorted by version from oldest to most recent. - """ - norm_name = NameVer.normalize_name(name) - versions = [p for p in packages if p.normalized_name == norm_name] - return cls.sorted(versions) - - @classmethod - def get_latest_version(cls, name, packages): - """ - Return the latest version of PypiPackage `name` from a list of `packages`. - """ - versions = cls.get_versions(name, packages) - if not versions: - return - return versions[-1] - - @classmethod - def get_name_version(cls, name, version, packages): - """ - Return the PypiPackage with `name` and `version` from a list of `packages` - or None if it is not found. - If `version` is None, return the latest version found. - """ - if TRACE_ULTRA_DEEP: - print("get_name_version:", name, version, packages) - if not version: - return cls.get_latest_version(name, packages) - - nvs = [p for p in cls.get_versions(name, packages) if p.version == version] - - if not nvs: - return name, version - - if len(nvs) == 1: - return nvs[0] - - raise Exception(f"More than one PypiPackage with {name}=={version}") - @classmethod def dists_from_paths_or_urls(cls, paths_or_urls): """ Return a list of Distribution given a list of - `paths_or_urls` to wheels or source distributions. + ``paths_or_urls`` to wheels or source distributions. Each Distribution receives two extra attributes: - the path_or_url it was created from @@ -1473,29 +1440,24 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ... bitarray-0.8.1-cp36-cp36m-macosx_10_9_x86_64.macosx_10_10_x86_64.whl ... bitarray-0.8.1-cp36-cp36m-win_amd64.whl ... https://example.com/bar/bitarray-0.8.1.tar.gz - ... bitarray-0.8.1.tar.gz.ABOUT bit.LICENSE'''.split() - >>> result = list(PypiPackage.dists_from_paths_or_urls(paths_or_urls)) + ... bitarray-0.8.1.tar.gz.ABOUT + ... bit.LICENSE'''.split() + >>> results = list(PypiPackage.dists_from_paths_or_urls(paths_or_urls)) >>> for r in results: - ... r.filename = '' - ... r.path_or_url = '' - >>> expected = [ - ... Wheel(name='bitarray', version='0.8.1', build='', - ... python_versions=['cp36'], abis=['cp36m'], - ... platforms=['linux_x86_64']), - ... Wheel(name='bitarray', version='0.8.1', build='', - ... python_versions=['cp36'], abis=['cp36m'], - ... platforms=['macosx_10_9_x86_64', 'macosx_10_10_x86_64']), - ... Wheel(name='bitarray', version='0.8.1', build='', - ... python_versions=['cp36'], abis=['cp36m'], - ... platforms=['win_amd64']), - ... Sdist(name='bitarray', version='0.8.1'), - ... Sdist(name='bitarray', version='0.8.1') - ... ] - >>> assert expected == result + ... print(r.__class__.__name__, r.name, r.version) + ... if isinstance(r, Wheel): + ... print(" ", ", ".join(r.python_versions), ", ".join(r.platforms)) + Wheel bitarray 0.8.1 + cp36 linux_x86_64 + Wheel bitarray 0.8.1 + cp36 macosx_10_9_x86_64, macosx_10_10_x86_64 + Wheel bitarray 0.8.1 + cp36 win_amd64 + Sdist bitarray 0.8.1 """ dists = [] - if TRACE_DEEP: - print(" ###paths_or_urls:", paths_or_urls) + if TRACE_ULTRA_DEEP: + print(" ###paths_or_urls:", paths_or_urls) installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: @@ -1503,7 +1465,14 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists.append(dist) if TRACE_DEEP: print( - " ===> dists_from_paths_or_urls:", dist, "with URL:", dist.download_url + " ===> dists_from_paths_or_urls:", + dist, + "\n ", + "with URL:", + dist.download_url, + "\n ", + "from URL:", + path_or_url, ) except InvalidDistributionFilename: if TRACE_DEEP: @@ -1639,98 +1608,108 @@ class PypiSimpleRepository: metadata=dict(help="Base PyPI simple URL for this index."), ) - packages_by_normalized_name = attr.ib( + # we keep a nested mapping of PypiPackage that has this shape: + # {name: {version: PypiPackage, version: PypiPackage, etc} + # the inner versions mapping is sorted by version from oldest to newest + + packages = attr.ib( type=dict, - default=attr.Factory(lambda: defaultdict(list)), - metadata=dict(help="Mapping of {package name: [package objects]} available in this repo"), + default=attr.Factory(lambda: defaultdict(dict)), + metadata=dict( + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + ), ) - packages_by_normalized_name_version = attr.ib( - type=dict, - default=attr.Factory(dict), - metadata=dict(help="Mapping of {(name, version): package object} available in this repo"), + fetched_package_normalized_names = attr.ib( + type=set, + default=attr.Factory(set), + metadata=dict(help="A set of already fetched package normalized names."), ) - def get_versions(self, name): + use_cached_index = attr.ib( + type=bool, + default=False, + metadata=dict( + help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + ), + ) + + def _get_package_versions_map(self, name): """ - Return a list of all available PypiPackage version for this package name. - The list may be empty. + Return a mapping of all available PypiPackage version for this package name. + The mapping may be empty. It is ordered by version from oldest to newest """ - name = name and NameVer.normalize_name(name) - self._populate_links_and_packages(name) - return self.packages_by_normalized_name.get(name, []) + assert name + normalized_name = NameVer.normalize_name(name) + versions = self.packages[normalized_name] + if not versions and normalized_name not in self.fetched_package_normalized_names: + self.fetched_package_normalized_names.add(normalized_name) + try: + links = self.fetch_links(normalized_name=normalized_name) + # note that thsi is sorted so the mapping is also sorted + versions = { + package.version: package + for package in PypiPackage.packages_from_many_paths_or_urls(paths_or_urls=links) + } + self.packages[normalized_name] = versions + except RemoteNotFetchedException as e: + if TRACE: + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") - def get_latest_version(self, name): + if not versions and TRACE: + print(f"WARNING: package {name} not found in repo: {self.index_url}") + + return versions + + def get_package_versions(self, name): """ - Return the latest PypiPackage version for this package name or None. + Return a mapping of all available PypiPackage version as{version: + package} for this package name. The mapping may be empty but not None. + It is sorted by version from oldest to newest. """ - versions = self.get_versions(name) - return PypiPackage.get_latest_version(name, versions) + return dict(self._get_package_versions_map(name)) - def get_package(self, name, version): + def get_package_version(self, name, version=None): """ Return the PypiPackage with name and version or None. + Return the latest PypiPackage version if version is None. """ - versions = self.get_versions(name) - if TRACE_DEEP: - print("PypiPackage.get_package:versions:", versions) - return PypiPackage.get_name_version(name, version, versions) + if not version: + versions = list(self._get_package_versions_map(name).values()) + # return the latest version + return versions and versions[-1] + else: + return self._get_package_versions_map(name).get(version) - def _fetch_links(self, name, _LINKS={}): + def fetch_links(self, normalized_name): """ Return a list of download link URLs found in a PyPI simple index for package name using the `index_url` of this repository. """ - name = name and NameVer.normalize_name(name) - index_url = self.index_url - - name = name and NameVer.normalize_name(name) - index_url = index_url.strip("/") - index_url = f"{index_url}/{name}" - - if TRACE_DEEP: - print( - f" Finding links for {name!r} from PyPI index: {index_url} : cached?:", - index_url in _LINKS, - ) - - if index_url not in _LINKS: - text = fetch_content_from_path_or_url_through_cache(path_or_url=index_url, as_text=True) - links = collect_urls(text) - # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] - links = [url for url, _, _sha256 in links] - _LINKS[index_url] = [l for l in links if l.endswith(EXTENSIONS)] - - links = _LINKS[index_url] - if TRACE_DEEP: - print(f" Found links {links!r}") + package_url = f"{self.index_url}/{normalized_name}" + text = CACHE.get( + path_or_url=package_url, + as_text=True, + force=not self.use_cached_index, + ) + links = collect_urls(text) + # TODO: keep sha256 + links = [l.partition("#sha256=") for l in links] + links = [url for url, _, _sha256 in links] return links - def _populate_links_and_packages(self, name): - name = name and NameVer.normalize_name(name) - - if TRACE_DEEP: - print("PypiPackage._populate_links_and_packages:name:", name) - - links = self._fetch_links(name) - packages = list(PypiPackage.packages_from_many_paths_or_urls(paths_or_urls=links)) - if TRACE_DEEP: - print("PypiPackage._populate_links_and_packages:packages:", packages) - - self.packages_by_normalized_name[name] = packages - - for p in packages: - name = name and NameVer.normalize_name(p.name) - self.packages_by_normalized_name_version[(name, p.version)] = p +PYPI_PUBLIC_REPO = PypiSimpleRepository(index_url=PYPI_SIMPLE_URL) +PYPI_SELFHOSTED_REPO = PypiSimpleRepository(index_url=ABOUT_PYPI_SIMPLE_URL) +DEFAULT_PYPI_REPOS = PYPI_PUBLIC_REPO, PYPI_SELFHOSTED_REPO +DEFAULT_PYPI_REPOS_BY_URL = {r.index_url: r for r in DEFAULT_PYPI_REPOS} @attr.attributes class LinksRepository: """ - Represents a simple links repository such an HTTP directory listing or a - page with links. + Represents a simple links repository such an HTTP directory listing or an + HTML page with links. """ url = attr.ib( @@ -1745,14 +1724,25 @@ class LinksRepository: metadata=dict(help="List of links available in this repo"), ) + use_cached_index = attr.ib( + type=bool, + default=False, + metadata=dict( + help="If True, use any existing on-disk cached index files. Otherwise, fetch and cache." + ), + ) + def __attrs_post_init__(self): if not self.links: self.links = self.find_links() - def find_links(self): + def find_links(self, _CACHE=[]): """ Return a list of link URLs found in the HTML page at `self.url` """ + if _CACHE: + return _CACHE + links_url = self.url if TRACE_DEEP: print(f"Finding links from: {links_url}") @@ -1764,9 +1754,10 @@ def find_links(self): if TRACE_DEEP: print(f"Base URL {base_url}") - text = fetch_content_from_path_or_url_through_cache( + text = CACHE.get( path_or_url=links_url, as_text=True, + force=not self.use_cached_index, ) links = [] @@ -1795,12 +1786,13 @@ def find_links(self): if TRACE: print(f"Found {len(links)} links at {links_url}") + _CACHE.extend(links) return links @classmethod - def from_url(cls, url=ABOUT_BASE_URL, _LINKS_REPO={}): + def from_url(cls, url=ABOUT_BASE_URL, _LINKS_REPO={}, use_cached_index=False): if url not in _LINKS_REPO: - _LINKS_REPO[url] = cls(url=url) + _LINKS_REPO[url] = cls(url=url, use_cached_index=use_cached_index) return _LINKS_REPO[url] @@ -1818,26 +1810,6 @@ def get_local_packages(directory=THIRDPARTY_DIR): return list(PypiPackage.packages_from_dir(directory=directory)) -def get_pypi_repo(index_url, _PYPI_REPO={}): - if index_url not in _PYPI_REPO: - _PYPI_REPO[index_url] = PypiSimpleRepository(index_url=index_url) - return _PYPI_REPO[index_url] - - -def get_pypi_package(name, version, index_url, verbose=TRACE_DEEP): - """ - Return a PypiPackage or None. - """ - try: - package = get_pypi_repo(index_url).get_package(name, version) - if verbose: - print(f" get_pypi_package: {name} @ {version} info from {index_url}: {package}") - return package - - except RemoteNotFetchedException as e: - print(f"Failed to fetch PyPI package {name} @ {version} info from {index_url}: {e}") - - ################################################################################ # # Basic file and URL-based operations using a persistent file-based Cache @@ -1857,34 +1829,40 @@ class Cache: def __attrs_post_init__(self): os.makedirs(self.directory, exist_ok=True) - def clear(self): - shutil.rmtree(self.directory) - - def get(self, path_or_url, as_text=True): + def get(self, path_or_url, as_text=True, force=False): """ - Get a file from a `path_or_url` through the cache. - `path_or_url` can be a path or a URL to a file. + Return the content fetched from a ``path_or_url`` through the cache. + Raise an Exception on errors. Treats the content as text if as_text is + True otherwise as treat as binary. `path_or_url` can be a path or a URL + to a file. """ cache_key = quote_plus(path_or_url.strip("/")) cached = os.path.join(self.directory, cache_key) - if not os.path.exists(cached): + if force or not os.path.exists(cached): + if TRACE_DEEP: + print(f" FILE CACHE MISS: {path_or_url}") content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) return content else: + if TRACE_DEEP: + print(f" FILE CACHE HIT: {path_or_url}") return get_local_file_content(path=cached, as_text=as_text) +CACHE = Cache() + + def get_file_content(path_or_url, as_text=True): """ Fetch and return the content at `path_or_url` from either a local path or a remote URL. Return the content as bytes is `as_text` is False. """ if path_or_url.startswith("https://"): - if TRACE: + if TRACE_DEEP: print(f"Fetching: {path_or_url}") _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content @@ -1936,7 +1914,7 @@ def get_remote_file_content( # using a GET with stream=True ensure we get the the final header from # several redirects and that we can ignore content there. A HEAD request may # not get us this last header - print(f" DOWNLOADING {url}") + print(f" DOWNLOADING: {url}") with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: status = response.status_code if status != requests.codes.ok: # NOQA @@ -1960,35 +1938,19 @@ def get_remote_file_content( return response.headers, response.text if as_text else response.content -def fetch_content_from_path_or_url_through_cache( +def fetch_and_save( path_or_url, - as_text=True, - cache=Cache(), -): - """ - Return the content from fetching at path or URL. Raise an Exception on - errors. Treats the content as text if as_text is True otherwise as treat as - binary. Use the provided file cache. This is the main entry for using the - cache. - - Note: the `cache` argument is a global, though it does not really matter - since it does not hold any state which is only kept on disk. - """ - return cache.get(path_or_url=path_or_url, as_text=as_text) - - -def fetch_and_save_path_or_url( - filename, dest_dir, - path_or_url, + filename, as_text=True, ): """ - Return the content from fetching the `filename` file name at URL or path - and save to `dest_dir`. Raise an Exception on errors. Treats the content as - text if as_text is True otherwise as treat as binary. + Fetch content at ``path_or_url`` URL or path and save this to + ``dest_dir/filername``. Return the fetched content. Raise an Exception on + errors. Treats the content as text if as_text is True otherwise as treat as + binary. """ - content = fetch_content_from_path_or_url_through_cache( + content = CACHE.get( path_or_url=path_or_url, as_text=as_text, ) @@ -2000,45 +1962,9 @@ def fetch_and_save_path_or_url( ################################################################################ -# Requirements processing -################################################################################ - - -def get_required_remote_packages( - requirements_file="requirements.txt", - index_url=PYPI_SIMPLE_URL, -): - """ - Yield tuple of (name, version, PypiPackage) for packages listed in the - `requirements_file` requirements file and found in the PyPI index - ``index_url`` URL. - """ - required_name_versions = load_requirements(requirements_file=requirements_file) - return get_required_packages(required_name_versions=required_name_versions, index_url=index_url) - - -def get_required_packages( - required_name_versions, - index_url=PYPI_SIMPLE_URL, -): - """ - Yield tuple of (name, version) or a PypiPackage for package name/version - listed in the ``required_name_versions`` list and found in the PyPI index - ``index_url`` URL. - """ - if TRACE: - print("get_required_packages", index_url) - - repo = get_pypi_repo(index_url=index_url) - - for name, version in required_name_versions: - if TRACE: - print(" get_required_packages: name:", name, "version:", version) - yield repo.get_package(name, version) - - -################################################################################ +# # Functions to update or fetch ABOUT and license files +# ################################################################################ @@ -2059,7 +1985,7 @@ def clean_about_files( local_dist.save_about_and_notice_files(dest_dir) -def fetch_abouts_and_licenses(dest_dir=THIRDPARTY_DIR): +def fetch_abouts_and_licenses(dest_dir=THIRDPARTY_DIR, use_cached_index=False): """ Given a thirdparty dir, add missing ABOUT. LICENSE and NOTICE files using best efforts: @@ -2069,6 +1995,8 @@ def fetch_abouts_and_licenses(dest_dir=THIRDPARTY_DIR): - derive from existing distribution with same name and latest version that would have such ABOUT file - extract ABOUT file data from distributions PKGINFO or METADATA files + + Use available existing on-disk cached index if use_cached_index is True. """ def get_other_dists(_package, _dist): @@ -2078,7 +2006,6 @@ def get_other_dists(_package, _dist): """ return [d for d in _package.get_distributions() if d != _dist] - selfhosted_repo = get_pypi_repo(index_url=ABOUT_PYPI_SIMPLE_URL) local_packages = get_local_packages(directory=dest_dir) packages_by_name = defaultdict(list) for local_package in local_packages: @@ -2094,7 +2021,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2106,7 +2033,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2115,7 +2042,6 @@ def get_other_dists(_package, _dist): for p in packages_by_name[local_package.name] if p.version != local_package.version ] - other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: latest_local_dists = list(other_local_version.get_distributions()) @@ -2133,7 +2059,9 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index + ) continue # lets try to fetch remotely @@ -2142,14 +2070,16 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version + # and that is in our self hosted repo + lpv = local_package.version + lpn = local_package.name + other_remote_packages = [ - p - for p in selfhosted_repo.get_versions(local_package.name) - if p.version != local_package.version + p for v, p in PYPI_SELFHOSTED_REPO.get_package_versions(lpn).items() if v != lpv ] latest_version = other_remote_packages and other_remote_packages[-1] @@ -2169,7 +2099,9 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files(dest_dir=dest_dir) + local_dist.fetch_license_files( + dest_dir=dest_dir, use_cached_index=use_cached_index + ) continue # try to get data from pkginfo (no license though) @@ -2179,7 +2111,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files(dest_dir) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2292,65 +2224,16 @@ def download_wheels_with_pip( return sorted(downloaded), error -def build_wheels_locally_if_pure_python( - requirements_specifier, - with_deps=False, - verbose=False, - dest_dir=THIRDPARTY_DIR, -): - """ - Given pip `requirements_specifier` string (such as package names or as - name==version), build the corresponding binary wheel(s) locally. - - If all these are "pure" Python wheels that run on all Python 3 versions and - operating systems, copy them back in `dest_dir` if they do not exists there - - Return a tuple of (True if all wheels are "pure", list of built wheel file names) - """ - deps = [] if with_deps else ["--no-deps"] - verbose = ["--verbose"] if verbose else [] - - wheel_dir = tempfile.mkdtemp(prefix="scancode-release-wheels-local-") - cli_args = ( - [ - "pip", - "wheel", - "--wheel-dir", - wheel_dir, - ] - + deps - + verbose - + [requirements_specifier] - ) - - print(f"Building local wheels for: {requirements_specifier}") - print(f"Using command:", " ".join(cli_args)) - call(cli_args) - - built = os.listdir(wheel_dir) - if not built: - return [] - - all_pure = all(is_pure_wheel(bwfn) for bwfn in built) - - if not all_pure: - print(f" Some wheels are not pure") - - print(f" Copying local wheels") - pure_built = [] - for bwfn in built: - owfn = os.path.join(dest_dir, bwfn) - if not os.path.exists(owfn): - nwfn = os.path.join(wheel_dir, bwfn) - fileutils.copyfile(nwfn, owfn) - pure_built.append(bwfn) - print(f" Built local wheel: {bwfn}") - return all_pure, pure_built +################################################################################ +# +# Functions to check for problems +# +################################################################################ def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2389,16 +2272,16 @@ def find_problems( check_about(dest_dir=dest_dir) -def compute_normalized_license_expression(declared_licenses): +def get_license_expression(declared_licenses): """ Return a normalized license expression or None. """ if not declared_licenses: return try: - from packagedcode import pypi + from packagedcode.licensing import get_only_expression_from_extracted_license - return pypi.compute_normalized_license(declared_licenses) + return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses lics = [python_safe_name(l).lower() for l in declared_licenses] diff --git a/setup.cfg b/setup.cfg index 1b726abd..8bb877e6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -44,6 +44,7 @@ license_files = AUTHORS.rst CHANGELOG.rst README.rst + CODE_OF_CONDUCT.rst [options] package_dir = @@ -71,12 +72,16 @@ where = src testing = pytest >= 6, != 7.0.0 pytest-xdist >= 2 - aboutcode-toolkit >= 6.0.0 + aboutcode-toolkit >= 7.0.2 + pycodestyle >= 2.8.0 + twine black commoncode isort>=5.10.1 docs = - Sphinx >= 3.3.1 - sphinx-rtd-theme >= 0.5.0 - doc8 >= 0.8.1 + Sphinx>=5.0.2 + sphinx-rtd-theme>=1.0.0 + sphinx-reredirects >= 0.1.2 + doc8>=0.11.2 +