From 4fddc1234408dc032dbd0eae848f9086b04456b3 Mon Sep 17 00:00:00 2001 From: Max Horn Date: Tue, 30 Jan 2024 23:22:05 +0100 Subject: [PATCH 1/5] New script generating release notes (#5613) * dev/releases/utils.py: move GitHub specific code to new file so that one can use the utils code without having the "github" python package installed. * Rename release_notes.readme.md -> README.release_notes.md * use `from utils import error, notice` * New script for generating release notes * Tweak dev/releases/create_stable_branch.py --- dev/releases/.gitignore | 3 + ...otes.readme.md => README.release_notes.md} | 0 dev/releases/create_stable_branch.py | 132 +++++--- dev/releases/generate_release_notes.py | 313 ------------------ dev/releases/make_github_release.py | 29 +- dev/releases/release_notes.py | 244 ++++++++++++++ dev/releases/update_website.py | 51 +-- dev/releases/utils.py | 65 ---- dev/releases/utils_github.py | 81 +++++ 9 files changed, 450 insertions(+), 468 deletions(-) create mode 100644 dev/releases/.gitignore rename dev/releases/{release_notes.readme.md => README.release_notes.md} (100%) delete mode 100755 dev/releases/generate_release_notes.py create mode 100755 dev/releases/release_notes.py create mode 100644 dev/releases/utils_github.py diff --git a/dev/releases/.gitignore b/dev/releases/.gitignore new file mode 100644 index 0000000000..65313761be --- /dev/null +++ b/dev/releases/.gitignore @@ -0,0 +1,3 @@ +tmp/ +releasenotes_*.md +unsorted_PRs_*.md diff --git a/dev/releases/release_notes.readme.md b/dev/releases/README.release_notes.md similarity index 100% rename from dev/releases/release_notes.readme.md rename to dev/releases/README.release_notes.md diff --git a/dev/releases/create_stable_branch.py b/dev/releases/create_stable_branch.py index 78b3b5c7bd..5bede7b51a 100755 --- a/dev/releases/create_stable_branch.py +++ b/dev/releases/create_stable_branch.py @@ -14,81 +14,107 @@ # TODO: implement parts of the steps described in # -from utils import * +from utils import error, notice, patchfile +import utils import subprocess +import sys # Insist on Python >= 3.6 for f-strings and other goodies -if sys.version_info < (3,6): +if sys.version_info < (3, 6): error("Python 3.6 or newer is required") -notice("Checking prerequisites") -verify_command_available("git") -verify_git_repo() -verify_git_clean() -# TODO: verify that we are on `master`, and that we are up-to-date (`git pull`) +def usage(name: str) -> None: + print(f"Usage: `{name} MINOR` creates the branch `stable-4.MINOR`") + sys.exit(1) -# TODO: verify that `./configure && make` were already run -gap_minor_version = 12 # TODO: this should be an argument or so? -gapversion = f"4.{gap_minor_version}" -nextgapversion = f"4.{gap_minor_version+1}" -stable_branch = "stable-" + gapversion # TODO: how to specify this? probably have the version as argument? +def main(gap_minor_version_str: str) -> None: + gap_minor_version = int(gap_minor_version_str) + gapversion = f"4.{gap_minor_version}" + nextgapversion = f"4.{gap_minor_version+1}" + stable_branch = "stable-" + gapversion -# TODO: error out if the branch already exists + notice("Checking prerequisites") + utils.verify_command_available("git") + utils.verify_git_repo() + utils.verify_git_clean() -# TODO: Create a pair of labels for GitHub issues called backport-to-X.Y and backport-to-X.Y-DONE. + notice("Switching to master branch") + subprocess.run(["git", "switch", "master"], check=True) -# TODO: Create a GitHub milestone for GAP X.Y.0 release. + notice("Ensure branch is up-to-date") + subprocess.run(["git", "pull", "--ff-only"], check=True) -notice(f"Creating branch {stable_branch}") -subprocess.run(["git", "branch", stable_branch], check=True) + # create the new branch now, before we add a commit to master + notice(f"Creating branch {stable_branch}") + subprocess.run(["git", "branch", stable_branch], check=True) - -# list of files which (potentially) are updated -files = [ + # list of files which (potentially) are updated + files = [ "CITATION", "configure.ac", "doc/versiondata", + ] + + notice(f"Updating version to {nextgapversion} on master branch") + for f in files: + notice(" patching " + f) + patchfile(f, gapversion + "dev", nextgapversion + "dev") + + notice("Commit master branch updates") + subprocess.run( + ["git", "commit", "-m", f"Start work on GAP {nextgapversion}", *files], + check=True, + ) + + notice(f"Tag master with v{nextgapversion}dev") + subprocess.run( + [ + "git", + "tag", + "-m", + f"Start work on GAP {nextgapversion}", + f"v{nextgapversion}dev", + ], + check=True, + ) + + notice(f"Switching to {stable_branch} branch") + subprocess.run(["git", "switch", stable_branch], check=True) + + notice("Patching files") + patchfile("Makefile.rules", "PKG_BRANCH = master", "PKG_BRANCH = " + stable_branch) + # adjust the CI and code coverage badges in README.md + patchfile("README.md", "master", stable_branch) + + notice(f"Create start commit for {stable_branch} branch") + files = [ "Makefile.rules", "README.md", - ] - -notice("Updating configure.ac on master branch") -patchfile("configure.ac", r"m4_define\(\[gap_version\],[^\n]+", r"m4_define([gap_version], ["+nextgapversion+"dev])") - -notice("Regenerate some files") -run_with_log(["make", "CITATION", "doc/versiondata"], "make") - -notice("Commit master branch updates") -subprocess.run(["git", "commit", "-m", f"Start work on GAP {nextgapversion}", *files], check=True) - -notice(f"Tag master with v{nextgapversion}dev") -subprocess.run(["git", "tag", "-m", f"Start work on GAP {nextgapversion}", f"v{nextgapversion}dev"], check=True) - -# TODO: push tags/commits? actually, we disabled direct pushes to -# master, so perhaps we should have created the above commit on a pull -# request, and create the tag only after it is merged?!? but then the -# sha changes ... so perhaps better is that an admin temporarily -# disables the branch protection rule so they can push -subprocess.run(["git", "push"], check=True) -subprocess.run(["git", "push", "--tags"], check=True) + ] + subprocess.run( + ["git", "commit", "-m", f"Create {stable_branch} branch", *files], check=True + ) + # push to the server + input( + f"Please 'git push master {stable_branch} v{nextgapversion}dev' now (you may have to temporarily change branch protection rules), then press ENTER" + ) -notice(f"Updating {stable_branch} branch") -subprocess.run(["git", "switch", stable_branch], check=True) + input( + f"Please create GitHub labels backport-to-{gapversion} and backport-to-{gapversion}-DONE, then press ENTER" + ) -notice("Patching files") -patchfile("Makefile.rules", r"PKG_BRANCH = master", r"PKG_BRANCH = "+stable_branch) -patchfile("README.md", r"master", r""+stable_branch) + input( + f"Please create a GitHub milestone for GAP {nextgapversion}.0 , then press ENTER" + ) -notice("Regenerate some files") -run_with_log(["make", "CITATION", "doc/versiondata"], "make") -notice(f"Create start commit for {stable_branch} branch") -subprocess.run(["git", "commit", "-m", f"Create {stable_branch} branch", *files], check=True) +if __name__ == "__main__": + # the argument is the new version + if len(sys.argv) != 2: + usage(sys.argv[0]) -# push to the server -# -#subprocess.run(["git", "push", "--set-upstream", "origin", stable_branch], check=True) + main(sys.argv[1]) diff --git a/dev/releases/generate_release_notes.py b/dev/releases/generate_release_notes.py deleted file mode 100755 index 92e6ae236a..0000000000 --- a/dev/releases/generate_release_notes.py +++ /dev/null @@ -1,313 +0,0 @@ -#!/usr/bin/env python3 -############################################################################# -## -## This file is part of GAP, a system for computational discrete algebra. -## -## Copyright of GAP belongs to its developers, whose names are too numerous -## to list here. Please refer to the COPYRIGHT file for details. -## -## SPDX-License-Identifier: GPL-2.0-or-later -## - -# Usage: -# ./generate_release_notes.py minor -# or -# ./generate_release_notes.py major -# -# to specify the type of the release. -# -# Output and description: -# This script is used to automatically generate the release notes based on the labels of -# pull requests that have been merged into the master branch since the starting date -# specified in the `history_start_date` variable below. -# -# For each such pull request (PR), this script extracts from GitHub its title, number and -# labels, using the GitHub API via the PyGithub package (https://github.com/PyGithub/PyGithub). -# To help to track the progress, it will output the number of the currently processed PR. -# For API requests using Basic Authentication or OAuth, you can make up to 5,000 requests -# per hour (https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting). -# As of March 2021 this script consumes about 3400 API calls and runs for about 25 minutes. -# This is why, to reduce the number of API calls and minimise the need to retrieve the data, -# PR details will be stored in the file `prscache.json`, which will then be used to -# categorise PR following the priority list and discussion from #4257, and output three -# files: -# - "releasenotes_*.md" : list of PR by categories for adding to release notes -# - "unsorted_PRs_*.md" : list of PR that could not be categorised -# - "releasenotes_*.json" : data for `BrowseReleaseNotes` function by Thomas Breuer (see #4257). -# where "*" is "minor" or "major" depending on the type of the release. -# -# If this script detects the file `prscache.json` it will use it, otherwise it will retrieve -# new data from GitHub. Thus, if new PR were merged, or there were updates of titles and labels -# of merged PRs, you need to delete `prscache.json` to enforce updating local data (TODO: make -# this turned on/off via a command line option in the next version). -# -# To find out when a branch was created, use e.g. -# git show --summary `git merge-base stable-4.11 master` -# - -import sys -import json -import os.path -from github import Github -from datetime import datetime -import utils - - -############################################################################# -# -# Configuration parameters -# -# the earliest date we need to track for the next minor/major releases -history_start_date = "2019-09-09" - -# the date of the last minor release (later, we may need to have more precise timestamp -# - maybe extracted from the corresponding release tag) -minor_branch_start_date = "2021-03-03" # next day after the minor release (starts at midnight) -# question: what if it was merged into master before 4.11.1, but backported after? -# Hopefully, before publishing 4.11.1 we have backported everything that had to be -# backported, so this was not the case. - -# this version number needed to form labels like "backport-to-4.11-DONE" -minor_branch_version = "4.11" - -# not yet - will make sense after branching the `stable-4.12` branch: -# major_branch_start_date = "2019-09-09" -# major_branch_version = "4.12" -# note that we will have to collate together PRs which are not backported to stable-4.11 -# between `history_start_date` and `major_branch_start_date`, and PRs backported to -# stable-4.12 after `major_branch_start_date` -# -############################################################################# - -def usage(): - print("Usage: `./release-notes.py minor` or `./release-notes.py major`") - sys.exit(1) - - -def get_prs(repo,startdate): - """Retrieves data for PRs matching selection criteria and puts them in a dictionary, - which is then saved in a json file, and also returned for immediate use.""" - # The output `prs` is a dictionary with keys being PR numbers, and values being - # dictionaries with keys "title", "closed_at" and "labels", for example: - # - # "3355": { - # "title": "Allow packages to use ISO 8601 dates in their PackageInfo.g", - # "closed_at": "2021-02-20T15:44:48", - # "labels": [ - # "gapdays2019-spring", - # "gapsingular2019", - # "kind: enhancement", - # "release notes: to be added" - # ] - # }, - - prs = {} - all_pulls = repo.get_pulls(state="closed", sort="created", direction="desc", base="master") - # We need to run this over the whole list of PRs. Sorting by creation date descending - # is not really helping - could be that some very old PRs are being merged. - for pr in all_pulls: - print(pr.number, end=" ") - # flush stdout immediately, to see progress indicator - sys.stdout.flush() - if pr.merged: - if pr.closed_at > datetime.fromisoformat(startdate): - # getting labels will cost further API calls - if the startdate is - # too far in the past, that may exceed the API capacity - labs = [lab.name for lab in list(pr.get_labels())] - prs[pr.number] = { "title" : pr.title, - "closed_at" : pr.closed_at.isoformat(), - "labels" : labs } -# if len(prs)>5: # for quick testing (maybe later have an optional argument) -# break - print("\n") - with open("prscache.json", "w", encoding="utf-8") as f: - json.dump(prs, f, ensure_ascii=False, indent=4) - return prs - - -def filter_prs(prs,rel_type): - newprs = {} - - if rel_type == "minor": - - # For minor release, list PRs backported to the stable-4.X branch since the previous minor release. - for k,v in sorted(prs.items()): - if "backport-to-" + minor_branch_version + "-DONE" in v["labels"]: - if datetime.fromisoformat(v["closed_at"]) > datetime.fromisoformat(minor_branch_start_date): - newprs[k] = v - return newprs - - elif rel_type == "major": - - # For major release, list PRs not backported to the stable-4.X branch. - # After branching stable-4.12 this will have to be changed to stop checking - # for "backport-to-4.11-DONE" at the date of the branching, and check for - # "backport-to-4.12-DONE" after that date - for k,v in sorted(prs.items()): - if not "backport-to-" + minor_branch_version + "-DONE" in v["labels"]: - newprs[k] = v - return newprs - - else: - - usage() - - -def pr_to_md(k, title): - """Returns markdown string for the PR entry""" - return f"- [#{k}](https://github.com/gap-system/gap/pull/{k}) {title}\n" - - -def changes_overview(prs,startdate,rel_type): - """Writes files with information for release notes.""" - - # Opening files with "w" resets them - relnotes_file = open("releasenotes_" + rel_type + ".md", "w") - unsorted_file = open("unsorted_PRs_" + rel_type + ".md", "w") - relnotes_json = open("releasenotes_" + rel_type + ".json", "w") - jsondict = prs.copy() - - # the following is a list of pairs [LABEL, DESCRIPTION]; the first entry is the name of a GitHub label - # (be careful to match them precisely), the second is a headline for a section the release notes; any PR with - # the given label is put into the corresponding section; each PR is put into only one section, the first one - # one from this list it fits in. - # See also . - prioritylist = [ - ["release notes: highlight", "Highlights"], - ["topic: libgap", "Changes to the `libgap` interface"], - ["topic: julia", "Changes to the **Julia** integration"], - ["topic: gac", "Changes to the GAP compiler"], - ["topic: documentation", "Changes in the documentation"], - ["topic: performance", "Performance improvements"], - ["topic: HPC-GAP", "Changes to HPC-GAP"], - ["kind: new feature", "New features"], - ["kind: enhancement", "Improved and extended functionality"], - ["kind: removal or deprecation", "Removed or obsolete functionality"], - ["kind: bug: wrong result", "Fixed bugs that could lead to incorrect results"], - ["kind: bug: crash", "Fixed bugs that could lead to crashes"], - ["kind: bug: unexpected error", "Fixed bugs that could lead to break loops"], - ["kind: bug", "Other fixed bugs"], - ] - - # Could also introduce some consistency checks here for wrong combinations of labels - - # Drop PRs not needed for release notes - removelist = [] - for k in prs: - if "release notes: not needed" in prs[k]["labels"]: - removelist.append(k) - for item in removelist: - del prs[item] - del jsondict[item] - - # Report PRs that have to be updated before inclusion into release notes. - unsorted_file.write("### " + "release notes: to be added" + "\n\n") - unsorted_file.write("If there are any PRs listed below, check their title and labels.\n") - unsorted_file.write("When done, change their label to \"release notes: use title\".\n\n") - removelist = [] - for k in prs: - if "release notes: to be added" in prs[k]["labels"]: - unsorted_file.write(pr_to_md(k, prs[k]["title"])) - removelist.append(k) - for item in removelist: - del prs[item] - unsorted_file.write("\n") - - # Report PRs that have neither "to be added" nor "added" or "use title" label - unsorted_file.write("### Uncategorized PR" + "\n\n") - unsorted_file.write("If there are any PRs listed below, either apply the same steps\n") - unsorted_file.write("as above, or change their label to \"release notes: not needed\".\n\n") - removelist = [] - for k in prs: - # we need to use both old "release notes: added" label and - # the newly introduced in "release notes: use title" label - # since both label may appear in GAP 4.12.0 changes overview - if not ("release notes: added" in prs[k]["labels"] or "release notes: use title" in prs[k]["labels"]): - unsorted_file.write(pr_to_md(k, prs[k]["title"])) - removelist.append(k) - for item in removelist: - del prs[item] - unsorted_file.close() - - # All remaining PRs are to be included in the release notes - - relnotes_file.write("## Release Notes \n\n") - - for priorityobject in prioritylist: - matches = [k for k in prs if priorityobject[0] in prs[k]["labels"]] - if len(matches) == 0: - continue - relnotes_file.write("### " + priorityobject[1] + "\n\n") - for k in matches: - relnotes_file.write(pr_to_md(k, prs[k]["title"])) - del prs[k] - relnotes_file.write("\n") - - # The remaining PRs have no "kind" or "topic" label from the priority list - # (may have other "kind" or "topic" label outside the priority list). - # Check their list in the release notes, and adjust labels if appropriate. - if len(prs) > 0: - relnotes_file.write("### Other changes\n\n") - for k in prs: - relnotes_file.write(pr_to_md(k, prs[k]["title"])) - relnotes_file.write("\n") - relnotes_file.close() - - relnotes_json.write("[") - jsonlist = [] - for k in jsondict: - temp = [] - temp.append(str(jsondict[k]["title"])) - temp.append(str(k)) - temp.append(jsondict[k]["labels"]) - jsonlist.append(temp) - for item in jsonlist: - relnotes_json.write("%s\n" % item) - relnotes_json.write("]") - relnotes_json.close - - -def main(rel_type): - - utils.initialize_github() - g = utils.GITHUB_INSTANCE - repo = utils.CURRENT_REPO - - # There is a GitHub API capacity of 5000 per hour i.e. that a maximum of 5000 requests can be made to GitHub per hour. - # Therefore, the following line indicates how many requests are currently still available - print("Current GitHub API capacity", g.rate_limiting, "at", datetime.now().isoformat() ) - - # If this limit is exceeded, an exception will be raised: - # github.GithubException.RateLimitExceededException: 403 - # {"message": "API rate limit exceeded for user ID XXX.", "documentation_url": - # "https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting"} - - - # TODO: we cache PRs data in a local file. For now, if it exists, it will be used, - # otherwise it will be recreated. Later, there may be an option to use the cache or - # to enforce retrieving updated PR details from Github. I think default is to update - # from GitHub (to get newly merged PRs, updates of labels, PR titles etc., while the - # cache could be used for testing and polishing the code to generate output ) - - # TODO: add some data to the cache, e.g. when the cache is saved. - # Produce warning if old. - - if os.path.isfile("prscache.json"): - print("Using cached data from prscache.json ...") - with open("prscache.json", "r") as read_file: - prs = json.load(read_file) - else: - print("Retrieving data using GitHub API ...") - prs = get_prs(repo,history_start_date) - - prs = filter_prs(prs,rel_type) - changes_overview(prs,history_start_date,rel_type) - print("Remaining GitHub API capacity", g.rate_limiting, "at", datetime.now().isoformat() ) - - -if __name__ == "__main__": - # the argument is "minor" or "major" to specify release kind - if len(sys.argv) != 2 or not sys.argv[1] in ["minor","major"]: - usage() - - main(sys.argv[1]) diff --git a/dev/releases/make_github_release.py b/dev/releases/make_github_release.py index ccf39a7301..d299d9b181 100755 --- a/dev/releases/make_github_release.py +++ b/dev/releases/make_github_release.py @@ -13,13 +13,16 @@ ## utils.py. ## ## If we do import * from utils, then initialize_github can't overwrite the -## global GITHUB_INSTANCE and CURRENT_REPO variables. +## global CURRENT_REPO variables. ## import utils +import utils_github import sys +from utils import error, notice + if len(sys.argv) != 3: - utils.error("usage: "+sys.argv[0]+" ") + error("usage: "+sys.argv[0]+" ") TAG_NAME = sys.argv[1] PATH_TO_RELEASE = sys.argv[2] @@ -27,11 +30,11 @@ utils.verify_git_clean() utils.verify_is_possible_gap_release_tag(TAG_NAME) -utils.initialize_github() +utils_github.initialize_github() # Error if the tag TAG_NAME hasn't been pushed to CURRENT_REPO yet. -if not any(tag.name == TAG_NAME for tag in utils.CURRENT_REPO.get_tags()): - utils.error(f"Repository {utils.CURRENT_REPO_NAME} has no tag '{TAG_NAME}'") +if not any(tag.name == TAG_NAME for tag in utils_github.CURRENT_REPO.get_tags()): + error(f"Repository {utils_github.CURRENT_REPO_NAME} has no tag '{TAG_NAME}'") # make sure that TAG_NAME # - exists @@ -40,14 +43,14 @@ utils.check_git_tag_for_release(TAG_NAME) # Error if this release has been already created on GitHub -if any(r.tag_name == TAG_NAME for r in utils.CURRENT_REPO.get_releases()): - utils.error(f"Github release with tag '{TAG_NAME}' already exists!") +if any(r.tag_name == TAG_NAME for r in utils_github.CURRENT_REPO.get_releases()): + error(f"Github release with tag '{TAG_NAME}' already exists!") # Create release RELEASE_NOTE = f"For an overview of changes in GAP {VERSION} see the " \ + f"[CHANGES.md](https://github.com/gap-system/gap/blob/{TAG_NAME}/CHANGES.md) file." -utils.notice(f"Creating release {TAG_NAME}") -RELEASE = utils.CURRENT_REPO.create_git_release(TAG_NAME, TAG_NAME, +notice(f"Creating release {TAG_NAME}") +RELEASE = utils_github.CURRENT_REPO.create_git_release(TAG_NAME, TAG_NAME, RELEASE_NOTE, prerelease=True) @@ -56,16 +59,16 @@ with open(manifest_filename, 'r') as manifest_file: manifest = manifest_file.read().splitlines() - utils.notice(f"Contents of {manifest_filename}:") + notice(f"Contents of {manifest_filename}:") for filename in manifest: print(filename) # Now check that TAG_NAME and the created archives belong together main_archive_name = "gap-" + VERSION + ".tar.gz" if not main_archive_name in manifest: - utils.error(f"Expected to find {main_archive_name} in MANIFEST, but did not!") + error(f"Expected to find {main_archive_name} in MANIFEST, but did not!") # Upload all assets to release - utils.notice("Uploading release assets") + notice("Uploading release assets") for filename in manifest: - utils.upload_asset_with_checksum(RELEASE, filename) + utils_github.upload_asset_with_checksum(RELEASE, filename) diff --git a/dev/releases/release_notes.py b/dev/releases/release_notes.py new file mode 100755 index 0000000000..7676dad814 --- /dev/null +++ b/dev/releases/release_notes.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python3 +############################################################################# +## +## This file is part of GAP, a system for computational discrete algebra. +## +## Copyright of GAP belongs to its developers, whose names are too numerous +## to list here. Please refer to the COPYRIGHT file for details. +## +## SPDX-License-Identifier: GPL-2.0-or-later +## + +# Usage: +# ./generate_release_notes.py VERSION +# +# For example +# ./generate_release_notes.py 4.13.1 +# +# This assumes that the tags named v4.13.1, 4.13dev (?) and v4.13.0 (???) already exists. +# +# A version ending in .0 is consider MAJOR, any other MINOR +# Don't use this with versions like 4.13.0-beta1 + +import json +import subprocess +import sys + +from utils import error, notice + +import typing +from tempfile import TemporaryDirectory +from typing import Any, Dict, List, Optional + + +# the following is a list of pairs [LABEL, DESCRIPTION]; the first entry is the name of a GitHub label +# (be careful to match them precisely), the second is a headline for a section the release notes; any PR with +# the given label is put into the corresponding section; each PR is put into only one section, the first one +# one from this list it fits in. +# See also . +prioritylist = [ + ["release notes: highlight", "Highlights"], + ["topic: packages", "Changes related to handling of packages"], + ["topic: gac", "Changes to the GAP compiler"], + ["topic: documentation", "Changes in the documentation"], + ["topic: performance", "Performance improvements"], + ["topic: build system", "Build system"], + ["topic: julia", "Changes to the **Julia** integration"], + ["topic: libgap", "Changes to the `libgap` interface"], + ["topic: HPC-GAP", "Changes to HPC-GAP"], + ["kind: new feature", "New features"], + ["kind: enhancement", "Improved and extended functionality"], + ["kind: removal or deprecation", "Removed or obsolete functionality"], + ["kind: bug: wrong result", "Fixed bugs that could lead to incorrect results"], + ["kind: bug: crash", "Fixed bugs that could lead to crashes"], + [ + "kind: bug: unexpected error", + "Fixed bugs that could lead to unexpected errors", + ], + ["kind: bug", "Other fixed bugs"], +] + + +def usage(name: str) -> None: + print(f"Usage: `{name} NEWVERSION`") + sys.exit(1) + + +def get_tag_date(tag: str) -> str: + # TODO: validate the tag exists + res = subprocess.run( + ["git", "for-each-ref", "--format=%(creatordate:short)", "refs/tags/" + tag], + check=True, + capture_output=True, + text=True, + ) + if res.returncode != 0: + error("error trying to dettermine tag date") + return res.stdout.strip() + + +def get_pr_list(date: str, extra: str) -> str: + query = f'merged:>={date} -label:"release notes: not needed" -label:"release notes: added" base:master {extra}' + print("query = ", query) + res = subprocess.run( + [ + "gh", + "pr", + "list", + "--search", + query, + "--json", + "number,title,closedAt,labels,mergedAt", + "--limit", + "200", + ], + check=True, + capture_output=True, + text=True, + ) + if res.returncode != 0: + error("error trying to dettermine tag date") + return json.loads(res.stdout.strip()) + + +def pr_to_md(pr): + """Returns markdown string for the PR entry""" + k = pr["number"] + title = pr["title"] + return f"- [#{k}](https://github.com/gap-system/gap/pull/{k}) {title}\n" + + +def has_label(pr, label): + return any(x["name"] == label for x in pr["labels"]) + + +def changes_overview(prs, startdate, new_version): + """Writes files with information for release notes.""" + + # Could also introduce some consistency checks here for wrong combinations of labels + + relnotes_file = open("releasenotes_" + new_version + ".md", "w") + prs_with_use_title = [pr for pr in prs if has_label(pr, "release notes: use title")] + + # Write out all PRs with 'use title' + relnotes_file.write( + f""" +## GAP {new_version} (TODO insert date here, ) + +The following gives an overview of the changes compared to the previous +release. This list is not complete, many more internal or minor changes +were made, but we tried to keep it to changes which we think might +affect some users directly. + +""" + ) + + for priorityobject in prioritylist: + matches = [pr for pr in prs_with_use_title if has_label(pr, priorityobject[0])] + print("PRs with label '" + priorityobject[0] + "': ", len(matches)) + if len(matches) == 0: + continue + relnotes_file.write("### " + priorityobject[1] + "\n\n") + for pr in matches: + relnotes_file.write(pr_to_md(pr)) + prs_with_use_title.remove(pr) + relnotes_file.write("\n") + + # The remaining PRs have no "kind" or "topic" label from the priority list + # (may have other "kind" or "topic" label outside the priority list). + # Check their list in the release notes, and adjust labels if appropriate. + if len(prs_with_use_title) > 0: + relnotes_file.write("### Other changes\n\n") + for pr in prs_with_use_title: + relnotes_file.write(pr_to_md(pr)) + relnotes_file.write("\n") + relnotes_file.close() + + unsorted_file = open("unsorted_PRs_" + new_version + ".md", "w") + + # Report PRs that have to be updated before inclusion into release notes. + unsorted_file.write("### " + "release notes: to be added" + "\n\n") + unsorted_file.write( + "If there are any PRs listed below, check their title and labels.\n" + ) + unsorted_file.write( + 'When done, change their label to "release notes: use title".\n\n' + ) + removelist = [] + for pr in prs: + if has_label(pr, "release notes: to be added"): + unsorted_file.write(pr_to_md(pr)) + + prs = [pr for pr in prs if not has_label(pr, "release notes: to be added")] + + unsorted_file.write("\n") + + # Report PRs that have neither "to be added" nor "added" or "use title" label + unsorted_file.write("### Uncategorized PR" + "\n\n") + unsorted_file.write( + "If there are any PRs listed below, either apply the same steps\n" + ) + unsorted_file.write( + 'as above, or change their label to "release notes: not needed".\n\n' + ) + removelist = [] + for pr in prs: + # we need to use both old "release notes: added" label and + # the newly introduced in "release notes: use title" label + # since both label may appear in GAP 4.12.0 changes overview + if not ( + has_label(pr, "release notes: added") + or has_label(pr, "release notes: use title") + ): + unsorted_file.write(pr_to_md(pr)) + unsorted_file.close() + + +def main(new_version: str) -> None: + major, minor, patchlevel = new_version.split(".") + if major != "4": + error("unexpected GAP version, not starting with '4.'") + if patchlevel == "0": + # "major" GAP release which changes just the minor version + previous_minor = int(minor) - 1 + basetag = f"v{major}.{minor}dev" + # *exclude* PRs backported to previous stable-4.X branch + extra = f'-label:"backport-to-{major}.{previous_minor}-DONE"' + else: + # "minor" GAP release which changes just the patchlevel + previous_patchlevel = int(patchlevel) - 1 + basetag = f"v{major}.{minor}.{previous_patchlevel}" + # *include* PRs backported to current stable-4.X branch + extra = f'label:"backport-to-{major}.{minor}-DONE"' + + print("Base tag is", basetag) + + startdate = get_tag_date(basetag) + print("Base tag was created ", startdate) + + print("Downloading filtered PR list") + prs = get_pr_list(startdate, extra) + # print(json.dumps(prs, sort_keys=True, indent=4)) + + # further filtering n + # prs = filter_prs(prs, rel_type) + + changes_overview(prs, startdate, new_version) + + print("======= END =======") + + +# TODO: allow specifying tags instead and download the json files from there +# TODO: "guess" the tag of the previous/old version so it can be completely omitted +# TODO: if the new package list is omitted, download the one from the PackageDistro? +# TODO: integrate this script into generate_release_notes.py + + +if __name__ == "__main__": + # the argument is the new version + # TODO ... and for now also the old version??? + if len(sys.argv) != 2: + usage(sys.argv[0]) + + main(sys.argv[1]) + # TODO: integrate generate_package_release_notes.py diff --git a/dev/releases/update_website.py b/dev/releases/update_website.py index 7159b1fdc2..8079360960 100755 --- a/dev/releases/update_website.py +++ b/dev/releases/update_website.py @@ -26,9 +26,12 @@ import tarfile import tempfile import utils +import utils_github + +from utils import error, notice if sys.version_info < (3,6): - utils.error("Python 3.6 or newer is required") + error("Python 3.6 or newer is required") parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description= @@ -64,19 +67,19 @@ def download_asset_by_name(asset_name, writedir): try: url = [ x for x in assets if x.name == asset_name ][0].browser_download_url except: - utils.error(f"Cannot find {asset_name} in the GitHub release with tag {release.tag_name}") + error(f"Cannot find {asset_name} in the GitHub release with tag {release.tag_name}") with utils.working_directory(writedir): - utils.notice(f"Downloading {url} to {writedir} . . .") + notice(f"Downloading {url} to {writedir} . . .") utils.download_with_sha256(url, asset_name) def extract_tarball(tarball): - utils.notice(f"Extracting {tarball} . . .") + notice(f"Extracting {tarball} . . .") with tarfile.open(tarball) as tar: try: tar.extractall() except: - utils.error(f"Failed to extract {tarball}!") + error(f"Failed to extract {tarball}!") def get_date_from_configure_ac(gaproot): with open(f"{gaproot}/configure.ac", "r") as configure_ac: @@ -85,7 +88,7 @@ def get_date_from_configure_ac(gaproot): release_date = re.search("\[gap_releaseday\], \[(\d{4}-\d{2}-\d{2})\]", filedata).group(1) release_date = datetime.datetime.strptime(release_date, "%Y-%m-%d") except: - utils.error("Cannot find the release date in configure.ac!") + error("Cannot find the release date in configure.ac!") return release_date.strftime("%d %B %Y") # This function deals with package-infos.json.gz and help-links.json.gz. @@ -101,11 +104,11 @@ def download_and_extract_json_gz_asset(asset_name, dest): ################################################################################ # Get all releases from 4.11.0 onwards, that are not a draft or prerelease -utils.CURRENT_REPO_NAME = f"{args.gap_fork}/gap" -utils.initialize_github(args.token) -utils.notice(f"Will use temporary directory: {tmpdir}") +utils_github.CURRENT_REPO_NAME = f"{args.gap_fork}/gap" +utils_github.initialize_github(args.token) +notice(f"Will use temporary directory: {tmpdir}") -releases = [ x for x in utils.CURRENT_REPO.get_releases() if +releases = [ x for x in utils_github.CURRENT_REPO.get_releases() if not x.draft and not x.prerelease and utils.is_possible_gap_release_tag(x.tag_name) and @@ -113,9 +116,9 @@ def download_and_extract_json_gz_asset(asset_name, dest): (int(x.tag_name[1:].split('.')[0]) == 4 and int(x.tag_name[1:].split('.')[1]) >= 11)) ] if releases: - utils.notice(f"Found {len(releases)} published GAP releases >= v4.11.0") + notice(f"Found {len(releases)} published GAP releases >= v4.11.0") else: - utils.notice("Found no published GAP releases >= v4.11.0") + notice("Found no published GAP releases >= v4.11.0") sys.exit(0) # Sort by version number, biggest to smallest @@ -128,20 +131,20 @@ def download_and_extract_json_gz_asset(asset_name, dest): for release in releases: version = release.tag_name[1:] version_safe = version.replace(".", "-") # Safe for the Jekyll website - utils.notice(f"\nProcessing GAP {version}...") + notice(f"\nProcessing GAP {version}...") # Work out the relevance of this release known_release = os.path.isfile(f"_Releases/{version}.html") newest_release = releases.index(release) == 0 if known_release: - utils.notice(f"I have seen this release before") + notice(f"I have seen this release before") elif newest_release: - utils.notice(f"This is a new release to me, and it has the biggest version number") + notice(f"This is a new release to me, and it has the biggest version number") else: - utils.notice(f"This is a new release to me, but I know about releases with bigger version numbers") + notice(f"This is a new release to me, but I know about releases with bigger version numbers") # For all releases, record the assets (in case they were deleted/updated/added) - utils.notice(f"Collecting GitHub release asset data in _data/assets/{version_safe}.json") + notice(f"Collecting GitHub release asset data in _data/assets/{version_safe}.json") assets = release.get_assets() asset_data = [] for asset in assets: @@ -152,7 +155,7 @@ def download_and_extract_json_gz_asset(asset_name, dest): request.raise_for_status() sha256 = request.text.strip() except: - utils.error(f"Failed to download {asset.browser_download_url}.sha256") + error(f"Failed to download {asset.browser_download_url}.sha256") filtered_asset = { "bytes": asset.size, "name": asset.name, @@ -175,20 +178,20 @@ def download_and_extract_json_gz_asset(asset_name, dest): with utils.working_directory(tmpdir): extract_tarball(tarball) date = get_date_from_configure_ac(f"{tmpdir}/gap-{version}") - utils.notice(f"Using release date {date} for GAP {version}") + notice(f"Using release date {date} for GAP {version}") - utils.notice(f"Writing the file _Releases/{version}.html") + notice(f"Writing the file _Releases/{version}.html") with open(f"{pwd}/_Releases/{version}.html", "w") as file: file.write(f"---\nversion: {version}\ndate: '{date}'\n---\n") - utils.notice(f"Writing the file _data/package-infos/{version_safe}.json") + notice(f"Writing the file _data/package-infos/{version_safe}.json") download_and_extract_json_gz_asset("package-infos.json.gz", f"{pwd}/_data/package-infos/{version_safe}.json") # For a new-to-me release with biggest version number, also set this is the # 'default'/'main' version on the website (i.e. the most prominent release). # Therefore update _data/release.json, _data/help.json, and _Packages/. if not known_release and newest_release: - utils.notice("Rewriting the _data/release.json file") + notice("Rewriting the _data/release.json file") release_data = { "version": version, "version-safe": version_safe, @@ -197,10 +200,10 @@ def download_and_extract_json_gz_asset(asset_name, dest): with open(f"{pwd}/_data/release.json", "wb") as file: file.write(json.dumps(release_data, indent=2).encode("utf-8")) - utils.notice("Overwriting _data/help.json with the contents of help-links.json.gz") + notice("Overwriting _data/help.json with the contents of help-links.json.gz") download_and_extract_json_gz_asset("help-links.json.gz", f"{pwd}/_data/help.json") - utils.notice("Repopulating _Packages/ with one HTML file for each package in packages-info.json") + notice("Repopulating _Packages/ with one HTML file for each package in packages-info.json") shutil.rmtree("_Packages") os.mkdir("_Packages") with open(f"{pwd}/_data/package-infos/{version_safe}.json", "rb") as file: diff --git a/dev/releases/utils.py b/dev/releases/utils.py index 0bbb33dffd..df3949a232 100644 --- a/dev/releases/utils.py +++ b/dev/releases/utils.py @@ -14,13 +14,6 @@ import shutil import subprocess import sys -import github - -CURRENT_REPO_NAME = os.environ.get("GITHUB_REPOSITORY", "gap-system/gap") - -# Initialized by initialize_github -GITHUB_INSTANCE = None -CURRENT_REPO = None # print notices in green def notice(msg): @@ -173,61 +166,3 @@ def check_git_tag_for_release(tag): if tag_commit != head: error(f"The tag {tag} does not point to the current commit {head} but" + f" instead points to {tag_commit}") - -# sets the global variables GITHUB_INSTANCE and CURRENT_REPO -# If no token is provided, this uses the value of the environment variable -# GITHUB_TOKEN. -def initialize_github(token=None): - global GITHUB_INSTANCE, CURRENT_REPO - if GITHUB_INSTANCE != None or CURRENT_REPO != None: - error("Global variables GITHUB_INSTANCE and CURRENT_REPO" - + " are already initialized.") - if token == None and "GITHUB_TOKEN" in os.environ: - token = os.environ["GITHUB_TOKEN"] - if token == None: - temp = subprocess.run(["git", "config", "--get", "github.token"], text=True, capture_output=True) - if temp.returncode == 0: - token = temp.stdout.strip() - if token == None and os.path.isfile(os.path.expanduser('~') + '/.github_shell_token'): - with open(os.path.expanduser('~') + '/.github_shell_token', 'r') as token_file: - token = token_file.read().strip() - if token == None: - error("Error: no access token found or provided") - g = github.Github(token) - GITHUB_INSTANCE = g - notice(f"Accessing repository {CURRENT_REPO_NAME}") - try: - CURRENT_REPO = GITHUB_INSTANCE.get_repo(CURRENT_REPO_NAME) - except github.GithubException: - error("Error: the access token may be incorrect") - -# Given the of a file that does not end with .sha256, create or get -# the corresponding sha256 checksum file .sha256, (comparing checksums -# just to be safe, in the latter case). Then upload the files and -# .sha256 as assets to the GitHub . -# Files already ending in ".sha256" are ignored. -def upload_asset_with_checksum(release, filename): - if not os.path.isfile(filename): - error(f"{filename} not found") - - if filename.endswith(".sha256"): - notice(f"Skipping provided checksum file {filename}") - return - - notice(f"Processing {filename}") - - checksum_filename = filename + ".sha256" - if os.path.isfile(checksum_filename): - notice("Comparing actual checksum with pre-existing checksum file") - verify_via_checksumfile(filename) - else: - notice("Writing new checksum file") - with open(checksum_filename, "w") as checksumfile: - checksumfile.write(sha256file(filename)) - - for file in [filename, checksum_filename]: - try: - notice(f"Uploading {file}") - release.upload_asset(file) - except github.GithubException: - error("Error: The upload failed") diff --git a/dev/releases/utils_github.py b/dev/releases/utils_github.py new file mode 100644 index 0000000000..89e2b70e86 --- /dev/null +++ b/dev/releases/utils_github.py @@ -0,0 +1,81 @@ +############################################################################# +## +## This file is part of GAP, a system for computational discrete algebra. +## +## Copyright of GAP belongs to its developers, whose names are too numerous +## to list here. Please refer to the COPYRIGHT file for details. +## +## SPDX-License-Identifier: GPL-2.0-or-later +## +import contextlib +import hashlib +import os +import re +import shutil +import subprocess +import sys +import github + +CURRENT_REPO_NAME = os.environ.get("GITHUB_REPOSITORY", "gap-system/gap") + +# Initialized by initialize_github +GITHUB_INSTANCE = None +CURRENT_REPO = None + +# sets the global variables GITHUB_INSTANCE and CURRENT_REPO +# If no token is provided, this uses the value of the environment variable +# GITHUB_TOKEN. +def initialize_github(token=None): + global GITHUB_INSTANCE, CURRENT_REPO + if GITHUB_INSTANCE != None or CURRENT_REPO != None: + error("Global variables GITHUB_INSTANCE and CURRENT_REPO" + + " are already initialized.") + if token == None and "GITHUB_TOKEN" in os.environ: + token = os.environ["GITHUB_TOKEN"] + if token == None: + temp = subprocess.run(["git", "config", "--get", "github.token"], text=True, capture_output=True) + if temp.returncode == 0: + token = temp.stdout.strip() + if token == None and os.path.isfile(os.path.expanduser('~') + '/.github_shell_token'): + with open(os.path.expanduser('~') + '/.github_shell_token', 'r') as token_file: + token = token_file.read().strip() + if token == None: + error("Error: no access token found or provided") + g = github.Github(token) + GITHUB_INSTANCE = g + notice(f"Accessing repository {CURRENT_REPO_NAME}") + try: + CURRENT_REPO = GITHUB_INSTANCE.get_repo(CURRENT_REPO_NAME) + except github.GithubException: + error("Error: the access token may be incorrect") + +# Given the of a file that does not end with .sha256, create or get +# the corresponding sha256 checksum file .sha256, (comparing checksums +# just to be safe, in the latter case). Then upload the files and +# .sha256 as assets to the GitHub . +# Files already ending in ".sha256" are ignored. +def upload_asset_with_checksum(release, filename): + if not os.path.isfile(filename): + error(f"{filename} not found") + + if filename.endswith(".sha256"): + notice(f"Skipping provided checksum file {filename}") + return + + notice(f"Processing {filename}") + + checksum_filename = filename + ".sha256" + if os.path.isfile(checksum_filename): + notice("Comparing actual checksum with pre-existing checksum file") + verify_via_checksumfile(filename) + else: + notice("Writing new checksum file") + with open(checksum_filename, "w") as checksumfile: + checksumfile.write(sha256file(filename)) + + for file in [filename, checksum_filename]: + try: + notice(f"Uploading {file}") + release.upload_asset(file) + except github.GithubException: + error("Error: The upload failed") From 6873315c8f2733a838ab2940eeff3e7fd5e4ea4f Mon Sep 17 00:00:00 2001 From: Max Horn Date: Tue, 30 Jan 2024 23:22:25 +0100 Subject: [PATCH 2/5] buildsys: detect 'check-manuals' exit code (#5614) The 'tee' and subshell combo ate the exit code. Also get rid of the 'dev/log' subdirectory. --- Makefile.rules | 5 +---- lib/algebra.gd | 2 +- lib/ctbl.gd | 2 +- lib/matrix.gd | 2 +- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/Makefile.rules b/Makefile.rules index f31d300e89..426329596f 100644 --- a/Makefile.rules +++ b/Makefile.rules @@ -560,7 +560,6 @@ distclean: clean rm -f config.log config.status GNUmakefile rm -f doc/make_doc rm -f doc/*/*.aux doc/*/*.bbl doc/*/*.blg doc/*/*.brf doc/*/*.idx doc/*/*.ilg doc/*/*.ind doc/*/*.log doc/*/*.out doc/*/*.pnr doc/*/*.tex doc/*/*.toc - rm -rf dev/log rm -rf tags rm -rf TAGS @@ -971,9 +970,7 @@ clean-doc: # Manual consistency check check-manuals: all - $(MKDIR_P) dev/log - ( cd doc/ref ; echo 'Read("testconsistency.g");' | $(TESTGAP) | \ - tee `date -u +../../dev/log/check_manuals_%Y-%m-%d-%H-%M` ) + cd doc/ref && $(TESTGAP) testconsistency.g .PHONY: doc clean-doc manuals check-manuals html diff --git a/lib/algebra.gd b/lib/algebra.gd index 0a2106306e..981900b57d 100644 --- a/lib/algebra.gd +++ b/lib/algebra.gd @@ -1664,7 +1664,7 @@ DeclareGlobalFunction( "AlgebraByStructureConstants" ); ## that describes the unique multiplicative identity element of the returned ## algebra w. r. t. the defining basis of this algebra, ## and that the returned algebra is an algebra-with-one -## (see ). +## (see ). ##

## A:= GF(2)^[2,2];; diff --git a/lib/ctbl.gd b/lib/ctbl.gd index 4b940a6bc0..da094d4fc7 100644 --- a/lib/ctbl.gd +++ b/lib/ctbl.gd @@ -2790,7 +2790,7 @@ DeclareGlobalFunction( "IsClassFusionOfNormalSubgroup" ); ##

## For each 2-modular Brauer characters where these conditions are ## not sufficient to determine the indicator, an unknown value -## (see ) is returned. +## (see ) is returned. ## ## ## <#/GAPDoc> diff --git a/lib/matrix.gd b/lib/matrix.gd index 21f68c7272..5717ed1f9e 100644 --- a/lib/matrix.gd +++ b/lib/matrix.gd @@ -1670,7 +1670,7 @@ DeclareGlobalFunction( "PermutationMat" ); ## If only vector is given then it is used to compute a default for ## R. ##

-## If the value +## If the value ## of the result implies then the result is ## fully mutable. ##

From be87d49af82d7964077bfb9abd40ac5c4c8a7d0f Mon Sep 17 00:00:00 2001 From: Max Horn Date: Sun, 28 Jan 2024 22:36:01 +0100 Subject: [PATCH 3/5] dev/releases/utils.py: move GitHub specific code to new file ... so that one can use the utils code without having the "github" python package installed. --- dev/releases/generate_release_notes.py | 313 +++++++++++++++++++++++++ 1 file changed, 313 insertions(+) create mode 100755 dev/releases/generate_release_notes.py diff --git a/dev/releases/generate_release_notes.py b/dev/releases/generate_release_notes.py new file mode 100755 index 0000000000..62c8535285 --- /dev/null +++ b/dev/releases/generate_release_notes.py @@ -0,0 +1,313 @@ +#!/usr/bin/env python3 +############################################################################# +## +## This file is part of GAP, a system for computational discrete algebra. +## +## Copyright of GAP belongs to its developers, whose names are too numerous +## to list here. Please refer to the COPYRIGHT file for details. +## +## SPDX-License-Identifier: GPL-2.0-or-later +## + +# Usage: +# ./generate_release_notes.py minor +# or +# ./generate_release_notes.py major +# +# to specify the type of the release. +# +# Output and description: +# This script is used to automatically generate the release notes based on the labels of +# pull requests that have been merged into the master branch since the starting date +# specified in the `history_start_date` variable below. +# +# For each such pull request (PR), this script extracts from GitHub its title, number and +# labels, using the GitHub API via the PyGithub package (https://github.com/PyGithub/PyGithub). +# To help to track the progress, it will output the number of the currently processed PR. +# For API requests using Basic Authentication or OAuth, you can make up to 5,000 requests +# per hour (https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting). +# As of March 2021 this script consumes about 3400 API calls and runs for about 25 minutes. +# This is why, to reduce the number of API calls and minimise the need to retrieve the data, +# PR details will be stored in the file `prscache.json`, which will then be used to +# categorise PR following the priority list and discussion from #4257, and output three +# files: +# - "releasenotes_*.md" : list of PR by categories for adding to release notes +# - "unsorted_PRs_*.md" : list of PR that could not be categorised +# - "releasenotes_*.json" : data for `BrowseReleaseNotes` function by Thomas Breuer (see #4257). +# where "*" is "minor" or "major" depending on the type of the release. +# +# If this script detects the file `prscache.json` it will use it, otherwise it will retrieve +# new data from GitHub. Thus, if new PR were merged, or there were updates of titles and labels +# of merged PRs, you need to delete `prscache.json` to enforce updating local data (TODO: make +# this turned on/off via a command line option in the next version). +# +# To find out when a branch was created, use e.g. +# git show --summary `git merge-base stable-4.11 master` +# + +import sys +import json +import os.path +from github import Github +from datetime import datetime +import utils + + +############################################################################# +# +# Configuration parameters +# +# the earliest date we need to track for the next minor/major releases +history_start_date = "2019-09-09" + +# the date of the last minor release (later, we may need to have more precise timestamp +# - maybe extracted from the corresponding release tag) +minor_branch_start_date = "2021-03-03" # next day after the minor release (starts at midnight) +# question: what if it was merged into master before 4.11.1, but backported after? +# Hopefully, before publishing 4.11.1 we have backported everything that had to be +# backported, so this was not the case. + +# this version number needed to form labels like "backport-to-4.11-DONE" +minor_branch_version = "4.11" + +# not yet - will make sense after branching the `stable-4.12` branch: +# major_branch_start_date = "2019-09-09" +# major_branch_version = "4.12" +# note that we will have to collate together PRs which are not backported to stable-4.11 +# between `history_start_date` and `major_branch_start_date`, and PRs backported to +# stable-4.12 after `major_branch_start_date` +# +############################################################################# + +def usage(): + print("Usage: `./release-notes.py minor` or `./release-notes.py major`") + sys.exit(1) + + +def get_prs(repo,startdate): + """Retrieves data for PRs matching selection criteria and puts them in a dictionary, + which is then saved in a json file, and also returned for immediate use.""" + # The output `prs` is a dictionary with keys being PR numbers, and values being + # dictionaries with keys "title", "closed_at" and "labels", for example: + # + # "3355": { + # "title": "Allow packages to use ISO 8601 dates in their PackageInfo.g", + # "closed_at": "2021-02-20T15:44:48", + # "labels": [ + # "gapdays2019-spring", + # "gapsingular2019", + # "kind: enhancement", + # "release notes: to be added" + # ] + # }, + + prs = {} + all_pulls = repo.get_pulls(state="closed", sort="created", direction="desc", base="master") + # We need to run this over the whole list of PRs. Sorting by creation date descending + # is not really helping - could be that some very old PRs are being merged. + for pr in all_pulls: + print(pr.number, end=" ") + # flush stdout immediately, to see progress indicator + sys.stdout.flush() + if pr.merged: + if pr.closed_at > datetime.fromisoformat(startdate): + # getting labels will cost further API calls - if the startdate is + # too far in the past, that may exceed the API capacity + labs = [lab.name for lab in list(pr.get_labels())] + prs[pr.number] = { "title" : pr.title, + "closed_at" : pr.closed_at.isoformat(), + "labels" : labs } +# if len(prs)>5: # for quick testing (maybe later have an optional argument) +# break + print("\n") + with open("prscache.json", "w", encoding="utf-8") as f: + json.dump(prs, f, ensure_ascii=False, indent=4) + return prs + + +def filter_prs(prs,rel_type): + newprs = {} + + if rel_type == "minor": + + # For minor release, list PRs backported to the stable-4.X branch since the previous minor release. + for k,v in sorted(prs.items()): + if "backport-to-" + minor_branch_version + "-DONE" in v["labels"]: + if datetime.fromisoformat(v["closed_at"]) > datetime.fromisoformat(minor_branch_start_date): + newprs[k] = v + return newprs + + elif rel_type == "major": + + # For major release, list PRs not backported to the stable-4.X branch. + # After branching stable-4.12 this will have to be changed to stop checking + # for "backport-to-4.11-DONE" at the date of the branching, and check for + # "backport-to-4.12-DONE" after that date + for k,v in sorted(prs.items()): + if not "backport-to-" + minor_branch_version + "-DONE" in v["labels"]: + newprs[k] = v + return newprs + + else: + + usage() + + +def pr_to_md(k, title): + """Returns markdown string for the PR entry""" + return f"- [#{k}](https://github.com/gap-system/gap/pull/{k}) {title}\n" + + +def changes_overview(prs,startdate,rel_type): + """Writes files with information for release notes.""" + + # Opening files with "w" resets them + relnotes_file = open("releasenotes_" + rel_type + ".md", "w") + unsorted_file = open("unsorted_PRs_" + rel_type + ".md", "w") + relnotes_json = open("releasenotes_" + rel_type + ".json", "w") + jsondict = prs.copy() + + # the following is a list of pairs [LABEL, DESCRIPTION]; the first entry is the name of a GitHub label + # (be careful to match them precisely), the second is a headline for a section the release notes; any PR with + # the given label is put into the corresponding section; each PR is put into only one section, the first one + # one from this list it fits in. + # See also . + prioritylist = [ + ["release notes: highlight", "Highlights"], + ["topic: libgap", "Changes to the `libgap` interface"], + ["topic: julia", "Changes to the **Julia** integration"], + ["topic: gac", "Changes to the GAP compiler"], + ["topic: documentation", "Changes in the documentation"], + ["topic: performance", "Performance improvements"], + ["topic: HPC-GAP", "Changes to HPC-GAP"], + ["kind: new feature", "New features"], + ["kind: enhancement", "Improved and extended functionality"], + ["kind: removal or deprecation", "Removed or obsolete functionality"], + ["kind: bug: wrong result", "Fixed bugs that could lead to incorrect results"], + ["kind: bug: crash", "Fixed bugs that could lead to crashes"], + ["kind: bug: unexpected error", "Fixed bugs that could lead to break loops"], + ["kind: bug", "Other fixed bugs"], + ] + + # Could also introduce some consistency checks here for wrong combinations of labels + + # Drop PRs not needed for release notes + removelist = [] + for k in prs: + if "release notes: not needed" in prs[k]["labels"]: + removelist.append(k) + for item in removelist: + del prs[item] + del jsondict[item] + + # Report PRs that have to be updated before inclusion into release notes. + unsorted_file.write("### " + "release notes: to be added" + "\n\n") + unsorted_file.write("If there are any PRs listed below, check their title and labels.\n") + unsorted_file.write("When done, change their label to \"release notes: use title\".\n\n") + removelist = [] + for k in prs: + if "release notes: to be added" in prs[k]["labels"]: + unsorted_file.write(pr_to_md(k, prs[k]["title"])) + removelist.append(k) + for item in removelist: + del prs[item] + unsorted_file.write("\n") + + # Report PRs that have neither "to be added" nor "added" or "use title" label + unsorted_file.write("### Uncategorized PR" + "\n\n") + unsorted_file.write("If there are any PRs listed below, either apply the same steps\n") + unsorted_file.write("as above, or change their label to \"release notes: not needed\".\n\n") + removelist = [] + for k in prs: + # we need to use both old "release notes: added" label and + # the newly introduced in "release notes: use title" label + # since both label may appear in GAP 4.12.0 changes overview + if not ("release notes: added" in prs[k]["labels"] or "release notes: use title" in prs[k]["labels"]): + unsorted_file.write(pr_to_md(k, prs[k]["title"])) + removelist.append(k) + for item in removelist: + del prs[item] + unsorted_file.close() + + # All remaining PRs are to be included in the release notes + + relnotes_file.write("## Release Notes \n\n") + + for priorityobject in prioritylist: + matches = [k for k in prs if priorityobject[0] in prs[k]["labels"]] + if len(matches) == 0: + continue + relnotes_file.write("### " + priorityobject[1] + "\n\n") + for k in matches: + relnotes_file.write(pr_to_md(k, prs[k]["title"])) + del prs[k] + relnotes_file.write("\n") + + # The remaining PRs have no "kind" or "topic" label from the priority list + # (may have other "kind" or "topic" label outside the priority list). + # Check their list in the release notes, and adjust labels if appropriate. + if len(prs) > 0: + relnotes_file.write("### Other changes\n\n") + for k in prs: + relnotes_file.write(pr_to_md(k, prs[k]["title"])) + relnotes_file.write("\n") + relnotes_file.close() + + relnotes_json.write("[") + jsonlist = [] + for k in jsondict: + temp = [] + temp.append(str(jsondict[k]["title"])) + temp.append(str(k)) + temp.append(jsondict[k]["labels"]) + jsonlist.append(temp) + for item in jsonlist: + relnotes_json.write("%s\n" % item) + relnotes_json.write("]") + relnotes_json.close + + +def main(rel_type): + + utils_github.initialize_github() + g = utils_github.GITHUB_INSTANCE + repo = utils_github.CURRENT_REPO + + # There is a GitHub API capacity of 5000 per hour i.e. that a maximum of 5000 requests can be made to GitHub per hour. + # Therefore, the following line indicates how many requests are currently still available + print("Current GitHub API capacity", g.rate_limiting, "at", datetime.now().isoformat() ) + + # If this limit is exceeded, an exception will be raised: + # github.GithubException.RateLimitExceededException: 403 + # {"message": "API rate limit exceeded for user ID XXX.", "documentation_url": + # "https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting"} + + + # TODO: we cache PRs data in a local file. For now, if it exists, it will be used, + # otherwise it will be recreated. Later, there may be an option to use the cache or + # to enforce retrieving updated PR details from Github. I think default is to update + # from GitHub (to get newly merged PRs, updates of labels, PR titles etc., while the + # cache could be used for testing and polishing the code to generate output ) + + # TODO: add some data to the cache, e.g. when the cache is saved. + # Produce warning if old. + + if os.path.isfile("prscache.json"): + print("Using cached data from prscache.json ...") + with open("prscache.json", "r") as read_file: + prs = json.load(read_file) + else: + print("Retrieving data using GitHub API ...") + prs = get_prs(repo,history_start_date) + + prs = filter_prs(prs,rel_type) + changes_overview(prs,history_start_date,rel_type) + print("Remaining GitHub API capacity", g.rate_limiting, "at", datetime.now().isoformat() ) + + +if __name__ == "__main__": + # the argument is "minor" or "major" to specify release kind + if len(sys.argv) != 2 or not sys.argv[1] in ["minor","major"]: + usage() + + main(sys.argv[1]) From 2d6dd7ccf25dbd114052ca597ea5f153efd14a0c Mon Sep 17 00:00:00 2001 From: Max Horn Date: Sun, 28 Jan 2024 22:32:51 +0100 Subject: [PATCH 4/5] New script for generating release notes --- dev/releases/generate_release_notes.py | 313 ------------------------- 1 file changed, 313 deletions(-) delete mode 100755 dev/releases/generate_release_notes.py diff --git a/dev/releases/generate_release_notes.py b/dev/releases/generate_release_notes.py deleted file mode 100755 index 62c8535285..0000000000 --- a/dev/releases/generate_release_notes.py +++ /dev/null @@ -1,313 +0,0 @@ -#!/usr/bin/env python3 -############################################################################# -## -## This file is part of GAP, a system for computational discrete algebra. -## -## Copyright of GAP belongs to its developers, whose names are too numerous -## to list here. Please refer to the COPYRIGHT file for details. -## -## SPDX-License-Identifier: GPL-2.0-or-later -## - -# Usage: -# ./generate_release_notes.py minor -# or -# ./generate_release_notes.py major -# -# to specify the type of the release. -# -# Output and description: -# This script is used to automatically generate the release notes based on the labels of -# pull requests that have been merged into the master branch since the starting date -# specified in the `history_start_date` variable below. -# -# For each such pull request (PR), this script extracts from GitHub its title, number and -# labels, using the GitHub API via the PyGithub package (https://github.com/PyGithub/PyGithub). -# To help to track the progress, it will output the number of the currently processed PR. -# For API requests using Basic Authentication or OAuth, you can make up to 5,000 requests -# per hour (https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting). -# As of March 2021 this script consumes about 3400 API calls and runs for about 25 minutes. -# This is why, to reduce the number of API calls and minimise the need to retrieve the data, -# PR details will be stored in the file `prscache.json`, which will then be used to -# categorise PR following the priority list and discussion from #4257, and output three -# files: -# - "releasenotes_*.md" : list of PR by categories for adding to release notes -# - "unsorted_PRs_*.md" : list of PR that could not be categorised -# - "releasenotes_*.json" : data for `BrowseReleaseNotes` function by Thomas Breuer (see #4257). -# where "*" is "minor" or "major" depending on the type of the release. -# -# If this script detects the file `prscache.json` it will use it, otherwise it will retrieve -# new data from GitHub. Thus, if new PR were merged, or there were updates of titles and labels -# of merged PRs, you need to delete `prscache.json` to enforce updating local data (TODO: make -# this turned on/off via a command line option in the next version). -# -# To find out when a branch was created, use e.g. -# git show --summary `git merge-base stable-4.11 master` -# - -import sys -import json -import os.path -from github import Github -from datetime import datetime -import utils - - -############################################################################# -# -# Configuration parameters -# -# the earliest date we need to track for the next minor/major releases -history_start_date = "2019-09-09" - -# the date of the last minor release (later, we may need to have more precise timestamp -# - maybe extracted from the corresponding release tag) -minor_branch_start_date = "2021-03-03" # next day after the minor release (starts at midnight) -# question: what if it was merged into master before 4.11.1, but backported after? -# Hopefully, before publishing 4.11.1 we have backported everything that had to be -# backported, so this was not the case. - -# this version number needed to form labels like "backport-to-4.11-DONE" -minor_branch_version = "4.11" - -# not yet - will make sense after branching the `stable-4.12` branch: -# major_branch_start_date = "2019-09-09" -# major_branch_version = "4.12" -# note that we will have to collate together PRs which are not backported to stable-4.11 -# between `history_start_date` and `major_branch_start_date`, and PRs backported to -# stable-4.12 after `major_branch_start_date` -# -############################################################################# - -def usage(): - print("Usage: `./release-notes.py minor` or `./release-notes.py major`") - sys.exit(1) - - -def get_prs(repo,startdate): - """Retrieves data for PRs matching selection criteria and puts them in a dictionary, - which is then saved in a json file, and also returned for immediate use.""" - # The output `prs` is a dictionary with keys being PR numbers, and values being - # dictionaries with keys "title", "closed_at" and "labels", for example: - # - # "3355": { - # "title": "Allow packages to use ISO 8601 dates in their PackageInfo.g", - # "closed_at": "2021-02-20T15:44:48", - # "labels": [ - # "gapdays2019-spring", - # "gapsingular2019", - # "kind: enhancement", - # "release notes: to be added" - # ] - # }, - - prs = {} - all_pulls = repo.get_pulls(state="closed", sort="created", direction="desc", base="master") - # We need to run this over the whole list of PRs. Sorting by creation date descending - # is not really helping - could be that some very old PRs are being merged. - for pr in all_pulls: - print(pr.number, end=" ") - # flush stdout immediately, to see progress indicator - sys.stdout.flush() - if pr.merged: - if pr.closed_at > datetime.fromisoformat(startdate): - # getting labels will cost further API calls - if the startdate is - # too far in the past, that may exceed the API capacity - labs = [lab.name for lab in list(pr.get_labels())] - prs[pr.number] = { "title" : pr.title, - "closed_at" : pr.closed_at.isoformat(), - "labels" : labs } -# if len(prs)>5: # for quick testing (maybe later have an optional argument) -# break - print("\n") - with open("prscache.json", "w", encoding="utf-8") as f: - json.dump(prs, f, ensure_ascii=False, indent=4) - return prs - - -def filter_prs(prs,rel_type): - newprs = {} - - if rel_type == "minor": - - # For minor release, list PRs backported to the stable-4.X branch since the previous minor release. - for k,v in sorted(prs.items()): - if "backport-to-" + minor_branch_version + "-DONE" in v["labels"]: - if datetime.fromisoformat(v["closed_at"]) > datetime.fromisoformat(minor_branch_start_date): - newprs[k] = v - return newprs - - elif rel_type == "major": - - # For major release, list PRs not backported to the stable-4.X branch. - # After branching stable-4.12 this will have to be changed to stop checking - # for "backport-to-4.11-DONE" at the date of the branching, and check for - # "backport-to-4.12-DONE" after that date - for k,v in sorted(prs.items()): - if not "backport-to-" + minor_branch_version + "-DONE" in v["labels"]: - newprs[k] = v - return newprs - - else: - - usage() - - -def pr_to_md(k, title): - """Returns markdown string for the PR entry""" - return f"- [#{k}](https://github.com/gap-system/gap/pull/{k}) {title}\n" - - -def changes_overview(prs,startdate,rel_type): - """Writes files with information for release notes.""" - - # Opening files with "w" resets them - relnotes_file = open("releasenotes_" + rel_type + ".md", "w") - unsorted_file = open("unsorted_PRs_" + rel_type + ".md", "w") - relnotes_json = open("releasenotes_" + rel_type + ".json", "w") - jsondict = prs.copy() - - # the following is a list of pairs [LABEL, DESCRIPTION]; the first entry is the name of a GitHub label - # (be careful to match them precisely), the second is a headline for a section the release notes; any PR with - # the given label is put into the corresponding section; each PR is put into only one section, the first one - # one from this list it fits in. - # See also . - prioritylist = [ - ["release notes: highlight", "Highlights"], - ["topic: libgap", "Changes to the `libgap` interface"], - ["topic: julia", "Changes to the **Julia** integration"], - ["topic: gac", "Changes to the GAP compiler"], - ["topic: documentation", "Changes in the documentation"], - ["topic: performance", "Performance improvements"], - ["topic: HPC-GAP", "Changes to HPC-GAP"], - ["kind: new feature", "New features"], - ["kind: enhancement", "Improved and extended functionality"], - ["kind: removal or deprecation", "Removed or obsolete functionality"], - ["kind: bug: wrong result", "Fixed bugs that could lead to incorrect results"], - ["kind: bug: crash", "Fixed bugs that could lead to crashes"], - ["kind: bug: unexpected error", "Fixed bugs that could lead to break loops"], - ["kind: bug", "Other fixed bugs"], - ] - - # Could also introduce some consistency checks here for wrong combinations of labels - - # Drop PRs not needed for release notes - removelist = [] - for k in prs: - if "release notes: not needed" in prs[k]["labels"]: - removelist.append(k) - for item in removelist: - del prs[item] - del jsondict[item] - - # Report PRs that have to be updated before inclusion into release notes. - unsorted_file.write("### " + "release notes: to be added" + "\n\n") - unsorted_file.write("If there are any PRs listed below, check their title and labels.\n") - unsorted_file.write("When done, change their label to \"release notes: use title\".\n\n") - removelist = [] - for k in prs: - if "release notes: to be added" in prs[k]["labels"]: - unsorted_file.write(pr_to_md(k, prs[k]["title"])) - removelist.append(k) - for item in removelist: - del prs[item] - unsorted_file.write("\n") - - # Report PRs that have neither "to be added" nor "added" or "use title" label - unsorted_file.write("### Uncategorized PR" + "\n\n") - unsorted_file.write("If there are any PRs listed below, either apply the same steps\n") - unsorted_file.write("as above, or change their label to \"release notes: not needed\".\n\n") - removelist = [] - for k in prs: - # we need to use both old "release notes: added" label and - # the newly introduced in "release notes: use title" label - # since both label may appear in GAP 4.12.0 changes overview - if not ("release notes: added" in prs[k]["labels"] or "release notes: use title" in prs[k]["labels"]): - unsorted_file.write(pr_to_md(k, prs[k]["title"])) - removelist.append(k) - for item in removelist: - del prs[item] - unsorted_file.close() - - # All remaining PRs are to be included in the release notes - - relnotes_file.write("## Release Notes \n\n") - - for priorityobject in prioritylist: - matches = [k for k in prs if priorityobject[0] in prs[k]["labels"]] - if len(matches) == 0: - continue - relnotes_file.write("### " + priorityobject[1] + "\n\n") - for k in matches: - relnotes_file.write(pr_to_md(k, prs[k]["title"])) - del prs[k] - relnotes_file.write("\n") - - # The remaining PRs have no "kind" or "topic" label from the priority list - # (may have other "kind" or "topic" label outside the priority list). - # Check their list in the release notes, and adjust labels if appropriate. - if len(prs) > 0: - relnotes_file.write("### Other changes\n\n") - for k in prs: - relnotes_file.write(pr_to_md(k, prs[k]["title"])) - relnotes_file.write("\n") - relnotes_file.close() - - relnotes_json.write("[") - jsonlist = [] - for k in jsondict: - temp = [] - temp.append(str(jsondict[k]["title"])) - temp.append(str(k)) - temp.append(jsondict[k]["labels"]) - jsonlist.append(temp) - for item in jsonlist: - relnotes_json.write("%s\n" % item) - relnotes_json.write("]") - relnotes_json.close - - -def main(rel_type): - - utils_github.initialize_github() - g = utils_github.GITHUB_INSTANCE - repo = utils_github.CURRENT_REPO - - # There is a GitHub API capacity of 5000 per hour i.e. that a maximum of 5000 requests can be made to GitHub per hour. - # Therefore, the following line indicates how many requests are currently still available - print("Current GitHub API capacity", g.rate_limiting, "at", datetime.now().isoformat() ) - - # If this limit is exceeded, an exception will be raised: - # github.GithubException.RateLimitExceededException: 403 - # {"message": "API rate limit exceeded for user ID XXX.", "documentation_url": - # "https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting"} - - - # TODO: we cache PRs data in a local file. For now, if it exists, it will be used, - # otherwise it will be recreated. Later, there may be an option to use the cache or - # to enforce retrieving updated PR details from Github. I think default is to update - # from GitHub (to get newly merged PRs, updates of labels, PR titles etc., while the - # cache could be used for testing and polishing the code to generate output ) - - # TODO: add some data to the cache, e.g. when the cache is saved. - # Produce warning if old. - - if os.path.isfile("prscache.json"): - print("Using cached data from prscache.json ...") - with open("prscache.json", "r") as read_file: - prs = json.load(read_file) - else: - print("Retrieving data using GitHub API ...") - prs = get_prs(repo,history_start_date) - - prs = filter_prs(prs,rel_type) - changes_overview(prs,history_start_date,rel_type) - print("Remaining GitHub API capacity", g.rate_limiting, "at", datetime.now().isoformat() ) - - -if __name__ == "__main__": - # the argument is "minor" or "major" to specify release kind - if len(sys.argv) != 2 or not sys.argv[1] in ["minor","major"]: - usage() - - main(sys.argv[1]) From 96e0ad44b3bf761b374206d8a4a02bd2613e4fff Mon Sep 17 00:00:00 2001 From: "James D. Mitchell" Date: Tue, 30 Jan 2024 13:53:25 +0000 Subject: [PATCH 5/5] Cleanup python scripts in dev/release --- dev/releases/create_stable_branch.py | 5 +- .../generate_package_release_notes.py | 29 +++- dev/releases/make_archives.py | 137 ++++++++++----- dev/releases/make_github_release.py | 19 ++- dev/releases/release_notes.py | 159 +++++++++--------- dev/releases/update_website.py | 106 +++++++----- dev/releases/utils.py | 88 +++++++--- dev/releases/utils_github.py | 40 +++-- 8 files changed, 360 insertions(+), 223 deletions(-) diff --git a/dev/releases/create_stable_branch.py b/dev/releases/create_stable_branch.py index 5bede7b51a..fde215c440 100755 --- a/dev/releases/create_stable_branch.py +++ b/dev/releases/create_stable_branch.py @@ -14,11 +14,12 @@ # TODO: implement parts of the steps described in # +import subprocess +import sys + from utils import error, notice, patchfile import utils -import subprocess -import sys # Insist on Python >= 3.6 for f-strings and other goodies if sys.version_info < (3, 6): diff --git a/dev/releases/generate_package_release_notes.py b/dev/releases/generate_package_release_notes.py index 2be8c22373..261c82f001 100755 --- a/dev/releases/generate_package_release_notes.py +++ b/dev/releases/generate_package_release_notes.py @@ -22,14 +22,17 @@ import json import gzip -from utils import * +from utils import download_with_sha256 + def usage(): - print("Usage: `./generate_package_release_notes.py OLD_GAP_VERSION NEW_GAP_VERSION`") + print( + "Usage: `./generate_package_release_notes.py OLD_GAP_VERSION NEW_GAP_VERSION`" + ) sys.exit(1) -def main(old_gap_version, new_gap_version): +def main(old_gap_version, new_gap_version): # create tmp directory tmpdir = os.getcwd() + "/tmp" notice(f"Files will be put in {tmpdir}") @@ -40,9 +43,15 @@ def main(old_gap_version, new_gap_version): # download package metadata old_json_file = f"{tmpdir}/package-infos-{old_gap_version}.json.gz" - download_with_sha256(f"https://github.com/gap-system/PackageDistro/releases/download/v{old_gap_version}/package-infos.json.gz", old_json_file) + download_with_sha256( + f"https://github.com/gap-system/PackageDistro/releases/download/v{old_gap_version}/package-infos.json.gz", + old_json_file, + ) new_json_file = f"{tmpdir}/package-infos-{new_gap_version}.json.gz" - download_with_sha256(f"https://github.com/gap-system/PackageDistro/releases/download/v{new_gap_version}/package-infos.json.gz", new_json_file) + download_with_sha256( + f"https://github.com/gap-system/PackageDistro/releases/download/v{new_gap_version}/package-infos.json.gz", + new_json_file, + ) # parse package metadata with gzip.open(old_json_file, "r") as f: @@ -67,7 +76,11 @@ def main(old_gap_version, new_gap_version): home = pkg["PackageWWWHome"] desc = pkg["Subtitle"] vers = pkg["Version"] - authors = [x["FirstNames"]+" "+x["LastName"] for x in pkg["Persons"] if x["IsAuthor"]] + authors = [ + x["FirstNames"] + " " + x["LastName"] + for x in pkg["Persons"] + if x["IsAuthor"] + ] authors = ", ".join(authors) print(f"- [**{name}**]({home}) {vers}: {desc}, by {authors}") print() @@ -90,7 +103,8 @@ def main(old_gap_version, new_gap_version): updated = new_json.keys() & old_json.keys() updated = [p for p in updated if old_json[p]["Version"] != new_json[p]["Version"]] if len(updated) > 0: - print(f""" + print( + f""" #### Updated packages redistributed with GAP The GAP {new_gap_version} distribution contains {len(new_json)} packages, of which {len(updated)} have been @@ -105,6 +119,7 @@ def main(old_gap_version, new_gap_version): newversion = new["Version"] print(f"- [**{name}**]({home}): {oldversion} -> {newversion}") + if __name__ == "__main__": if len(sys.argv) != 3: usage() diff --git a/dev/releases/make_archives.py b/dev/releases/make_archives.py index f602c46a04..fa37de81de 100755 --- a/dev/releases/make_archives.py +++ b/dev/releases/make_archives.py @@ -9,16 +9,16 @@ ## SPDX-License-Identifier: GPL-2.0-or-later ## ## This script create the archives that form a GA{} release. -## +## ## The version of the gap release is taken from the Makefile variable ## GAP_BUILD_VERSION. -## -from utils import * +## import glob import grp import gzip import json +import os import pwd import re import shutil @@ -26,8 +26,22 @@ import sys import tarfile +from utils import ( + download_with_sha256, + error, + get_makefile_var, + notice, + patchfile, + run_with_log, + safe_git_fetch_tags, + verify_command_available, + verify_git_clean, + verify_git_repo, + working_directory, +) + # Insist on Python >= 3.6 for f-strings and other goodies -if sys.version_info < (3,6): +if sys.version_info < (3, 6): error("Python 3.6 or newer is required") notice("Checking prerequisites") @@ -54,13 +68,14 @@ gapversion = get_makefile_var("GAP_BUILD_VERSION") except: error("make sure GAP has been compiled via './configure && make'") + notice(f"Detected GAP version {gapversion}") -if re.fullmatch( r"[1-9]+\.[0-9]+\.[0-9]+", gapversion) != None: - notice(f"--- THIS LOOKS LIKE A RELEASE ---") +if re.fullmatch(r"[1-9]+\.[0-9]+\.[0-9]+", gapversion) != None: + notice("--- THIS LOOKS LIKE A RELEASE ---") pkg_tag = f"v{gapversion}" else: - notice(f"--- THIS LOOKS LIKE A NIGHTLY BUILD ---") + notice("--- THIS LOOKS LIKE A NIGHTLY BUILD ---") pkg_tag = "latest" @@ -68,19 +83,22 @@ # TODO: is that really what we want? Or should it be the date this # script was run? Or for releases, perhaps use the TaggerDate of the # release tag (but then we need to find and process that tag) -commit_date = subprocess.run(["git", "show", "-s", "--format=%as"], - check=True, capture_output=True, text=True) +commit_date = subprocess.run( + ["git", "show", "-s", "--format=%as"], check=True, capture_output=True, text=True +) commit_date = commit_date.stdout.strip() commit_year = commit_date[0:4] # derive tarball names basename = f"gap-{gapversion}" -all_packages = f"packages-v{gapversion}" # only the pkg dir +all_packages = f"packages-v{gapversion}" # only the pkg dir all_packages_tarball = f"{all_packages}.tar.gz" -req_packages = f"packages-required-v{gapversion}" # a subset of the above +req_packages = f"packages-required-v{gapversion}" # a subset of the above req_packages_tarball = f"{req_packages}.tar.gz" -PKG_BOOTSTRAP_URL = f"https://github.com/gap-system/PackageDistro/releases/download/{pkg_tag}/" +PKG_BOOTSTRAP_URL = ( + f"https://github.com/gap-system/PackageDistro/releases/download/{pkg_tag}/" +) PKG_MINIMAL = "packages-required.tar.gz" PKG_FULL = "packages.tar.gz" @@ -88,36 +106,53 @@ notice("Exporting repository content via `git archive`") rawbasename = "gap-raw" rawgap_tarfile = f"{tmpdir}/{rawbasename}.tar" -subprocess.run(["git", "archive", - f"--prefix={basename}/", - f"--output={rawgap_tarfile}", - "HEAD"], check=True) +subprocess.run( + ["git", "archive", f"--prefix={basename}/", f"--output={rawgap_tarfile}", "HEAD"], + check=True, +) notice("Extracting exported content") -shutil.rmtree(basename, ignore_errors=True) # remove any leftovers +shutil.rmtree(basename, ignore_errors=True) # remove any leftovers with tarfile.open(rawgap_tarfile) as tar: tar.extractall(path=tmpdir) os.remove(rawgap_tarfile) notice("Processing exported content") -manifest_list = [] # collect names of assets to be uploaded to GitHub release +manifest_list = [] # collect names of assets to be uploaded to GitHub release # download package distribution -notice("Downloading package distribution") # ... outside of the directory we just created -download_with_sha256(PKG_BOOTSTRAP_URL+"package-infos.json.gz", tmpdir+"/"+"package-infos.json.gz") +# outside of the directory we just created +notice("Downloading package distribution") +download_with_sha256( + PKG_BOOTSTRAP_URL + "package-infos.json.gz", tmpdir + "/" + "package-infos.json.gz" +) manifest_list.append("package-infos.json.gz") -download_with_sha256(PKG_BOOTSTRAP_URL+PKG_MINIMAL, tmpdir+"/"+req_packages_tarball) +download_with_sha256( + PKG_BOOTSTRAP_URL + PKG_MINIMAL, tmpdir + "/" + req_packages_tarball +) manifest_list.append(req_packages_tarball) -download_with_sha256(PKG_BOOTSTRAP_URL+PKG_FULL, tmpdir+"/"+all_packages_tarball) +download_with_sha256(PKG_BOOTSTRAP_URL + PKG_FULL, tmpdir + "/" + all_packages_tarball) manifest_list.append(all_packages_tarball) with working_directory(tmpdir + "/" + basename): # This sets the version, release day and year of the release we are # creating. notice("Patching configure.ac") - patchfile("configure.ac", r"m4_define\(\[gap_version\],[^\n]+", r"m4_define([gap_version], ["+gapversion+"])") - patchfile("configure.ac", r"m4_define\(\[gap_releaseday\],[^\n]+", r"m4_define([gap_releaseday], ["+commit_date+"])") - patchfile("configure.ac", r"m4_define\(\[gap_releaseyear\],[^\n]+", r"m4_define([gap_releaseyear], ["+commit_year+"])") + patchfile( + "configure.ac", + r"m4_define\(\[gap_version\],[^\n]+", + r"m4_define([gap_version], [" + gapversion + "])", + ) + patchfile( + "configure.ac", + r"m4_define\(\[gap_releaseday\],[^\n]+", + r"m4_define([gap_releaseday], [" + commit_date + "])", + ) + patchfile( + "configure.ac", + r"m4_define\(\[gap_releaseyear\],[^\n]+", + r"m4_define([gap_releaseyear], [" + commit_year + "])", + ) # Building GAP notice("Running autogen.sh") @@ -153,7 +188,7 @@ shutil.rmtree("hpcgap-build") notice("Extracting package tarballs") - with tarfile.open(tmpdir+"/"+all_packages_tarball) as tar: + with tarfile.open(tmpdir + "/" + all_packages_tarball) as tar: tar.extractall(path="pkg") # for some reason pkg sometimes ends up with permission 0700 so # we make sure to fix that here @@ -161,8 +196,8 @@ # ensure all files are at readable by everyone subprocess.run(["chmod", "-R", "a+r", "."], check=True) - with tarfile.open(tmpdir+"/"+req_packages_tarball) as tar: - tar.extractall(path=tmpdir+"/"+req_packages) + with tarfile.open(tmpdir + "/" + req_packages_tarball) as tar: + tar.extractall(path=tmpdir + "/" + req_packages) notice("Building GAP's manuals") run_with_log(["make", "doc"], "gapdoc", "building the manuals") @@ -170,19 +205,28 @@ # Now we create the help-links.json file. We build # the json package, create the files, then clean up the package again. notice("Compiling json package") - path_to_json_package = glob.glob(f'{tmpdir}/{basename}/pkg/json*')[0] + path_to_json_package = glob.glob(f"{tmpdir}/{basename}/pkg/json*")[0] with working_directory(path_to_json_package): subprocess.run(["./configure"], check=True) subprocess.run(["make"], check=True) - notice(f"Constructing help-links JSON file") + notice("Constructing help-links JSON file") json_output = subprocess.run( - ["./gap", "-r", "--quiet", "--quitonbreak", f"dev/releases/HelpLinks-to-JSON.g"], - check=True, capture_output=True, text=True) + [ + "./gap", + "-r", + "--quiet", + "--quitonbreak", + "dev/releases/HelpLinks-to-JSON.g", + ], + check=True, + capture_output=True, + text=True, + ) formatted_json = json.dumps(json.loads(json_output.stdout), indent=2) with working_directory(tmpdir): - with gzip.open("help-links.json.gz", 'wb') as file: - file.write(formatted_json.encode('utf-8')) + with gzip.open("help-links.json.gz", "wb") as file: + file.write(formatted_json.encode("utf-8")) manifest_list.append("help-links.json.gz") notice("Cleaning up the json package") @@ -193,11 +237,11 @@ notice("Removing unwanted version-controlled files") badfiles = [ - ".codecov.yml", - ".ctags", - ".gitattributes", - ".gitignore", - ".mailmap", + ".codecov.yml", + ".ctags", + ".gitattributes", + ".gitignore", + ".mailmap", ] shutil.rmtree("benchmark") @@ -226,29 +270,30 @@ def make_and_record_archive(name, compression, root_dir, base_dir): else: error(f"unknown compression type {compression} (not gztar or zip)") - filename = f"{name}{ext}" - notice(f"Creating {filename}") + fname = name + ext + notice(f"Creating {fname}") owner = pwd.getpwuid(0).pw_name group = grp.getgrgid(0).gr_name - shutil.make_archive(name, compression, root_dir, base_dir, owner = owner, group = group) - manifest_list.append(filename) + shutil.make_archive(name, compression, root_dir, base_dir, owner=owner, group=group) + manifest_list.append(fname) + # Create the remaining archives notice("Creating remaining GAP and package archives") with working_directory(tmpdir): make_and_record_archive(basename, "gztar", ".", basename) - make_and_record_archive(basename, "zip", ".", basename) + make_and_record_archive(basename, "zip", ".", basename) make_and_record_archive(all_packages, "zip", basename, "pkg") make_and_record_archive(req_packages, "zip", ".", req_packages) notice("Removing packages to facilitate creating the GAP core archives") shutil.rmtree(basename + "/pkg") make_and_record_archive(basename + "-core", "gztar", ".", basename) - make_and_record_archive(basename + "-core", "zip", ".", basename) + make_and_record_archive(basename + "-core", "zip", ".", basename) # If you create additional archives, make sure to add them to manifest_list! manifest_filename = "MANIFEST" notice(f"Creating the manifest, with name {manifest_filename}") - with open(manifest_filename, 'w') as manifest: + with open(manifest_filename, "w", encoding="utf-8") as manifest: for filename in manifest_list: manifest.write(f"{filename}\n") diff --git a/dev/releases/make_github_release.py b/dev/releases/make_github_release.py index d299d9b181..4cebd85b17 100755 --- a/dev/releases/make_github_release.py +++ b/dev/releases/make_github_release.py @@ -15,14 +15,15 @@ ## If we do import * from utils, then initialize_github can't overwrite the ## global CURRENT_REPO variables. ## +import sys + import utils import utils_github -import sys from utils import error, notice if len(sys.argv) != 3: - error("usage: "+sys.argv[0]+" ") + error("usage: " + sys.argv[0] + " ") TAG_NAME = sys.argv[1] PATH_TO_RELEASE = sys.argv[2] @@ -47,16 +48,18 @@ error(f"Github release with tag '{TAG_NAME}' already exists!") # Create release -RELEASE_NOTE = f"For an overview of changes in GAP {VERSION} see the " \ +RELEASE_NOTE = ( + f"For an overview of changes in GAP {VERSION} see the " + f"[CHANGES.md](https://github.com/gap-system/gap/blob/{TAG_NAME}/CHANGES.md) file." +) notice(f"Creating release {TAG_NAME}") -RELEASE = utils_github.CURRENT_REPO.create_git_release(TAG_NAME, TAG_NAME, - RELEASE_NOTE, - prerelease=True) +RELEASE = utils_github.CURRENT_REPO.create_git_release( + TAG_NAME, TAG_NAME, RELEASE_NOTE, prerelease=True +) with utils.working_directory(PATH_TO_RELEASE): manifest_filename = "MANIFEST" - with open(manifest_filename, 'r') as manifest_file: + with open(manifest_filename, "r", encoding="utf-8") as manifest_file: manifest = manifest_file.read().splitlines() notice(f"Contents of {manifest_filename}:") @@ -65,7 +68,7 @@ # Now check that TAG_NAME and the created archives belong together main_archive_name = "gap-" + VERSION + ".tar.gz" - if not main_archive_name in manifest: + if main_archive_name not in manifest: error(f"Expected to find {main_archive_name} in MANIFEST, but did not!") # Upload all assets to release diff --git a/dev/releases/release_notes.py b/dev/releases/release_notes.py index 7676dad814..bc569f063b 100755 --- a/dev/releases/release_notes.py +++ b/dev/releases/release_notes.py @@ -24,12 +24,7 @@ import subprocess import sys -from utils import error, notice - -import typing -from tempfile import TemporaryDirectory -from typing import Any, Dict, List, Optional - +from utils import error # the following is a list of pairs [LABEL, DESCRIPTION]; the first entry is the name of a GitHub label # (be careful to match them precisely), the second is a headline for a section the release notes; any PR with @@ -116,82 +111,86 @@ def changes_overview(prs, startdate, new_version): """Writes files with information for release notes.""" # Could also introduce some consistency checks here for wrong combinations of labels - - relnotes_file = open("releasenotes_" + new_version + ".md", "w") - prs_with_use_title = [pr for pr in prs if has_label(pr, "release notes: use title")] - - # Write out all PRs with 'use title' - relnotes_file.write( - f""" + with open( + "releasenotes_" + new_version + ".md", "w", encoding="utf-8" + ) as relnotes_file: + prs_with_use_title = [ + pr for pr in prs if has_label(pr, "release notes: use title") + ] + + # Write out all PRs with 'use title' + relnotes_file.write( + f""" ## GAP {new_version} (TODO insert date here, ) -The following gives an overview of the changes compared to the previous -release. This list is not complete, many more internal or minor changes -were made, but we tried to keep it to changes which we think might -affect some users directly. - -""" - ) - - for priorityobject in prioritylist: - matches = [pr for pr in prs_with_use_title if has_label(pr, priorityobject[0])] - print("PRs with label '" + priorityobject[0] + "': ", len(matches)) - if len(matches) == 0: - continue - relnotes_file.write("### " + priorityobject[1] + "\n\n") - for pr in matches: - relnotes_file.write(pr_to_md(pr)) - prs_with_use_title.remove(pr) - relnotes_file.write("\n") - - # The remaining PRs have no "kind" or "topic" label from the priority list - # (may have other "kind" or "topic" label outside the priority list). - # Check their list in the release notes, and adjust labels if appropriate. - if len(prs_with_use_title) > 0: - relnotes_file.write("### Other changes\n\n") - for pr in prs_with_use_title: - relnotes_file.write(pr_to_md(pr)) - relnotes_file.write("\n") - relnotes_file.close() - - unsorted_file = open("unsorted_PRs_" + new_version + ".md", "w") - - # Report PRs that have to be updated before inclusion into release notes. - unsorted_file.write("### " + "release notes: to be added" + "\n\n") - unsorted_file.write( - "If there are any PRs listed below, check their title and labels.\n" - ) - unsorted_file.write( - 'When done, change their label to "release notes: use title".\n\n' - ) - removelist = [] - for pr in prs: - if has_label(pr, "release notes: to be added"): - unsorted_file.write(pr_to_md(pr)) - - prs = [pr for pr in prs if not has_label(pr, "release notes: to be added")] - - unsorted_file.write("\n") - - # Report PRs that have neither "to be added" nor "added" or "use title" label - unsorted_file.write("### Uncategorized PR" + "\n\n") - unsorted_file.write( - "If there are any PRs listed below, either apply the same steps\n" - ) - unsorted_file.write( - 'as above, or change their label to "release notes: not needed".\n\n' - ) - removelist = [] - for pr in prs: - # we need to use both old "release notes: added" label and - # the newly introduced in "release notes: use title" label - # since both label may appear in GAP 4.12.0 changes overview - if not ( - has_label(pr, "release notes: added") - or has_label(pr, "release notes: use title") - ): - unsorted_file.write(pr_to_md(pr)) - unsorted_file.close() + The following gives an overview of the changes compared to the previous + release. This list is not complete, many more internal or minor changes + were made, but we tried to keep it to changes which we think might + affect some users directly. + + """ + ) + + for priorityobject in prioritylist: + matches = [ + pr for pr in prs_with_use_title if has_label(pr, priorityobject[0]) + ] + print("PRs with label '" + priorityobject[0] + "': ", len(matches)) + if len(matches) == 0: + continue + relnotes_file.write("### " + priorityobject[1] + "\n\n") + for pr in matches: + relnotes_file.write(pr_to_md(pr)) + prs_with_use_title.remove(pr) + relnotes_file.write("\n") + + # The remaining PRs have no "kind" or "topic" label from the priority list + # (may have other "kind" or "topic" label outside the priority list). + # Check their list in the release notes, and adjust labels if appropriate. + if len(prs_with_use_title) > 0: + relnotes_file.write("### Other changes\n\n") + for pr in prs_with_use_title: + relnotes_file.write(pr_to_md(pr)) + relnotes_file.write("\n") + relnotes_file.close() + + with open( + "unsorted_PRs_" + new_version + ".md", "w", encoding="utf-8" + ) as unsorted_file: + # Report PRs that have to be updated before inclusion into release notes. + unsorted_file.write("### " + "release notes: to be added" + "\n\n") + unsorted_file.write( + "If there are any PRs listed below, check their title and labels.\n" + ) + unsorted_file.write( + 'When done, change their label to "release notes: use title".\n\n' + ) + for pr in prs: + if has_label(pr, "release notes: to be added"): + unsorted_file.write(pr_to_md(pr)) + + prs = [pr for pr in prs if not has_label(pr, "release notes: to be added")] + + unsorted_file.write("\n") + + # Report PRs that have neither "to be added" nor "added" or "use title" label + unsorted_file.write("### Uncategorized PR" + "\n\n") + unsorted_file.write( + "If there are any PRs listed below, either apply the same steps\n" + ) + unsorted_file.write( + 'as above, or change their label to "release notes: not needed".\n\n' + ) + for pr in prs: + # we need to use both old "release notes: added" label and + # the newly introduced in "release notes: use title" label + # since both label may appear in GAP 4.12.0 changes overview + if not ( + has_label(pr, "release notes: added") + or has_label(pr, "release notes: use title") + ): + unsorted_file.write(pr_to_md(pr)) + unsorted_file.close() def main(new_version: str) -> None: diff --git a/dev/releases/update_website.py b/dev/releases/update_website.py index 8079360960..9a3fb4dcb2 100755 --- a/dev/releases/update_website.py +++ b/dev/releases/update_website.py @@ -20,37 +20,42 @@ import json import os import re -import requests import shutil import sys import tarfile import tempfile +import requests + import utils import utils_github from utils import error, notice -if sys.version_info < (3,6): +if sys.version_info < (3, 6): error("Python 3.6 or newer is required") -parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, -description= -"""Update the GAP website from the GAP releases data on GitHub. +parser = argparse.ArgumentParser( + formatter_class=argparse.RawDescriptionHelpFormatter, + description="""Update the GAP website from the GAP releases data on GitHub. Run this script in the root of a clone of the GapWWW repository, \ checked out at the version from which you want to update \ (most likely the master branch of github.com/gap-system/GapWWW). \ The script modifies the working directory according to the information \ on GitHub.""", -epilog= -"""Notes: + epilog="""Notes: * To learn how to create a GitHub access token, please consult \ https://help.github.com/articles/creating-an-access-token-for-command-line-use -""") +""", +) group = parser.add_argument_group("Repository details and access") group.add_argument("--token", type=str, help="GitHub access token") -group.add_argument("--gap-fork", type=str, default="gap-system", - help="GitHub GAP fork to search for releases (for testing; default: gap-system)") +group.add_argument( + "--gap-fork", + type=str, + default="gap-system", + help="GitHub GAP fork to search for releases (for testing; default: gap-system)", +) args = parser.parse_args() utils.verify_command_available("git") @@ -65,32 +70,39 @@ # (global variable , with assets ) to . def download_asset_by_name(asset_name, writedir): try: - url = [ x for x in assets if x.name == asset_name ][0].browser_download_url + url = [x for x in assets if x.name == asset_name][0].browser_download_url except: - error(f"Cannot find {asset_name} in the GitHub release with tag {release.tag_name}") + error( + f"Cannot find {asset_name} in the GitHub release with tag {release.tag_name}" + ) with utils.working_directory(writedir): notice(f"Downloading {url} to {writedir} . . .") utils.download_with_sha256(url, asset_name) + def extract_tarball(tarball): - notice(f"Extracting {tarball} . . .") - with tarfile.open(tarball) as tar: + notice(f"Extracting {tball} . . .") + with tarfile.open(tball) as tar: try: tar.extractall() except: - error(f"Failed to extract {tarball}!") + error(f"Failed to extract {tball}!") + def get_date_from_configure_ac(gaproot): - with open(f"{gaproot}/configure.ac", "r") as configure_ac: + with open(f"{gaproot}/configure.ac", "r", encoding="utf-8") as configure_ac: filedata = configure_ac.read() - try: # Expect date in YYYY-MM-DD format - release_date = re.search("\[gap_releaseday\], \[(\d{4}-\d{2}-\d{2})\]", filedata).group(1) - release_date = datetime.datetime.strptime(release_date, "%Y-%m-%d") - except: + release_date = re.search( + r"\[gap_releaseday\], \[(\d{4}-\d{2}-\d{2})\]", filedata + ) + if release_date: + release_date = datetime.datetime.strptime(release_date.group(1), "%Y-%m-%d") + else: error("Cannot find the release date in configure.ac!") return release_date.strftime("%d %B %Y") + # This function deals with package-infos.json.gz and help-links.json.gz. # The function downloads the release asset called to the tmpdir. # The asset is assumed to be gzipped. It is extracted to the filepath . @@ -98,7 +110,7 @@ def download_and_extract_json_gz_asset(asset_name, dest): download_asset_by_name(asset_name, tmpdir) with utils.working_directory(tmpdir): with gzip.open(asset_name, "rt", encoding="utf-8") as file_in: - with open(dest, "w") as file_out: + with open(dest, "w", encoding="utf-8") as file_out: shutil.copyfileobj(file_in, file_out) @@ -108,13 +120,20 @@ def download_and_extract_json_gz_asset(asset_name, dest): utils_github.initialize_github(args.token) notice(f"Will use temporary directory: {tmpdir}") -releases = [ x for x in utils_github.CURRENT_REPO.get_releases() if - not x.draft and - not x.prerelease and - utils.is_possible_gap_release_tag(x.tag_name) and - (int(x.tag_name[1:].split('.')[0]) > 4 or - (int(x.tag_name[1:].split('.')[0]) == 4 and - int(x.tag_name[1:].split('.')[1]) >= 11)) ] +releases = [ + x + for x in utils_github.CURRENT_REPO.get_releases() + if not x.draft + and not x.prerelease + and utils.is_possible_gap_release_tag(x.tag_name) + and ( + int(x.tag_name[1:].split(".")[0]) > 4 + or ( + int(x.tag_name[1:].split(".")[0]) == 4 + and int(x.tag_name[1:].split(".")[1]) >= 11 + ) + ) +] if releases: notice(f"Found {len(releases)} published GAP releases >= v4.11.0") else: @@ -122,7 +141,7 @@ def download_and_extract_json_gz_asset(asset_name, dest): sys.exit(0) # Sort by version number, biggest to smallest -releases.sort(key=lambda s: list(map(int, s.tag_name[1:].split('.')))) +releases.sort(key=lambda s: list(map(int, s.tag_name[1:].split(".")))) releases.reverse() @@ -137,11 +156,13 @@ def download_and_extract_json_gz_asset(asset_name, dest): known_release = os.path.isfile(f"_Releases/{version}.html") newest_release = releases.index(release) == 0 if known_release: - notice(f"I have seen this release before") + notice("I have seen this release before") elif newest_release: - notice(f"This is a new release to me, and it has the biggest version number") + notice("This is a new release to me, and it has the biggest version number") else: - notice(f"This is a new release to me, but I know about releases with bigger version numbers") + notice( + "This is a new release to me, but I know about releases with bigger version numbers" + ) # For all releases, record the assets (in case they were deleted/updated/added) notice(f"Collecting GitHub release asset data in _data/assets/{version_safe}.json") @@ -163,7 +184,7 @@ def download_and_extract_json_gz_asset(asset_name, dest): "url": asset.browser_download_url, } asset_data.append(filtered_asset) - asset_data.sort(key=lambda s: list(map(str, s['name']))) + asset_data.sort(key=lambda s: list(map(str, s["name"]))) with open(f"{pwd}/_data/assets/{version_safe}.json", "wb") as file: file.write(json.dumps(asset_data, indent=2).encode("utf-8")) @@ -176,16 +197,19 @@ def download_and_extract_json_gz_asset(asset_name, dest): tarball = f"gap-{version}-core.tar.gz" download_asset_by_name(tarball, tmpdir) with utils.working_directory(tmpdir): + # FIXME extract_tarball is not defined extract_tarball(tarball) date = get_date_from_configure_ac(f"{tmpdir}/gap-{version}") notice(f"Using release date {date} for GAP {version}") notice(f"Writing the file _Releases/{version}.html") - with open(f"{pwd}/_Releases/{version}.html", "w") as file: + with open(f"{pwd}/_Releases/{version}.html", "w", encoding="utf-8") as file: file.write(f"---\nversion: {version}\ndate: '{date}'\n---\n") notice(f"Writing the file _data/package-infos/{version_safe}.json") - download_and_extract_json_gz_asset("package-infos.json.gz", f"{pwd}/_data/package-infos/{version_safe}.json") + download_and_extract_json_gz_asset( + "package-infos.json.gz", f"{pwd}/_data/package-infos/{version_safe}.json" + ) # For a new-to-me release with biggest version number, also set this is the # 'default'/'main' version on the website (i.e. the most prominent release). @@ -201,13 +225,19 @@ def download_and_extract_json_gz_asset(asset_name, dest): file.write(json.dumps(release_data, indent=2).encode("utf-8")) notice("Overwriting _data/help.json with the contents of help-links.json.gz") - download_and_extract_json_gz_asset("help-links.json.gz", f"{pwd}/_data/help.json") + download_and_extract_json_gz_asset( + "help-links.json.gz", f"{pwd}/_data/help.json" + ) - notice("Repopulating _Packages/ with one HTML file for each package in packages-info.json") + notice( + "Repopulating _Packages/ with one HTML file for each package in packages-info.json" + ) shutil.rmtree("_Packages") os.mkdir("_Packages") with open(f"{pwd}/_data/package-infos/{version_safe}.json", "rb") as file: data = json.loads(file.read()) for pkg in data: - with open(f"{pwd}/_Packages/{pkg}.html", "w+") as pkg_file: + with open( + f"{pwd}/_Packages/{pkg}.html", "w+", encoding="utf-8" + ) as pkg_file: pkg_file.write(f"---\ntitle: {data[pkg]['PackageName']}\n---\n") diff --git a/dev/releases/utils.py b/dev/releases/utils.py index df3949a232..152ab292ef 100644 --- a/dev/releases/utils.py +++ b/dev/releases/utils.py @@ -15,42 +15,54 @@ import subprocess import sys + # print notices in green def notice(msg): print("\033[32m" + msg + "\033[0m") + # print warnings in yellow def warning(msg): print("\033[33m" + msg + "\033[0m") + # print error in red and exit def error(msg): print("\033[31m" + msg + "\033[0m") sys.exit(1) + def verify_command_available(cmd): - if shutil.which(cmd) == None: + if shutil.which(cmd) is None: error(f"the '{cmd}' command was not found, please install it") # TODO: do the analog of this in ReleaseTools bash script: # command -v curl >/dev/null 2>&1 || # error "the 'curl' command was not found, please install it" + def verify_git_repo(): - res = subprocess.run(["git", "--git-dir=.git", "rev-parse"], stderr = subprocess.DEVNULL) + res = subprocess.run( + ["git", "--git-dir=.git", "rev-parse"], stderr=subprocess.DEVNULL, check=False + ) if res.returncode != 0: error("current directory is not a git root directory") + # check for uncommitted changes def is_git_clean(): - res = subprocess.run(["git", "update-index", "--refresh"]) + res = subprocess.run(["git", "update-index", "--refresh"], check=False) if res.returncode == 0: - res = subprocess.run(["git", "diff-index", "--quiet", "HEAD", "--"]) + res = subprocess.run( + ["git", "diff-index", "--quiet", "HEAD", "--"], check=False + ) return res.returncode == 0 + def verify_git_clean(): if not is_git_clean(): error("uncommitted changes detected") + # from https://code.activestate.com/recipes/576620-changedirectory-context-manager/ @contextlib.contextmanager def working_directory(path): @@ -63,14 +75,16 @@ def working_directory(path): yield os.chdir(prev_cwd) + # helper for extracting values of variables set in the GAP Makefiles.rules def get_makefile_var(var): res = subprocess.run(["make", f"print-{var}"], check=True, capture_output=True) - kv = res.stdout.decode('ascii').strip().split('=') + kv = res.stdout.decode("ascii").strip().split("=") assert len(kv) == 2 assert kv[0] == var return kv[1] + # compute the sha256 checksum of a file def sha256file(path): h = hashlib.sha256() @@ -80,37 +94,44 @@ def sha256file(path): h.update(data) return h.hexdigest() + # read a file into memory, apply some transformations, and write it back def patchfile(path, pattern, repl): # Read in the file - with open(path, 'r') as file : + with open(path, "r", encoding="utf-8") as file: filedata = file.read() # Replace the target string filedata = re.sub(pattern, repl, filedata) # Write the file out again - with open(path, 'w') as file: + with open(path, "w", encoding="utf-8") as file: file.write(filedata) + # download file at the given URL to path `dst` def download(url, dst): notice(f"Downlading {url} to {dst}") - res = subprocess.run(["curl", "-L", "-C", "-", "-o", dst, url]) + res = subprocess.run(["curl", "-L", "-C", "-", "-o", dst, url], check=False) if res.returncode != 0: - error('failed downloading ' + url) + error("failed downloading " + url) + def file_matches_checksumfile(filename): - with open(filename + ".sha256", "r") as f: + with open(filename + ".sha256", "r", encoding="utf-8") as f: expected_checksum = f.read().strip() return expected_checksum == sha256file(filename) + def verify_via_checksumfile(filename): actual_checksum = sha256file(filename) - with open(filename + ".sha256", "r") as f: + with open(filename + ".sha256", "r", encoding="utf-8") as f: expected_checksum = f.read().strip() if expected_checksum != actual_checksum: - error(f"checksum for '{filename}' expected to be {expected_checksum} but got {actual_checksum}") + error( + f"checksum for '{filename}' expected to be {expected_checksum} but got {actual_checksum}" + ) + # Download file at the given URL to path `dst`, unless we detect that a file # already exists at `dst` with the expected checksum. @@ -123,46 +144,59 @@ def download_with_sha256(url, dst): download(url, dst) verify_via_checksumfile(dst) + # Run what ever command and create appropriate log file -def run_with_log(args, name, msg = None): - if msg == None: +def run_with_log(args, name, msg=None): + if msg is None: msg = name - with open("../"+name+".log", "w") as fp: + with open("../" + name + ".log", "w", encoding="utf-8") as fp: try: subprocess.run(args, check=True, stdout=fp, stderr=fp) except subprocess.CalledProcessError: - error(msg+" failed. See "+name+".log.") + error(msg + " failed. See " + name + ".log.") + def is_possible_gap_release_tag(tag): - return re.fullmatch( r"v[1-9]+\.[0-9]+\.[0-9]+(-.+)?", tag) != None + return re.fullmatch(r"v[1-9]+\.[0-9]+\.[0-9]+(-.+)?", tag) is not None + def verify_is_possible_gap_release_tag(tag): if not is_possible_gap_release_tag(tag): error(f"{tag} does not look like the tag of a GAP release version") + # Error checked git fetch of tags def safe_git_fetch_tags(): try: subprocess.run(["git", "fetch", "--tags"], check=True) except subprocess.CalledProcessError: - error('failed to fetch tags, you may have to do \n' - + 'git fetch --tags -f') + error("failed to fetch tags, you may have to do \n" + "git fetch --tags -f") + # lightweight vs annotated # https://stackoverflow.com/questions/40479712/how-can-i-tell-if-a-given-git-tag-is-annotated-or-lightweight#40499437 def is_annotated_git_tag(tag): - res = subprocess.run(["git", "for-each-ref", "refs/tags/" + tag], - capture_output=True, text=True) + res = subprocess.run( + ["git", "for-each-ref", "refs/tags/" + tag], + capture_output=True, + text=True, + check=False, + ) return res.returncode == 0 and res.stdout.split()[1] == "tag" + def check_git_tag_for_release(tag): if not is_annotated_git_tag(tag): error(f"There is no annotated tag {tag}") # check that tag points to HEAD - tag_commit = subprocess.run(["git", "rev-parse", tag + "^{}"], - check=True, capture_output=True, text=True).stdout.strip() - head = subprocess.run(["git", "rev-parse", "HEAD"], - check=True, capture_output=True, text=True).stdout.strip() + tag_commit = subprocess.run( + ["git", "rev-parse", tag + "^{}"], check=True, capture_output=True, text=True + ).stdout.strip() + head = subprocess.run( + ["git", "rev-parse", "HEAD"], check=True, capture_output=True, text=True + ).stdout.strip() if tag_commit != head: - error(f"The tag {tag} does not point to the current commit {head} but" - + f" instead points to {tag_commit}") + error( + f"The tag {tag} does not point to the current commit {head} but" + + f" instead points to {tag_commit}" + ) diff --git a/dev/releases/utils_github.py b/dev/releases/utils_github.py index 89e2b70e86..f1f99d0235 100644 --- a/dev/releases/utils_github.py +++ b/dev/releases/utils_github.py @@ -7,39 +7,48 @@ ## ## SPDX-License-Identifier: GPL-2.0-or-later ## -import contextlib -import hashlib import os -import re -import shutil import subprocess -import sys import github +from utils import notice, error, sha256file, verify_via_checksumfile + CURRENT_REPO_NAME = os.environ.get("GITHUB_REPOSITORY", "gap-system/gap") # Initialized by initialize_github GITHUB_INSTANCE = None CURRENT_REPO = None + # sets the global variables GITHUB_INSTANCE and CURRENT_REPO # If no token is provided, this uses the value of the environment variable # GITHUB_TOKEN. def initialize_github(token=None): global GITHUB_INSTANCE, CURRENT_REPO - if GITHUB_INSTANCE != None or CURRENT_REPO != None: - error("Global variables GITHUB_INSTANCE and CURRENT_REPO" - + " are already initialized.") - if token == None and "GITHUB_TOKEN" in os.environ: + if GITHUB_INSTANCE is not None or CURRENT_REPO is not None: + error( + "Global variables GITHUB_INSTANCE and CURRENT_REPO" + + " are already initialized." + ) + if token is None and "GITHUB_TOKEN" in os.environ: token = os.environ["GITHUB_TOKEN"] - if token == None: - temp = subprocess.run(["git", "config", "--get", "github.token"], text=True, capture_output=True) + if token is None: + temp = subprocess.run( + ["git", "config", "--get", "github.token"], + text=True, + capture_output=True, + check=False, + ) if temp.returncode == 0: token = temp.stdout.strip() - if token == None and os.path.isfile(os.path.expanduser('~') + '/.github_shell_token'): - with open(os.path.expanduser('~') + '/.github_shell_token', 'r') as token_file: + if token is None and os.path.isfile( + os.path.expanduser("~") + "/.github_shell_token" + ): + with open( + os.path.expanduser("~") + "/.github_shell_token", "r", encoding="utf-8" + ) as token_file: token = token_file.read().strip() - if token == None: + if token is None: error("Error: no access token found or provided") g = github.Github(token) GITHUB_INSTANCE = g @@ -49,6 +58,7 @@ def initialize_github(token=None): except github.GithubException: error("Error: the access token may be incorrect") + # Given the of a file that does not end with .sha256, create or get # the corresponding sha256 checksum file .sha256, (comparing checksums # just to be safe, in the latter case). Then upload the files and @@ -70,7 +80,7 @@ def upload_asset_with_checksum(release, filename): verify_via_checksumfile(filename) else: notice("Writing new checksum file") - with open(checksum_filename, "w") as checksumfile: + with open(checksum_filename, "w", encoding="utf-8") as checksumfile: checksumfile.write(sha256file(filename)) for file in [filename, checksum_filename]: