From feab44803c257a742f7a7512add16e468d823ae1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Sep 2022 08:35:04 -0700 Subject: [PATCH 01/13] Bump typing-extensions from 4.2.0 to 4.3.0 in /hail/python (#12200) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.2.0 to 4.3.0. - [Release notes](https://github.com/python/typing_extensions/releases) - [Changelog](https://github.com/python/typing_extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/python/typing_extensions/compare/4.2.0...4.3.0) --- updated-dependencies: - dependency-name: typing-extensions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- hail/python/dev/pinned-requirements.txt | 2 +- hail/python/pinned-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/hail/python/dev/pinned-requirements.txt b/hail/python/dev/pinned-requirements.txt index 05a5dac9553..6f3111e6363 100644 --- a/hail/python/dev/pinned-requirements.txt +++ b/hail/python/dev/pinned-requirements.txt @@ -404,7 +404,7 @@ types-urllib3==1.26.15 # via # -r python/dev/requirements.txt # types-requests -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # argon2-cffi # astroid diff --git a/hail/python/pinned-requirements.txt b/hail/python/pinned-requirements.txt index ef35c51b4ea..9d9c4caa27e 100644 --- a/hail/python/pinned-requirements.txt +++ b/hail/python/pinned-requirements.txt @@ -217,7 +217,7 @@ tornado==6.1 # via bokeh tqdm==4.64.0 # via -r python/requirements.txt -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # aiohttp # async-timeout From 776bd8a799635757298266f3e628852f65458809 Mon Sep 17 00:00:00 2001 From: jigold Date: Wed, 21 Sep 2022 13:03:25 -0400 Subject: [PATCH 02/13] [batch] Add index on jobs for update id (#12208) * [batch] Add index on jobs for update id * fix --- batch/sql/add-jobs-update-id-index.sql | 1 + batch/sql/estimated-current.sql | 1 + build.yaml | 3 +++ 3 files changed, 5 insertions(+) create mode 100644 batch/sql/add-jobs-update-id-index.sql diff --git a/batch/sql/add-jobs-update-id-index.sql b/batch/sql/add-jobs-update-id-index.sql new file mode 100644 index 00000000000..7d931771dbf --- /dev/null +++ b/batch/sql/add-jobs-update-id-index.sql @@ -0,0 +1 @@ +CREATE INDEX `jobs_batch_id_update_id` ON `jobs` (`batch_id`, `update_id`); diff --git a/batch/sql/estimated-current.sql b/batch/sql/estimated-current.sql index db8bb680d0a..fdce83615a5 100644 --- a/batch/sql/estimated-current.sql +++ b/batch/sql/estimated-current.sql @@ -254,6 +254,7 @@ CREATE TABLE IF NOT EXISTS `jobs` ( ) ENGINE = InnoDB; CREATE INDEX `jobs_batch_id_state_always_run_inst_coll_cancelled` ON `jobs` (`batch_id`, `state`, `always_run`, `inst_coll`, `cancelled`); CREATE INDEX `jobs_batch_id_state_always_run_cancelled` ON `jobs` (`batch_id`, `state`, `always_run`, `cancelled`); +CREATE INDEX `jobs_batch_id_update_id` ON `jobs` (`batch_id`, `update_id`); CREATE TABLE IF NOT EXISTS `batch_bunches` ( `batch_id` BIGINT NOT NULL, diff --git a/build.yaml b/build.yaml index 15e71c2a258..200114f9cdc 100644 --- a/build.yaml +++ b/build.yaml @@ -2073,6 +2073,9 @@ steps: - name: cleanup-add-batch-updates script: /io/sql/cleanup-add-batch-updates.sql online: true + - name: add-jobs-update-id-index + script: /io/sql/add-jobs-update-id-index.sql + online: true inputs: - from: /repo/batch/sql to: /io/sql From 4818c93396c634e6f34a888bf50d7ecefd4c4b36 Mon Sep 17 00:00:00 2001 From: Dan King Date: Wed, 21 Sep 2022 12:49:57 -0700 Subject: [PATCH 03/13] [query] deprecate the batch_size parameter for gvcf_batch_size (#12213) CHANGELOG: The `batch_size` parameter of `vds.new_combiner` is deprecated in favor of `gvcf_batch_size`. This avoids a confusing error message: https://dev.hail.is/t/vds-new-combiner/269/4?u=dking. --- .../vds/combiner/variant_dataset_combiner.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/hail/python/hail/vds/combiner/variant_dataset_combiner.py b/hail/python/hail/vds/combiner/variant_dataset_combiner.py index 39caf3ed3c8..68fd6fde3c7 100644 --- a/hail/python/hail/vds/combiner/variant_dataset_combiner.py +++ b/hail/python/hail/vds/combiner/variant_dataset_combiner.py @@ -519,7 +519,8 @@ def new_combiner(*, gvcf_reference_entry_fields_to_keep: Optional[Collection[str]] = None, branch_factor: int = VariantDatasetCombiner._default_branch_factor, target_records: int = VariantDatasetCombiner._default_target_records, - batch_size: int = VariantDatasetCombiner._default_gvcf_batch_size, + gvcf_batch_size: Optional[int] = None, + batch_size: Optional[int] = None, reference_genome: Union[str, hl.ReferenceGenome] = 'default', contig_recoding: Optional[Dict[str, str]] = None, force: bool = False, @@ -540,6 +541,22 @@ def new_combiner(*, raise ValueError("'gvcf_sample_names' and 'gvcf_paths' must have the same length " f'{len(gvcf_sample_names)} != {len(gvcf_paths)}') + if batch_size is None: + if gvcf_batch_size is None: + gvcf_batch_size = VariantDatasetCombiner._default_gvcf_batch_size + else: + pass + else: + if gvcf_batch_size is None: + warning('The batch_size parameter is deprecated. ' + 'The batch_size parameter will be removed in a future version of Hail. ' + 'Please use gvcf_batch_size instead.') + gvcf_batch_size = batch_size + else: + raise ValueError('Specify only one of batch_size and gvcf_batch_size. ' + f'Received {batch_size} and {gvcf_batch_size}.') + del batch_size + n_partition_args = (int(intervals is not None) + int(import_interval_size is not None) + int(use_genome_default_intervals) @@ -563,7 +580,7 @@ def maybe_load_from_saved_path(save_path: str) -> Optional[VariantDatasetCombine # is a failure due to branch factor being too large) combiner.branch_factor = branch_factor combiner.target_records = target_records - combiner.gvcf_batch_size = batch_size + combiner.gvcf_batch_size = gvcf_batch_size return combiner except (ValueError, TypeError, OSError, KeyError): warning(f'file exists at {save_path}, but it is not a valid combiner plan, overwriting') @@ -656,7 +673,7 @@ def maybe_load_from_saved_path(save_path: str) -> Optional[VariantDatasetCombine reference_genome=reference_genome, branch_factor=branch_factor, target_records=target_records, - gvcf_batch_size=batch_size, + gvcf_batch_size=gvcf_batch_size, contig_recoding=contig_recoding, vdses=vdses, gvcfs=gvcf_paths, From e03edd41d248b91d435f6fb87eee6060158867f9 Mon Sep 17 00:00:00 2001 From: Daniel Goldstein Date: Wed, 21 Sep 2022 17:03:31 -0400 Subject: [PATCH 04/13] [ci] Accept multiple registries in the docker auth script (#12210) --- .../convert-cloud-credentials-to-docker-auth-config | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/ci/buildkit/convert-cloud-credentials-to-docker-auth-config b/ci/buildkit/convert-cloud-credentials-to-docker-auth-config index d6bf6b2212b..cb168e1e91f 100644 --- a/ci/buildkit/convert-cloud-credentials-to-docker-auth-config +++ b/ci/buildkit/convert-cloud-credentials-to-docker-auth-config @@ -13,4 +13,11 @@ elif [ ! -z "${AZURE_APPLICATION_CREDENTIALS}" ]; then fi USER_PASS=$(echo -n "$USERNAME:$PASSWORD" | base64 | tr -d \\n) -echo '{"auths": { "'$REGISTRY'": { "auth": "'$USER_PASS'"}}}' > $HOME/.docker/config.json + +registry_auths='{}' +for registry in ${REGISTRIES:=$REGISTRY} +do + registry_auths=$(echo $registry_auths | jq --arg registry "$registry" --arg userpass "$USER_PASS" '. + { ($registry): { "auth": $userpass }}') +done + +echo '{"auths": '$registry_auths'}' > $HOME/.docker/config.json From 6ce031726cd20057221564fe5d6318903f436013 Mon Sep 17 00:00:00 2001 From: jigold Date: Wed, 21 Sep 2022 18:31:06 -0400 Subject: [PATCH 05/13] [batch] Get rid of default standing workers in dev (#12202) * [batch] Get rid of default standing workers in dev * fixes --- .../sql/no_dev_standing_workers_by_default.py | 22 +++++++++++++++++++ build.yaml | 3 +++ 2 files changed, 25 insertions(+) create mode 100644 batch/sql/no_dev_standing_workers_by_default.py diff --git a/batch/sql/no_dev_standing_workers_by_default.py b/batch/sql/no_dev_standing_workers_by_default.py new file mode 100644 index 00000000000..05387ca24c5 --- /dev/null +++ b/batch/sql/no_dev_standing_workers_by_default.py @@ -0,0 +1,22 @@ +import os +import asyncio +from gear import Database + + +async def main(): + if os.environ['HAIL_SCOPE'] != 'dev': + return + + db = Database() + await db.async_init() + + await db.execute_update( + ''' +UPDATE pools +SET enable_standing_worker = 0 +''' + ) + + +loop = asyncio.get_event_loop() +loop.run_until_complete(main()) diff --git a/build.yaml b/build.yaml index 200114f9cdc..dac456bf2df 100644 --- a/build.yaml +++ b/build.yaml @@ -2076,6 +2076,9 @@ steps: - name: add-jobs-update-id-index script: /io/sql/add-jobs-update-id-index.sql online: true + - name: no-dev-standing-workers-by-default + script: /io/sql/no_dev_standing_workers_by_default.py + online: true inputs: - from: /repo/batch/sql to: /io/sql From 02166f797d7edb4960fbbe618dde82a3e6403005 Mon Sep 17 00:00:00 2001 From: Daniel Goldstein Date: Wed, 21 Sep 2022 19:44:04 -0400 Subject: [PATCH 06/13] [batch] Make frozen 503 a middleware for POST and PATCH (#12145) --- batch/batch/front_end/front_end.py | 22 ++++------------------ batch/batch/utils.py | 7 +++++++ 2 files changed, 11 insertions(+), 18 deletions(-) diff --git a/batch/batch/front_end/front_end.py b/batch/batch/front_end/front_end.py index 2c1ec19eb63..b32f248f537 100644 --- a/batch/batch/front_end/front_end.py +++ b/batch/batch/front_end/front_end.py @@ -72,7 +72,7 @@ from ..globals import BATCH_FORMAT_VERSION, HTTP_CLIENT_MAX_SIZE from ..inst_coll_config import InstanceCollectionConfigs from ..spec_writer import SpecWriter -from ..utils import query_billing_projects +from ..utils import query_billing_projects, unavailable_if_frozen from .validate import ValidationError, validate_and_clean_jobs, validate_batch # import uvloop @@ -677,10 +677,6 @@ def check_service_account_permissions(user, sa): async def create_jobs(request: aiohttp.web.Request, userdata: dict): app = request.app - if app['frozen']: - log.info('ignoring batch create request; batch is frozen') - raise web.HTTPServiceUnavailable() - batch_id = int(request.match_info['batch_id']) job_specs = await request.json() return await _create_jobs(userdata, job_specs, batch_id, 1, app) @@ -1105,10 +1101,6 @@ async def create_batch_fast(request, userdata): app = request.app db: Database = app['db'] - if app['frozen']: - log.info('ignoring batch create request; batch is frozen') - raise web.HTTPServiceUnavailable() - user = userdata['username'] batch_and_bunch = await request.json() batch_spec = batch_and_bunch['batch'] @@ -1132,10 +1124,6 @@ async def create_batch(request, userdata): app = request.app db: Database = app['db'] - if app['frozen']: - log.info('ignoring batch create jobs request; batch is frozen') - raise web.HTTPServiceUnavailable() - batch_spec = await request.json() id = await _create_batch(batch_spec, userdata, db) n_jobs = batch_spec['n_jobs'] @@ -1372,10 +1360,6 @@ async def close_batch(request, userdata): app = request.app db: Database = app['db'] - if app['frozen']: - log.info('ignoring batch close request; batch is frozen') - raise web.HTTPServiceUnavailable() - record = await db.select_and_fetchone( ''' SELECT 1 FROM batches @@ -2459,7 +2443,9 @@ async def on_cleanup(app): def run(): - app = web.Application(client_max_size=HTTP_CLIENT_MAX_SIZE, middlewares=[monitor_endpoints_middleware]) + app = web.Application( + client_max_size=HTTP_CLIENT_MAX_SIZE, middlewares=[unavailable_if_frozen, monitor_endpoints_middleware] + ) setup_aiohttp_session(app) setup_aiohttp_jinja2(app, 'batch.front_end') diff --git a/batch/batch/utils.py b/batch/batch/utils.py index 3662ed9dcf3..4a04f4b204e 100644 --- a/batch/batch/utils.py +++ b/batch/batch/utils.py @@ -13,6 +13,13 @@ log = logging.getLogger('utils') +@web.middleware +async def unavailable_if_frozen(request: web.Request, handler): + if request.method in ("POST", "PATCH") and request.app['frozen']: + raise web.HTTPServiceUnavailable() + return await handler(request) + + def authorization_token(request): auth_header = request.headers.get('Authorization') if not auth_header: From 172e19cad7934dce9ad89a4aa2bd5e3cff41e52a Mon Sep 17 00:00:00 2001 From: Iris Rademacher <84595986+iris-garden@users.noreply.github.com> Date: Wed, 21 Sep 2022 21:12:52 -0400 Subject: [PATCH 07/13] [batch] reject local paths as input when using ServiceBackend (#12186) * [batch] rejects local paths as input when using ServiceBackend * add test case * types * no abstract * capitalization * dont validate job resource groups * unstatic * fix method name * move validate call * cleanup * fix args * types * lint * import * contextmanager syntax * camelcase * update test --- hail/python/hailtop/aiotools/router_fs.py | 22 +++++++++----------- hail/python/hailtop/batch/backend.py | 14 ++++++++++++- hail/python/hailtop/batch/batch.py | 1 + hail/python/test/hailtop/batch/test_batch.py | 9 ++++++++ 4 files changed, 33 insertions(+), 13 deletions(-) diff --git a/hail/python/hailtop/aiotools/router_fs.py b/hail/python/hailtop/aiotools/router_fs.py index dbd423f3c4a..6812524eb5a 100644 --- a/hail/python/hailtop/aiotools/router_fs.py +++ b/hail/python/hailtop/aiotools/router_fs.py @@ -1,6 +1,6 @@ from typing import Any, Optional, List, Set, AsyncIterator, Dict, AsyncContextManager, Callable import asyncio -import urllib.parse +import urllib from ..aiocloud import aioaws, aioazure, aiogoogle from .fs import (AsyncFS, MultiPartCreate, FileStatus, FileListEntry, ReadableStream, @@ -35,6 +35,12 @@ def __init__(self, self._azure_kwargs = azure_kwargs or {} self._s3_kwargs = s3_kwargs or {} + def get_scheme(self, uri: str) -> str: + scheme = urllib.parse.urlparse(uri).scheme or self._default_scheme + if not scheme: + raise ValueError(f"no default scheme and URL has no scheme: {uri}") + return scheme + def parse_url(self, url: str) -> AsyncFSURL: return self._get_fs(url).parse_url(url) @@ -59,19 +65,11 @@ def _load_fs(self, scheme: str): self._scheme_fs[scheme] = fs self._filesystems.append(fs) - def _get_fs(self, url: str) -> AsyncFS: - parsed = urllib.parse.urlparse(url) - if not parsed.scheme: - if self._default_scheme: - parsed = parsed._replace(scheme=self._default_scheme) - else: - raise ValueError(f"no default scheme and URL has no scheme: {url}") - - scheme = parsed.scheme + def _get_fs(self, uri: str) -> AsyncFS: + scheme = self.get_scheme(uri) if scheme not in self._scheme_fs: self._load_fs(scheme) - - fs = self._scheme_fs.get(parsed.scheme) + fs = self._scheme_fs.get(scheme) assert fs is not None return fs diff --git a/hail/python/hailtop/batch/backend.py b/hail/python/hailtop/batch/backend.py index 9d6c0d75de4..d8ca8e32cc9 100644 --- a/hail/python/hailtop/batch/backend.py +++ b/hail/python/hailtop/batch/backend.py @@ -79,6 +79,9 @@ def close(self): # pylint: disable=R0201 self._close() self._closed = True + def validate_file_scheme(self, uri: str) -> None: + pass + def __del__(self): self.close() @@ -446,7 +449,7 @@ def __init__(self, self.remote_tmpdir = remote_tmpdir gcs_kwargs = {'project': google_project} - self.__fs: AsyncFS = RouterAsyncFS(default_scheme='file', gcs_kwargs=gcs_kwargs) + self.__fs: RouterAsyncFS = RouterAsyncFS(default_scheme='file', gcs_kwargs=gcs_kwargs) @property def _fs(self): @@ -728,3 +731,12 @@ async def compile_job(job): status = batch_handle.wait() print(f'batch {batch_handle.id} complete: {status["state"]}') return batch_handle + + def validate_file_scheme(self, uri: str) -> None: + scheme = self.__fs.get_scheme(uri) + if scheme == "file": + raise ValueError( + f"Local filepath detected: '{uri}'. " + "ServiceBackend does not support the use of local filepaths. " + "Please specify a remote URI instead (e.g. gs://bucket/folder)." + ) diff --git a/hail/python/hailtop/batch/batch.py b/hail/python/hailtop/batch/batch.py index f5f03fb5432..e3c808cd041 100644 --- a/hail/python/hailtop/batch/batch.py +++ b/hail/python/hailtop/batch/batch.py @@ -300,6 +300,7 @@ def _new_job_resource_file(self, source, value=None): return jrf def _new_input_resource_file(self, input_path, value=None): + self._backend.validate_file_scheme(input_path) if value is None: value = f'{secret_alnum_string(5)}/{os.path.basename(input_path.rstrip("/"))}' irf = _resource.InputResourceFile(value) diff --git a/hail/python/test/hailtop/batch/test_batch.py b/hail/python/test/hailtop/batch/test_batch.py index ec0d4e201a4..5b7f95b9a22 100644 --- a/hail/python/test/hailtop/batch/test_batch.py +++ b/hail/python/test/hailtop/batch/test_batch.py @@ -1,6 +1,7 @@ import asyncio import secrets import unittest +import pytest import os import subprocess as sp import tempfile @@ -579,6 +580,14 @@ def test_file_name_space(self): res_status = res.status() assert res_status['state'] == 'success', str((res_status, res.debug_info())) + def test_local_paths_error(self): + b = self.batch() + j = b.new_job() + for input in ["hi.txt", "~/hello.csv", "./hey.tsv", "/sup.json", "file://yo.yaml"]: + with pytest.raises(ValueError) as e: + b.read_input(input) + assert str(e.value).startswith("Local filepath detected") + def test_dry_run(self): b = self.batch() j = b.new_job() From dd606177285851ff9be02314a760a0d1f5620da6 Mon Sep 17 00:00:00 2001 From: Daniel Goldstein Date: Thu, 22 Sep 2022 12:12:47 -0400 Subject: [PATCH 08/13] [docker] Add script for mirroring dockerhub images (#12204) * [docker] Add script for mirroring dockerhub images * fix --- build.yaml | 10 ++--- docker/copy_image.sh | 13 ++++++ docker/hailgenetics/Makefile | 11 +++++ docker/{ => hailgenetics}/genetics/Dockerfile | 0 docker/{ => hailgenetics}/hail/Dockerfile | 0 docker/hailgenetics/mirror_images.sh | 42 +++++++++++++++++++ .../{ => hailgenetics}/python-dill/Dockerfile | 3 +- docker/hailgenetics/python-dill/push.sh | 17 ++++++++ docker/python-dill/Makefile | 4 -- docker/python-dill/README.md | 1 - docker/python-dill/push.sh | 23 ---------- docker/third-party/copy_images.sh | 17 +------- infra/bootstrap_utils.sh | 8 ++-- 13 files changed, 95 insertions(+), 54 deletions(-) create mode 100644 docker/copy_image.sh create mode 100644 docker/hailgenetics/Makefile rename docker/{ => hailgenetics}/genetics/Dockerfile (100%) rename docker/{ => hailgenetics}/hail/Dockerfile (100%) create mode 100755 docker/hailgenetics/mirror_images.sh rename docker/{ => hailgenetics}/python-dill/Dockerfile (81%) create mode 100644 docker/hailgenetics/python-dill/push.sh delete mode 100644 docker/python-dill/Makefile delete mode 100644 docker/python-dill/README.md delete mode 100644 docker/python-dill/push.sh diff --git a/build.yaml b/build.yaml index dac456bf2df..b20f914b722 100644 --- a/build.yaml +++ b/build.yaml @@ -2690,14 +2690,14 @@ steps: - create_ci_test_repo - kind: buildImage2 name: hailgenetics_hail_image - dockerFile: /io/docker/hail/Dockerfile - contextPath: /io/docker/hail/ + dockerFile: /io/docker/hailgenetics/hail/Dockerfile + contextPath: /io/docker/hailgenetics/hail publishAs: hailgenetics/hail inputs: - - from: /repo/docker/hail - to: /io/docker/hail + - from: /repo/docker/hailgenetics/hail + to: /io/docker/hailgenetics/hail - from: /just-wheel/wheel-container.tar - to: /io/docker/hail/wheel-container.tar + to: /io/docker/hailgenetics/hail/wheel-container.tar dependsOn: - merge_code - build_hail_jar_and_wheel_only diff --git a/docker/copy_image.sh b/docker/copy_image.sh new file mode 100644 index 00000000000..c14b2c331ab --- /dev/null +++ b/docker/copy_image.sh @@ -0,0 +1,13 @@ +if command -v skopeo +then + copy_image() { + skopeo copy --override-os linux --override-arch amd64 docker://docker.io/$1 docker://$2 + } +else + echo Could not find skopeo, falling back to docker which will be slower. + copy_image() { + docker pull $1 + docker tag $1 $2 + docker push $2 + } +fi diff --git a/docker/hailgenetics/Makefile b/docker/hailgenetics/Makefile new file mode 100644 index 00000000000..cf780af1a27 --- /dev/null +++ b/docker/hailgenetics/Makefile @@ -0,0 +1,11 @@ +include ../../config.mk + +.PHONY: publish-python-dill + +publish-python-dill: + DOCKER_PREFIX=$(DOCKER_PREFIX) bash python-dill/push.sh + +mirror-dockerhub-images: + DOCKER_PREFIX=$(DOCKER_PREFIX) \ + HAIL_PIP_VERSION=$(shell cat ../../hail/python/hail/hail_pip_version) \ + ./mirror_images.sh diff --git a/docker/genetics/Dockerfile b/docker/hailgenetics/genetics/Dockerfile similarity index 100% rename from docker/genetics/Dockerfile rename to docker/hailgenetics/genetics/Dockerfile diff --git a/docker/hail/Dockerfile b/docker/hailgenetics/hail/Dockerfile similarity index 100% rename from docker/hail/Dockerfile rename to docker/hailgenetics/hail/Dockerfile diff --git a/docker/hailgenetics/mirror_images.sh b/docker/hailgenetics/mirror_images.sh new file mode 100755 index 00000000000..b6393f71dc6 --- /dev/null +++ b/docker/hailgenetics/mirror_images.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -ex + +source ../copy_image.sh + +if [[ -z "${DOCKER_PREFIX}" ]]; +then + echo "Env variable DOCKER_PREFIX must be set" + exit 1 +fi + +if [[ -z "${HAIL_PIP_VERSION}" ]]; +then + echo "Env variable HAIL_PIP_VERSION must be set" + exit 1 +fi + +python_dill_images=( + "python-dill:3.7" + "python-dill:3.7-slim" + "python-dill:3.8" + "python-dill:3.8-slim" + "python-dill:3.9" + "python-dill:3.9-slim" + "python-dill:3.10" + "python-dill:3.10-slim" +) + +for image in "${python_dill_images[@]}" +do + copy_image "hailgenetics/${image}" "${DOCKER_PREFIX}/hailgenetics/${image}" +done + +pip_release_images=( + "hail:${HAIL_PIP_VERSION}" + "genetics:${HAIL_PIP_VERSION}" +) +for image in "${pip_release_images[@]}" +do + copy_image "hailgenetics/${image}" "${DOCKER_PREFIX}/hailgenetics/${image}" +done diff --git a/docker/python-dill/Dockerfile b/docker/hailgenetics/python-dill/Dockerfile similarity index 81% rename from docker/python-dill/Dockerfile rename to docker/hailgenetics/python-dill/Dockerfile index 6a145538f01..d42ddf99b5f 100644 --- a/docker/python-dill/Dockerfile +++ b/docker/hailgenetics/python-dill/Dockerfile @@ -1,4 +1,5 @@ -FROM python:@PYTHON_VERSION@ +ARG PYTHON_VERSION +FROM python:${PYTHON_VERSION} RUN pip install --upgrade --no-cache-dir dill numpy scipy sklearn && \ python3 -m pip check && \ apt-get update && \ diff --git a/docker/hailgenetics/python-dill/push.sh b/docker/hailgenetics/python-dill/push.sh new file mode 100644 index 00000000000..97bee301732 --- /dev/null +++ b/docker/hailgenetics/python-dill/push.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -ex + +for version in 3.7 3.7-slim 3.8 3.8-slim 3.9 3.9-slim 3.10 3.10-slim +do + public=hailgenetics/python-dill:$version + + DOCKER_BUILDKIT=1 docker build \ + --build-arg PYTHON_VERSION=$version \ + --file Dockerfile.out \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ${public} \ + . + + time DOCKER_BUILDKIT=1 docker push ${public} +done diff --git a/docker/python-dill/Makefile b/docker/python-dill/Makefile deleted file mode 100644 index 76f2a48f70c..00000000000 --- a/docker/python-dill/Makefile +++ /dev/null @@ -1,4 +0,0 @@ -include ../../config.mk - -push: - DOCKER_PREFIX=$(DOCKER_PREFIX) bash push.sh diff --git a/docker/python-dill/README.md b/docker/python-dill/README.md deleted file mode 100644 index a9118ff6795..00000000000 --- a/docker/python-dill/README.md +++ /dev/null @@ -1 +0,0 @@ -These are used by the BatchPoolExecutor. diff --git a/docker/python-dill/push.sh b/docker/python-dill/push.sh deleted file mode 100644 index 862aef208b7..00000000000 --- a/docker/python-dill/push.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -for version in 3.7 3.7-slim 3.8 3.8-slim 3.9 3.9-slim 3.10 3.10-slim -do - sed "s/@PYTHON_VERSION@/$version/g" Dockerfile > Dockerfile.out - - public=hailgenetics/python-dill:$version - private=${DOCKER_PREFIX}/python-dill:$version - cache=${DOCKER_PREFIX}/python-dill:cache - - DOCKER_BUILDKIT=1 docker build \ - --file Dockerfile.out \ - --cache-from ${cache} \ - --build-arg BUILDKIT_INLINE_CACHE=1 \ - --tag ${public} \ - --tag ${private} \ - --tag ${cache} \ - . - - time DOCKER_BUILDKIT=1 docker push ${public} - time DOCKER_BUILDKIT=1 docker push ${private} - time DOCKER_BUILDKIT=1 docker push ${cache} -done diff --git a/docker/third-party/copy_images.sh b/docker/third-party/copy_images.sh index 92d2282da6c..f890d721470 100755 --- a/docker/third-party/copy_images.sh +++ b/docker/third-party/copy_images.sh @@ -2,6 +2,8 @@ set -ex +source ../copy_image.sh + images=$(cat images.txt) if [ -z "${DOCKER_PREFIX}" ] @@ -10,21 +12,6 @@ then exit 1 fi -if command -v skopeo -then - copy_image() { - skopeo copy --override-os linux --override-arch amd64 docker://docker.io/$1 docker://$2 - } -else - echo Could not find skopeo, falling back to docker which will be slower. - copy_image() { - docker pull $1 - docker tag $1 $2 - docker push $2 - } -fi - - for image in ${images} do dest="${DOCKER_PREFIX}/${image}" diff --git a/infra/bootstrap_utils.sh b/infra/bootstrap_utils.sh index d22a39b7073..b47c98e45d1 100644 --- a/infra/bootstrap_utils.sh +++ b/infra/bootstrap_utils.sh @@ -10,12 +10,10 @@ fi source $HAIL/devbin/functions.sh copy_images() { - cd $HAIL/docker/third-party - DOCKER_PREFIX=$(get_global_config_field docker_prefix) - DOCKER_PREFIX=$DOCKER_PREFIX ./copy_images.sh - cd - + make -C $HAIL/docker/third-party copy - make -C $HAIL/docker/python-dill push DOCKER_PREFIX=$DOCKER_PREFIX + make -C $HAIL/hail python/hail/hail_pip_version + make -C $HAIL/docker/hailgenetics mirror-dockerhub-images } generate_ssl_certs() { From c7f532fe2e73de1d8c6bef10c3a31e533d818cbd Mon Sep 17 00:00:00 2001 From: Daniel Goldstein Date: Thu, 22 Sep 2022 13:49:38 -0400 Subject: [PATCH 09/13] [batch] Dont transfer outputs in the local backend if there arent any (#12214) --- hail/python/hailtop/batch/backend.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/hail/python/hailtop/batch/backend.py b/hail/python/hailtop/batch/backend.py index d8ca8e32cc9..8769a3fffeb 100644 --- a/hail/python/hailtop/batch/backend.py +++ b/hail/python/hailtop/batch/backend.py @@ -316,9 +316,10 @@ def transfer_dicts_for_resource_file(res_file: Union[resource.ResourceFile, reso transfer_dict for output_resource in job._external_outputs for transfer_dict in transfer_dicts_for_resource_file(output_resource)] - output_transfers = orjson.dumps(output_transfer_dicts).decode('utf-8') - code += [f'python3 -m hailtop.aiotools.copy {shq(requester_pays_project_json)} {shq(output_transfers)}'] + if output_transfer_dicts: + output_transfers = orjson.dumps(output_transfer_dicts).decode('utf-8') + code += [f'python3 -m hailtop.aiotools.copy {shq(requester_pays_project_json)} {shq(output_transfers)}'] code += ['\n'] run_code(code) From ef14a51d23f0a6c507f12217d596b8d64d766283 Mon Sep 17 00:00:00 2001 From: Dan King Date: Thu, 22 Sep 2022 16:38:02 -0400 Subject: [PATCH 10/13] [query] fix install-on-cluster to handle lines with spaces (#12216) CHANGELOG: Fix bug that caused make install-on-cluster to fail with a message about sys_platform. Co-authored-by: ryerobinson <39314627+ryerobinson@users.noreply.github.com> Co-authored-by: ryerobinson <39314627+ryerobinson@users.noreply.github.com> --- hail/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hail/Makefile b/hail/Makefile index 1da8b638fd1..872d4b82621 100644 --- a/hail/Makefile +++ b/hail/Makefile @@ -256,7 +256,7 @@ install: $(WHEEL) .PHONY: install-on-cluster install-on-cluster: $(WHEEL) - sed '/^pyspark/d' python/requirements.txt | grep -v '^#' | xargs $(PIP) install -U + sed '/^pyspark/d' python/requirements.txt | grep -v '^#' | tr '\n' '\0' | xargs -0 $(PIP) install -U -$(PIP) uninstall -y hail $(PIP) install $(WHEEL) --no-deps From 767bdd6331608b869ef43d59d73a754818a4d3d0 Mon Sep 17 00:00:00 2001 From: Iris Rademacher <84595986+iris-garden@users.noreply.github.com> Date: Fri, 23 Sep 2022 12:15:59 -0400 Subject: [PATCH 11/13] [ggplot] add shape aesthetic support to point geom (#12207) * [ggplot] add shape aesthetic support to point geom * add shape scale * docs and export * lint * missing imports * wip * fixes legend categories * fix var --- hail/python/hail/ggplot/__init__.py | 4 +- hail/python/hail/ggplot/geoms.py | 28 +++-- hail/python/hail/ggplot/ggplot.py | 9 +- hail/python/hail/ggplot/scale.py | 169 ++++++++++++++++++++++------ hail/python/hail/ggplot/utils.py | 17 --- 5 files changed, 163 insertions(+), 64 deletions(-) diff --git a/hail/python/hail/ggplot/__init__.py b/hail/python/hail/ggplot/__init__.py index 6c54a1e0def..becd249d5d7 100644 --- a/hail/python/hail/ggplot/__init__.py +++ b/hail/python/hail/ggplot/__init__.py @@ -8,7 +8,7 @@ from .scale import scale_x_continuous, scale_y_continuous, scale_x_discrete, scale_y_discrete, scale_x_genomic, \ scale_x_log10, scale_y_log10, scale_x_reverse, scale_y_reverse, scale_color_discrete, scale_color_hue, scale_color_identity,\ scale_color_manual, scale_color_continuous, scale_fill_discrete, scale_fill_hue, scale_fill_identity, scale_fill_continuous,\ - scale_fill_manual + scale_fill_manual, scale_shape_manual, scale_shape_auto from .facets import vars, facet_wrap __all__ = [ @@ -50,6 +50,8 @@ "scale_fill_discrete", "scale_fill_hue", "scale_fill_manual", + "scale_shape_manual", + "scale_shape_auto", "facet_wrap", "vars" ] diff --git a/hail/python/hail/ggplot/geoms.py b/hail/python/hail/ggplot/geoms.py index 2f682c0d081..77ffad28008 100644 --- a/hail/python/hail/ggplot/geoms.py +++ b/hail/python/hail/ggplot/geoms.py @@ -26,14 +26,21 @@ def get_stat(self): def _add_aesthetics_to_trace_args(self, trace_args, df): for aes_name, (plotly_name, default) in self.aes_to_arg.items(): + value = None + if hasattr(self, aes_name) and getattr(self, aes_name) is not None: - trace_args[plotly_name] = getattr(self, aes_name) + value = getattr(self, aes_name) elif aes_name in df.attrs: - trace_args[plotly_name] = df.attrs[aes_name] + value = df.attrs[aes_name] elif aes_name in df.columns: - trace_args[plotly_name] = df[aes_name] + value = df[aes_name] elif default is not None: - trace_args[plotly_name] = default + value = default + + if plotly_name == "name" and trace_args.get(plotly_name, None) is not None: + trace_args[plotly_name] += f" & {value}" + elif value is not None: + trace_args[plotly_name] = value def _update_legend_trace_args(self, trace_args, legend_cache): if "name" in trace_args: @@ -88,14 +95,17 @@ class GeomPoint(Geom): "size": ("marker_size", None), "tooltip": ("hovertext", None), "color_legend": ("name", None), - "alpha": ("marker_opacity", None) + "alpha": ("marker_opacity", None), + "shape": ("marker_symbol", None), + "shape_legend": ("name", None), } - def __init__(self, aes, color=None, size=None, alpha=None): + def __init__(self, aes, color=None, size=None, alpha=None, shape=None): super().__init__(aes) self.color = color self.size = size self.alpha = alpha + self.shape = shape def apply_to_fig(self, parent, grouped_data, fig_so_far, precomputed, facet_row, facet_col, legend_cache): def plot_group(df): @@ -119,17 +129,17 @@ def get_stat(self): return StatIdentity() -def geom_point(mapping=aes(), *, color=None, size=None, alpha=None): +def geom_point(mapping=aes(), *, color=None, size=None, alpha=None, shape=None): """Create a scatter plot. - Supported aesthetics: ``x``, ``y``, ``color``, ``alpha``, ``tooltip`` + Supported aesthetics: ``x``, ``y``, ``color``, ``alpha``, ``tooltip``, ``shape`` Returns ------- :class:`FigureAttribute` The geom to be applied. """ - return GeomPoint(mapping, color=color, size=size, alpha=alpha) + return GeomPoint(mapping, color=color, size=size, alpha=alpha, shape=shape) class GeomLine(GeomLineBasic): diff --git a/hail/python/hail/ggplot/ggplot.py b/hail/python/hail/ggplot/ggplot.py index 0ce69e1c94e..3c5d1c509ca 100644 --- a/hail/python/hail/ggplot/ggplot.py +++ b/hail/python/hail/ggplot/ggplot.py @@ -10,7 +10,7 @@ from .labels import Labels from .scale import Scale, ScaleContinuous, ScaleDiscrete, scale_x_continuous, scale_x_genomic, scale_y_continuous, \ scale_x_discrete, scale_y_discrete, scale_color_discrete, scale_color_continuous, scale_fill_discrete, \ - scale_fill_continuous + scale_fill_continuous, scale_shape_auto from .aes import Aesthetic, aes from .facets import Faceter from .utils import is_continuous_type, is_genomic_type, check_scale_continuity @@ -91,6 +91,13 @@ def add_default_scales(self, aesthetic): self.scales["fill"] = scale_fill_discrete() elif aesthetic_str == "fill" and is_continuous: self.scales["fill"] = scale_fill_continuous() + elif aesthetic_str == "shape" and not is_continuous: + self.scales["shape"] = scale_shape_auto() + elif aesthetic_str == "shape" and is_continuous: + raise ValueError( + "The 'shape' aesthetic does not support continuous " + "types. Specify values of a discrete type instead." + ) else: if is_continuous: self.scales[aesthetic_str] = ScaleContinuous(aesthetic_str) diff --git a/hail/python/hail/ggplot/scale.py b/hail/python/hail/ggplot/scale.py index 89a14637bcc..331c53a21a9 100644 --- a/hail/python/hail/ggplot/scale.py +++ b/hail/python/hail/ggplot/scale.py @@ -1,14 +1,16 @@ import abc -from .geoms import FigureAttribute - -from hail.context import get_reference -from hail import tstr -from .utils import categorical_strings_to_colors, continuous_nums_to_colors, is_continuous_type, is_discrete_type +from collections.abc import Mapping import plotly.express as px import plotly +from hail.context import get_reference +from hail import tstr + +from .geoms import FigureAttribute +from .utils import continuous_nums_to_colors, is_continuous_type, is_discrete_type + class Scale(FigureAttribute): def __init__(self, aesthetic_name): @@ -150,6 +152,9 @@ class ScaleDiscrete(Scale): def __init__(self, aesthetic_name): super().__init__(aesthetic_name) + def get_values(self, categories): + return None + def transform_data(self, field_expr): return field_expr @@ -162,30 +167,50 @@ def is_continuous(self): def valid_dtype(self, dtype): return is_discrete_type(dtype) - -class ScaleColorManual(ScaleDiscrete): - - def __init__(self, aesthetic_name, values): - super().__init__(aesthetic_name) - self.values = values - def create_local_transformer(self, groups_of_dfs): - categorical_strings = set() + categories = set() for group_of_dfs in groups_of_dfs: for df in group_of_dfs: if self.aesthetic_name in df.attrs: - categorical_strings.add(df.attrs[self.aesthetic_name]) - - unique_color_mapping = categorical_strings_to_colors(categorical_strings, self.values) + categories.add(df.attrs[self.aesthetic_name]) + + values = self.get_values(categories) + + if values is None: + return super().create_local_transformer(groups_of_dfs) + elif isinstance(values, Mapping): + mapping = values + elif isinstance(values, list): + if len(categories) > len(values): + raise ValueError( + f"Not enough scale values specified. Found {len(categories)} " + f"distinct categories in {categories} and only {len(values)} " + f"scale values were provided in {values}." + ) + mapping = dict(zip(categories, values)) + else: + raise TypeError( + "Expected scale values to be a Mapping or list, but received a(n) " + f"{type(values)}: {values}." + ) def transform(df): df.attrs[f"{self.aesthetic_name}_legend"] = df.attrs[self.aesthetic_name] - df.attrs[self.aesthetic_name] = unique_color_mapping[df.attrs[self.aesthetic_name]] + df.attrs[self.aesthetic_name] = mapping[df.attrs[self.aesthetic_name]] return df return transform +class ScaleDiscreteManual(ScaleDiscrete): + def __init__(self, aesthetic_name, values): + super().__init__(aesthetic_name) + self.values = values + + def get_values(self, categories): + return self.values + + class ScaleColorContinuous(ScaleContinuous): def create_local_transformer(self, groups_of_dfs): @@ -217,26 +242,71 @@ def transform(df): class ScaleColorHue(ScaleDiscrete): - def create_local_transformer(self, groups_of_dfs): - categorical_strings = set() - for group_of_dfs in groups_of_dfs: - for df in group_of_dfs: - if self.aesthetic_name in df.attrs: - categorical_strings.add(df.attrs[self.aesthetic_name]) - - num_categories = len(categorical_strings) + def get_values(self, categories): + num_categories = len(categories) step = 1.0 / num_categories interpolation_values = [step * i for i in range(num_categories)] hsv_scale = px.colors.get_colorscale("HSV") - colors = px.colors.sample_colorscale(hsv_scale, interpolation_values) - unique_color_mapping = dict(zip(categorical_strings, colors)) - - def transform(df): - df.attrs[f"{self.aesthetic_name}_legend"] = df.attrs[self.aesthetic_name] - df.attrs[self.aesthetic_name] = unique_color_mapping[df.attrs[self.aesthetic_name]] - return df - - return transform + return px.colors.sample_colorscale(hsv_scale, interpolation_values) + + +class ScaleShapeAuto(ScaleDiscrete): + def get_values(self, categories): + return [ + "circle", + "square", + "diamond", + "cross", + "x", + "triangle-up", + "triangle-down", + "triangle-left", + "triangle-right", + "triangle-ne", + "triangle-se", + "triangle-sw", + "triangle-nw", + "pentagon", + "hexagon", + "hexagon2", + "octagon", + "star", + "hexagram", + "star-triangle-up", + "star-triangle-down", + "star-square", + "star-diamond", + "diamond-tall", + "diamond-wide", + "hourglass", + "bowtie", + "circle-cross", + "circle-x", + "square-cross", + "square-x", + "diamond-cross", + "diamond-x", + "cross-thin", + "x-thin", + "asterisk", + "hash", + "y-up", + "y-down", + "y-left", + "y-right", + "line-ew", + "line-ns", + "line-ne", + "line-nw", + "arrow-up", + "arrow-down", + "arrow-left", + "arrow-right", + "arrow-bar-up", + "arrow-bar-down", + "arrow-bar-left", + "arrow-bar-right", + ] class ScaleColorContinuousIdentity(ScaleContinuous): @@ -469,7 +539,7 @@ def scale_color_manual(*, values): :class:`.FigureAttribute` The scale to be applied. """ - return ScaleColorManual("color", values=values) + return ScaleDiscreteManual("color", values=values) def scale_fill_discrete(): @@ -531,4 +601,31 @@ def scale_fill_manual(*, values): :class:`.FigureAttribute` The scale to be applied. """ - return ScaleColorManual("fill", values=values) + return ScaleDiscreteManual("fill", values=values) + + +def scale_shape_manual(*, values): + """A scale that assigns shapes to discrete aesthetics. See `the plotly documentation `__ for a list of supported shapes. + + Parameters + ---------- + values: :class:`list` of :class:`str` + The shapes from which to choose. + + Returns + ------- + :class:`.FigureAttribute` + The scale to be applied. + """ + return ScaleDiscreteManual("shape", values=values) + + +def scale_shape_auto(): + """A scale that automatically assigns shapes to discrete aesthetics. + + Returns + ------- + :class:`.FigureAttribute` + The scale to be applied. + """ + return ScaleShapeAuto("shape") diff --git a/hail/python/hail/ggplot/utils.py b/hail/python/hail/ggplot/utils.py index 5a1c21842b5..35daef50167 100644 --- a/hail/python/hail/ggplot/utils.py +++ b/hail/python/hail/ggplot/utils.py @@ -30,23 +30,6 @@ def should_use_scale_for_grouping(scale): return (scale.aesthetic_name not in excluded_from_grouping) and scale.is_discrete() -# Map strings to numbers that will index into a color scale. -def categorical_strings_to_colors(string_set, color_values): - - if isinstance(color_values, list): - if len(string_set) > len(color_values): - print(f"Not enough colors specified. Found {len(string_set)} distinct values of color aesthetic and only {len(color_values)} colors were provided.") - color_dict = {} - for idx, element in enumerate(string_set): - if element not in color_dict: - color_dict[element] = color_values[idx] - - else: - color_dict = color_values - - return color_dict - - def continuous_nums_to_colors(min_color, max_color, continuous_color_scale): def adjust_color(input_color): return (input_color - min_color) / max_color - min_color From af6ba20835940adb2929f8cfd5cab9a6f8b7291c Mon Sep 17 00:00:00 2001 From: Tim Poterba Date: Fri, 23 Sep 2022 13:38:51 -0400 Subject: [PATCH 12/13] [query] Log FASTA downloads (#12217) --- hail/src/main/scala/is/hail/io/reference/FASTAReader.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/hail/src/main/scala/is/hail/io/reference/FASTAReader.scala b/hail/src/main/scala/is/hail/io/reference/FASTAReader.scala index 02cd2a2dc8b..7f4ac39d5ec 100644 --- a/hail/src/main/scala/is/hail/io/reference/FASTAReader.scala +++ b/hail/src/main/scala/is/hail/io/reference/FASTAReader.scala @@ -38,6 +38,7 @@ object FASTAReader { def setup(tmpdir: String, fs: FS, fastaFile: String, indexFile: String): String = { val localFastaFile = ExecuteContext.createTmpPathNoCleanup(tmpdir, "fasta-reader", "fasta") + log.info(s"copying FASTA file at $fastaFile to $localFastaFile") fs.copyRecode(fastaFile, localFastaFile) val localIndexFile = localFastaFile + ".fai" From 2ea2615a797a5aff72d20d9d12a2609342846a07 Mon Sep 17 00:00:00 2001 From: Iris Rademacher <84595986+iris-garden@users.noreply.github.com> Date: Fri, 23 Sep 2022 15:48:43 -0400 Subject: [PATCH 13/13] [release] update changelog for 0.2.100 (#12220) --- hail/Makefile | 2 +- hail/python/hail/docs/change_log.md | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/hail/Makefile b/hail/Makefile index 872d4b82621..893d878b697 100644 --- a/hail/Makefile +++ b/hail/Makefile @@ -14,7 +14,7 @@ BRANCH := $(shell git rev-parse --abbrev-ref HEAD) SCALA_VERSION ?= 2.12.13 SPARK_VERSION ?= 3.1.3 HAIL_MAJOR_MINOR_VERSION := 0.2 -HAIL_PATCH_VERSION := 99 +HAIL_PATCH_VERSION := 100 HAIL_PIP_VERSION := $(HAIL_MAJOR_MINOR_VERSION).$(HAIL_PATCH_VERSION) HAIL_VERSION := $(HAIL_PIP_VERSION)-$(SHORT_REVISION) ELASTIC_MAJOR_VERSION ?= 7 diff --git a/hail/python/hail/docs/change_log.md b/hail/python/hail/docs/change_log.md index 0aab41cdfd4..d337e961f4d 100644 --- a/hail/python/hail/docs/change_log.md +++ b/hail/python/hail/docs/change_log.md @@ -24,6 +24,25 @@ an earlier version of Hail to read files written in a later version. --- +## Version 0.2.100 + +Released 2022-09-23 + +### New Features + +- (hail#12207) Add support for the `shape` aesthetic to `hail.ggplot.geom_point`. + +### Deprecations + +- (hail#12213) The `batch_size` parameter of `vds.new_combiner` is deprecated in favor of `gvcf_batch_size`. + +### Bug fixes + +- (hail#12216) Fix bug that caused `make install-on-cluster` to fail with a message about `sys_platform`. +- (hail#12164) Fix bug that caused Query on Batch pipelines to fail on datasets with indexes greater than 2GiB. + +--- + ## Version 0.2.99 Released 2022-09-13