diff --git a/.gitignore b/.gitignore index 05d29448..fa358d22 100644 --- a/.gitignore +++ b/.gitignore @@ -110,5 +110,7 @@ Pipfile.lock # minimalkv store/ +old/ + # Exploratory code -exp.py +exploration_scripts/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bf26523c..642a254a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,6 +27,7 @@ repos: - types-setuptools - types-redis - types-boto + - boto3-stubs - repo: https://github.com/Quantco/pre-commit-mirrors-pyupgrade rev: 3.1.0 hooks: diff --git a/docs/changes.rst b/docs/changes.rst index 0900478c..80def5c5 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -1,8 +1,13 @@ Changelog ********* -1.6.1 +1.7.0 ===== +* Deprecated ``get_store``, ``url2dict``, and ``extract_params``. + + * ``get_store_from_url`` should be used to create stores from a URL + +* Added ``from_url`` and ``from_parsed_url`` to each store. * Made the SQLAlchemyStore compatible with SQLAlchemy 2.0. diff --git a/minimalkv/_boto.py b/minimalkv/_boto.py index 77e85c4d..cf6dacc3 100644 --- a/minimalkv/_boto.py +++ b/minimalkv/_boto.py @@ -10,13 +10,21 @@ def _get_s3bucket( from boto.s3.connection import S3ResponseError # type: ignore from boto.s3.connection import OrdinaryCallingFormat, S3Connection - s3con = S3Connection( - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - host=host, - is_secure=False, - calling_format=OrdinaryCallingFormat(), - ) + s3_connection_params = { + "aws_access_key_id": access_key, + "aws_secret_access_key": secret_key, + "is_secure": False, + "calling_format": OrdinaryCallingFormat(), + } + + # Split up the host into host and port. + if ":" in host: + host, port = host.split(":") + s3_connection_params["port"] = int(port) + s3_connection_params["host"] = host + + s3con = S3Connection(**s3_connection_params) + # add access key prefix to bucket name, unless explicitly prohibited if force_bucket_suffix and not bucket.lower().endswith("-" + access_key.lower()): bucket = bucket + "-" + access_key.lower() diff --git a/minimalkv/_get_store.py b/minimalkv/_get_store.py index c665ff24..bea57fd8 100644 --- a/minimalkv/_get_store.py +++ b/minimalkv/_get_store.py @@ -1,11 +1,15 @@ from functools import reduce -from typing import Any +from typing import Any, Dict, List, Optional, Type + +from uritools import SplitResult, urisplit from minimalkv._key_value_store import KeyValueStore from minimalkv._urls import url2dict -def get_store_from_url(url: str) -> KeyValueStore: +def get_store_from_url( + url: str, store_cls: Optional[Type[KeyValueStore]] = None +) -> KeyValueStore: """ Take a URL and return a minimalkv store according to the parameters in the URL. @@ -13,11 +17,14 @@ def get_store_from_url(url: str) -> KeyValueStore: ---------- url : str Access-URL, see below for supported formats. + store_cls : Optional[Type[KeyValueStore]] + The class of the store to create. + If the URL scheme doesn't match the class, a ValueError is raised. Returns ------- store : KeyValueStore - Value Store as described in url. + KeyValueStore as described in url. Notes ----- @@ -41,18 +48,100 @@ def get_store_from_url(url: str) -> KeyValueStore: * AzureBlockBlockStorage (SAS): ``azure://account_name:shared_access_signature@container?use_sas&create_if_missing=false[?max_connections=2&socket_timeout=(20,100)]`` * AzureBlockBlockStorage (SAS): ``azure://account_name:shared_access_signature@container?use_sas&create_if_missing=false[?max_connections=2&socket_timeout=(20,100)][?max_block_size=4*1024*1024&max_single_put_size=64*1024*1024]`` * GoogleCloudStorage: ``gcs://@bucket_name[?create_if_missing=true][&bucket_creation_location=EUROPE-WEST1]`` + * S3FSStore ``s3://access_key:secret_key@endpoint/bucket[?create_if_missing=true]`` + + See the respective store's :func:`_from_parsed_url` function for more details. + + """ + from minimalkv._hstores import HS3FSStore + from minimalkv.net.s3fsstore import S3FSStore + + scheme_to_store: Dict[str, Type[KeyValueStore]] = { + "s3": S3FSStore, + "hs3": HS3FSStore, + "boto": HS3FSStore, + } + + parsed_url = urisplit(url) + # Wrappers can be used to add functionality to a store, e.g. encryption. + # See the documentation of _extract_wrappers for details. + wrappers = _extract_wrappers(parsed_url) + + # Remove wrappers from scheme + scheme_parts = parsed_url.getscheme().split("+") + # pop off the type of the store + scheme = scheme_parts[0] + + if scheme not in scheme_to_store: + # If we can't find the scheme, we fall back to the old creation methods + return get_store(**url2dict(url)) + + store_cls_from_url = scheme_to_store[scheme] + if store_cls is not None and store_cls_from_url != store_cls: + raise ValueError( + f"URL scheme {scheme} does not match store class {store_cls.__name__}" + ) + + query_listdict: Dict[str, List[str]] = parsed_url.getquerydict() + # We will just use the last occurrence for each key + query = {k: v[-1] for k, v in query_listdict.items()} + + store = store_cls_from_url._from_parsed_url(parsed_url, query) + + # apply wrappers/decorators: + from minimalkv._store_decoration import decorate_store + + wrapped_store = reduce(decorate_store, wrappers, store) + + return wrapped_store + + +def _extract_wrappers(parsed_url: SplitResult) -> List[str]: + """ + Extract wrappers from a parsed URL. - Get the encoded credentials as string like so: + Wrappers allow you to add additional functionality to a store, e.g. encryption. + They can be specified in two ways: + 1. As the fragment part of the URL, e.g. "s3://...#wrap:readonly+urlencode" + 2. As part of the scheme, e.g. "s3+readonly+urlencode://..." - .. code-block:: python + The two methods cannot be mixed in the same URL. - from pathlib import Path - import base64 - json_as_bytes = Path().read_bytes() - json_b64_encoded = base64.urlsafe_b64encode(b).decode() + Parameters + ---------- + parsed_url: SplitResult + The parsed URL. + Returns + ------- + wrappers: List[str] + The list of wrappers. """ - return get_store(**url2dict(url)) + # Find wrappers in scheme, looking like this: "s3+readonly+urlencode://..." + parts = parsed_url.getscheme().split("+") + # pop off the type of the store + parts.pop(0) + scheme_wrappers = list(reversed(parts)) + + # Find fragment wrappers, looking like this: "s3://...#wrap:readonly+urlencode" + fragment = parsed_url.getfragment() + fragments = fragment.split("#") if fragment else [] + wrap_spec = [s for s in fragments if s.startswith("wrap:")] + if wrap_spec: + fragment_without_wrap = wrap_spec[-1].partition("wrap:")[ + 2 + ] # remove the 'wrap:' part + fragment_wrappers = list(fragment_without_wrap.split("+")) + else: + fragment_wrappers = [] + + # can't have both: + if scheme_wrappers and fragment_wrappers: + raise ValueError( + "Adding store wrappers via both the scheme and the fragment is not allowed." + ) + + return scheme_wrappers + fragment_wrappers def get_store( diff --git a/minimalkv/_hstores.py b/minimalkv/_hstores.py index aa428e39..88589f0f 100644 --- a/minimalkv/_hstores.py +++ b/minimalkv/_hstores.py @@ -5,8 +5,10 @@ from minimalkv.memory import DictStore from minimalkv.memory.redisstore import RedisStore from minimalkv.net.azurestore import AzureBlockBlobStore +from minimalkv.net.boto3store import Boto3Store from minimalkv.net.botostore import BotoStore from minimalkv.net.gcstore import GoogleCloudStore +from minimalkv.net.s3fsstore import S3FSStore class HDictStore(ExtendedKeyspaceMixin, DictStore): # noqa D @@ -39,6 +41,14 @@ def size(self, key: str) -> bytes: return k.size +class HS3FSStore(ExtendedKeyspaceMixin, S3FSStore): # noqa D + pass + + +class HBoto3Store(ExtendedKeyspaceMixin, Boto3Store): # noqa D + pass + + class HGoogleCloudStore(ExtendedKeyspaceMixin, GoogleCloudStore): # noqa D pass diff --git a/minimalkv/_key_value_store.py b/minimalkv/_key_value_store.py index 30adaa2d..6f645be4 100644 --- a/minimalkv/_key_value_store.py +++ b/minimalkv/_key_value_store.py @@ -1,6 +1,8 @@ from io import BytesIO from types import TracebackType -from typing import IO, Iterable, Iterator, List, Optional, Type, Union +from typing import IO, Dict, Iterable, Iterator, List, Optional, Type, Union + +from uritools import SplitResult from minimalkv._constants import VALID_KEY_RE from minimalkv._mixins import UrlMixin @@ -98,7 +100,7 @@ def get_file(self, key: str, file: Union[str, IO]) -> str: implement a specialized function if data needs to be written to disk or streamed. If ``file`` is a string, contents of ``key`` are written to a newly created file - with the filename ``file``. Otherwise the data will be written using the + with the filename ``file``. Otherwise, the data will be written using the ``write`` method of ``file``. Parameters @@ -462,6 +464,29 @@ def __exit__( """ self.close() + @classmethod + def _from_parsed_url( + cls, parsed_url: SplitResult, query: Dict[str, str] + ) -> "KeyValueStore": + """ + Build a ``KeyValueStore`` from a parsed URL. + + To build a ``KeyValueStore`` from a URL, use :func:`get_store_from_url`. + + Parameters + ---------- + parsed_url: SplitResult + The parsed URL. + query: Dict[str, str] + Query parameters from the URL. + + Returns + ------- + store : KeyValueStore + The created KeyValueStore. + """ + raise NotImplementedError + class UrlKeyValueStore(UrlMixin, KeyValueStore): """Class is deprecated. Use the :class:`.UrlMixin` instead. diff --git a/minimalkv/_store_creation.py b/minimalkv/_store_creation.py index 5643a991..9616ad9a 100644 --- a/minimalkv/_store_creation.py +++ b/minimalkv/_store_creation.py @@ -1,6 +1,7 @@ import os import os.path from typing import TYPE_CHECKING, Any, Dict +from warnings import warn from minimalkv.fs import FilesystemStore @@ -10,6 +11,14 @@ def create_store(type: str, params: Dict[str, Any]) -> "KeyValueStore": """Create store of type ``type`` with ``params``.""" + warn( + """ + create_store will be removed in the next major release. + If you want to create a KeyValueStore from a URL, use get_store_from_url. + """, + DeprecationWarning, + stacklevel=2, + ) # TODO: More detailed docstring if type in ("azure", "hazure"): return _create_store_azure(type, params) diff --git a/minimalkv/_url_utils.py b/minimalkv/_url_utils.py new file mode 100644 index 00000000..27aca9d8 --- /dev/null +++ b/minimalkv/_url_utils.py @@ -0,0 +1,17 @@ +from typing import Optional + +from uritools import SplitResult + + +def _get_username(split_result: SplitResult) -> Optional[str]: + userinfo = split_result.getuserinfo() + if not userinfo: + return None + return userinfo.split(":")[0] + + +def _get_password(split_result: SplitResult) -> Optional[str]: + userinfo = split_result.getuserinfo() + if not userinfo or ":" not in userinfo: + return None + return userinfo.split(":")[1] diff --git a/minimalkv/_urls.py b/minimalkv/_urls.py index 5d287062..3df619d3 100644 --- a/minimalkv/_urls.py +++ b/minimalkv/_urls.py @@ -1,5 +1,6 @@ import base64 from typing import Any, Dict, List +from warnings import warn from uritools import urisplit @@ -32,8 +33,16 @@ def url2dict(url: str, raise_on_extra_params: bool = False) -> Dict[str, Any]: ``azure://account_name:shared_access_signature@container?use_sas&create_if_missing=false[?max_connections=2&socket_timeout=(20,100)]`` ``azure://account_name:shared_access_signature@container?use_sas&create_if_missing=false[?max_connections=2&socket_timeout=(20,100)][?max_block_size=4*1024*1024&max_single_put_size=64*1024*1024]`` ``gcs://@bucket_name[?create_if_missing=true][?bucket_creation_location=EUROPE-WEST1]`` - """ + warn( + """ + url2dict will be removed in the next major release. + If you want to create a KeyValueStore from a URL, use get_store_from_url. + """, + DeprecationWarning, + stacklevel=2, + ) + u = urisplit(url) parsed = dict( scheme=u.getscheme(), @@ -68,6 +77,15 @@ def url2dict(url: str, raise_on_extra_params: bool = False) -> Dict[str, Any]: def extract_params(scheme, host, port, path, query, userinfo): # noqa D + warn( + """ + extract_params will be removed in the next major release. + If you want to create a KeyValueStore from a URL, use get_store_from_url. + """, + DeprecationWarning, + stacklevel=2, + ) + # We want to ignore wrappers here store_type = scheme.split("+")[0] @@ -126,7 +144,7 @@ def extract_params(scheme, host, port, path, query, userinfo): # noqa D def _parse_userinfo(userinfo: str) -> List[str]: - """Try to split the URL's userinfo into fields separated by :. + """Try to split the URL's userinfo into fields separated by `:`. The user info is the part between ``://`` and ``@``. If anything looks wrong, remind the user to percent-encode values. @@ -140,8 +158,15 @@ def _parse_userinfo(userinfo: str) -> List[str]: ------- parts: list of str URL-encoded user-info split at ``:``. - """ + warn( + """ + _parse_userinfo will be removed in the next major release. + If you want to create a KeyValueStore from a URL, use get_store_from_url. + """, + DeprecationWarning, + stacklevel=2, + ) if hasattr(userinfo, "split"): parts = userinfo.split(":", 1) diff --git a/minimalkv/fsspecstore.py b/minimalkv/fsspecstore.py index 2443d775..8f28915b 100644 --- a/minimalkv/fsspecstore.py +++ b/minimalkv/fsspecstore.py @@ -2,12 +2,13 @@ import warnings from typing import IO, TYPE_CHECKING, Iterator, Optional, Union +from minimalkv.net._net_common import LAZY_PROPERTY_ATTR_PREFIX, lazy_property + if TYPE_CHECKING: from fsspec import AbstractFileSystem from fsspec.spec import AbstractBufferedFile from minimalkv import KeyValueStore -from minimalkv.net._net_common import LAZY_PROPERTY_ATTR_PREFIX, lazy_property # The complete path of the key is structured as follows: # /Users/simon/data/mykvstore/file1 diff --git a/minimalkv/net/boto3store.py b/minimalkv/net/boto3store.py index 53a2fa3d..12e4e453 100644 --- a/minimalkv/net/boto3store.py +++ b/minimalkv/net/boto3store.py @@ -103,10 +103,11 @@ def __init__( reduced_redundancy=False, public=False, metadata=None, + create_if_missing=False, ): - if isinstance(bucket, str): - import boto3 + import boto3 + if isinstance(bucket, str): s3_resource = boto3.resource("s3") bucket = s3_resource.Bucket(bucket) if bucket not in s3_resource.buckets.all(): @@ -245,3 +246,23 @@ def _url_for(self, key): Params={"Bucket": self.bucket.name, "Key": key}, ExpiresIn=self.url_valid_time, ) + + def __eq__(self, other): + """ + Assert that two ``Boto3Store``s are equal. + + The bucket name and other configuration parameters are compared. + See :func:`from_url` for details on the connection parameters. + Does NOT compare the credentials or the contents of the bucket! + """ + return ( + isinstance(other, Boto3Store) + and self.bucket.name == other.bucket.name + and self.bucket.meta.client.meta.endpoint_url + == other.bucket.meta.client.meta.endpoint_url + and self._object_prefix == other._object_prefix + and self.url_valid_time == other.url_valid_time + and self.reduced_redundancy == other.reduced_redundancy + and self.public == other.public + and self.metadata == other.metadata + ) diff --git a/minimalkv/net/s3fsstore.py b/minimalkv/net/s3fsstore.py index 4e1cc1ba..496496ad 100644 --- a/minimalkv/net/s3fsstore.py +++ b/minimalkv/net/s3fsstore.py @@ -1,6 +1,12 @@ +import os import warnings +from typing import Dict + +import boto3 +from uritools import SplitResult from minimalkv import UrlMixin +from minimalkv._url_utils import _get_password, _get_username from minimalkv.fsspecstore import FSSpecStore try: @@ -76,3 +82,112 @@ def _url_for(self, key) -> str: return self._fs.url( f"{self.bucket.name}/{self.object_prefix}{key}", expires=self.url_valid_time ) + + @classmethod + def _from_parsed_url( + cls, parsed_url: SplitResult, query: Dict[str, str] + ) -> "S3FSStore": # noqa D + """ + Build an ``S3FSStore`` from a parsed URL. + To build an ``S3FSStore`` from a URL, use :func:`get_store_from_url`. + + URl format: + ``s3://access_key_id:secret_access_key@endpoint/bucket[?]`` + + **Positional arguments**: + + ``access_key_id``: The access key ID of the S3 user. + + ``secret_access_key``: The secret access key of the S3 user. + + ``endpoint``: The endpoint of the S3 service. Leave empty for standard AWS. + + ``bucket``: The name of the bucket. + + **Query arguments**: + + ``force_bucket_suffix`` (default: ``True``): If set, it is ensured that + the bucket name ends with ``-`` + by appending this string if necessary. + If ``False``, the bucket name is used as-is. + + ``create_if_missing`` (default: ``True`` ): If set, creates the bucket if it does not exist; + otherwise, try to retrieve the bucket and fail with an ``IOError``. + + **Notes**: + + If the scheme is ``hs3``, an ``HS3FSStore`` is returned which allows ``/`` in key names. + + Parameters + ---------- + parsed_url: SplitResult + The parsed URL. + query: Dict[str, str] + Query parameters from the URL. + + Returns + ------- + store : S3FSStore + The created S3FSStore. + """ + + url_access_key_id = _get_username(parsed_url) + url_secret_access_key = _get_password(parsed_url) + + if url_access_key_id is None: + url_secret_access_key = os.environ.get("AWS_ACCESS_KEY_ID") + else: + os.environ["AWS_ACCESS_KEY_ID"] = url_access_key_id + + if url_secret_access_key is None: + url_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY") + else: + os.environ["AWS_SECRET_ACCESS_KEY"] = url_secret_access_key + + boto3_params = { + "aws_access_key_id": url_access_key_id, + "aws_secret_access_key": url_secret_access_key, + } + host = parsed_url.gethost() + port = parsed_url.getport() + + is_secure = query.get("is_secure", "true").lower() == "true" + endpoint_scheme = "https" if is_secure else "http" + + if host is None: + endpoint_url = None + elif port is None: + endpoint_url = f"{endpoint_scheme}://{host}" + else: + endpoint_url = f"{endpoint_scheme}://{host}:{port}" + + boto3_params["endpoint_url"] = endpoint_url + + # Remove Nones from client_params + boto3_params = {k: v for k, v in boto3_params.items() if v is not None} + + bucket_name = parsed_url.getpath().lstrip("/") + + resource = boto3.resource("s3", **boto3_params) # type: ignore + + force_bucket_suffix = query.get("force_bucket_suffix", "true").lower() == "true" + if force_bucket_suffix: + # Try to find access key in env + if url_access_key_id is None: + access_key_id = os.environ.get("AWS_ACCESS_KEY_ID") + else: + access_key_id = url_access_key_id + + if access_key_id is None: + raise ValueError( + "Cannot find access key in URL or environment variable AWS_ACCESS_KEY_ID" + ) + + if not bucket_name.lower().endswith("-" + access_key_id.lower()): + bucket_name += "-" + access_key_id.lower() + + # We only create a reference to the bucket here. + # The bucket will be created in the `create_filesystem` method if it doesn't exist. + bucket = resource.Bucket(bucket_name) + + return cls(bucket) diff --git a/tests/bucket_manager.py b/tests/bucket_manager.py index 78aec179..c2f8f78a 100644 --- a/tests/bucket_manager.py +++ b/tests/bucket_manager.py @@ -2,6 +2,7 @@ from contextlib import contextmanager from uuid import uuid4 as uuid +import boto3 import pytest boto = pytest.importorskip("boto") @@ -54,9 +55,42 @@ def boto3_bucket( is_secure=None, **kwargs, ): - import os + """ + Create a boto3 bucket. + + The bucket is deleted after the consuming function returns. + """ + bucket = boto3_bucket_reference( + host=host, + bucket_name=bucket_name, + port=port, + is_secure=is_secure, + access_key=access_key, + secret_key=secret_key, + ) + bucket.create() + + yield bucket + + for key in bucket.objects.all(): + key.delete() + bucket.delete() - import boto3 + +def boto3_bucket_reference( + access_key=None, + secret_key=None, + host=None, + bucket_name=None, + port=None, + is_secure=None, +): + """ + Create a boto3 bucket reference. + + The bucket is not created. + """ + import os # Set environment variables for boto3 os.environ["AWS_ACCESS_KEY_ID"] = access_key @@ -73,35 +107,24 @@ def boto3_bucket( name = bucket_name or f"testrun-bucket-{uuid()}" # We only set the endpoint url if we're testing against a non-aws host - if port != 80: - s3_client = boto3.client( - "s3", - endpoint_url=endpoint_url, - ) - else: - s3_client = boto3.client( - "s3", - ) - - s3_client.create_bucket(Bucket=name) - if port != 80: s3_resource = boto3.resource( "s3", endpoint_url=endpoint_url, + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + region_name="us-east-1", ) else: s3_resource = boto3.resource( "s3", + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + region_name="us-east-1", ) bucket = s3_resource.Bucket(name) - - yield bucket - - for key in bucket.objects.all(): - key.delete() - bucket.delete() + return bucket def load_boto_credentials(): diff --git a/tests/store_creation/test_creation_boto3store.py b/tests/store_creation/test_creation_boto3store.py new file mode 100644 index 00000000..044431d2 --- /dev/null +++ b/tests/store_creation/test_creation_boto3store.py @@ -0,0 +1,61 @@ +import pytest +from bucket_manager import boto3_bucket_reference + +from minimalkv._get_store import get_store, get_store_from_url +from minimalkv._urls import url2dict +from minimalkv.net.s3fsstore import S3FSStore + +storage = pytest.importorskip("google.cloud.storage") + +S3_URL = "s3://minio:miniostorage@127.0.0.1:9000/bucketname?create_if_missing=true&is_secure=false" + +""" +When using the `s3` scheme in a URL, the new store creation returns an `S3FSStore`. +The old store creation returns a `BotoStore`. +To compare these two implementations, the following tests are run. +""" + + +def test_new_s3fs_creation(): + expected = S3FSStore( + bucket=boto3_bucket_reference( + access_key="minio", + secret_key="miniostorage", + host="127.0.0.1", + port=9000, + bucket_name="bucketname-minio", + is_secure=False, + ), + ) + + actual = get_store_from_url(S3_URL) + assert s3fsstores_equal(actual, expected) + + +def test_equal_access(): + new_store = get_store_from_url(S3_URL) + old_store = get_store(**url2dict(S3_URL)) + + new_store.put("key", b"value") + assert old_store.get("key") == b"value" + + +def s3fsstores_equal(store1, store2): + """ + Return whether two ``S3FSStore``s are equal. + + The bucket name and other configuration parameters are compared. + See :func:`from_url` for details on the connection parameters. + Does NOT compare the credentials or the contents of the bucket! + """ + return ( + isinstance(store2, S3FSStore) + and store1.bucket.name == store2.bucket.name + and store1.bucket.meta.client.meta.endpoint_url + == store2.bucket.meta.client.meta.endpoint_url + and store1.object_prefix == store2.object_prefix + and store1.url_valid_time == store2.url_valid_time + and store1.reduced_redundancy == store2.reduced_redundancy + and store1.public == store2.public + and store1.metadata == store2.metadata + ) diff --git a/tests/storefact/test_store_creation.py b/tests/store_creation/test_creation_from_params.py similarity index 100% rename from tests/storefact/test_store_creation.py rename to tests/store_creation/test_creation_from_params.py diff --git a/tests/storefact/test_urls.py b/tests/storefact/test_urls.py index 7a0ceb6d..606212c7 100644 --- a/tests/storefact/test_urls.py +++ b/tests/storefact/test_urls.py @@ -88,11 +88,11 @@ def test_bad_url2dict(url, raises): url2dict(url) -def test_wrapper_old_style(): +def test_wrapper_scheme(): assert isinstance(get_store_from_url("memory+readonly://"), ReadOnlyDecorator) -def test_wrapper_new_style(): +def test_wrapper_fragment(): assert isinstance( get_store_from_url("memory://#wrap:readonly"), ReadOnlyDecorator,