diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index d681940f..c09d7495 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -113,7 +113,9 @@ jobs: python-version: ['3.8', '3.11'] test-type: ['standalone', 'cluster'] connection-type: ['hiredis', 'plain'] - protocol: ['3'] + exclude: + - test-type: 'cluster' + connection-type: 'hiredis' env: ACTIONS_ALLOW_UNSECURE_COMMANDS: true name: RESP3 [${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}}] @@ -132,9 +134,33 @@ jobs: pip install hiredis fi invoke devenv - sleep 5 # time to settle - invoke ${{matrix.test-type}}-tests - invoke ${{matrix.test-type}}-tests --uvloop + sleep 10 # time to settle + invoke ${{matrix.test-type}}-tests --protocol=3 + invoke ${{matrix.test-type}}-tests --uvloop --protocol=3 + + - uses: actions/upload-artifact@v4 + if: success() || failure() + with: + name: pytest-results-${{matrix.test-type}}-${{matrix.connection-type}}-${{matrix.python-version}}-resp3 + path: '${{matrix.test-type}}*results.xml' + + - name: Upload codecov coverage + uses: codecov/codecov-action@v4 + with: + fail_ci_if_error: false + + - name: View Test Results + uses: dorny/test-reporter@v1 + if: success() || failure() + continue-on-error: true + with: + name: Test Results ${{matrix.python-version}} ${{matrix.test-type}}-${{matrix.connection-type}}-resp3 + path: '*.xml' + reporter: java-junit + list-suites: all + list-tests: all + max-annotations: 10 + fail-on-error: 'false' build_and_test_package: name: Validate building and installing the package diff --git a/dev_requirements.txt b/dev_requirements.txt index 48ec278d..b1e10f96 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,18 +1,18 @@ -click==8.0.4 black==24.3.0 cachetools -flake8==5.0.4 +click==8.0.4 flake8-isort==6.0.0 +flake8==5.0.4 flynt~=0.69.0 +invoke==1.7.3 mock==4.0.3 packaging>=20.4 -pytest==7.2.0 -pytest-timeout==2.1.0 -pytest-asyncio>=0.20.2 -invoke==1.7.3 -pytest-cov>=4.0.0 -vulture>=2.3.0 +pytest +pytest-asyncio +pytest-cov +pytest-timeout ujson>=4.2.0 -wheel>=0.30.0 urllib3<2 uvloop +vulture>=2.3.0 +wheel>=0.30.0 diff --git a/setup.py b/setup.py index 69a0d35a..6bf34fc5 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ long_description_content_type="text/markdown", keywords=["Valkey", "key-value store", "database"], license="MIT", - version="5.1.0b6", + version="5.1.0b7", packages=find_packages( include=[ "valkey", diff --git a/tests/test_asyncio/conftest.py b/tests/test_asyncio/conftest.py index 19d35e1f..c84fe79d 100644 --- a/tests/test_asyncio/conftest.py +++ b/tests/test_asyncio/conftest.py @@ -5,13 +5,11 @@ import pytest_asyncio import valkey.asyncio as valkey from tests.conftest import VALKEY_INFO -from valkey._parsers import _AsyncHiredisParser, _AsyncRESP2Parser from valkey.asyncio import Sentinel from valkey.asyncio.client import Monitor from valkey.asyncio.connection import Connection, parse_url from valkey.asyncio.retry import Retry from valkey.backoff import NoBackoff -from valkey.utils import HIREDIS_AVAILABLE from .compat import mock @@ -26,41 +24,21 @@ async def _get_info(valkey_url): @pytest_asyncio.fixture( params=[ pytest.param( - (True, _AsyncRESP2Parser), + (True,), marks=pytest.mark.skipif( 'config.VALKEY_INFO["cluster_enabled"]', reason="cluster mode enabled" ), ), - (False, _AsyncRESP2Parser), - pytest.param( - (True, _AsyncHiredisParser), - marks=[ - pytest.mark.skipif( - 'config.VALKEY_INFO["cluster_enabled"]', - reason="cluster mode enabled", - ), - pytest.mark.skipif( - not HIREDIS_AVAILABLE, reason="hiredis is not installed" - ), - ], - ), - pytest.param( - (False, _AsyncHiredisParser), - marks=pytest.mark.skipif( - not HIREDIS_AVAILABLE, reason="hiredis is not installed" - ), - ), + (False,), ], ids=[ - "single-python-parser", - "pool-python-parser", - "single-hiredis", - "pool-hiredis", + "single", + "pool", ], ) async def create_valkey(request): """Wrapper around valkey.create_valkey.""" - single_connection, parser_cls = request.param + (single_connection,) = request.param teardown_clients = [] @@ -76,10 +54,9 @@ async def client_factory( cluster_mode = VALKEY_INFO["cluster_enabled"] if not cluster_mode: single = kwargs.pop("single_connection_client", False) or single_connection - parser_class = kwargs.pop("parser_class", None) or parser_cls url_options = parse_url(url) url_options.update(kwargs) - pool = valkey.ConnectionPool(parser_class=parser_class, **url_options) + pool = valkey.ConnectionPool(**url_options) client = cls(connection_pool=pool) else: client = valkey.ValkeyCluster.from_url(url, **kwargs) diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index b374321f..dd29cc31 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -2919,6 +2919,31 @@ async def test_xinfo_stream(self, r: valkey.Valkey): assert info["first-entry"] == await get_stream_message(r, stream, m1) assert info["last-entry"] == await get_stream_message(r, stream, m2) + await r.xtrim(stream, 0) + info = await r.xinfo_stream(stream) + assert info["length"] == 0 + assert info["first-entry"] is None + assert info["last-entry"] is None + + @skip_if_server_version_lt("6.0.0") + async def test_xinfo_stream_full(self, r: valkey.Valkey): + stream = "stream" + group = "group" + + await r.xadd(stream, {"foo": "bar"}) + info = await r.xinfo_stream(stream, full=True) + assert info["length"] == 1 + assert len(info["groups"]) == 0 + + await r.xgroup_create(stream, group, 0) + info = await r.xinfo_stream(stream, full=True) + assert info["length"] == 1 + + await r.xreadgroup(group, "consumer", streams={stream: ">"}) + info = await r.xinfo_stream(stream, full=True) + consumer = info["groups"][0]["consumers"][0] + assert isinstance(consumer, dict) + @skip_if_server_version_lt("5.0.0") async def test_xlen(self, r: valkey.Valkey): stream = "stream" diff --git a/tests/test_asyncio/test_json.py b/tests/test_asyncio/test_json.py index e5227aa6..4aacd305 100644 --- a/tests/test_asyncio/test_json.py +++ b/tests/test_asyncio/test_json.py @@ -16,7 +16,7 @@ async def test_json_setbinarykey(decoded_r: valkey.Valkey): async def test_json_setgetdeleteforget(decoded_r: valkey.Valkey): assert await decoded_r.json().set("foo", Path.root_path(), "bar") - assert_resp_response(decoded_r, await decoded_r.json().get("foo"), "bar", [["bar"]]) + assert await decoded_r.json().get("foo") == "bar" assert await decoded_r.json().get("baz") is None assert await decoded_r.json().delete("foo") == 1 assert await decoded_r.json().forget("foo") == 0 # second delete @@ -25,12 +25,12 @@ async def test_json_setgetdeleteforget(decoded_r: valkey.Valkey): async def test_jsonget(decoded_r: valkey.Valkey): await decoded_r.json().set("foo", Path.root_path(), "bar") - assert_resp_response(decoded_r, await decoded_r.json().get("foo"), "bar", [["bar"]]) + assert await decoded_r.json().get("foo") == "bar" async def test_json_get_jset(decoded_r: valkey.Valkey): assert await decoded_r.json().set("foo", Path.root_path(), "bar") - assert_resp_response(decoded_r, await decoded_r.json().get("foo"), "bar", [["bar"]]) + assert await decoded_r.json().get("foo") == "bar" assert await decoded_r.json().get("baz") is None assert 1 == await decoded_r.json().delete("foo") assert await decoded_r.exists("foo") == 0 @@ -38,10 +38,7 @@ async def test_json_get_jset(decoded_r: valkey.Valkey): async def test_nonascii_setgetdelete(decoded_r: valkey.Valkey): assert await decoded_r.json().set("notascii", Path.root_path(), "hyvää-élève") - res = "hyvää-élève" - assert_resp_response( - decoded_r, await decoded_r.json().get("notascii", no_escape=True), res, [[res]] - ) + assert await decoded_r.json().get("notascii", no_escape=True) == "hyvää-élève" assert 1 == await decoded_r.json().delete("notascii") assert await decoded_r.exists("notascii") == 0 @@ -173,8 +170,7 @@ async def test_toggle(decoded_r: valkey.Valkey): async def test_strappend(decoded_r: valkey.Valkey): await decoded_r.json().set("jsonkey", Path.root_path(), "foo") assert 6 == await decoded_r.json().strappend("jsonkey", "bar") - res = await decoded_r.json().get("jsonkey", Path.root_path()) - assert_resp_response(decoded_r, res, "foobar", [["foobar"]]) + assert "foobar" == await decoded_r.json().get("jsonkey", Path.root_path()) async def test_strlen(decoded_r: valkey.Valkey): @@ -207,14 +203,12 @@ async def test_arrindex(decoded_r: valkey.Valkey): async def test_arrinsert(decoded_r: valkey.Valkey): await decoded_r.json().set("arr", Path.root_path(), [0, 4]) assert 5 == await decoded_r.json().arrinsert("arr", Path.root_path(), 1, *[1, 2, 3]) - res = [0, 1, 2, 3, 4] - assert_resp_response(decoded_r, await decoded_r.json().get("arr"), res, [[res]]) + assert await decoded_r.json().get("arr") == [0, 1, 2, 3, 4] # test prepends await decoded_r.json().set("val2", Path.root_path(), [5, 6, 7, 8, 9]) await decoded_r.json().arrinsert("val2", Path.root_path(), 0, ["some", "thing"]) - res = [["some", "thing"], 5, 6, 7, 8, 9] - assert_resp_response(decoded_r, await decoded_r.json().get("val2"), res, [[res]]) + assert await decoded_r.json().get("val2") == [["some", "thing"], 5, 6, 7, 8, 9] async def test_arrlen(decoded_r: valkey.Valkey): @@ -230,7 +224,7 @@ async def test_arrpop(decoded_r: valkey.Valkey): assert 3 == await decoded_r.json().arrpop("arr", Path.root_path(), -1) assert 2 == await decoded_r.json().arrpop("arr", Path.root_path()) assert 0 == await decoded_r.json().arrpop("arr", Path.root_path(), 0) - assert_resp_response(decoded_r, await decoded_r.json().get("arr"), [1], [[[1]]]) + assert [1] == await decoded_r.json().get("arr") # test out of bounds await decoded_r.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -244,8 +238,7 @@ async def test_arrpop(decoded_r: valkey.Valkey): async def test_arrtrim(decoded_r: valkey.Valkey): await decoded_r.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) assert 3 == await decoded_r.json().arrtrim("arr", Path.root_path(), 1, 3) - res = await decoded_r.json().get("arr") - assert_resp_response(decoded_r, res, [1, 2, 3], [[[1, 2, 3]]]) + assert [1, 2, 3] == await decoded_r.json().get("arr") # <0 test, should be 0 equivalent await decoded_r.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -326,15 +319,14 @@ async def test_json_delete_with_dollar(decoded_r: valkey.Valkey): doc1 = {"a": 1, "nested": {"a": 2, "b": 3}} assert await decoded_r.json().set("doc1", "$", doc1) assert await decoded_r.json().delete("doc1", "$..a") == 2 - res = [{"nested": {"b": 3}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == [{"nested": {"b": 3}}] doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert await decoded_r.json().set("doc2", "$", doc2) assert await decoded_r.json().delete("doc2", "$..a") == 1 - res = await decoded_r.json().get("doc2", "$") - res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc2", "$"), res, [res]) + assert await decoded_r.json().get("doc2", "$") == [ + {"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]} + ] doc3 = [ { @@ -365,8 +357,7 @@ async def test_json_delete_with_dollar(decoded_r: valkey.Valkey): } ] ] - res = await decoded_r.json().get("doc3", "$") - assert_resp_response(decoded_r, res, doc3val, [doc3val]) + assert await decoded_r.json().get("doc3", "$") == doc3val # Test async default path assert await decoded_r.json().delete("doc3") == 1 @@ -379,14 +370,14 @@ async def test_json_forget_with_dollar(decoded_r: valkey.Valkey): doc1 = {"a": 1, "nested": {"a": 2, "b": 3}} assert await decoded_r.json().set("doc1", "$", doc1) assert await decoded_r.json().forget("doc1", "$..a") == 2 - res = [{"nested": {"b": 3}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == [{"nested": {"b": 3}}] doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert await decoded_r.json().set("doc2", "$", doc2) assert await decoded_r.json().forget("doc2", "$..a") == 1 - res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc2", "$"), res, [res]) + assert await decoded_r.json().get("doc2", "$") == [ + {"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]} + ] doc3 = [ { @@ -417,8 +408,7 @@ async def test_json_forget_with_dollar(decoded_r: valkey.Valkey): } ] ] - res = await decoded_r.json().get("doc3", "$") - assert_resp_response(decoded_r, res, doc3val, [doc3val]) + assert await decoded_r.json().get("doc3", "$") == doc3val # Test async default path assert await decoded_r.json().forget("doc3") == 1 @@ -441,14 +431,8 @@ async def test_json_mget_dollar(decoded_r: valkey.Valkey): {"a": 4, "b": 5, "nested": {"a": 6}, "c": None, "nested2": {"a": [None]}}, ) # Compare also to single JSON.GET - res = [1, 3, None] - assert_resp_response( - decoded_r, await decoded_r.json().get("doc1", "$..a"), res, [res] - ) - res = [4, 6, [None]] - assert_resp_response( - decoded_r, await decoded_r.json().get("doc2", "$..a"), res, [res] - ) + assert await decoded_r.json().get("doc1", "$..a") == [1, 3, None] + assert await decoded_r.json().get("doc2", "$..a") == [4, 6, [None]] # Test mget with single path await decoded_r.json().mget("doc1", "$..a") == [1, 3, None] @@ -506,7 +490,9 @@ async def test_numby_commands_dollar(decoded_r: valkey.Valkey): await decoded_r.json().set( "doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]} ) - await decoded_r.json().numincrby("doc1", ".b[0].a", 3) == 5 + assert_resp_response( + decoded_r, await decoded_r.json().numincrby("doc1", ".b[0].a", 3), 5, [5] + ) # Test legacy NUMMULTBY await decoded_r.json().set( @@ -514,7 +500,9 @@ async def test_numby_commands_dollar(decoded_r: valkey.Valkey): ) with pytest.deprecated_call(): - await decoded_r.json().nummultby("doc1", ".b[0].a", 3) == 6 + assert_resp_response( + decoded_r, await decoded_r.json().nummultby("doc1", ".b[0].a", 3), 6, [6] + ) async def test_strappend_dollar(decoded_r: valkey.Valkey): @@ -525,13 +513,13 @@ async def test_strappend_dollar(decoded_r: valkey.Valkey): await decoded_r.json().strappend("doc1", "bar", "$..a") == [6, 8, None] res = [{"a": "foobar", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single await decoded_r.json().strappend("doc1", "baz", "$.nested1.a") == [11] res = [{"a": "foobar", "nested1": {"a": "hellobarbaz"}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -540,7 +528,7 @@ async def test_strappend_dollar(decoded_r: valkey.Valkey): # Test multi await decoded_r.json().strappend("doc1", "bar", ".*.a") == 8 res = [{"a": "foobar", "nested1": {"a": "hellobarbazbar"}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing path with pytest.raises(exceptions.ResponseError): @@ -586,7 +574,7 @@ async def test_arrappend_dollar(decoded_r: valkey.Valkey): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrappend("doc1", "$.nested1.a", "baz") == [6] @@ -597,7 +585,7 @@ async def test_arrappend_dollar(decoded_r: valkey.Valkey): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -623,7 +611,7 @@ async def test_arrappend_dollar(decoded_r: valkey.Valkey): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrappend("doc1", ".nested1.a", "baz") == 6 res = [ @@ -633,7 +621,7 @@ async def test_arrappend_dollar(decoded_r: valkey.Valkey): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -661,7 +649,7 @@ async def test_arrinsert_dollar(decoded_r: valkey.Valkey): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrinsert("doc1", "$.nested1.a", -2, "baz") == [6] res = [ @@ -671,7 +659,7 @@ async def test_arrinsert_dollar(decoded_r: valkey.Valkey): "nested2": {"a": 31}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -738,7 +726,7 @@ async def test_arrpop_dollar(decoded_r: valkey.Valkey): assert await decoded_r.json().arrpop("doc1", "$..a", 1) == ['"foo"', None, None] res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -757,7 +745,7 @@ async def test_arrpop_dollar(decoded_r: valkey.Valkey): # Test multi (all paths are updated, but return result of last path) await decoded_r.json().arrpop("doc1", "..a", "1") is None res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # # Test missing key with pytest.raises(exceptions.ResponseError): @@ -777,15 +765,15 @@ async def test_arrtrim_dollar(decoded_r: valkey.Valkey): # Test multi assert await decoded_r.json().arrtrim("doc1", "$..a", "1", -1) == [0, 2, None] res = [{"a": [], "nested1": {"a": [None, "world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res assert await decoded_r.json().arrtrim("doc1", "$..a", "1", "1") == [0, 1, None] res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single assert await decoded_r.json().arrtrim("doc1", "$.nested1.a", 1, 0) == [0] res = [{"a": [], "nested1": {"a": []}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -808,7 +796,7 @@ async def test_arrtrim_dollar(decoded_r: valkey.Valkey): # Test single assert await decoded_r.json().arrtrim("doc1", ".nested1.a", "1", "1") == 1 res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -934,7 +922,7 @@ async def test_clear_dollar(decoded_r: valkey.Valkey): res = [ {"nested1": {"a": {}}, "a": [], "nested2": {"a": "claro"}, "nested3": {"a": {}}} ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test single await decoded_r.json().set( @@ -956,13 +944,11 @@ async def test_clear_dollar(decoded_r: valkey.Valkey): "nested3": {"a": {"baz": 50}}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing path (async defaults to root) assert await decoded_r.json().clear("doc1") == 1 - assert_resp_response( - decoded_r, await decoded_r.json().get("doc1", "$"), [{}], [[{}]] - ) + assert await decoded_r.json().get("doc1", "$") == [{}] # Test missing key with pytest.raises(exceptions.ResponseError): @@ -990,7 +976,7 @@ async def test_toggle_dollar(decoded_r: valkey.Valkey): "nested3": {"a": False}, } ] - assert_resp_response(decoded_r, await decoded_r.json().get("doc1", "$"), res, [res]) + assert await decoded_r.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): diff --git a/tests/test_asyncio/test_timeseries.py b/tests/test_asyncio/test_timeseries.py index 53de9527..c5a7096e 100644 --- a/tests/test_asyncio/test_timeseries.py +++ b/tests/test_asyncio/test_timeseries.py @@ -743,3 +743,111 @@ async def test_uncompressed(decoded_r: valkey.Valkey): assert compressed_info.memory_usage != uncompressed_info.memory_usage else: assert compressed_info["memoryUsage"] != uncompressed_info["memoryUsage"] + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +async def test_create_with_insertion_filters(decoded_r: valkey.Valkey): + await decoded_r.ts().create( + "time-series-1", + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + assert 1000 == await decoded_r.ts().add("time-series-1", 1000, 1.0) + assert 1010 == await decoded_r.ts().add("time-series-1", 1010, 11.0) + assert 1010 == await decoded_r.ts().add("time-series-1", 1013, 10.0) + assert 1020 == await decoded_r.ts().add("time-series-1", 1020, 11.5) + assert 1021 == await decoded_r.ts().add("time-series-1", 1021, 22.0) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response( + decoded_r, + data_points, + [(1000, 1.0), (1010, 11.0), (1020, 11.5), (1021, 22.0)], + [[1000, 1.0], [1010, 11.0], [1020, 11.5], [1021, 22.0]], + ) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +async def test_alter_with_insertion_filters(decoded_r: valkey.Valkey): + assert 1000 == await decoded_r.ts().add("time-series-1", 1000, 1.0) + assert 1010 == await decoded_r.ts().add("time-series-1", 1010, 11.0) + assert 1013 == await decoded_r.ts().add("time-series-1", 1013, 10.0) + + await decoded_r.ts().alter( + "time-series-1", + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1013 == await decoded_r.ts().add("time-series-1", 1015, 11.5) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response( + decoded_r, + data_points, + [(1000, 1.0), (1010, 11.0), (1013, 10.0)], + [[1000, 1.0], [1010, 11.0], [1013, 10.0]], + ) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +async def test_add_with_insertion_filters(decoded_r: valkey.Valkey): + assert 1000 == await decoded_r.ts().add( + "time-series-1", + 1000, + 1.0, + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1000 == await decoded_r.ts().add("time-series-1", 1004, 3.0) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response(decoded_r, data_points, [(1000, 1.0)], [[1000, 1.0]]) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +async def test_incrby_with_insertion_filters(decoded_r: valkey.Valkey): + assert 1000 == await decoded_r.ts().incrby( + "time-series-1", + 1.0, + timestamp=1000, + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1000 == await decoded_r.ts().incrby("time-series-1", 3.0, timestamp=1000) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response(decoded_r, data_points, [(1000, 1.0)], [[1000, 1.0]]) + + assert 1000 == await decoded_r.ts().incrby("time-series-1", 10.1, timestamp=1000) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response(decoded_r, data_points, [(1000, 11.1)], [[1000, 11.1]]) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +async def test_decrby_with_insertion_filters(decoded_r: valkey.Valkey): + assert 1000 == await decoded_r.ts().decrby( + "time-series-1", + 1.0, + timestamp=1000, + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1000 == await decoded_r.ts().decrby("time-series-1", 3.0, timestamp=1000) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response(decoded_r, data_points, [(1000, -1.0)], [[1000, -1.0]]) + + assert 1000 == await decoded_r.ts().decrby("time-series-1", 10.1, timestamp=1000) + + data_points = await decoded_r.ts().range("time-series-1", "-", "+") + assert_resp_response(decoded_r, data_points, [(1000, -11.1)], [[1000, -11.1]]) diff --git a/tests/test_command_parser.py b/tests/test_command_parser.py index ae2fa8d6..80a8b694 100644 --- a/tests/test_command_parser.py +++ b/tests/test_command_parser.py @@ -1,9 +1,13 @@ import pytest from valkey._parsers import CommandsParser +from valkey.utils import HIREDIS_AVAILABLE from .conftest import assert_resp_response, skip_if_server_version_lt +# The response to COMMAND contains maps inside sets, which are not handled +# by the hiredis-py parser (see https://github.com/redis/hiredis-py/issues/188) +@pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") class TestCommandsParser: def test_init_commands(self, r): commands_parser = CommandsParser(r) diff --git a/tests/test_commands.py b/tests/test_commands.py index 38bfa422..67286626 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -18,6 +18,7 @@ parse_info, ) from valkey.client import EMPTY_RESPONSE, NEVER_DECODE +from valkey.utils import HIREDIS_AVAILABLE from .conftest import ( _get_client, @@ -1800,7 +1801,14 @@ def test_tfunction_list(self, r): assert len(functions) == 3 expected_names = [b"lib1", b"lib2", b"lib3"] - actual_names = [functions[0][13], functions[1][13], functions[2][13]] + if is_resp2_connection(r): + actual_names = [functions[0][13], functions[1][13], functions[2][13]] + else: + actual_names = [ + functions[0][b"name"], + functions[1][b"name"], + functions[2][b"name"], + ] assert sorted(expected_names) == sorted(actual_names) assert r.tfunction_delete("lib1") @@ -4392,14 +4400,23 @@ def test_xinfo_stream(self, r): assert info["entries-added"] == 2 assert info["recorded-first-entry-id"] == m1 + r.xtrim(stream, 0) + info = r.xinfo_stream(stream) + assert info["length"] == 0 + assert info["first-entry"] is None + assert info["last-entry"] is None + @skip_if_server_version_lt("6.0.0") def test_xinfo_stream_full(self, r): stream = "stream" group = "group" m1 = r.xadd(stream, {"foo": "bar"}) - r.xgroup_create(stream, group, 0) info = r.xinfo_stream(stream, full=True) + assert info["length"] == 1 + assert len(info["groups"]) == 0 + r.xgroup_create(stream, group, 0) + info = r.xinfo_stream(stream, full=True) assert info["length"] == 1 assert_resp_response_in( r, @@ -4409,6 +4426,11 @@ def test_xinfo_stream_full(self, r): ) assert len(info["groups"]) == 1 + r.xreadgroup(group, "consumer", streams={stream: ">"}) + info = r.xinfo_stream(stream, full=True) + consumer = info["groups"][0]["consumers"][0] + assert isinstance(consumer, dict) + @skip_if_server_version_lt("5.0.0") def test_xlen(self, r): stream = "stream" @@ -4919,6 +4941,9 @@ def test_command_getkeys(self, r): r, res, ["key1", "key2", "key3"], [b"key1", b"key2", b"key3"] ) + # The response to COMMAND contains maps inside sets, which are not handled + # by the hiredis-py parser (see https://github.com/redis/hiredis-py/issues/188) + @pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @skip_if_server_version_lt("2.8.13") def test_command(self, r): res = r.command() diff --git a/tests/test_json.py b/tests/test_json.py index 15de1538..aed5695e 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -25,7 +25,7 @@ def test_json_setbinarykey(client): def test_json_setgetdeleteforget(client): assert client.json().set("foo", Path.root_path(), "bar") - assert_resp_response(client, client.json().get("foo"), "bar", [["bar"]]) + assert client.json().get("foo") == "bar" assert client.json().get("baz") is None assert client.json().delete("foo") == 1 assert client.json().forget("foo") == 0 # second delete @@ -34,12 +34,12 @@ def test_json_setgetdeleteforget(client): def test_jsonget(client): client.json().set("foo", Path.root_path(), "bar") - assert_resp_response(client, client.json().get("foo"), "bar", [["bar"]]) + assert client.json().get("foo") == "bar" def test_json_get_jset(client): assert client.json().set("foo", Path.root_path(), "bar") - assert_resp_response(client, client.json().get("foo"), "bar", [["bar"]]) + assert client.json().get("foo") == "bar" assert client.json().get("baz") is None assert 1 == client.json().delete("foo") assert client.exists("foo") == 0 @@ -79,10 +79,7 @@ def test_json_merge(client): def test_nonascii_setgetdelete(client): assert client.json().set("notascii", Path.root_path(), "hyvää-élève") - res = "hyvää-élève" - assert_resp_response( - client, client.json().get("notascii", no_escape=True), res, [[res]] - ) + assert client.json().get("notascii", no_escape=True) == "hyvää-élève" assert 1 == client.json().delete("notascii") assert client.exists("notascii") == 0 @@ -178,9 +175,7 @@ def test_toggle(client): def test_strappend(client): client.json().set("jsonkey", Path.root_path(), "foo") assert 6 == client.json().strappend("jsonkey", "bar") - assert_resp_response( - client, client.json().get("jsonkey", Path.root_path()), "foobar", [["foobar"]] - ) + assert "foobar" == client.json().get("jsonkey", Path.root_path()) # # def test_debug(client): @@ -221,14 +216,12 @@ def test_arrindex(client): def test_arrinsert(client): client.json().set("arr", Path.root_path(), [0, 4]) assert 5 - -client.json().arrinsert("arr", Path.root_path(), 1, *[1, 2, 3]) - res = [0, 1, 2, 3, 4] - assert_resp_response(client, client.json().get("arr"), res, [[res]]) + assert client.json().get("arr") == [0, 1, 2, 3, 4] # test prepends client.json().set("val2", Path.root_path(), [5, 6, 7, 8, 9]) client.json().arrinsert("val2", Path.root_path(), 0, ["some", "thing"]) - res = [["some", "thing"], 5, 6, 7, 8, 9] - assert_resp_response(client, client.json().get("val2"), res, [[res]]) + assert client.json().get("val2") == [["some", "thing"], 5, 6, 7, 8, 9] def test_arrlen(client): @@ -244,7 +237,7 @@ def test_arrpop(client): assert 3 == client.json().arrpop("arr", Path.root_path(), -1) assert 2 == client.json().arrpop("arr", Path.root_path()) assert 0 == client.json().arrpop("arr", Path.root_path(), 0) - assert_resp_response(client, client.json().get("arr"), [1], [[[1]]]) + assert [1] == client.json().get("arr") # test out of bounds client.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -258,7 +251,7 @@ def test_arrpop(client): def test_arrtrim(client): client.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) assert 3 == client.json().arrtrim("arr", Path.root_path(), 1, 3) - assert_resp_response(client, client.json().get("arr"), [1, 2, 3], [[[1, 2, 3]]]) + assert [1, 2, 3] == client.json().get("arr") # <0 test, should be 0 equivalent client.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4]) @@ -316,7 +309,7 @@ def test_json_commands_in_pipeline(client): p.set("foo", Path.root_path(), "bar") p.get("foo") p.delete("foo") - assert_resp_response(client, p.execute(), [True, "bar", 1], [True, [["bar"]], 1]) + assert p.execute() == [True, "bar", 1] assert client.keys() == [] assert client.get("foo") is None @@ -329,7 +322,7 @@ def test_json_commands_in_pipeline(client): p.jsonget("foo") p.exists("notarealkey") p.delete("foo") - assert_resp_response(client, p.execute(), [True, d, 0, 1], [True, [[d]], 0, 1]) + assert p.execute() == [True, d, 0, 1] assert client.keys() == [] assert client.get("foo") is None @@ -339,13 +332,13 @@ def test_json_delete_with_dollar(client): assert client.json().set("doc1", "$", doc1) assert client.json().delete("doc1", "$..a") == 2 res = [{"nested": {"b": 3}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert client.json().set("doc2", "$", doc2) assert client.json().delete("doc2", "$..a") == 1 res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(client, client.json().get("doc2", "$"), res, [res]) + assert client.json().get("doc2", "$") == res doc3 = [ { @@ -376,7 +369,7 @@ def test_json_delete_with_dollar(client): } ] ] - assert_resp_response(client, client.json().get("doc3", "$"), doc3val, [doc3val]) + assert client.json().get("doc3", "$") == doc3val # Test default path assert client.json().delete("doc3") == 1 @@ -390,13 +383,13 @@ def test_json_forget_with_dollar(client): assert client.json().set("doc1", "$", doc1) assert client.json().forget("doc1", "$..a") == 2 res = [{"nested": {"b": 3}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}} assert client.json().set("doc2", "$", doc2) assert client.json().forget("doc2", "$..a") == 1 res = [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}] - assert_resp_response(client, client.json().get("doc2", "$"), res, [res]) + assert client.json().get("doc2", "$") == res doc3 = [ { @@ -427,7 +420,7 @@ def test_json_forget_with_dollar(client): } ] ] - assert_resp_response(client, client.json().get("doc3", "$"), doc3val, [doc3val]) + assert client.json().get("doc3", "$") == doc3val # Test default path assert client.json().forget("doc3") == 1 @@ -450,9 +443,9 @@ def test_json_mget_dollar(client): ) # Compare also to single JSON.GET res = [1, 3, None] - assert_resp_response(client, client.json().get("doc1", "$..a"), res, [res]) + assert client.json().get("doc1", "$..a") == res res = [4, 6, [None]] - assert_resp_response(client, client.json().get("doc2", "$..a"), res, [res]) + assert client.json().get("doc2", "$..a") == res # Test mget with single path client.json().mget("doc1", "$..a") == [1, 3, None] @@ -499,13 +492,15 @@ def test_numby_commands_dollar(client): # Test legacy NUMINCRBY client.json().set("doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}) - client.json().numincrby("doc1", ".b[0].a", 3) == 5 + assert_resp_response(client, client.json().numincrby("doc1", ".b[0].a", 3), 5, [5]) # Test legacy NUMMULTBY client.json().set("doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}) with pytest.deprecated_call(): - client.json().nummultby("doc1", ".b[0].a", 3) == 6 + assert_resp_response( + client, client.json().nummultby("doc1", ".b[0].a", 3), 6, [6] + ) def test_strappend_dollar(client): @@ -515,23 +510,23 @@ def test_strappend_dollar(client): # Test multi client.json().strappend("doc1", "bar", "$..a") == [6, 8, None] - # res = [{"a": "foobar", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}] - # assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + res = [{"a": "foobar", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}] + assert_resp_response(client, client.json().get("doc1", "$"), res, res) # Test single client.json().strappend("doc1", "baz", "$.nested1.a") == [11] - # res = [{"a": "foobar", "nested1": {"a": "hellobarbaz"}, "nested2": {"a": 31}}] - # assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + res = [{"a": "foobar", "nested1": {"a": "hellobarbaz"}, "nested2": {"a": 31}}] + assert_resp_response(client, client.json().get("doc1", "$"), res, res) # Test missing key with pytest.raises(exceptions.ResponseError): client.json().strappend("non_existing_doc", "$..a", "err") # Test multi - client.json().strappend("doc1", "bar", ".*.a") == 8 - # res = [{"a": "foo", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}] - # assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().strappend("doc1", "bar", ".*.a") == 14 + res = [{"a": "foobar", "nested1": {"a": "hellobarbazbar"}, "nested2": {"a": 31}}] + assert_resp_response(client, client.json().get("doc1", "$"), res, res) # Test missing path with pytest.raises(exceptions.ResponseError): @@ -577,7 +572,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrappend("doc1", "$.nested1.a", "baz") == [6] @@ -588,7 +583,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -614,7 +609,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrappend("doc1", ".nested1.a", "baz") == 6 @@ -625,7 +620,7 @@ def test_arrappend_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -652,7 +647,7 @@ def test_arrinsert_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrinsert("doc1", "$.nested1.a", -2, "baz") == [6] @@ -663,7 +658,7 @@ def test_arrinsert_dollar(client): "nested2": {"a": 31}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -733,7 +728,7 @@ def test_arrpop_dollar(client): assert client.json().arrpop("doc1", "$..a", 1) == ['"foo"', None, None] res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -752,7 +747,7 @@ def test_arrpop_dollar(client): # Test multi (all paths are updated, but return result of last path) client.json().arrpop("doc1", "..a", "1") is None res = [{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # # Test missing key with pytest.raises(exceptions.ResponseError): @@ -772,16 +767,16 @@ def test_arrtrim_dollar(client): # Test multi assert client.json().arrtrim("doc1", "$..a", "1", -1) == [0, 2, None] res = [{"a": [], "nested1": {"a": [None, "world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res assert client.json().arrtrim("doc1", "$..a", "1", "1") == [0, 1, None] res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single assert client.json().arrtrim("doc1", "$.nested1.a", 1, 0) == [0] res = [{"a": [], "nested1": {"a": []}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -804,7 +799,7 @@ def test_arrtrim_dollar(client): # Test single assert client.json().arrtrim("doc1", ".nested1.a", "1", "1") == 1 res = [{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -928,7 +923,7 @@ def test_clear_dollar(client): res = [ {"nested1": {"a": {}}, "a": [], "nested2": {"a": "claro"}, "nested3": {"a": {}}} ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test single client.json().set( @@ -950,11 +945,11 @@ def test_clear_dollar(client): "nested3": {"a": {"baz": 50}}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing path (defaults to root) assert client.json().clear("doc1") == 1 - assert_resp_response(client, client.json().get("doc1", "$"), [{}], [[{}]]) + assert client.json().get("doc1", "$") == [{}] # Test missing key with pytest.raises(exceptions.ResponseError): @@ -982,7 +977,7 @@ def test_toggle_dollar(client): "nested3": {"a": False}, } ] - assert_resp_response(client, client.json().get("doc1", "$"), res, [res]) + assert client.json().get("doc1", "$") == res # Test missing key with pytest.raises(exceptions.ResponseError): @@ -1282,12 +1277,10 @@ def test_arrindex_dollar(client): }, ) - assert_resp_response( - client, - client.json().get("store", "$.store.book[?(@.price<10)].size"), - [[10, 20, 30, 40], [5, 10, 20, 30]], - [[[10, 20, 30, 40], [5, 10, 20, 30]]], - ) + assert client.json().get("store", "$.store.book[?(@.price<10)].size") == [ + [10, 20, 30, 40], + [5, 10, 20, 30], + ] assert client.json().arrindex( "store", "$.store.book[?(@.price<10)].size", "20" @@ -1316,7 +1309,7 @@ def test_arrindex_dollar(client): "3", [], ] - assert_resp_response(client, client.json().get("test_num", "$..arr"), res, [res]) + assert client.json().get("test_num", "$..arr") == res assert client.json().arrindex("test_num", "$..arr", 3) == [3, 2, -1, None, -1] @@ -1349,7 +1342,7 @@ def test_arrindex_dollar(client): "3", [], ] - assert_resp_response(client, client.json().get("test_string", "$..arr"), res, [res]) + assert client.json().get("test_string", "$..arr") == res assert client.json().arrindex("test_string", "$..arr", "baz") == [ 3, @@ -1442,7 +1435,7 @@ def test_arrindex_dollar(client): None, [], ] - assert_resp_response(client, client.json().get("test_None", "$..arr"), res, [res]) + assert client.json().get("test_None", "$..arr") == res # Test with none-scalar value assert client.json().arrindex( @@ -1481,7 +1474,7 @@ def test_custom_decoder(client): cj = client.json(encoder=ujson, decoder=ujson) assert cj.set("foo", Path.root_path(), "bar") - assert_resp_response(client, cj.get("foo"), "bar", [["bar"]]) + assert cj.get("foo") == "bar" assert cj.get("baz") is None assert 1 == cj.delete("foo") assert client.exists("foo") == 0 @@ -1502,7 +1495,7 @@ def test_set_file(client): nojsonfile.write(b"Hello World") assert client.json().set_file("test", Path.root_path(), jsonfile.name) - assert_resp_response(client, client.json().get("test"), obj, [[obj]]) + assert client.json().get("test") == obj with pytest.raises(json.JSONDecodeError): client.json().set_file("test2", Path.root_path(), nojsonfile.name) @@ -1524,6 +1517,4 @@ def test_set_path(client): result = {jsonfile: True, nojsonfile: False} assert client.json().set_path(Path.root_path(), root) == result res = {"hello": "world"} - assert_resp_response( - client, client.json().get(jsonfile.rsplit(".")[0]), res, [[res]] - ) + assert client.json().get(jsonfile.rsplit(".")[0]) == res diff --git a/tests/test_parsers/test_helpers.py b/tests/test_parsers/test_helpers.py index 5986370a..4a1b2497 100644 --- a/tests/test_parsers/test_helpers.py +++ b/tests/test_parsers/test_helpers.py @@ -33,3 +33,31 @@ def test_parse_info(): assert info["search_version"] == "99.99.99" assert info["search_redis_version"] == "7.2.2 - oss" assert info["search_query_timeout_ms"] == 500 + + +def test_parse_info_list(): + info_output = """ +list_one:a, +list_two:a b,,c,10,1.1 + """ + info = parse_info(info_output) + + assert isinstance(info["list_one"], list) + assert info["list_one"] == ["a"] + + assert isinstance(info["list_two"], list) + assert info["list_two"] == ["a b", "c", 10, 1.1] + + +def test_parse_info_list_dict_mixed(): + info_output = """ +list_one:a,b=1 +list_two:a b=foo,,c,d=bar,e, + """ + info = parse_info(info_output) + + assert isinstance(info["list_one"], dict) + assert info["list_one"] == {"a": True, "b": 1} + + assert isinstance(info["list_two"], dict) + assert info["list_two"] == {"a b": "foo", "c": True, "d": "bar", "e": True} diff --git a/tests/test_timeseries.py b/tests/test_timeseries.py index 02e08000..41bb3f91 100644 --- a/tests/test_timeseries.py +++ b/tests/test_timeseries.py @@ -971,4 +971,160 @@ def test_uncompressed(client): if is_resp2_connection(client): assert compressed_info.memory_usage != uncompressed_info.memory_usage else: - assert compressed_info["memoryUsage"] != uncompressed_info["memoryUsage"] + assert compressed_info["memoryUsage"] < uncompressed_info["memoryUsage"] + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_create_with_insertion_filters(client): + client.ts().create( + "time-series-1", + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + assert 1000 == client.ts().add("time-series-1", 1000, 1.0) + assert 1010 == client.ts().add("time-series-1", 1010, 11.0) + assert 1010 == client.ts().add("time-series-1", 1013, 10.0) + assert 1020 == client.ts().add("time-series-1", 1020, 11.5) + assert 1021 == client.ts().add("time-series-1", 1021, 22.0) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response( + client, + data_points, + [(1000, 1.0), (1010, 11.0), (1020, 11.5), (1021, 22.0)], + [[1000, 1.0], [1010, 11.0], [1020, 11.5], [1021, 22.0]], + ) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_create_with_insertion_filters_other_duplicate_policy(client): + client.ts().create( + "time-series-1", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + assert 1000 == client.ts().add("time-series-1", 1000, 1.0) + assert 1010 == client.ts().add("time-series-1", 1010, 11.0) + # Still accepted because the duplicate_policy is not `last`. + assert 1013 == client.ts().add("time-series-1", 1013, 10.0) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response( + client, + data_points, + [(1000, 1.0), (1010, 11.0), (1013, 10)], + [[1000, 1.0], [1010, 11.0], [1013, 10]], + ) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_alter_with_insertion_filters(client): + assert 1000 == client.ts().add("time-series-1", 1000, 1.0) + assert 1010 == client.ts().add("time-series-1", 1010, 11.0) + assert 1013 == client.ts().add("time-series-1", 1013, 10.0) + + client.ts().alter( + "time-series-1", + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1013 == client.ts().add("time-series-1", 1015, 11.5) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response( + client, + data_points, + [(1000, 1.0), (1010, 11.0), (1013, 10.0)], + [[1000, 1.0], [1010, 11.0], [1013, 10.0]], + ) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_add_with_insertion_filters(client): + assert 1000 == client.ts().add( + "time-series-1", + 1000, + 1.0, + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1000 == client.ts().add("time-series-1", 1004, 3.0) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response(client, data_points, [(1000, 1.0)], [[1000, 1.0]]) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_incrby_with_insertion_filters(client): + assert 1000 == client.ts().incrby( + "time-series-1", + 1.0, + timestamp=1000, + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1000 == client.ts().incrby("time-series-1", 3.0, timestamp=1000) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response(client, data_points, [(1000, 1.0)], [[1000, 1.0]]) + + assert 1000 == client.ts().incrby("time-series-1", 10.1, timestamp=1000) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response(client, data_points, [(1000, 11.1)], [[1000, 11.1]]) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_decrby_with_insertion_filters(client): + assert 1000 == client.ts().decrby( + "time-series-1", + 1.0, + timestamp=1000, + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + + assert 1000 == client.ts().decrby("time-series-1", 3.0, timestamp=1000) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response(client, data_points, [(1000, -1.0)], [[1000, -1.0]]) + + assert 1000 == client.ts().decrby("time-series-1", 10.1, timestamp=1000) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response(client, data_points, [(1000, -11.1)], [[1000, -11.1]]) + + +@skip_ifmodversion_lt("1.12.0", "timeseries") +def test_madd_with_insertion_filters(client): + client.ts().create( + "time-series-1", + duplicate_policy="last", + ignore_max_time_diff=5, + ignore_max_val_diff=10.0, + ) + assert 1010 == client.ts().add("time-series-1", 1010, 1.0) + assert [1010, 1010, 1020, 1021] == client.ts().madd( + [ + ("time-series-1", 1011, 11.0), + ("time-series-1", 1013, 10.0), + ("time-series-1", 1020, 2.0), + ("time-series-1", 1021, 22.0), + ] + ) + + data_points = client.ts().range("time-series-1", "-", "+") + assert_resp_response( + client, + data_points, + [(1010, 1.0), (1020, 2.0), (1021, 22.0)], + [[1010, 1.0], [1020, 2.0], [1021, 22.0]], + ) diff --git a/valkey/_parsers/helpers.py b/valkey/_parsers/helpers.py index 1e89c38d..8fbd2e82 100644 --- a/valkey/_parsers/helpers.py +++ b/valkey/_parsers/helpers.py @@ -46,11 +46,18 @@ def get_value(value): return int(value) except ValueError: return value + elif "=" not in value: + return [get_value(v) for v in value.split(",") if v] else: sub_dict = {} for item in value.split(","): - k, v = item.rsplit("=", 1) - sub_dict[k] = get_value(v) + if not item: + continue + if "=" in item: + k, v = item.rsplit("=", 1) + sub_dict[k] = get_value(v) + else: + sub_dict[item] = True return sub_dict for line in response.splitlines(): @@ -268,17 +275,22 @@ def parse_xinfo_stream(response, **options): data = {str_if_bytes(k): v for k, v in response.items()} if not options.get("full", False): first = data.get("first-entry") - if first is not None: + if first is not None and first[0] is not None: data["first-entry"] = (first[0], pairs_to_dict(first[1])) last = data["last-entry"] - if last is not None: + if last is not None and last[0] is not None: data["last-entry"] = (last[0], pairs_to_dict(last[1])) else: data["entries"] = {_id: pairs_to_dict(entry) for _id, entry in data["entries"]} - if isinstance(data["groups"][0], list): + if len(data["groups"]) > 0 and isinstance(data["groups"][0], list): data["groups"] = [ pairs_to_dict(group, decode_keys=True) for group in data["groups"] ] + for g in data["groups"]: + if g["consumers"] and g["consumers"][0] is not None: + g["consumers"] = [ + pairs_to_dict(c, decode_keys=True) for c in g["consumers"] + ] else: data["groups"] = [ {str_if_bytes(k): v for k, v in group.items()} diff --git a/valkey/_parsers/hiredis.py b/valkey/_parsers/hiredis.py index dd381179..37634ba9 100644 --- a/valkey/_parsers/hiredis.py +++ b/valkey/_parsers/hiredis.py @@ -19,6 +19,11 @@ SERVER_CLOSED_CONNECTION_ERROR, ) +# Used to signal that hiredis-py does not have enough data to parse. +# Using `False` or `None` is not reliable, given that the parser can +# return `False` or `None` for legitimate reasons from RESP payloads. +NOT_ENOUGH_DATA = object() + class _HiredisReaderArgs(TypedDict, total=False): protocolError: Callable[[str], Exception] @@ -51,25 +56,26 @@ def on_connect(self, connection, **kwargs): "protocolError": InvalidResponse, "replyError": self.parse_error, "errors": connection.encoder.encoding_errors, + "notEnoughData": NOT_ENOUGH_DATA, } if connection.encoder.decode_responses: kwargs["encoding"] = connection.encoder.encoding self._reader = hiredis.Reader(**kwargs) - self._next_response = False + self._next_response = NOT_ENOUGH_DATA def on_disconnect(self): self._sock = None self._reader = None - self._next_response = False + self._next_response = NOT_ENOUGH_DATA def can_read(self, timeout): if not self._reader: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) - if self._next_response is False: + if self._next_response is NOT_ENOUGH_DATA: self._next_response = self._reader.gets() - if self._next_response is False: + if self._next_response is NOT_ENOUGH_DATA: return self.read_from_socket(timeout=timeout, raise_on_timeout=False) return True @@ -108,9 +114,9 @@ def read_response(self, disable_decoding=False): raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) # _next_response might be cached from a can_read() call - if self._next_response is not False: + if self._next_response is not NOT_ENOUGH_DATA: response = self._next_response - self._next_response = False + self._next_response = NOT_ENOUGH_DATA return response if disable_decoding: @@ -118,7 +124,7 @@ def read_response(self, disable_decoding=False): else: response = self._reader.gets() - while response is False: + while response is NOT_ENOUGH_DATA: self.read_from_socket() if disable_decoding: response = self._reader.gets(False) @@ -156,6 +162,7 @@ def on_connect(self, connection): kwargs: _HiredisReaderArgs = { "protocolError": InvalidResponse, "replyError": self.parse_error, + "notEnoughData": NOT_ENOUGH_DATA, } if connection.encoder.decode_responses: kwargs["encoding"] = connection.encoder.encoding @@ -170,7 +177,7 @@ def on_disconnect(self): async def can_read_destructive(self): if not self._connected: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) - if self._reader.gets(): + if self._reader.gets() is not NOT_ENOUGH_DATA: return True try: async with async_timeout(0): @@ -200,7 +207,7 @@ async def read_response( response = self._reader.gets(False) else: response = self._reader.gets() - while response is False: + while response is NOT_ENOUGH_DATA: await self.read_from_socket() if disable_decoding: response = self._reader.gets(False)