From 700045c7a7db6d986df6610d61239375a2211053 Mon Sep 17 00:00:00 2001 From: Vladyslav Vildanov <117659936+vladvildanov@users.noreply.github.com> Date: Mon, 7 Oct 2024 16:11:56 +0300 Subject: [PATCH 1/2] Backport 5.1 changes into master (#3406) * Updated package version (#3389) * Fix bug with Redis Set commands returns List instead of Set (#3399) * Fix bug with Redis Set commands returns List instead of Set in RESP2 * Removed fixture, codestyle fixes * Fixed tests for async * Fixed asyncio cluster test cases * Added Sets alignment for RESP2 and RESP3 * Updated doctests * Fix bug with partial Hiredis availability (#3400) * Fix bug with partial Hiredis availability * Added yes flag * Codestyl fixes * Removed redundant check * Removed redundant checks associated with pack command * Updated condition to check the actual flag * Removed unused import * Fix bug with async pipeline and cluster fails with some commands (#3402) * Fix bug with async pipeline fails with some commands * Codestyl changes * Remove keys option in cluster context * Updated package version (#3403) --- .github/workflows/integration.yaml | 33 ++++++++++++++++++++++++++++ doctests/dt_set.py | 34 ++++++++++++++--------------- redis/_parsers/helpers.py | 6 +++++ redis/asyncio/client.py | 4 ++++ redis/cluster.py | 4 ++++ redis/connection.py | 3 +-- redis/utils.py | 4 ++-- setup.py | 2 +- tests/test_asyncio/test_cluster.py | 20 ++++++++--------- tests/test_asyncio/test_commands.py | 20 ++++++++--------- tests/test_asyncio/test_pipeline.py | 10 +++++++++ tests/test_cache.py | 2 +- tests/test_cluster.py | 24 ++++++++++---------- tests/test_commands.py | 20 ++++++++--------- tests/test_encoding.py | 18 --------------- tests/test_pipeline.py | 10 +++++++++ 16 files changed, 131 insertions(+), 83 deletions(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index b10edf2fb4..ed969d11f1 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -90,6 +90,21 @@ jobs: invoke ${{matrix.test-type}}-tests ls -1 + - name: Run tests against hiredis < 3.0.0 + if: ${{ matrix.connection-type == 'hiredis' && matrix.python-version == '3.12'}} + run: | + pip uninstall hiredis -y + pip install -U setuptools wheel + pip install -r requirements.txt + pip install -r dev_requirements.txt + if [ "${{matrix.connection-type}}" == "hiredis" ]; then + pip install "hiredis<3.0.0" + fi + invoke devenv + sleep 10 # time to settle + invoke ${{matrix.test-type}}-tests + ls -1 + - name: Upload test results and profiling data uses: actions/upload-artifact@v4 with: @@ -145,6 +160,24 @@ jobs: invoke ${{matrix.test-type}}-tests --protocol=3 fi + - name: Run tests against hiredis < 3.0.0 + if: ${{ matrix.connection-type == 'hiredis' && matrix.python-version == '3.12'}} + run: | + pip uninstall hiredis -y + pip install -U setuptools wheel + pip install -r requirements.txt + pip install -r dev_requirements.txt + if [ "${{matrix.connection-type}}" == "hiredis" ]; then + pip install "hiredis<3.0.0" + fi + invoke devenv + sleep 10 # time to settle + if [ "${{matrix.event-loop}}" == "uvloop" ]; then + invoke ${{matrix.test-type}}-tests --uvloop --protocol=3 + else + invoke ${{matrix.test-type}}-tests --protocol=3 + fi + - name: Upload test results and profiling data uses: actions/upload-artifact@v4 with: diff --git a/doctests/dt_set.py b/doctests/dt_set.py index 0c0562ac80..fc66410b45 100644 --- a/doctests/dt_set.py +++ b/doctests/dt_set.py @@ -58,11 +58,11 @@ r.sadd("bikes:racing:usa", "bike:1", "bike:4") # HIDE_END res7 = r.sinter("bikes:racing:france", "bikes:racing:usa") -print(res7) # >>> ['bike:1'] +print(res7) # >>> {'bike:1'} # STEP_END # REMOVE_START -assert res7 == ["bike:1"] +assert res7 == {"bike:1"} # REMOVE_END # STEP_START scard @@ -83,12 +83,12 @@ print(res9) # >>> 3 res10 = r.smembers("bikes:racing:france") -print(res10) # >>> ['bike:1', 'bike:2', 'bike:3'] +print(res10) # >>> {'bike:1', 'bike:2', 'bike:3'} # STEP_END # REMOVE_START assert res9 == 3 -assert res10 == ['bike:1', 'bike:2', 'bike:3'] +assert res10 == {'bike:1', 'bike:2', 'bike:3'} # REMOVE_END # STEP_START smismember @@ -109,11 +109,11 @@ r.sadd("bikes:racing:usa", "bike:1", "bike:4") res13 = r.sdiff("bikes:racing:france", "bikes:racing:usa") -print(res13) # >>> ['bike:2', 'bike:3'] +print(res13) # >>> {'bike:2', 'bike:3'} # STEP_END # REMOVE_START -assert res13 == ['bike:2', 'bike:3'] +assert res13 == {'bike:2', 'bike:3'} r.delete("bikes:racing:france") r.delete("bikes:racing:usa") # REMOVE_END @@ -124,27 +124,27 @@ r.sadd("bikes:racing:italy", "bike:1", "bike:2", "bike:3", "bike:4") res13 = r.sinter("bikes:racing:france", "bikes:racing:usa", "bikes:racing:italy") -print(res13) # >>> ['bike:1'] +print(res13) # >>> {'bike:1'} res14 = r.sunion("bikes:racing:france", "bikes:racing:usa", "bikes:racing:italy") -print(res14) # >>> ['bike:1', 'bike:2', 'bike:3', 'bike:4'] +print(res14) # >>> {'bike:1', 'bike:2', 'bike:3', 'bike:4'} res15 = r.sdiff("bikes:racing:france", "bikes:racing:usa", "bikes:racing:italy") -print(res15) # >>> [] +print(res15) # >>> {} res16 = r.sdiff("bikes:racing:usa", "bikes:racing:france") -print(res16) # >>> ['bike:4'] +print(res16) # >>> {'bike:4'} res17 = r.sdiff("bikes:racing:france", "bikes:racing:usa") -print(res17) # >>> ['bike:2', 'bike:3'] +print(res17) # >>> {'bike:2', 'bike:3'} # STEP_END # REMOVE_START -assert res13 == ['bike:1'] -assert res14 == ['bike:1', 'bike:2', 'bike:3', 'bike:4'] -assert res15 == [] -assert res16 == ['bike:4'] -assert res17 == ['bike:2', 'bike:3'] +assert res13 == {'bike:1'} +assert res14 == {'bike:1', 'bike:2', 'bike:3', 'bike:4'} +assert res15 == {} +assert res16 == {'bike:4'} +assert res17 == {'bike:2', 'bike:3'} r.delete("bikes:racing:france") r.delete("bikes:racing:usa") r.delete("bikes:racing:italy") @@ -160,7 +160,7 @@ print(res19) # >>> bike:3 res20 = r.smembers("bikes:racing:france") -print(res20) # >>> ['bike:2', 'bike:4', 'bike:5'] +print(res20) # >>> {'bike:2', 'bike:4', 'bike:5'} res21 = r.srandmember("bikes:racing:france") print(res21) # >>> bike:4 diff --git a/redis/_parsers/helpers.py b/redis/_parsers/helpers.py index 7494c79210..6832100bb6 100644 --- a/redis/_parsers/helpers.py +++ b/redis/_parsers/helpers.py @@ -785,6 +785,9 @@ def string_keys_to_dict(key_string, callback): _RedisCallbacksRESP2 = { + **string_keys_to_dict( + "SDIFF SINTER SMEMBERS SUNION", lambda r: r and set(r) or set() + ), **string_keys_to_dict( "ZDIFF ZINTER ZPOPMAX ZPOPMIN ZRANGE ZRANGEBYSCORE ZRANK ZREVRANGE " "ZREVRANGEBYSCORE ZREVRANK ZUNION", @@ -829,6 +832,9 @@ def string_keys_to_dict(key_string, callback): _RedisCallbacksRESP3 = { + **string_keys_to_dict( + "SDIFF SINTER SMEMBERS SUNION", lambda r: r and set(r) or set() + ), **string_keys_to_dict( "ZRANGE ZINTER ZPOPMAX ZPOPMIN ZRANGEBYSCORE ZREVRANGE ZREVRANGEBYSCORE " "ZUNION HGETALL XREADGROUP", diff --git a/redis/asyncio/client.py b/redis/asyncio/client.py index 039ebfdfae..9508849703 100644 --- a/redis/asyncio/client.py +++ b/redis/asyncio/client.py @@ -1423,6 +1423,10 @@ async def _execute_transaction( # noqa: C901 if not isinstance(r, Exception): args, options = cmd command_name = args[0] + + # Remove keys entry, it needs only for cache. + options.pop("keys", None) + if command_name in self.response_callbacks: r = self.response_callbacks[command_name](r, **options) if inspect.isawaitable(r): diff --git a/redis/cluster.py b/redis/cluster.py index fbf5428d40..9dcbad7fc1 100644 --- a/redis/cluster.py +++ b/redis/cluster.py @@ -1163,6 +1163,10 @@ def _execute_command(self, target_node, *args, **kwargs): asking = False connection.send_command(*args, **kwargs) response = redis_node.parse_response(connection, command, **kwargs) + + # Remove keys entry, it needs only for cache. + kwargs.pop("keys", None) + if command in self.cluster_response_callbacks: response = self.cluster_response_callbacks[command]( response, **kwargs diff --git a/redis/connection.py b/redis/connection.py index 6aae2101c2..40f2d29722 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -38,7 +38,6 @@ from .utils import ( CRYPTOGRAPHY_AVAILABLE, HIREDIS_AVAILABLE, - HIREDIS_PACK_AVAILABLE, SSL_AVAILABLE, compare_versions, ensure_string, @@ -314,7 +313,7 @@ def __del__(self): def _construct_command_packer(self, packer): if packer is not None: return packer - elif HIREDIS_PACK_AVAILABLE: + elif HIREDIS_AVAILABLE: return HiredisRespSerializer() else: return PythonRespSerializer(self._buffer_cutoff, self.encoder.encode) diff --git a/redis/utils.py b/redis/utils.py index b4e9afb054..8693fb3c8f 100644 --- a/redis/utils.py +++ b/redis/utils.py @@ -8,10 +8,10 @@ # Only support Hiredis >= 3.0: HIREDIS_AVAILABLE = int(hiredis.__version__.split(".")[0]) >= 3 - HIREDIS_PACK_AVAILABLE = hasattr(hiredis, "pack_command") + if not HIREDIS_AVAILABLE: + raise ImportError("hiredis package should be >= 3.0.0") except ImportError: HIREDIS_AVAILABLE = False - HIREDIS_PACK_AVAILABLE = False try: import ssl # noqa diff --git a/setup.py b/setup.py index 0c968a4d9f..8036b64066 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ long_description_content_type="text/markdown", keywords=["Redis", "key-value store", "database"], license="MIT", - version="5.1.0b7", + version="5.1.1", packages=find_packages( include=[ "redis", diff --git a/tests/test_asyncio/test_cluster.py b/tests/test_asyncio/test_cluster.py index e480db332b..f3b76b80c9 100644 --- a/tests/test_asyncio/test_cluster.py +++ b/tests/test_asyncio/test_cluster.py @@ -1752,38 +1752,38 @@ async def test_cluster_rpoplpush(self, r: RedisCluster) -> None: async def test_cluster_sdiff(self, r: RedisCluster) -> None: await r.sadd("{foo}a", "1", "2", "3") - assert set(await r.sdiff("{foo}a", "{foo}b")) == {b"1", b"2", b"3"} + assert await r.sdiff("{foo}a", "{foo}b") == {b"1", b"2", b"3"} await r.sadd("{foo}b", "2", "3") - assert await r.sdiff("{foo}a", "{foo}b") == [b"1"] + assert await r.sdiff("{foo}a", "{foo}b") == {b"1"} async def test_cluster_sdiffstore(self, r: RedisCluster) -> None: await r.sadd("{foo}a", "1", "2", "3") assert await r.sdiffstore("{foo}c", "{foo}a", "{foo}b") == 3 - assert set(await r.smembers("{foo}c")) == {b"1", b"2", b"3"} + assert await r.smembers("{foo}c") == {b"1", b"2", b"3"} await r.sadd("{foo}b", "2", "3") assert await r.sdiffstore("{foo}c", "{foo}a", "{foo}b") == 1 - assert await r.smembers("{foo}c") == [b"1"] + assert await r.smembers("{foo}c") == {b"1"} async def test_cluster_sinter(self, r: RedisCluster) -> None: await r.sadd("{foo}a", "1", "2", "3") - assert await r.sinter("{foo}a", "{foo}b") == [] + assert await r.sinter("{foo}a", "{foo}b") == set() await r.sadd("{foo}b", "2", "3") - assert set(await r.sinter("{foo}a", "{foo}b")) == {b"2", b"3"} + assert await r.sinter("{foo}a", "{foo}b") == {b"2", b"3"} async def test_cluster_sinterstore(self, r: RedisCluster) -> None: await r.sadd("{foo}a", "1", "2", "3") assert await r.sinterstore("{foo}c", "{foo}a", "{foo}b") == 0 - assert await r.smembers("{foo}c") == [] + assert await r.smembers("{foo}c") == set() await r.sadd("{foo}b", "2", "3") assert await r.sinterstore("{foo}c", "{foo}a", "{foo}b") == 2 - assert set(await r.smembers("{foo}c")) == {b"2", b"3"} + assert await r.smembers("{foo}c") == {b"2", b"3"} async def test_cluster_smove(self, r: RedisCluster) -> None: await r.sadd("{foo}a", "a1", "a2") await r.sadd("{foo}b", "b1", "b2") assert await r.smove("{foo}a", "{foo}b", "a1") - assert await r.smembers("{foo}a") == [b"a2"] - assert set(await r.smembers("{foo}b")) == {b"b1", b"b2", b"a1"} + assert await r.smembers("{foo}a") == {b"a2"} + assert await r.smembers("{foo}b") == {b"b1", b"b2", b"a1"} async def test_cluster_sunion(self, r: RedisCluster) -> None: await r.sadd("{foo}a", "1", "2") diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index 28c3094cdb..f6ed07fab5 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -1415,34 +1415,34 @@ async def test_scard(self, r: redis.Redis): @pytest.mark.onlynoncluster async def test_sdiff(self, r: redis.Redis): await r.sadd("a", "1", "2", "3") - assert set(await r.sdiff("a", "b")) == {b"1", b"2", b"3"} + assert await r.sdiff("a", "b") == {b"1", b"2", b"3"} await r.sadd("b", "2", "3") - assert await r.sdiff("a", "b") == [b"1"] + assert await r.sdiff("a", "b") == {b"1"} @pytest.mark.onlynoncluster async def test_sdiffstore(self, r: redis.Redis): await r.sadd("a", "1", "2", "3") assert await r.sdiffstore("c", "a", "b") == 3 - assert set(await r.smembers("c")) == {b"1", b"2", b"3"} + assert await r.smembers("c") == {b"1", b"2", b"3"} await r.sadd("b", "2", "3") assert await r.sdiffstore("c", "a", "b") == 1 - assert await r.smembers("c") == [b"1"] + assert await r.smembers("c") == {b"1"} @pytest.mark.onlynoncluster async def test_sinter(self, r: redis.Redis): await r.sadd("a", "1", "2", "3") - assert await r.sinter("a", "b") == [] + assert await r.sinter("a", "b") == set() await r.sadd("b", "2", "3") - assert set(await r.sinter("a", "b")) == {b"2", b"3"} + assert await r.sinter("a", "b") == {b"2", b"3"} @pytest.mark.onlynoncluster async def test_sinterstore(self, r: redis.Redis): await r.sadd("a", "1", "2", "3") assert await r.sinterstore("c", "a", "b") == 0 - assert await r.smembers("c") == [] + assert await r.smembers("c") == set() await r.sadd("b", "2", "3") assert await r.sinterstore("c", "a", "b") == 2 - assert set(await r.smembers("c")) == {b"2", b"3"} + assert await r.smembers("c") == {b"2", b"3"} async def test_sismember(self, r: redis.Redis): await r.sadd("a", "1", "2", "3") @@ -1460,8 +1460,8 @@ async def test_smove(self, r: redis.Redis): await r.sadd("a", "a1", "a2") await r.sadd("b", "b1", "b2") assert await r.smove("a", "b", "a1") - assert await r.smembers("a") == [b"a2"] - assert set(await r.smembers("b")) == {b"b1", b"b2", b"a1"} + assert await r.smembers("a") == {b"a2"} + assert await r.smembers("b") == {b"b1", b"b2", b"a1"} async def test_spop(self, r: redis.Redis): s = [b"1", b"2", b"3"] diff --git a/tests/test_asyncio/test_pipeline.py b/tests/test_asyncio/test_pipeline.py index 4b29360d72..31759d84a3 100644 --- a/tests/test_asyncio/test_pipeline.py +++ b/tests/test_asyncio/test_pipeline.py @@ -417,3 +417,13 @@ async def test_pipeline_discard(self, r): response = await pipe.execute() assert response[0] assert await r.get("foo") == b"bar" + + @pytest.mark.onlynoncluster + async def test_send_set_commands_over_async_pipeline(self, r: redis.asyncio.Redis): + pipe = r.pipeline() + pipe.hset("hash:1", "foo", "bar") + pipe.hset("hash:1", "bar", "foo") + pipe.hset("hash:1", "baz", "bar") + pipe.hgetall("hash:1") + resp = await pipe.execute() + assert resp == [1, 1, 1, {b"bar": b"foo", b"baz": b"bar", b"foo": b"bar"}] diff --git a/tests/test_cache.py b/tests/test_cache.py index 1803646094..67733dc9af 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -41,7 +41,7 @@ def r(request): @pytest.mark.skipif(HIREDIS_AVAILABLE, reason="PythonParser only") @pytest.mark.onlynoncluster -# @skip_if_resp_version(2) +@skip_if_resp_version(2) @skip_if_server_version_lt("7.4.0") class TestCache: @pytest.mark.parametrize( diff --git a/tests/test_cluster.py b/tests/test_cluster.py index c4b3188050..fe5852d1fb 100644 --- a/tests/test_cluster.py +++ b/tests/test_cluster.py @@ -1865,49 +1865,49 @@ def test_cluster_rpoplpush(self, r): def test_cluster_sdiff(self, r): r.sadd("{foo}a", "1", "2", "3") - assert set(r.sdiff("{foo}a", "{foo}b")) == {b"1", b"2", b"3"} + assert r.sdiff("{foo}a", "{foo}b") == {b"1", b"2", b"3"} r.sadd("{foo}b", "2", "3") - assert r.sdiff("{foo}a", "{foo}b") == [b"1"] + assert r.sdiff("{foo}a", "{foo}b") == {b"1"} def test_cluster_sdiffstore(self, r): r.sadd("{foo}a", "1", "2", "3") assert r.sdiffstore("{foo}c", "{foo}a", "{foo}b") == 3 - assert set(r.smembers("{foo}c")) == {b"1", b"2", b"3"} + assert r.smembers("{foo}c") == {b"1", b"2", b"3"} r.sadd("{foo}b", "2", "3") assert r.sdiffstore("{foo}c", "{foo}a", "{foo}b") == 1 - assert r.smembers("{foo}c") == [b"1"] + assert r.smembers("{foo}c") == {b"1"} def test_cluster_sinter(self, r): r.sadd("{foo}a", "1", "2", "3") - assert r.sinter("{foo}a", "{foo}b") == [] + assert r.sinter("{foo}a", "{foo}b") == set() r.sadd("{foo}b", "2", "3") - assert set(r.sinter("{foo}a", "{foo}b")) == {b"2", b"3"} + assert r.sinter("{foo}a", "{foo}b") == {b"2", b"3"} def test_cluster_sinterstore(self, r): r.sadd("{foo}a", "1", "2", "3") assert r.sinterstore("{foo}c", "{foo}a", "{foo}b") == 0 - assert r.smembers("{foo}c") == [] + assert r.smembers("{foo}c") == set() r.sadd("{foo}b", "2", "3") assert r.sinterstore("{foo}c", "{foo}a", "{foo}b") == 2 - assert set(r.smembers("{foo}c")) == {b"2", b"3"} + assert r.smembers("{foo}c") == {b"2", b"3"} def test_cluster_smove(self, r): r.sadd("{foo}a", "a1", "a2") r.sadd("{foo}b", "b1", "b2") assert r.smove("{foo}a", "{foo}b", "a1") - assert r.smembers("{foo}a") == [b"a2"] - assert set(r.smembers("{foo}b")) == {b"b1", b"b2", b"a1"} + assert r.smembers("{foo}a") == {b"a2"} + assert r.smembers("{foo}b") == {b"b1", b"b2", b"a1"} def test_cluster_sunion(self, r): r.sadd("{foo}a", "1", "2") r.sadd("{foo}b", "2", "3") - assert set(r.sunion("{foo}a", "{foo}b")) == {b"1", b"2", b"3"} + assert r.sunion("{foo}a", "{foo}b") == {b"1", b"2", b"3"} def test_cluster_sunionstore(self, r): r.sadd("{foo}a", "1", "2") r.sadd("{foo}b", "2", "3") assert r.sunionstore("{foo}c", "{foo}a", "{foo}b") == 3 - assert set(r.smembers("{foo}c")) == {b"1", b"2", b"3"} + assert r.smembers("{foo}c") == {b"1", b"2", b"3"} @skip_if_server_version_lt("6.2.0") def test_cluster_zdiff(self, r): diff --git a/tests/test_commands.py b/tests/test_commands.py index 74e9c1c88e..4cad4c14b6 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -2247,25 +2247,25 @@ def test_scard(self, r): @pytest.mark.onlynoncluster def test_sdiff(self, r): r.sadd("a", "1", "2", "3") - assert set(r.sdiff("a", "b")) == {b"1", b"2", b"3"} + assert r.sdiff("a", "b") == {b"1", b"2", b"3"} r.sadd("b", "2", "3") - assert r.sdiff("a", "b") == [b"1"] + assert r.sdiff("a", "b") == {b"1"} @pytest.mark.onlynoncluster def test_sdiffstore(self, r): r.sadd("a", "1", "2", "3") assert r.sdiffstore("c", "a", "b") == 3 - assert set(r.smembers("c")) == {b"1", b"2", b"3"} + assert r.smembers("c") == {b"1", b"2", b"3"} r.sadd("b", "2", "3") assert r.sdiffstore("c", "a", "b") == 1 - assert r.smembers("c") == [b"1"] + assert r.smembers("c") == {b"1"} @pytest.mark.onlynoncluster def test_sinter(self, r): r.sadd("a", "1", "2", "3") - assert r.sinter("a", "b") == [] + assert r.sinter("a", "b") == set() r.sadd("b", "2", "3") - assert set(r.sinter("a", "b")) == {b"2", b"3"} + assert r.sinter("a", "b") == {b"2", b"3"} @pytest.mark.onlynoncluster @skip_if_server_version_lt("7.0.0") @@ -2280,10 +2280,10 @@ def test_sintercard(self, r): def test_sinterstore(self, r): r.sadd("a", "1", "2", "3") assert r.sinterstore("c", "a", "b") == 0 - assert r.smembers("c") == [] + assert r.smembers("c") == set() r.sadd("b", "2", "3") assert r.sinterstore("c", "a", "b") == 2 - assert set(r.smembers("c")) == {b"2", b"3"} + assert r.smembers("c") == {b"2", b"3"} def test_sismember(self, r): r.sadd("a", "1", "2", "3") @@ -2308,8 +2308,8 @@ def test_smove(self, r): r.sadd("a", "a1", "a2") r.sadd("b", "b1", "b2") assert r.smove("a", "b", "a1") - assert r.smembers("a") == [b"a2"] - assert set(r.smembers("b")) == {b"b1", b"b2", b"a1"} + assert r.smembers("a") == {b"a2"} + assert r.smembers("b") == {b"b1", b"b2", b"a1"} def test_spop(self, r): s = [b"1", b"2", b"3"] diff --git a/tests/test_encoding.py b/tests/test_encoding.py index 331cd5108c..0fcb256cfb 100644 --- a/tests/test_encoding.py +++ b/tests/test_encoding.py @@ -1,7 +1,5 @@ import pytest import redis -from redis.connection import Connection -from redis.utils import HIREDIS_PACK_AVAILABLE from .conftest import _get_client @@ -75,22 +73,6 @@ def test_replace(self, request): assert r.get("a") == "foo\ufffd" -@pytest.mark.skipif( - HIREDIS_PACK_AVAILABLE, - reason="Packing via hiredis does not preserve memoryviews", -) -class TestMemoryviewsAreNotPacked: - def test_memoryviews_are_not_packed(self): - c = Connection() - arg = memoryview(b"some_arg") - arg_list = ["SOME_COMMAND", arg] - cmd = c.pack_command(*arg_list) - assert cmd[1] is arg - cmds = c.pack_commands([arg_list, arg_list]) - assert cmds[1] is arg - assert cmds[3] is arg - - class TestCommandsAreNotEncoded: @pytest.fixture() def r(self, request): diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py index 7f10fcad4f..be7784ad0b 100644 --- a/tests/test_pipeline.py +++ b/tests/test_pipeline.py @@ -412,3 +412,13 @@ def test_pipeline_discard(self, r): response = pipe.execute() assert response[0] assert r.get("foo") == b"bar" + + @pytest.mark.onlynoncluster + def test_send_set_commands_over_pipeline(self, r: redis.Redis): + pipe = r.pipeline() + pipe.hset("hash:1", "foo", "bar") + pipe.hset("hash:1", "bar", "foo") + pipe.hset("hash:1", "baz", "bar") + pipe.hgetall("hash:1") + resp = pipe.execute() + assert resp == [1, 1, 1, {b"bar": b"foo", b"baz": b"bar", b"foo": b"bar"}] From 17db62e3c9ea796f5705d2857f49e52799057af7 Mon Sep 17 00:00:00 2001 From: David Dougherty Date: Tue, 8 Oct 2024 22:38:12 -0700 Subject: [PATCH 2/2] DOC-4200: add TCEs to the aggregation query page (#3381) --- doctests/query_agg.py | 103 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 doctests/query_agg.py diff --git a/doctests/query_agg.py b/doctests/query_agg.py new file mode 100644 index 0000000000..4fa8f14b84 --- /dev/null +++ b/doctests/query_agg.py @@ -0,0 +1,103 @@ +# EXAMPLE: query_agg +# HIDE_START +import json +import redis +from redis.commands.json.path import Path +from redis.commands.search import Search +from redis.commands.search.aggregation import AggregateRequest +from redis.commands.search.field import NumericField, TagField +from redis.commands.search.indexDefinition import IndexDefinition, IndexType +import redis.commands.search.reducers as reducers + +r = redis.Redis(decode_responses=True) + +# create index +schema = ( + TagField("$.condition", as_name="condition"), + NumericField("$.price", as_name="price"), +) + +index = r.ft("idx:bicycle") +index.create_index( + schema, + definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), +) + +# load data +with open("data/query_em.json") as f: + bicycles = json.load(f) + +pipeline = r.pipeline(transaction=False) +for bid, bicycle in enumerate(bicycles): + pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) +pipeline.execute() +# HIDE_END + +# STEP_START agg1 +search = Search(r, index_name="idx:bicycle") +aggregate_request = AggregateRequest(query='@condition:{new}') \ + .load('__key', 'price') \ + .apply(discounted='@price - (@price * 0.1)') +res = search.aggregate(aggregate_request) +print(len(res.rows)) # >>> 5 +print(res.rows) # >>> [['__key', 'bicycle:0', ... +#[['__key', 'bicycle:0', 'price', '270', 'discounted', '243'], +# ['__key', 'bicycle:5', 'price', '810', 'discounted', '729'], +# ['__key', 'bicycle:6', 'price', '2300', 'discounted', '2070'], +# ['__key', 'bicycle:7', 'price', '430', 'discounted', '387'], +# ['__key', 'bicycle:8', 'price', '1200', 'discounted', '1080']] +# REMOVE_START +assert len(res.rows) == 5 +# REMOVE_END +# STEP_END + +# STEP_START agg2 +search = Search(r, index_name="idx:bicycle") +aggregate_request = AggregateRequest(query='*') \ + .load('price') \ + .apply(price_category='@price<1000') \ + .group_by('@condition', reducers.sum('@price_category').alias('num_affordable')) +res = search.aggregate(aggregate_request) +print(len(res.rows)) # >>> 3 +print(res.rows) # >>> +#[['condition', 'refurbished', 'num_affordable', '1'], +# ['condition', 'used', 'num_affordable', '1'], +# ['condition', 'new', 'num_affordable', '3']] +# REMOVE_START +assert len(res.rows) == 3 +# REMOVE_END +# STEP_END + +# STEP_START agg3 +search = Search(r, index_name="idx:bicycle") +aggregate_request = AggregateRequest(query='*') \ + .apply(type="'bicycle'") \ + .group_by('@type', reducers.count().alias('num_total')) +res = search.aggregate(aggregate_request) +print(len(res.rows)) # >>> 1 +print(res.rows) # >>> [['type', 'bicycle', 'num_total', '10']] +# REMOVE_START +assert len(res.rows) == 1 +# REMOVE_END +# STEP_END + +# STEP_START agg4 +search = Search(r, index_name="idx:bicycle") +aggregate_request = AggregateRequest(query='*') \ + .load('__key') \ + .group_by('@condition', reducers.tolist('__key').alias('bicycles')) +res = search.aggregate(aggregate_request) +print(len(res.rows)) # >>> 3 +print(res.rows) # >>> +#[['condition', 'refurbished', 'bicycles', ['bicycle:9']], +# ['condition', 'used', 'bicycles', ['bicycle:1', 'bicycle:2', 'bicycle:3', 'bicycle:4']], +# ['condition', 'new', 'bicycles', ['bicycle:5', 'bicycle:6', 'bicycle:7', 'bicycle:0', 'bicycle:8']]] +# REMOVE_START +assert len(res.rows) == 3 +# REMOVE_END +# STEP_END + +# REMOVE_START +# destroy index and data +r.ft("idx:bicycle").dropindex(delete_documents=True) +# REMOVE_END