Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bugfix/initial compressed urls #79

Merged
merged 3 commits into from
Sep 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,10 @@ Or to run it without ``tox`` you can simply run::
Changelog
---------

1.3.1
* Fix a bug whereby ``FANCY_COMPRESS_REMEMBERED_URLS`` setting
raises a TypeError upon first implementation.

1.3.0
* Enable ``FANCY_COMPRESS_REMEMBERED_URLS`` setting to compress
``remembered_urls`` dictionary when ``FANCY_REMEMBER_ALL_URLS``
Expand Down
2 changes: 1 addition & 1 deletion fancy_cache/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .cache_page import cache_page # NOQA

__version__ = "1.3.0"
__version__ = "1.3.1"
19 changes: 12 additions & 7 deletions fancy_cache/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,10 @@ def find_urls(
else:
remembered_urls = cache.get(REMEMBERED_URLS_KEY, {})
if COMPRESS_REMEMBERED_URLS:
remembered_urls = json.loads(zlib.decompress(remembered_urls).decode())
if not isinstance(remembered_urls, dict):
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
keys_to_delete = []
if urls:
regexes = _urls_to_regexes(urls)
Expand Down Expand Up @@ -110,9 +113,10 @@ def find_urls(

remembered_urls = cache.get(REMEMBERED_URLS_KEY, {})
if COMPRESS_REMEMBERED_URLS:
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
if not isinstance(remembered_urls, dict):
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
remembered_urls = delete_keys(keys_to_delete, remembered_urls)
if COMPRESS_REMEMBERED_URLS:
remembered_urls = zlib.compress(
Expand All @@ -130,9 +134,10 @@ def delete_keys_cas(keys_to_delete: typing.List[str]) -> bool:
return False

if COMPRESS_REMEMBERED_URLS:
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
if not isinstance(remembered_urls, dict):
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)

remembered_urls = delete_keys(keys_to_delete, remembered_urls)
if COMPRESS_REMEMBERED_URLS:
Expand Down
14 changes: 8 additions & 6 deletions fancy_cache/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,9 +207,10 @@ def remember_url(self, request, cache_key: str, timeout: int) -> None:

remembered_urls = self.cache.get(REMEMBERED_URLS_KEY, {})
if COMPRESS_REMEMBERED_URLS:
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
if not isinstance(remembered_urls, dict):
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
remembered_urls = filter_remembered_urls(remembered_urls)
remembered_urls[url] = (cache_key, expiration_time)
if COMPRESS_REMEMBERED_URLS:
Expand Down Expand Up @@ -238,9 +239,10 @@ def _remember_url_cas(
return False

if COMPRESS_REMEMBERED_URLS:
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)
if not isinstance(remembered_urls, dict):
remembered_urls = json.loads(
zlib.decompress(remembered_urls).decode()
)

remembered_urls = filter_remembered_urls(remembered_urls)

Expand Down
17 changes: 17 additions & 0 deletions fancy_tests/tests/test_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,23 @@ def test_find_and_purge_all_urls_with_zlib_compression(self):
found = list(find_urls([]))
eq_(len(found), 0)

@mock.patch("fancy_cache.memory.COMPRESS_REMEMBERED_URLS", True)
def test_find_and_purge_all_urls_with_zlib_compression_first_time(self):
"""
When enabling zlib compression, the existing REMEMBERED_URLS
will not be compressed yet. This test ensures that the transition to
compressed REMEMBERED_URLS is seamless.
"""
remembered_urls = cache.get(REMEMBERED_URLS_KEY)
cache.set(REMEMBERED_URLS_KEY, remembered_urls, 5)
found = list(find_urls([], purge=True))
eq_(len(found), 4)
for key, value in self.urls.items():
pair = (key, value[0], None)
ok_(pair in found)
found = list(find_urls([]))
eq_(len(found), 0)

def test_find_one_url(self):
found = list(find_urls(["/page1.html"]))
eq_(len(found), 1)
Expand Down
Loading