Skip to content

Commit

Permalink
Merge pull request freelawproject#4417 from freelawproject/fix-test-p…
Browse files Browse the repository at this point in the history
…rofile-urls-failing-test

Added prefix to the search micro-cache key to avoid affecting other tests
  • Loading branch information
mlissner authored Sep 4, 2024
2 parents 6025c3b + c0dc7b0 commit 2b20b2d
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 9 deletions.
7 changes: 6 additions & 1 deletion cl/search/tests/tests_es_recap.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ def setUpTestData(cls):
)
# Index parties in ES.
index_docket_parties_in_es.delay(cls.de.docket.pk)
cache.clear()

async def _test_article_count(self, params, expected_count, field_name):
r = await self.async_client.get("/", params)
Expand Down Expand Up @@ -2576,6 +2575,12 @@ def test_initial_complaint_button(self) -> None:
async def test_micro_cache_for_search_results(self, mock_fetch_es) -> None:
"""Assert micro-cache for search results behaves properly."""

# Clean search_results_cache before starting the test.
r = get_redis_interface("CACHE")
keys = r.keys("search_results_cache")
if keys:
r.delete(*keys)

mock_fetch_es.side_effect = lambda *args, **kwargs: fetch_es_results(
*args, **kwargs
)
Expand Down
18 changes: 10 additions & 8 deletions cl/search/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -871,9 +871,9 @@ def retrieve_cached_search_results(
Retrieve cached search results based on the GET parameters.
:param get_params: The GET parameters provided by the user.
:return: A two-tuple containing either the cached search results and a hash
of the get parameters, or None and the query parameters hash if no cached
results were found.
:return: A two-tuple containing either the cached search results and the
cache key based ona prefix and the get parameters, or None and the cache key
if no cached results were found.
"""

params = get_params.copy()
Expand All @@ -883,11 +883,13 @@ def retrieve_cached_search_results(
params.setdefault("page", "1")
params.setdefault("q", "")
sorted_params = dict(sorted(params.items()))
key_prefix = "search_results_cache:"
params_hash = sha256(pickle.dumps(sorted_params))
cached_results = cache.get(params_hash)
cache_key = f"{key_prefix}{params_hash}"
cached_results = cache.get(cache_key)
if cached_results:
return pickle.loads(cached_results), params_hash
return None, params_hash
return pickle.loads(cached_results), cache_key
return None, cache_key


def fetch_and_paginate_results(
Expand Down Expand Up @@ -918,7 +920,7 @@ def fetch_and_paginate_results(
return results, 0, False, None, None

# Check micro-cache for all other search requests.
results_dict, get_params_hash = retrieve_cached_search_results(get_params)
results_dict, micro_cache_key = retrieve_cached_search_results(get_params)
if results_dict:
# Return results and counts. Set query time to 1ms.
return (
Expand Down Expand Up @@ -972,7 +974,7 @@ def fetch_and_paginate_results(
}
serialized_data = pickle.dumps(results_dict)
cache.set(
get_params_hash,
micro_cache_key,
serialized_data,
settings.SEARCH_RESULTS_MICRO_CACHE,
)
Expand Down

0 comments on commit 2b20b2d

Please sign in to comment.