Skip to content

Commit

Permalink
Clear hot cache on origin miss (#45)
Browse files Browse the repository at this point in the history
* Clear hot cache on origin miss

## Motivation / Description
While rare, a hot entry can get deleted and we need
to clear the hot cache when the origin returns miss
and the revalidation time has elapsed.

* bump version
  • Loading branch information
bisho authored Oct 11, 2023
1 parent d944014 commit fd3da3e
Show file tree
Hide file tree
Showing 4 changed files with 156 additions and 7 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "meta-memcache"
version = "1.0.3"
version = "1.0.4"
description = "Modern, pure python, memcache client with support for new meta commands."
license = "MIT"
readme = "README.md"
Expand Down
2 changes: 1 addition & 1 deletion src/meta_memcache/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.0.3"
__version__ = "1.0.4"

from meta_memcache.cache_client import CacheClient
from meta_memcache.configuration import (
Expand Down
22 changes: 20 additions & 2 deletions src/meta_memcache/extras/probabilistic_hot_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,10 @@ def _lookup_hot_cache(
ttl = found.expiration - now
if ttl > 0:
is_found = True
elif not found.extended and ttl < self._max_stale_while_revalidate_seconds:
elif (
not found.extended
and abs(ttl) < self._max_stale_while_revalidate_seconds
):
# Expired but the value is still fresh enough. We will try to
# use stale-while-revalidate to avoid thundering herds. Only
# one thread will get to refresh the cache.
Expand All @@ -100,8 +103,11 @@ def _lookup_hot_cache(
found.extended = True
is_found = False
else:
# Expired and the value is too stale to use.
# Expired and the value is too stale to use. No longer hot.
self._clear_hot_cache_if_necessary(key)
is_found = False
is_hot = False
value = None
else:
# Not found so not hot
is_found = False
Expand Down Expand Up @@ -140,6 +146,16 @@ def _store_in_hot_cache_if_necessary(
)
self._metrics and self._metrics.gauge_set("item_count", len(self._store))

def _clear_hot_cache_if_necessary(self, key: Key) -> bool:
if found := self._store.get(key.key):
if time.time() > found.expiration:
del self._store[key.key]
self._metrics and self._metrics.gauge_set(
"item_count", len(self._store)
)
return True
return False

def get(
self,
key: Union[Key, str],
Expand All @@ -165,6 +181,7 @@ def get(
)
if result is None:
allowed and self._metrics and self._metrics.metric_inc("candidate_misses")
is_hot and self._clear_hot_cache_if_necessary(key)
return None
else:
self._store_in_hot_cache_if_necessary(key, result, is_hot, allowed)
Expand Down Expand Up @@ -204,6 +221,7 @@ def multi_get(
allowed and self._metrics and self._metrics.metric_inc(
"candidate_misses"
)
is_hot and self._clear_hot_cache_if_necessary(key)
values[key] = None
else:
if allowed:
Expand Down
137 changes: 134 additions & 3 deletions tests/probabilistic_hot_cache_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def test_get_without_prefixes(
time.time.return_value = 120

# mimic cache error, so we can see the hot cache extended by the winner thread
originsl_meta_get = client.meta_get.side_effect
original_meta_get = client.meta_get.side_effect
client.meta_get.side_effect = MemcacheError("mimic cache error")

with pytest.raises(MemcacheError):
Expand Down Expand Up @@ -181,7 +181,7 @@ def test_get_without_prefixes(
client.meta_get.reset_mock()

# restore the original meta_get, and check hot cache is again updated and used
client.meta_get.side_effect = originsl_meta_get
client.meta_get.side_effect = original_meta_get

assert hot_cache.get(key="foo_hot") == 1
assert store.get("foo_hot") == CachedValue(value=1, expiration=190, extended=False)
Expand All @@ -196,7 +196,7 @@ def test_get_without_prefixes(
"test_hot_cache_misses": 8,
"test_hot_cache_skips": 0,
"test_hot_cache_item_count": 1,
"test_hot_cache_hot_candidates": 1,
"test_hot_cache_hot_candidates": 2,
"test_hot_cache_hot_skips": 0,
"test_hot_cache_candidate_misses": 0,
}
Expand Down Expand Up @@ -324,6 +324,9 @@ def test_multi_get(
client: Mock,
) -> None:
store = {}
metrics_collector = PrometheusMetricsCollector(
namespace="test", registry=CollectorRegistry()
)
hot_cache = ProbabilisticHotCache(
client=client,
store=store,
Expand All @@ -332,6 +335,7 @@ def test_multi_get(
probability_factor=1,
max_stale_while_revalidate_seconds=10,
allowed_prefixes=["allowed:"],
metrics_collector=metrics_collector,
)

time.time.return_value = 0
Expand Down Expand Up @@ -393,3 +397,130 @@ def test_multi_get(
],
**DEFAULT_FLAGS,
)
assert metrics_collector.get_counters() == {
"test_hot_cache_hits": 1,
"test_hot_cache_misses": 5,
"test_hot_cache_skips": 6,
"test_hot_cache_item_count": 1,
"test_hot_cache_hot_candidates": 1,
"test_hot_cache_hot_skips": 2,
"test_hot_cache_candidate_misses": 2,
}


def test_hot_miss_invalidates_hot_cache(
time: Mock,
client: Mock,
) -> None:
store = {}
metrics_collector = PrometheusMetricsCollector(
namespace="test", registry=CollectorRegistry()
)
hot_cache = ProbabilisticHotCache(
client=client,
store=store,
cache_ttl=60,
max_last_access_age_seconds=10,
probability_factor=1,
max_stale_while_revalidate_seconds=10,
allowed_prefixes=None,
metrics_collector=metrics_collector,
)

time.time.return_value = 0

# Request a key that is hot, it is stored in the hot cache
assert hot_cache.get(key="foo_hot") == 1
assert store.get("foo_hot") == CachedValue(value=1, expiration=60, extended=False)
client.meta_get.assert_called_once_with(key=Key(key="foo_hot"), **DEFAULT_FLAGS)
client.meta_get.reset_mock()

# Time goes by
time.time.return_value = 60

# mimic cache miss, so we can see the hot cache extended by the winner thread
client.meta_get.side_effect = lambda *args, **kwargs: Miss()

# First call gets the miss, extends the hot cache and returns None
assert hot_cache.get(key="foo_hot") is None
assert store.get("foo_hot") == CachedValue(value=1, expiration=70, extended=True)
client.meta_get.assert_called_once_with(key=Key(key="foo_hot"), **DEFAULT_FLAGS)
client.meta_get.reset_mock()

# Second call gets the cached Value, that hasn't been cleared from hot cache
assert hot_cache.get(key="foo_hot") == 1
assert store.get("foo_hot") == CachedValue(value=1, expiration=70, extended=True)
client.meta_get.assert_not_called()
client.meta_get.reset_mock()

# Time goes by
time.time.return_value = 71

# If the origin cache is still miss, the value will be cleared from hot cache
assert hot_cache.get(key="foo_hot") is None
assert store.get("foo_hot") is None
client.meta_get.assert_called_once_with(key=Key(key="foo_hot"), **DEFAULT_FLAGS)
client.meta_get.reset_mock()

assert metrics_collector.get_counters() == {
"test_hot_cache_hits": 1,
"test_hot_cache_misses": 3,
"test_hot_cache_skips": 0,
"test_hot_cache_item_count": 0,
"test_hot_cache_hot_candidates": 1,
"test_hot_cache_hot_skips": 0,
"test_hot_cache_candidate_misses": 2,
}


def test_stale_expires(
time: Mock,
client: Mock,
) -> None:
store = {}
metrics_collector = PrometheusMetricsCollector(
namespace="test", registry=CollectorRegistry()
)
hot_cache = ProbabilisticHotCache(
client=client,
store=store,
cache_ttl=60,
max_last_access_age_seconds=10,
probability_factor=1,
max_stale_while_revalidate_seconds=10,
allowed_prefixes=None,
metrics_collector=metrics_collector,
)

time.time.return_value = 0

# Request a key that is hot, it is stored in the hot cache
assert hot_cache.get(key="foo_hot") == 1
assert store.get("foo_hot") == CachedValue(value=1, expiration=60, extended=False)
client.meta_get.assert_called_once_with(key=Key(key="foo_hot"), **DEFAULT_FLAGS)
client.meta_get.reset_mock()

# Time goes by
time.time.return_value = 10
assert hot_cache.get(key="foo_hot") == 1
assert store.get("foo_hot") == CachedValue(value=1, expiration=60, extended=False)
client.meta_get.assert_not_called()
client.meta_get.reset_mock()

# Time goes by so much that the item will expire
time.time.return_value = 300
# And the item is no longer hot
client.meta_get.side_effect = lambda *args, **kwargs: Value(
size=1,
value=1,
int_flags={
IntFlag.HIT_AFTER_WRITE: 1,
IntFlag.LAST_READ_AGE: 9999,
},
)

# The item will no longer be in the hot cache
assert hot_cache.get(key="foo_hot") == 1
assert store.get("foo_hot") is None
client.meta_get.assert_called_once_with(key=Key(key="foo_hot"), **DEFAULT_FLAGS)
client.meta_get.reset_mock()

0 comments on commit fd3da3e

Please sign in to comment.