-
-
Notifications
You must be signed in to change notification settings - Fork 2.1k
Type annotations for LruCache #8562
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
Add type annotations for `LruCache`. |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -15,12 +15,30 @@ | |
|
||
import threading | ||
from functools import wraps | ||
from typing import Callable, Optional, Type, Union | ||
from typing import ( | ||
Any, | ||
Callable, | ||
Generic, | ||
Iterable, | ||
Optional, | ||
Type, | ||
TypeVar, | ||
Union, | ||
cast, | ||
overload, | ||
) | ||
|
||
from typing_extensions import Literal | ||
|
||
from synapse.config import cache as cache_config | ||
from synapse.util.caches import CacheMetric, register_cache | ||
from synapse.util.caches.treecache import TreeCache | ||
|
||
T = TypeVar("T") | ||
FT = TypeVar("FT", bound=Callable[..., Any]) | ||
KT = TypeVar("KT") | ||
VT = TypeVar("VT") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There's enough different types here that these could probably use some comments. |
||
|
||
|
||
def enumerate_leaves(node, depth): | ||
if depth == 0: | ||
|
@@ -42,7 +60,7 @@ def __init__(self, prev_node, next_node, key, value, callbacks=set()): | |
self.callbacks = callbacks | ||
|
||
|
||
class LruCache: | ||
class LruCache(Generic[KT, VT]): | ||
""" | ||
Least-recently-used cache, supporting prometheus metrics and invalidation callbacks. | ||
|
||
|
@@ -128,13 +146,13 @@ def evict(): | |
if metrics: | ||
metrics.inc_evictions(evicted_len) | ||
|
||
def synchronized(f): | ||
def synchronized(f: FT) -> FT: | ||
@wraps(f) | ||
def inner(*args, **kwargs): | ||
with lock: | ||
return f(*args, **kwargs) | ||
|
||
return inner | ||
return cast(FT, inner) | ||
|
||
cached_cache_len = [0] | ||
if size_callback is not None: | ||
|
@@ -188,8 +206,31 @@ def delete_node(node): | |
node.callbacks.clear() | ||
return deleted_len | ||
|
||
@overload | ||
def cache_get( | ||
key: KT, | ||
default: Literal[None] = None, | ||
callbacks: Iterable[Callable[[], None]] = ..., | ||
update_metrics: bool = ..., | ||
Comment on lines
+218
to
+219
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I did not realize you could use |
||
) -> Optional[VT]: | ||
... | ||
|
||
@overload | ||
def cache_get( | ||
key: KT, | ||
default: T, | ||
callbacks: Iterable[Callable[[], None]] = ..., | ||
update_metrics: bool = ..., | ||
) -> Union[T, VT]: | ||
... | ||
|
||
@synchronized | ||
def cache_get(key, default=None, callbacks=[], update_metrics=True): | ||
def cache_get( | ||
key: KT, | ||
default=None, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Does making this There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. no, though I'm not sure it does anything. |
||
callbacks: Iterable[Callable[[], None]] = [], | ||
update_metrics: bool = True, | ||
): | ||
node = cache.get(key, None) | ||
if node is not None: | ||
move_node_to_front(node) | ||
|
@@ -203,7 +244,7 @@ def cache_get(key, default=None, callbacks=[], update_metrics=True): | |
return default | ||
|
||
@synchronized | ||
def cache_set(key, value, callbacks=[]): | ||
def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = []): | ||
node = cache.get(key, None) | ||
if node is not None: | ||
# We sometimes store large objects, e.g. dicts, which cause | ||
|
@@ -232,7 +273,7 @@ def cache_set(key, value, callbacks=[]): | |
evict() | ||
|
||
@synchronized | ||
def cache_set_default(key, value): | ||
def cache_set_default(key: KT, value: VT) -> VT: | ||
node = cache.get(key, None) | ||
if node is not None: | ||
return node.value | ||
|
@@ -241,8 +282,16 @@ def cache_set_default(key, value): | |
evict() | ||
return value | ||
|
||
@overload | ||
def cache_pop(key: KT, default: Literal[None] = None) -> Union[None, VT]: | ||
richvdh marked this conversation as resolved.
Show resolved
Hide resolved
|
||
... | ||
|
||
@overload | ||
def cache_pop(key: KT, default: T) -> Union[T, VT]: | ||
... | ||
|
||
@synchronized | ||
def cache_pop(key, default=None): | ||
def cache_pop(key: KT, default=None): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same question as with There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same answer :) |
||
node = cache.get(key, None) | ||
if node: | ||
delete_node(node) | ||
|
@@ -252,18 +301,18 @@ def cache_pop(key, default=None): | |
return default | ||
|
||
@synchronized | ||
def cache_del_multi(key): | ||
def cache_del_multi(key: KT) -> None: | ||
""" | ||
This will only work if constructed with cache_type=TreeCache | ||
""" | ||
popped = cache.pop(key) | ||
if popped is None: | ||
return | ||
for leaf in enumerate_leaves(popped, keylen - len(key)): | ||
for leaf in enumerate_leaves(popped, keylen - len(cast(tuple, key))): | ||
delete_node(leaf) | ||
|
||
@synchronized | ||
def cache_clear(): | ||
def cache_clear() -> None: | ||
list_root.next_node = list_root | ||
list_root.prev_node = list_root | ||
for node in cache.values(): | ||
|
@@ -274,7 +323,7 @@ def cache_clear(): | |
cached_cache_len[0] = 0 | ||
|
||
@synchronized | ||
def cache_contains(key): | ||
def cache_contains(key: KT) -> bool: | ||
return key in cache | ||
|
||
self.sentinel = object() | ||
|
Uh oh!
There was an error while loading. Please reload this page.