Decorators¶
aiocache comes with a couple of decorators for caching results from asynchronous functions. Do not use the decorator in synchronous functions, it may lead to unexpected behavior.
cached¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 | import asyncio
from collections import namedtuple
from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer
Result = namedtuple('Result', "content, status")
@cached(
ttl=10, cache=Cache.REDIS, key="key", serializer=PickleSerializer(),
port=6379, namespace="main")
async def cached_call():
return Result("content", 200)
def test_cached():
cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main")
loop = asyncio.get_event_loop()
loop.run_until_complete(cached_call())
assert loop.run_until_complete(cache.exists("key")) is True
loop.run_until_complete(cache.delete("key"))
loop.run_until_complete(cache.close())
if __name__ == "__main__":
test_cached()
|
multi_cached¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 | import asyncio
from aiocache import multi_cached, Cache
DICT = {
'a': "Z",
'b': "Y",
'c': "X",
'd': "W"
}
@multi_cached("ids", cache=Cache.REDIS, namespace="main")
async def multi_cached_ids(ids=None):
return {id_: DICT[id_] for id_ in ids}
@multi_cached("keys", cache=Cache.REDIS, namespace="main")
async def multi_cached_keys(keys=None):
return {id_: DICT[id_] for id_ in keys}
cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main")
def test_multi_cached():
loop = asyncio.get_event_loop()
loop.run_until_complete(multi_cached_ids(ids=['a', 'b']))
loop.run_until_complete(multi_cached_ids(ids=['a', 'c']))
loop.run_until_complete(multi_cached_keys(keys=['d']))
assert loop.run_until_complete(cache.exists('a'))
assert loop.run_until_complete(cache.exists('b'))
assert loop.run_until_complete(cache.exists('c'))
assert loop.run_until_complete(cache.exists('d'))
loop.run_until_complete(cache.delete("a"))
loop.run_until_complete(cache.delete("b"))
loop.run_until_complete(cache.delete("c"))
loop.run_until_complete(cache.delete("d"))
loop.run_until_complete(cache.close())
if __name__ == "__main__":
test_multi_cached()
|