Cache reasons
A data cache is saved to be quantitative treatment apparatus cater subsequent requests, to accelerate the speed of data retrieval. By caching the data into the dictionary, next time if needed you can get directly to the dictionary.
Implement and test a cache class
class MyCache(object):
def __init__(self):
self.cache = {}
self.max_cache_size = 10
def __contains__(self, key):
"""
判断该键是否存在于缓存当中返回 True 或者 False
:param key:
:return:
"""
return key in self.cache
def update(self, key, value):
"""
更新该缓存字典 您可能选择性删除
:param key:
:param value:
:return:
"""
if key not in self.cache and len(self.cache) >= self.max_cache_size:
self.remove_oldest()
self.cache[key] = {'date_accessed': datetime.datetime.now(), "value": value}
def remove_oldest(self):
"""
删除备份最早访问日期的输入数据
:return:
"""
oldest_entry = None
# 根据其中的 date_accessed 字段找到最早的一个任务
for key in self.cache:
if oldest_entry is None:
oldest_entry = key
elif self.cache[key]["date_accessed"] < self.cache[oldest_entry]['date_accessed']:
oldest_entry = key
self.cache.pop(oldest_entry)
@property
def size(self):
"""
返回缓存的容量大小
:return:
"""
return len(self.cache)
if __name__ == "__main__":
# 测试缓存类
keys = ['test', 'red', 'fox', 'fence', 'junk', 'other', 'alpha', 'bravo', 'cal', 'devo', 'ele']
s = "abcdefghijklmnop"
cache = MyCache()
for i, key in enumerate(keys):
if key in cache:
continue
else:
value = ".".join([random.choice(s) for i in range(20)])
cache.update(key, value)
print(i+1, "------> ", cache.size)
Use lru_cache decorator
Python3.2 introduced in a very elegant cache machine, i.e. functool module lru_cache decorator
@ functools.lru_cache (maxsize = None, typed = False)
using functools module lur_cache decorators, can buffer up to a function maxsize the result of the call, thereby improving the efficiency of program execution,
especially suitable for time-consuming function. Maxsize parameter is most often the cache, if it is None, then no limitation is set to 2n, the best performance;
if typed = True (note that this parameter is not in the functools32), the different types of call parameters are cached, for example, f (3) and f (3.0).
The function will be decorated lru_cache cache_clear and cache_info two methods were used to clear the cache and view the cache information.
Here's a simple example demonstrates lru_cache effect:
Note: There is a C implementation, faster, and is compatible with third-party modules fastcache Python2 and Python3 can achieve the same function.
from functools import lru_cache
@lru_cache(None)
def add(x, y):
print(f"calculating: {x} + {y}")
return x+y
print(add(1, 2))
print(add.cache_info()) # 显示缓存信息
add.cache_clear() # 清空缓存
# 在第二次执行的时候,并没有真正执行函数体, 二是直接返回缓存的结果
print(add(1, 2))
print(add(2, 3))
web cache
In web application more common practice is to implement a client redis own package, and implement different types of cache data through different prefixes:
code is as follows:
# 测试Python-redis的缓存功能
import json
import pprint
from redis import Redis
# from flask_restful import current_app
myredis = Redis.from_url("redis://localhost:6379/1", db=0)
class RedisCache(object):
def __init__(self, key_prefix="", encoder=None, decoder=None):
"""
初始化 redis 存储对象
:param key_prefix: 前缀
:param encoder: 编码
:param decoder: 解码
"""
self._key_prefix = key_prefix
# self._timeout = timeout
self.encoder = encoder
self.decoder = decoder
self._r = None
@property
def rds(self):
"""
获取一个底层的 redis 对象
:return:
"""
if self._r is not None:
return self._r
self._r = myredis
return self._r
# if getattr(current_app, "redis", None):
# self._r = current_app.redis
# else:
# rds = Redis.from_url(current_app.config["REDIS_URI"], db=0)
# current_app.redis = rds
# self._r = rds
# return self._r
def key(self, key):
"""
给 key 加上前缀
:param key:
:return:
"""
return f"{self._key_prefix}{key}"
def cache(self, key, value, timeout=None):
"""
存入
:param key:
:param value:
:param timeout:
:return:
"""
if self.encoder:
value = self.encoder(value)
# print(f"cache: {self.key(key)} {value}")
if timeout is not None:
self.rds.set(self.key(key), value, timeout)
else:
self.rds.set(self.key(key), value)
def get(self, key):
"""
取出
:param key:
:return:
"""
key = self.key(key)
value = self.rds.get(key)
if (value is not None) and self.decoder:
value = self.decoder(value)
return value
def delete(self, key):
"""
删除
:param key:
:return:
"""
self.rds.delete(self.key(key))
def get_keys(self, *keys):
"""
获取已存储键值
:param keys:
:return:
"""
pl = self.rds.pipeline()
for key in keys:
pl.get(self.key(key))
return pl.execute()
forked_infos = [
{"annualized_returns": None,
"create_time": 1562038393,
"desc": "失败的MACD策略",
"forked_id": "None",
"max_drawdown": "---",
"origin": None,
"parent": None
},
{"annualized_returns": None,
"create_time": 1562060612,
"desc": "失败的MACD策略",
"forked_id": "5d1b2744b264566d3f3f3632",
"max_drawdown": None,
"origin": "5d1ad079e86117f3883f361e",
"parent": "5d1ad079e86117f3883f361e"
}
]
mycache = RedisCache("DEMO", encoder=json.dumps, decoder=json.loads)
# Invalid input of type: 'list'. Convert to a byte, string or number first.
mycache.cache("5d1ad079e86117f3883f361e", forked_infos)
print(pprint.pformat(mycache.get("5d1ad079e86117f3883f361e")))