functools主要有3个函数:partial, wraps, lru_cache


partial: 利用一个类装饰器把现函数和参数保存起来, 后续调用的时候再补充和更新参数

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
class partial:
    def __init__(self, func, *args, **kwargs):
        if not callable(func):
            raise TypeError("func must be callable")

        self.func = func
        self.args = args
        self.kwargs = kwargs

    def __call__(self, *args, **kwargs):
        args = self.args + args
        new_kwargs = self.kwargs.copy()
        new_kwargs.update(kwargs)
        self.func(*args, **new_kwargs)

warps:把被装饰的函数一些如 __name__的等属性复制给装饰器函数

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
                       '__annotations__')
WRAPPER_UPDATES = ('__dict__',)


def update_wrapper(wrapper, wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES):
    for attr in assigned:
        try:
            value = getattr(wrapped, attr)
        except AttributeError:
            continue
        else:
            setattr(wrapper, attr, value)

    for attr in updated:
        getattr(wrapper, attr).update(getattr(wrapped, attr, {}))

    wrapper.__wrapped__ = wrapped
    return wrapper


def wraps(wrapped):
    return partial(update_wrapper, wrapped=wrapped)

lru_cache(least recently use): 最近最少使用淘汰机制缓存

方案一: list + dict

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
def lru_cache_1(max_size=128):
    cache = {}
    keys = []
    def decorator(func):
        def wrapper(*args, **kwargs):
            key = make_key(args, kwargs)
            if key in cache:
                # move key to the tail
                keys.remove(key)
                keys.append(key)
                
                return cache[key]

            value = func(*args, **kwargs)            
            if len(cache) >= max_size:
                # remove oldest key and value
                old_key = keys.pop(0)
                cache.pop(old_key)
                
            keys.append(key)
            cache[key] = value
            return value      
        return wrapper
    return decorator

方案二: OrderDict

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
from collections import OrderedDict
def lru_cache_2(max_size=128):
    cache = OrderedDict()
    def decorator(func):
        def wrapper(*args, **kwargs):
            key = make_key(args, kwargs)
            if key in cache:
                # move key to the tail
                cache.move_to_end(key, last=True)
                return cache[key]

            value = func(*args, **kwargs)            
            if len(cache) >= max_size:
                # remove oldest key and value
                cache.popitem(last=False)
            cache[key] = value
            return value       
        return wrapper
    return decorator
方案三: class双向链表 + dict
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
class Link:
    __slots__ = 'prev', 'next', 'key', 'value'
    
def lru_cache_3(max_size=128):
    root = Link()
    root.prev, root.next, root.key, root.value = root, root, None, None
    key_link_map = {}
    def decorator(func):
        def wrapper(*args, **kwargs):
            key = make_key(args, kwargs)
            if key in key_link_map:
                  
                link = key_link_map[key]
                link_prev = link.prev
                link_next = link.next 
                
                link_prev.next = link_next
                link_next.prev = link_prev
                
                last = root.prev
                last.next = root.prev = link
                link.prev = last
                link.next = root
                
                return link.value
                
                
            value = func(*args, **kwargs)    
            link = Link()
            link.value = value
            link.key = key
            
            if len(key_link_map) >= max_size:
                old_link = key_link_map.pop(key)
                root.next = old_link.next
                root.next.prev = root
        
                
            last = root.prev
            last.next = root.prev = link
            link.prev = last
            link.next = root
            return value       
        return wrapper
    return decorator
方案四:list双向链表 + dict (标准库使用方式)
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
PREV, NEXT, KEY, VALUE = 0, 1, 2, 3
def lru_cache_4(max_size=128):
    root = []
    root[:] = root, root, None, None
    cache = {}
    cache_get = cache.get
    cache_len = cache.__len__
    def decorator(func):
        def wrapper(*args, **kwargs):
            nonlocal root
            key = make_key(args, kwargs)
            link = cache_get(key)
            if link is not None:               
                link_prev, link_next, _, value = link
            
                link_prev[NEXT] = link_next
                link_next[PREV] = link_prev
                
                last = root[PREV]
                last[NEXT] = root[PREV] = link
                link[PREV] = last
                link[NEXT] = root
                
                return value       
                
            value = func(*args, **kwargs)    
            
            if cache_len() >= max_size:
                old_root = root
                old_root[KEY] = key
                old_root[VALUE] = value
                
                root = old_root[NEXT]
                old_key = root[KEY]
                del cache[old_key]
                cache[key] = old_root
                root[KEY] = root[VALUE] = None   
                
            else:
                last = root[PREV]
                link = [last, root, key, value]
                last[NEXT] = root[PREV] = cache[key] = link
                
            return value       
        return wrapper
    return decorator
缓存池污染

对于类似Mysql中的lru_cache缓存可能因为一次全局遍历导致热点数据失效,可将链表按一定比例分为新、老生代两部分,最近被访问的数据,只有一定时间多次访问才会加入新生代,这样保证新生代的热点数据不会被污染。

参考