"""functools.py - Tools for working with functions and callable objects
# Python module wrapper for _functools C module
# to allow utilities written in Python to be added
# to the functools module.
# Written by Nick Coghlan <ncoghlan at gmail.com>,
# Raymond Hettinger <python at rcn.com>,
# and Ćukasz Langa <lukasz at langa.pl>.
# Copyright (C) 2006-2013 Python Software Foundation.
# See C source code for _functools credits/copyright
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
from abc import get_cache_token
from collections import namedtuple
# import types, weakref # Deferred to single_dispatch()
from reprlib import recursive_repr
from _thread import RLock
from types import GenericAlias
################################################################################
### update_wrapper() and wraps() decorator
################################################################################
# update_wrapper() and wraps() are tools to help write
# wrapper functions that can handle naive introspection
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
WRAPPER_UPDATES = ('__dict__',)
def update_wrapper(wrapper,
assigned = WRAPPER_ASSIGNMENTS,
updated = WRAPPER_UPDATES):
"""Update a wrapper function to look like the wrapped function
wrapper is the function to be updated
wrapped is the original function
assigned is a tuple naming the attributes assigned directly
from the wrapped function to the wrapper function (defaults to
functools.WRAPPER_ASSIGNMENTS)
updated is a tuple naming the attributes of the wrapper that
are updated with the corresponding attribute from the wrapped
function (defaults to functools.WRAPPER_UPDATES)
value = getattr(wrapped, attr)
setattr(wrapper, attr, value)
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
# Issue #17482: set __wrapped__ last so we don't inadvertently copy it
# from the wrapped function when updating __dict__
wrapper.__wrapped__ = wrapped
# Return the wrapper so this can be used as a decorator via partial()
assigned = WRAPPER_ASSIGNMENTS,
updated = WRAPPER_UPDATES):
"""Decorator factory to apply update_wrapper() to a wrapper function
Returns a decorator that invokes update_wrapper() with the decorated
function as the wrapper argument and the arguments to wraps() as the
remaining arguments. Default arguments are as for update_wrapper().
This is a convenience function to simplify applying partial() to
return partial(update_wrapper, wrapped=wrapped,
assigned=assigned, updated=updated)
################################################################################
### total_ordering class decorator
################################################################################
# The total ordering functions all invoke the root magic method directly
# rather than using the corresponding operator. This avoids possible
# infinite recursion that could occur when the operator dispatch logic
# detects a NotImplemented result and then calls a reflected method.
def _gt_from_lt(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (not a < b) and (a != b).'
op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return not op_result and self != other
def _le_from_lt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).'
op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return op_result or self == other
def _ge_from_lt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (not a < b).'
op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
def _ge_from_le(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).'
op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return not op_result or self == other
def _lt_from_le(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (a <= b) and (a != b).'
op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result and self != other
def _gt_from_le(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (not a <= b).'
op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
def _lt_from_gt(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (not a > b) and (a != b).'
op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return not op_result and self != other
def _ge_from_gt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).'
op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return op_result or self == other
def _le_from_gt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (not a > b).'
op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
def _le_from_ge(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (not a >= b) or (a == b).'
op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return not op_result or self == other
def _gt_from_ge(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (a >= b) and (a != b).'
op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result and self != other
def _lt_from_ge(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (not a >= b).'
op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
'__lt__': [('__gt__', _gt_from_lt),
('__ge__', _ge_from_lt)],
'__le__': [('__ge__', _ge_from_le),
('__gt__', _gt_from_le)],
'__gt__': [('__lt__', _lt_from_gt),
('__le__', _le_from_gt)],
'__ge__': [('__le__', _le_from_ge),
"""Class decorator that fills in missing ordering methods"""
# Find user-defined comparisons (not those inherited from object).
roots = {op for op in _convert if getattr(cls, op, None) is not getattr(object, op, None)}
raise ValueError('must define at least one ordering operation: < > <= >=')
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
for opname, opfunc in _convert[root]:
setattr(cls, opname, opfunc)
################################################################################
### cmp_to_key() function converter
################################################################################
"""Convert a cmp= function into a key= function"""
return mycmp(self.obj, other.obj) < 0
return mycmp(self.obj, other.obj) > 0
return mycmp(self.obj, other.obj) == 0
return mycmp(self.obj, other.obj) <= 0
return mycmp(self.obj, other.obj) >= 0
from _functools import cmp_to_key
################################################################################
### reduce() sequence to a single item
################################################################################
_initial_missing = object()
def reduce(function, sequence, initial=_initial_missing):
reduce(function, sequence[, initial]) -> value
Apply a function of two arguments cumulatively to the items of a sequence,
from left to right, so as to reduce the sequence to a single value.
For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates
((((1+2)+3)+4)+5). If initial is present, it is placed before the items
of the sequence in the calculation, and serves as a default when the
if initial is _initial_missing:
raise TypeError("reduce() of empty sequence with no initial value") from None
value = function(value, element)
from _functools import reduce
################################################################################
### partial() argument application
################################################################################
# Purely functional, no descriptor behaviour
"""New function with partial application of the given arguments
__slots__ = "func", "args", "keywords", "__dict__", "__weakref__"
def __new__(cls, func, /, *args, **keywords):
raise TypeError("the first argument must be callable")
if hasattr(func, "func"):
keywords = {**func.keywords, **keywords}
self = super(partial, cls).__new__(cls)
def __call__(self, /, *args, **keywords):
keywords = {**self.keywords, **keywords}
return self.func(*self.args, *args, **keywords)
qualname = type(self).__qualname__
args.extend(repr(x) for x in self.args)
args.extend(f"{k}={v!r}" for (k, v) in self.keywords.items())
if type(self).__module__ == "functools":
return f"functools.{qualname}({', '.join(args)})"
return f"{qualname}({', '.join(args)})"
return type(self), (self.func,), (self.func, self.args,
self.keywords or None, self.__dict__ or None)
def __setstate__(self, state):
if not isinstance(state, tuple):
raise TypeError("argument to __setstate__ must be a tuple")
raise TypeError(f"expected 4 items in state, got {len(state)}")
func, args, kwds, namespace = state
if (not callable(func) or not isinstance(args, tuple) or
(kwds is not None and not isinstance(kwds, dict)) or
(namespace is not None and not isinstance(namespace, dict))):
raise TypeError("invalid partial state")
args = tuple(args) # just in case it's a subclass
elif type(kwds) is not dict: # XXX does it need to be *exactly* dict?
self.__dict__ = namespace
from _functools import partial
class partialmethod(object):
"""Method descriptor with partial application of the given arguments
Supports wrapping existing descriptors and handles non-descriptor
callables as instance methods.
def __init__(self, func, /, *args, **keywords):
if not callable(func) and not hasattr(func, "__get__"):
raise TypeError("{!r} is not callable or a descriptor"
# func could be a descriptor like classmethod which isn't callable,
# so we can't inherit from partial (it verifies func is callable)
if isinstance(func, partialmethod):
# flattening is mandatory in order to place cls/self before all
# it's also more efficient since only one function will be called
self.args = func.args + args
self.keywords = {**func.keywords, **keywords}
args = ", ".join(map(repr, self.args))
keywords = ", ".join("{}={!r}".format(k, v)
for k, v in self.keywords.items())
format_string = "{module}.{cls}({func}, {args}, {keywords})"
return format_string.format(module=self.__class__.__module__,
cls=self.__class__.__qualname__,
def _make_unbound_method(self):
def _method(cls_or_self, /, *args, **keywords):
keywords = {**self.keywords, **keywords}
return self.func(cls_or_self, *self.args, *args, **keywords)
_method.__isabstractmethod__ = self.__isabstractmethod__
_method._partialmethod = self
def __get__(self, obj, cls=None):
get = getattr(self.func, "__get__", None)
if new_func is not self.func:
# Assume __get__ returning something new indicates the
# creation of an appropriate callable
result = partial(new_func, *self.args, **self.keywords)
result.__self__ = new_func.__self__
# If the underlying descriptor didn't do anything, treat this
# like an instance method
result = self._make_unbound_method().__get__(obj, cls)
def __isabstractmethod__(self):
return getattr(self.func, "__isabstractmethod__", False)
__class_getitem__ = classmethod(GenericAlias)
def _unwrap_partial(func):
while isinstance(func, partial):
################################################################################
### LRU Cache function decorator
################################################################################
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
""" This class guarantees that hash() will be called no more than once
per element. This is important because the lru_cache() will hash
the key multiple times on a cache miss.
def __init__(self, tup, hash=hash):
self.hashvalue = hash(tup)
def _make_key(args, kwds, typed,
tuple=tuple, type=type, len=len):
"""Make a cache key from optionally typed positional and keyword arguments
The key is constructed in a way that is flat as possible rather than
as a nested structure that would take more memory.
If there is only a single argument and its data type is known to cache
its hash value, then that argument is returned without a wrapper. This
saves space and improves lookup speed.
# All of code below relies on kwds preserving the order input by the user.
# Formerly, we sorted() the kwds before looping. The new way is *much*
# faster; however, it means that f(x=1, y=2) will now be treated as a
# distinct call from f(y=2, x=1) which will be cached separately.
for item in kwds.items():
key += tuple(type(v) for v in args)
key += tuple(type(v) for v in kwds.values())
elif len(key) == 1 and type(key[0]) in fasttypes:
def lru_cache(maxsize=128, typed=False):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
If *typed* is True, arguments of different types will be cached separately.
For example, f(3.0) and f(3) will be treated as distinct calls with
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and