X7ROOT File Manager
Current Path:
/opt/cloudlinux/venv/lib/python3.11/site-packages/astroid/brain
opt
/
cloudlinux
/
venv
/
lib
/
python3.11
/
site-packages
/
astroid
/
brain
/
📁
..
📄
__init__.py
(0 B)
📁
__pycache__
📄
brain_argparse.py
(1.52 KB)
📄
brain_attrs.py
(2.97 KB)
📄
brain_boto3.py
(1012 B)
📄
brain_builtin_inference.py
(33.45 KB)
📄
brain_collections.py
(4.31 KB)
📄
brain_crypt.py
(863 B)
📄
brain_ctypes.py
(2.6 KB)
📄
brain_curses.py
(3.4 KB)
📄
brain_dataclasses.py
(21.57 KB)
📄
brain_dateutil.py
(767 B)
📄
brain_fstrings.py
(2.41 KB)
📄
brain_functools.py
(5.85 KB)
📄
brain_gi.py
(7.37 KB)
📄
brain_hashlib.py
(2.75 KB)
📄
brain_http.py
(10.39 KB)
📄
brain_hypothesis.py
(1.69 KB)
📄
brain_io.py
(1.49 KB)
📄
brain_mechanize.py
(2.58 KB)
📄
brain_multiprocessing.py
(3.14 KB)
📄
brain_namedtuple_enum.py
(22.19 KB)
📄
brain_nose.py
(2.27 KB)
📄
brain_numpy_core_einsumfunc.py
(825 B)
📄
brain_numpy_core_fromnumeric.py
(732 B)
📄
brain_numpy_core_function_base.py
(1.27 KB)
📄
brain_numpy_core_multiarray.py
(4.14 KB)
📄
brain_numpy_core_numeric.py
(1.59 KB)
📄
brain_numpy_core_numerictypes.py
(8.35 KB)
📄
brain_numpy_core_umath.py
(4.78 KB)
📄
brain_numpy_ma.py
(896 B)
📄
brain_numpy_ndarray.py
(8.79 KB)
📄
brain_numpy_random_mtrand.py
(3.36 KB)
📄
brain_numpy_utils.py
(2.58 KB)
📄
brain_pathlib.py
(1.51 KB)
📄
brain_pkg_resources.py
(2.15 KB)
📄
brain_pytest.py
(2.17 KB)
📄
brain_qt.py
(2.74 KB)
📄
brain_random.py
(2.82 KB)
📄
brain_re.py
(2.8 KB)
📄
brain_regex.py
(3.28 KB)
📄
brain_responses.py
(1.82 KB)
📄
brain_scipy_signal.py
(2.22 KB)
📄
brain_signal.py
(3.79 KB)
📄
brain_six.py
(7.42 KB)
📄
brain_sqlalchemy.py
(1009 B)
📄
brain_ssl.py
(6.4 KB)
📄
brain_subprocess.py
(2.93 KB)
📄
brain_threading.py
(870 B)
📄
brain_type.py
(2.42 KB)
📄
brain_typing.py
(13.98 KB)
📄
brain_unittest.py
(1.12 KB)
📄
brain_uuid.py
(667 B)
📄
helpers.py
(908 B)
Editing: brain_functools.py
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/main/LICENSE # Copyright (c) https://github.com/PyCQA/astroid/blob/main/CONTRIBUTORS.txt """Astroid hooks for understanding functools library module.""" from __future__ import annotations from collections.abc import Iterator from functools import partial from itertools import chain from astroid import BoundMethod, arguments, extract_node, helpers, nodes, objects from astroid.context import InferenceContext from astroid.exceptions import InferenceError, UseInferenceDefault from astroid.inference_tip import inference_tip from astroid.interpreter import objectmodel from astroid.manager import AstroidManager from astroid.nodes.node_classes import AssignName, Attribute, Call, Name from astroid.nodes.scoped_nodes import FunctionDef from astroid.util import UninferableBase LRU_CACHE = "functools.lru_cache" class LruWrappedModel(objectmodel.FunctionModel): """Special attribute model for functions decorated with functools.lru_cache. The said decorators patches at decoration time some functions onto the decorated function. """ @property def attr___wrapped__(self): return self._instance @property def attr_cache_info(self): cache_info = extract_node( """ from functools import _CacheInfo _CacheInfo(0, 0, 0, 0) """ ) class CacheInfoBoundMethod(BoundMethod): def infer_call_result( self, caller, context: InferenceContext | None = None ): yield helpers.safe_infer(cache_info) return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance) @property def attr_cache_clear(self): node = extract_node("""def cache_clear(self): pass""") return BoundMethod(proxy=node, bound=self._instance.parent.scope()) def _transform_lru_cache(node, context: InferenceContext | None = None) -> None: # TODO: this is not ideal, since the node should be immutable, # but due to https://github.com/PyCQA/astroid/issues/354, # there's not much we can do now. # Replacing the node would work partially, because, # in pylint, the old node would still be available, leading # to spurious false positives. node.special_attributes = LruWrappedModel()(node) def _functools_partial_inference( node: nodes.Call, context: InferenceContext | None = None ) -> Iterator[objects.PartialFunction]: call = arguments.CallSite.from_call(node, context=context) number_of_positional = len(call.positional_arguments) if number_of_positional < 1: raise UseInferenceDefault("functools.partial takes at least one argument") if number_of_positional == 1 and not call.keyword_arguments: raise UseInferenceDefault( "functools.partial needs at least to have some filled arguments" ) partial_function = call.positional_arguments[0] try: inferred_wrapped_function = next(partial_function.infer(context=context)) except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if isinstance(inferred_wrapped_function, UninferableBase): raise UseInferenceDefault("Cannot infer the wrapped function") if not isinstance(inferred_wrapped_function, FunctionDef): raise UseInferenceDefault("The wrapped function is not a function") # Determine if the passed keywords into the callsite are supported # by the wrapped function. if not inferred_wrapped_function.args: function_parameters = [] else: function_parameters = chain( inferred_wrapped_function.args.args or (), inferred_wrapped_function.args.posonlyargs or (), inferred_wrapped_function.args.kwonlyargs or (), ) parameter_names = { param.name for param in function_parameters if isinstance(param, AssignName) } if set(call.keyword_arguments) - parameter_names: raise UseInferenceDefault("wrapped function received unknown parameters") partial_function = objects.PartialFunction( call, name=inferred_wrapped_function.name, lineno=inferred_wrapped_function.lineno, col_offset=inferred_wrapped_function.col_offset, parent=node.parent, ) partial_function.postinit( args=inferred_wrapped_function.args, body=inferred_wrapped_function.body, decorators=inferred_wrapped_function.decorators, returns=inferred_wrapped_function.returns, type_comment_returns=inferred_wrapped_function.type_comment_returns, type_comment_args=inferred_wrapped_function.type_comment_args, doc_node=inferred_wrapped_function.doc_node, ) return iter((partial_function,)) def _looks_like_lru_cache(node) -> bool: """Check if the given function node is decorated with lru_cache.""" if not node.decorators: return False for decorator in node.decorators.nodes: if not isinstance(decorator, Call): continue if _looks_like_functools_member(decorator, "lru_cache"): return True return False def _looks_like_functools_member(node, member) -> bool: """Check if the given Call node is a functools.partial call.""" if isinstance(node.func, Name): return node.func.name == member if isinstance(node.func, Attribute): return ( node.func.attrname == member and isinstance(node.func.expr, Name) and node.func.expr.name == "functools" ) return False _looks_like_partial = partial(_looks_like_functools_member, member="partial") AstroidManager().register_transform( FunctionDef, _transform_lru_cache, _looks_like_lru_cache ) AstroidManager().register_transform( Call, inference_tip(_functools_partial_inference), _looks_like_partial, )
Upload File
Create Folder