protocols.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986
  1. # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
  2. # For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
  3. # Copyright (c) https://github.com/PyCQA/astroid/blob/main/CONTRIBUTORS.txt
  4. """This module contains a set of functions to handle python protocols for nodes
  5. where it makes sense.
  6. """
  7. from __future__ import annotations
  8. import collections
  9. import itertools
  10. import operator as operator_mod
  11. from collections.abc import Callable, Generator, Iterator, Sequence
  12. from typing import Any, TypeVar
  13. from astroid import arguments, bases, decorators, helpers, nodes, util
  14. from astroid.const import Context
  15. from astroid.context import InferenceContext, copy_context
  16. from astroid.exceptions import (
  17. AstroidIndexError,
  18. AstroidTypeError,
  19. AttributeInferenceError,
  20. InferenceError,
  21. NoDefault,
  22. )
  23. from astroid.nodes import node_classes
  24. from astroid.typing import (
  25. ConstFactoryResult,
  26. InferenceResult,
  27. SuccessfulInferenceResult,
  28. )
  29. raw_building = util.lazy_import("raw_building")
  30. objects = util.lazy_import("objects")
  31. _TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List)
  32. def _reflected_name(name) -> str:
  33. return "__r" + name[2:]
  34. def _augmented_name(name) -> str:
  35. return "__i" + name[2:]
  36. _CONTEXTLIB_MGR = "contextlib.contextmanager"
  37. BIN_OP_METHOD = {
  38. "+": "__add__",
  39. "-": "__sub__",
  40. "/": "__truediv__",
  41. "//": "__floordiv__",
  42. "*": "__mul__",
  43. "**": "__pow__",
  44. "%": "__mod__",
  45. "&": "__and__",
  46. "|": "__or__",
  47. "^": "__xor__",
  48. "<<": "__lshift__",
  49. ">>": "__rshift__",
  50. "@": "__matmul__",
  51. }
  52. REFLECTED_BIN_OP_METHOD = {
  53. key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items()
  54. }
  55. AUGMENTED_OP_METHOD = {
  56. key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items()
  57. }
  58. UNARY_OP_METHOD = {
  59. "+": "__pos__",
  60. "-": "__neg__",
  61. "~": "__invert__",
  62. "not": None, # XXX not '__nonzero__'
  63. }
  64. _UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = {
  65. "+": operator_mod.pos,
  66. "-": operator_mod.neg,
  67. "~": operator_mod.invert,
  68. "not": operator_mod.not_,
  69. }
  70. def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult:
  71. """Perform unary operation on `obj`, unless it is `NotImplemented`.
  72. Can raise TypeError if operation is unsupported.
  73. """
  74. if obj is NotImplemented:
  75. value = obj
  76. else:
  77. func = _UNARY_OPERATORS[op]
  78. value = func(obj)
  79. return nodes.const_factory(value)
  80. nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op)
  81. nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op)
  82. nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op)
  83. nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op)
  84. nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op)
  85. # Binary operations
  86. BIN_OP_IMPL = {
  87. "+": lambda a, b: a + b,
  88. "-": lambda a, b: a - b,
  89. "/": lambda a, b: a / b,
  90. "//": lambda a, b: a // b,
  91. "*": lambda a, b: a * b,
  92. "**": lambda a, b: a**b,
  93. "%": lambda a, b: a % b,
  94. "&": lambda a, b: a & b,
  95. "|": lambda a, b: a | b,
  96. "^": lambda a, b: a ^ b,
  97. "<<": lambda a, b: a << b,
  98. ">>": lambda a, b: a >> b,
  99. "@": operator_mod.matmul,
  100. }
  101. for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
  102. BIN_OP_IMPL[_KEY + "="] = _IMPL
  103. @decorators.yes_if_nothing_inferred
  104. def const_infer_binary_op(
  105. self: nodes.Const,
  106. opnode: nodes.AugAssign | nodes.BinOp,
  107. operator: str,
  108. other: InferenceResult,
  109. context: InferenceContext,
  110. _: SuccessfulInferenceResult,
  111. ) -> Generator[ConstFactoryResult | util.UninferableBase, None, None]:
  112. not_implemented = nodes.Const(NotImplemented)
  113. if isinstance(other, nodes.Const):
  114. if (
  115. operator == "**"
  116. and isinstance(self.value, (int, float))
  117. and isinstance(other.value, (int, float))
  118. and (self.value > 1e5 or other.value > 1e5)
  119. ):
  120. yield not_implemented
  121. return
  122. try:
  123. impl = BIN_OP_IMPL[operator]
  124. try:
  125. yield nodes.const_factory(impl(self.value, other.value))
  126. except TypeError:
  127. # ArithmeticError is not enough: float >> float is a TypeError
  128. yield not_implemented
  129. except Exception: # pylint: disable=broad-except
  130. yield util.Uninferable
  131. except TypeError:
  132. yield not_implemented
  133. elif isinstance(self.value, str) and operator == "%":
  134. # TODO(cpopa): implement string interpolation later on.
  135. yield util.Uninferable
  136. else:
  137. yield not_implemented
  138. nodes.Const.infer_binary_op = const_infer_binary_op
  139. def _multiply_seq_by_int(
  140. self: _TupleListNodeT,
  141. opnode: nodes.AugAssign | nodes.BinOp,
  142. value: int,
  143. context: InferenceContext,
  144. ) -> _TupleListNodeT:
  145. node = self.__class__(parent=opnode)
  146. if value > 1e8:
  147. node.elts = [util.Uninferable]
  148. return node
  149. filtered_elts = (
  150. helpers.safe_infer(elt, context) or util.Uninferable
  151. for elt in self.elts
  152. if not isinstance(elt, util.UninferableBase)
  153. )
  154. node.elts = list(filtered_elts) * value
  155. return node
  156. def _filter_uninferable_nodes(
  157. elts: Sequence[InferenceResult], context: InferenceContext
  158. ) -> Iterator[SuccessfulInferenceResult]:
  159. for elt in elts:
  160. if isinstance(elt, util.UninferableBase):
  161. yield nodes.Unknown()
  162. else:
  163. for inferred in elt.infer(context):
  164. if not isinstance(inferred, util.UninferableBase):
  165. yield inferred
  166. else:
  167. yield nodes.Unknown()
  168. @decorators.yes_if_nothing_inferred
  169. def tl_infer_binary_op(
  170. self: _TupleListNodeT,
  171. opnode: nodes.AugAssign | nodes.BinOp,
  172. operator: str,
  173. other: InferenceResult,
  174. context: InferenceContext,
  175. method: SuccessfulInferenceResult,
  176. ) -> Generator[_TupleListNodeT | nodes.Const | util.UninferableBase, None, None]:
  177. """Infer a binary operation on a tuple or list.
  178. The instance on which the binary operation is performed is a tuple
  179. or list. This refers to the left-hand side of the operation, so:
  180. 'tuple() + 1' or '[] + A()'
  181. """
  182. # For tuples and list the boundnode is no longer the tuple or list instance
  183. context.boundnode = None
  184. not_implemented = nodes.Const(NotImplemented)
  185. if isinstance(other, self.__class__) and operator == "+":
  186. node = self.__class__(parent=opnode)
  187. node.elts = list(
  188. itertools.chain(
  189. _filter_uninferable_nodes(self.elts, context),
  190. _filter_uninferable_nodes(other.elts, context),
  191. )
  192. )
  193. yield node
  194. elif isinstance(other, nodes.Const) and operator == "*":
  195. if not isinstance(other.value, int):
  196. yield not_implemented
  197. return
  198. yield _multiply_seq_by_int(self, opnode, other.value, context)
  199. elif isinstance(other, bases.Instance) and operator == "*":
  200. # Verify if the instance supports __index__.
  201. as_index = helpers.class_instance_as_index(other)
  202. if not as_index:
  203. yield util.Uninferable
  204. elif not isinstance(as_index.value, int): # pragma: no cover
  205. # already checked by class_instance_as_index() but faster than casting
  206. raise AssertionError("Please open a bug report.")
  207. else:
  208. yield _multiply_seq_by_int(self, opnode, as_index.value, context)
  209. else:
  210. yield not_implemented
  211. nodes.Tuple.infer_binary_op = tl_infer_binary_op
  212. nodes.List.infer_binary_op = tl_infer_binary_op
  213. @decorators.yes_if_nothing_inferred
  214. def instance_class_infer_binary_op(
  215. self: bases.Instance | nodes.ClassDef,
  216. opnode: nodes.AugAssign | nodes.BinOp,
  217. operator: str,
  218. other: InferenceResult,
  219. context: InferenceContext,
  220. method: SuccessfulInferenceResult,
  221. ) -> Generator[InferenceResult, None, None]:
  222. return method.infer_call_result(self, context)
  223. bases.Instance.infer_binary_op = instance_class_infer_binary_op
  224. nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op
  225. # assignment ##################################################################
  226. """The assigned_stmts method is responsible to return the assigned statement
  227. (e.g. not inferred) according to the assignment type.
  228. The `assign_path` argument is used to record the lhs path of the original node.
  229. For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
  230. will be [1, 1] once arrived to the Assign node.
  231. The `context` argument is the current inference context which should be given
  232. to any intermediary inference necessary.
  233. """
  234. def _resolve_looppart(parts, assign_path, context):
  235. """Recursive function to resolve multiple assignments on loops."""
  236. assign_path = assign_path[:]
  237. index = assign_path.pop(0)
  238. for part in parts:
  239. if isinstance(part, util.UninferableBase):
  240. continue
  241. if not hasattr(part, "itered"):
  242. continue
  243. try:
  244. itered = part.itered()
  245. except TypeError:
  246. continue
  247. try:
  248. if isinstance(itered[index], (nodes.Const, nodes.Name)):
  249. itered = [part]
  250. except IndexError:
  251. pass
  252. for stmt in itered:
  253. index_node = nodes.Const(index)
  254. try:
  255. assigned = stmt.getitem(index_node, context)
  256. except (AttributeError, AstroidTypeError, AstroidIndexError):
  257. continue
  258. if not assign_path:
  259. # we achieved to resolved the assignment path,
  260. # don't infer the last part
  261. yield assigned
  262. elif isinstance(assigned, util.UninferableBase):
  263. break
  264. else:
  265. # we are not yet on the last part of the path
  266. # search on each possibly inferred value
  267. try:
  268. yield from _resolve_looppart(
  269. assigned.infer(context), assign_path, context
  270. )
  271. except InferenceError:
  272. break
  273. @decorators.raise_if_nothing_inferred
  274. def for_assigned_stmts(
  275. self: nodes.For | nodes.Comprehension,
  276. node: node_classes.AssignedStmtsPossibleNode = None,
  277. context: InferenceContext | None = None,
  278. assign_path: list[int] | None = None,
  279. ) -> Any:
  280. if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
  281. # Skip inferring of async code for now
  282. return {
  283. "node": self,
  284. "unknown": node,
  285. "assign_path": assign_path,
  286. "context": context,
  287. }
  288. if assign_path is None:
  289. for lst in self.iter.infer(context):
  290. if isinstance(lst, (nodes.Tuple, nodes.List)):
  291. yield from lst.elts
  292. else:
  293. yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
  294. return {
  295. "node": self,
  296. "unknown": node,
  297. "assign_path": assign_path,
  298. "context": context,
  299. }
  300. nodes.For.assigned_stmts = for_assigned_stmts
  301. nodes.Comprehension.assigned_stmts = for_assigned_stmts
  302. def sequence_assigned_stmts(
  303. self: nodes.Tuple | nodes.List,
  304. node: node_classes.AssignedStmtsPossibleNode = None,
  305. context: InferenceContext | None = None,
  306. assign_path: list[int] | None = None,
  307. ) -> Any:
  308. if assign_path is None:
  309. assign_path = []
  310. try:
  311. index = self.elts.index(node) # type: ignore[arg-type]
  312. except ValueError as exc:
  313. raise InferenceError(
  314. "Tried to retrieve a node {node!r} which does not exist",
  315. node=self,
  316. assign_path=assign_path,
  317. context=context,
  318. ) from exc
  319. assign_path.insert(0, index)
  320. return self.parent.assigned_stmts(
  321. node=self, context=context, assign_path=assign_path
  322. )
  323. nodes.Tuple.assigned_stmts = sequence_assigned_stmts
  324. nodes.List.assigned_stmts = sequence_assigned_stmts
  325. def assend_assigned_stmts(
  326. self: nodes.AssignName | nodes.AssignAttr,
  327. node: node_classes.AssignedStmtsPossibleNode = None,
  328. context: InferenceContext | None = None,
  329. assign_path: list[int] | None = None,
  330. ) -> Any:
  331. return self.parent.assigned_stmts(node=self, context=context)
  332. nodes.AssignName.assigned_stmts = assend_assigned_stmts
  333. nodes.AssignAttr.assigned_stmts = assend_assigned_stmts
  334. def _arguments_infer_argname(
  335. self, name: str | None, context: InferenceContext
  336. ) -> Generator[InferenceResult, None, None]:
  337. # arguments information may be missing, in which case we can't do anything
  338. # more
  339. if not (self.arguments or self.vararg or self.kwarg):
  340. yield util.Uninferable
  341. return
  342. functype = self.parent.type
  343. # first argument of instance/class method
  344. if (
  345. self.arguments
  346. and getattr(self.arguments[0], "name", None) == name
  347. and functype != "staticmethod"
  348. ):
  349. cls = self.parent.parent.scope()
  350. is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
  351. # If this is a metaclass, then the first argument will always
  352. # be the class, not an instance.
  353. if context.boundnode and isinstance(context.boundnode, bases.Instance):
  354. cls = context.boundnode._proxied
  355. if is_metaclass or functype == "classmethod":
  356. yield cls
  357. return
  358. if functype == "method":
  359. yield cls.instantiate_class()
  360. return
  361. if context and context.callcontext:
  362. callee = context.callcontext.callee
  363. while hasattr(callee, "_proxied"):
  364. callee = callee._proxied
  365. if getattr(callee, "name", None) == self.parent.name:
  366. call_site = arguments.CallSite(context.callcontext, context.extra_context)
  367. yield from call_site.infer_argument(self.parent, name, context)
  368. return
  369. if name == self.vararg:
  370. vararg = nodes.const_factory(())
  371. vararg.parent = self
  372. if not self.arguments and self.parent.name == "__init__":
  373. cls = self.parent.parent.scope()
  374. vararg.elts = [cls.instantiate_class()]
  375. yield vararg
  376. return
  377. if name == self.kwarg:
  378. kwarg = nodes.const_factory({})
  379. kwarg.parent = self
  380. yield kwarg
  381. return
  382. # if there is a default value, yield it. And then yield Uninferable to reflect
  383. # we can't guess given argument value
  384. try:
  385. context = copy_context(context)
  386. yield from self.default_value(name).infer(context)
  387. yield util.Uninferable
  388. except NoDefault:
  389. yield util.Uninferable
  390. def arguments_assigned_stmts(
  391. self: nodes.Arguments,
  392. node: node_classes.AssignedStmtsPossibleNode = None,
  393. context: InferenceContext | None = None,
  394. assign_path: list[int] | None = None,
  395. ) -> Any:
  396. try:
  397. node_name = node.name # type: ignore[union-attr]
  398. except AttributeError:
  399. # Added to handle edge cases where node.name is not defined.
  400. # https://github.com/PyCQA/astroid/pull/1644#discussion_r901545816
  401. node_name = None # pragma: no cover
  402. if context and context.callcontext:
  403. callee = context.callcontext.callee
  404. while hasattr(callee, "_proxied"):
  405. callee = callee._proxied
  406. else:
  407. return _arguments_infer_argname(self, node_name, context)
  408. if node and getattr(callee, "name", None) == node.frame(future=True).name:
  409. # reset call context/name
  410. callcontext = context.callcontext
  411. context = copy_context(context)
  412. context.callcontext = None
  413. args = arguments.CallSite(callcontext, context=context)
  414. return args.infer_argument(self.parent, node_name, context)
  415. return _arguments_infer_argname(self, node_name, context)
  416. nodes.Arguments.assigned_stmts = arguments_assigned_stmts
  417. @decorators.raise_if_nothing_inferred
  418. def assign_assigned_stmts(
  419. self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign,
  420. node: node_classes.AssignedStmtsPossibleNode = None,
  421. context: InferenceContext | None = None,
  422. assign_path: list[int] | None = None,
  423. ) -> Any:
  424. if not assign_path:
  425. yield self.value
  426. return None
  427. yield from _resolve_assignment_parts(
  428. self.value.infer(context), assign_path, context
  429. )
  430. return {
  431. "node": self,
  432. "unknown": node,
  433. "assign_path": assign_path,
  434. "context": context,
  435. }
  436. def assign_annassigned_stmts(
  437. self: nodes.AnnAssign,
  438. node: node_classes.AssignedStmtsPossibleNode = None,
  439. context: InferenceContext | None = None,
  440. assign_path: list[int] | None = None,
  441. ) -> Any:
  442. for inferred in assign_assigned_stmts(self, node, context, assign_path):
  443. if inferred is None:
  444. yield util.Uninferable
  445. else:
  446. yield inferred
  447. nodes.Assign.assigned_stmts = assign_assigned_stmts
  448. nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts
  449. nodes.AugAssign.assigned_stmts = assign_assigned_stmts
  450. def _resolve_assignment_parts(parts, assign_path, context):
  451. """Recursive function to resolve multiple assignments."""
  452. assign_path = assign_path[:]
  453. index = assign_path.pop(0)
  454. for part in parts:
  455. assigned = None
  456. if isinstance(part, nodes.Dict):
  457. # A dictionary in an iterating context
  458. try:
  459. assigned, _ = part.items[index]
  460. except IndexError:
  461. return
  462. elif hasattr(part, "getitem"):
  463. index_node = nodes.Const(index)
  464. try:
  465. assigned = part.getitem(index_node, context)
  466. except (AstroidTypeError, AstroidIndexError):
  467. return
  468. if not assigned:
  469. return
  470. if not assign_path:
  471. # we achieved to resolved the assignment path, don't infer the
  472. # last part
  473. yield assigned
  474. elif isinstance(assigned, util.UninferableBase):
  475. return
  476. else:
  477. # we are not yet on the last part of the path search on each
  478. # possibly inferred value
  479. try:
  480. yield from _resolve_assignment_parts(
  481. assigned.infer(context), assign_path, context
  482. )
  483. except InferenceError:
  484. return
  485. @decorators.raise_if_nothing_inferred
  486. def excepthandler_assigned_stmts(
  487. self: nodes.ExceptHandler,
  488. node: node_classes.AssignedStmtsPossibleNode = None,
  489. context: InferenceContext | None = None,
  490. assign_path: list[int] | None = None,
  491. ) -> Any:
  492. for assigned in node_classes.unpack_infer(self.type):
  493. if isinstance(assigned, nodes.ClassDef):
  494. assigned = objects.ExceptionInstance(assigned)
  495. yield assigned
  496. return {
  497. "node": self,
  498. "unknown": node,
  499. "assign_path": assign_path,
  500. "context": context,
  501. }
  502. nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts
  503. def _infer_context_manager(self, mgr, context):
  504. try:
  505. inferred = next(mgr.infer(context=context))
  506. except StopIteration as e:
  507. raise InferenceError(node=mgr) from e
  508. if isinstance(inferred, bases.Generator):
  509. # Check if it is decorated with contextlib.contextmanager.
  510. func = inferred.parent
  511. if not func.decorators:
  512. raise InferenceError(
  513. "No decorators found on inferred generator %s", node=func
  514. )
  515. for decorator_node in func.decorators.nodes:
  516. decorator = next(decorator_node.infer(context=context), None)
  517. if isinstance(decorator, nodes.FunctionDef):
  518. if decorator.qname() == _CONTEXTLIB_MGR:
  519. break
  520. else:
  521. # It doesn't interest us.
  522. raise InferenceError(node=func)
  523. try:
  524. yield next(inferred.infer_yield_types())
  525. except StopIteration as e:
  526. raise InferenceError(node=func) from e
  527. elif isinstance(inferred, bases.Instance):
  528. try:
  529. enter = next(inferred.igetattr("__enter__", context=context))
  530. except (InferenceError, AttributeInferenceError, StopIteration) as exc:
  531. raise InferenceError(node=inferred) from exc
  532. if not isinstance(enter, bases.BoundMethod):
  533. raise InferenceError(node=enter)
  534. yield from enter.infer_call_result(self, context)
  535. else:
  536. raise InferenceError(node=mgr)
  537. @decorators.raise_if_nothing_inferred
  538. def with_assigned_stmts(
  539. self: nodes.With,
  540. node: node_classes.AssignedStmtsPossibleNode = None,
  541. context: InferenceContext | None = None,
  542. assign_path: list[int] | None = None,
  543. ) -> Any:
  544. """Infer names and other nodes from a *with* statement.
  545. This enables only inference for name binding in a *with* statement.
  546. For instance, in the following code, inferring `func` will return
  547. the `ContextManager` class, not whatever ``__enter__`` returns.
  548. We are doing this intentionally, because we consider that the context
  549. manager result is whatever __enter__ returns and what it is binded
  550. using the ``as`` keyword.
  551. class ContextManager(object):
  552. def __enter__(self):
  553. return 42
  554. with ContextManager() as f:
  555. pass
  556. # ContextManager().infer() will return ContextManager
  557. # f.infer() will return 42.
  558. Arguments:
  559. self: nodes.With
  560. node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
  561. context: Inference context used for caching already inferred objects
  562. assign_path:
  563. A list of indices, where each index specifies what item to fetch from
  564. the inference results.
  565. """
  566. try:
  567. mgr = next(mgr for (mgr, vars) in self.items if vars == node)
  568. except StopIteration:
  569. return None
  570. if assign_path is None:
  571. yield from _infer_context_manager(self, mgr, context)
  572. else:
  573. for result in _infer_context_manager(self, mgr, context):
  574. # Walk the assign_path and get the item at the final index.
  575. obj = result
  576. for index in assign_path:
  577. if not hasattr(obj, "elts"):
  578. raise InferenceError(
  579. "Wrong type ({targets!r}) for {node!r} assignment",
  580. node=self,
  581. targets=node,
  582. assign_path=assign_path,
  583. context=context,
  584. )
  585. try:
  586. obj = obj.elts[index]
  587. except IndexError as exc:
  588. raise InferenceError(
  589. "Tried to infer a nonexistent target with index {index} "
  590. "in {node!r}.",
  591. node=self,
  592. targets=node,
  593. assign_path=assign_path,
  594. context=context,
  595. ) from exc
  596. except TypeError as exc:
  597. raise InferenceError(
  598. "Tried to unpack a non-iterable value in {node!r}.",
  599. node=self,
  600. targets=node,
  601. assign_path=assign_path,
  602. context=context,
  603. ) from exc
  604. yield obj
  605. return {
  606. "node": self,
  607. "unknown": node,
  608. "assign_path": assign_path,
  609. "context": context,
  610. }
  611. nodes.With.assigned_stmts = with_assigned_stmts
  612. @decorators.raise_if_nothing_inferred
  613. def named_expr_assigned_stmts(
  614. self: nodes.NamedExpr,
  615. node: node_classes.AssignedStmtsPossibleNode,
  616. context: InferenceContext | None = None,
  617. assign_path: list[int] | None = None,
  618. ) -> Any:
  619. """Infer names and other nodes from an assignment expression."""
  620. if self.target == node:
  621. yield from self.value.infer(context=context)
  622. else:
  623. raise InferenceError(
  624. "Cannot infer NamedExpr node {node!r}",
  625. node=self,
  626. assign_path=assign_path,
  627. context=context,
  628. )
  629. nodes.NamedExpr.assigned_stmts = named_expr_assigned_stmts
  630. @decorators.yes_if_nothing_inferred
  631. def starred_assigned_stmts( # noqa: C901
  632. self: nodes.Starred,
  633. node: node_classes.AssignedStmtsPossibleNode = None,
  634. context: InferenceContext | None = None,
  635. assign_path: list[int] | None = None,
  636. ) -> Any:
  637. """
  638. Arguments:
  639. self: nodes.Starred
  640. node: a node related to the current underlying Node.
  641. context: Inference context used for caching already inferred objects
  642. assign_path:
  643. A list of indices, where each index specifies what item to fetch from
  644. the inference results.
  645. """
  646. # pylint: disable=too-many-locals,too-many-statements
  647. def _determine_starred_iteration_lookups(
  648. starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]]
  649. ) -> None:
  650. # Determine the lookups for the rhs of the iteration
  651. itered = target.itered()
  652. for index, element in enumerate(itered):
  653. if (
  654. isinstance(element, nodes.Starred)
  655. and element.value.name == starred.value.name
  656. ):
  657. lookups.append((index, len(itered)))
  658. break
  659. if isinstance(element, nodes.Tuple):
  660. lookups.append((index, len(element.itered())))
  661. _determine_starred_iteration_lookups(starred, element, lookups)
  662. stmt = self.statement(future=True)
  663. if not isinstance(stmt, (nodes.Assign, nodes.For)):
  664. raise InferenceError(
  665. "Statement {stmt!r} enclosing {node!r} must be an Assign or For node.",
  666. node=self,
  667. stmt=stmt,
  668. unknown=node,
  669. context=context,
  670. )
  671. if context is None:
  672. context = InferenceContext()
  673. if isinstance(stmt, nodes.Assign):
  674. value = stmt.value
  675. lhs = stmt.targets[0]
  676. if not isinstance(lhs, nodes.BaseContainer):
  677. yield util.Uninferable
  678. return
  679. if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
  680. raise InferenceError(
  681. "Too many starred arguments in the assignment targets {lhs!r}.",
  682. node=self,
  683. targets=lhs,
  684. unknown=node,
  685. context=context,
  686. )
  687. try:
  688. rhs = next(value.infer(context))
  689. except (InferenceError, StopIteration):
  690. yield util.Uninferable
  691. return
  692. if isinstance(rhs, util.UninferableBase) or not hasattr(rhs, "itered"):
  693. yield util.Uninferable
  694. return
  695. try:
  696. elts = collections.deque(rhs.itered()) # type: ignore[union-attr]
  697. except TypeError:
  698. yield util.Uninferable
  699. return
  700. # Unpack iteratively the values from the rhs of the assignment,
  701. # until the find the starred node. What will remain will
  702. # be the list of values which the Starred node will represent
  703. # This is done in two steps, from left to right to remove
  704. # anything before the starred node and from right to left
  705. # to remove anything after the starred node.
  706. for index, left_node in enumerate(lhs.elts):
  707. if not isinstance(left_node, nodes.Starred):
  708. if not elts:
  709. break
  710. elts.popleft()
  711. continue
  712. lhs_elts = collections.deque(reversed(lhs.elts[index:]))
  713. for right_node in lhs_elts:
  714. if not isinstance(right_node, nodes.Starred):
  715. if not elts:
  716. break
  717. elts.pop()
  718. continue
  719. # We're done unpacking.
  720. packed = nodes.List(
  721. ctx=Context.Store,
  722. parent=self,
  723. lineno=lhs.lineno,
  724. col_offset=lhs.col_offset,
  725. )
  726. packed.postinit(elts=list(elts))
  727. yield packed
  728. break
  729. if isinstance(stmt, nodes.For):
  730. try:
  731. inferred_iterable = next(stmt.iter.infer(context=context))
  732. except (InferenceError, StopIteration):
  733. yield util.Uninferable
  734. return
  735. if isinstance(inferred_iterable, util.UninferableBase) or not hasattr(
  736. inferred_iterable, "itered"
  737. ):
  738. yield util.Uninferable
  739. return
  740. try:
  741. itered = inferred_iterable.itered() # type: ignore[union-attr]
  742. except TypeError:
  743. yield util.Uninferable
  744. return
  745. target = stmt.target
  746. if not isinstance(target, nodes.Tuple):
  747. raise InferenceError(
  748. "Could not make sense of this, the target must be a tuple",
  749. context=context,
  750. )
  751. lookups: list[tuple[int, int]] = []
  752. _determine_starred_iteration_lookups(self, target, lookups)
  753. if not lookups:
  754. raise InferenceError(
  755. "Could not make sense of this, needs at least a lookup", context=context
  756. )
  757. # Make the last lookup a slice, since that what we want for a Starred node
  758. last_element_index, last_element_length = lookups[-1]
  759. is_starred_last = last_element_index == (last_element_length - 1)
  760. lookup_slice = slice(
  761. last_element_index,
  762. None if is_starred_last else (last_element_length - last_element_index),
  763. )
  764. last_lookup = lookup_slice
  765. for element in itered:
  766. # We probably want to infer the potential values *for each* element in an
  767. # iterable, but we can't infer a list of all values, when only a list of
  768. # step values are expected:
  769. #
  770. # for a, *b in [...]:
  771. # b
  772. #
  773. # *b* should now point to just the elements at that particular iteration step,
  774. # which astroid can't know about.
  775. found_element = None
  776. for index, lookup in enumerate(lookups):
  777. if not hasattr(element, "itered"):
  778. break
  779. if index + 1 is len(lookups):
  780. cur_lookup: slice | int = last_lookup
  781. else:
  782. # Grab just the index, not the whole length
  783. cur_lookup = lookup[0]
  784. try:
  785. itered_inner_element = element.itered()
  786. element = itered_inner_element[cur_lookup]
  787. except IndexError:
  788. break
  789. except TypeError:
  790. # Most likely the itered() call failed, cannot make sense of this
  791. yield util.Uninferable
  792. return
  793. else:
  794. found_element = element
  795. unpacked = nodes.List(
  796. ctx=Context.Store,
  797. parent=self,
  798. lineno=self.lineno,
  799. col_offset=self.col_offset,
  800. )
  801. unpacked.postinit(elts=found_element or [])
  802. yield unpacked
  803. return
  804. yield util.Uninferable
  805. nodes.Starred.assigned_stmts = starred_assigned_stmts
  806. @decorators.yes_if_nothing_inferred
  807. def match_mapping_assigned_stmts(
  808. self: nodes.MatchMapping,
  809. node: nodes.AssignName,
  810. context: InferenceContext | None = None,
  811. assign_path: None = None,
  812. ) -> Generator[nodes.NodeNG, None, None]:
  813. """Return empty generator (return -> raises StopIteration) so inferred value
  814. is Uninferable.
  815. """
  816. return
  817. yield
  818. nodes.MatchMapping.assigned_stmts = match_mapping_assigned_stmts
  819. @decorators.yes_if_nothing_inferred
  820. def match_star_assigned_stmts(
  821. self: nodes.MatchStar,
  822. node: nodes.AssignName,
  823. context: InferenceContext | None = None,
  824. assign_path: None = None,
  825. ) -> Generator[nodes.NodeNG, None, None]:
  826. """Return empty generator (return -> raises StopIteration) so inferred value
  827. is Uninferable.
  828. """
  829. return
  830. yield
  831. nodes.MatchStar.assigned_stmts = match_star_assigned_stmts
  832. @decorators.yes_if_nothing_inferred
  833. def match_as_assigned_stmts(
  834. self: nodes.MatchAs,
  835. node: nodes.AssignName,
  836. context: InferenceContext | None = None,
  837. assign_path: None = None,
  838. ) -> Generator[nodes.NodeNG, None, None]:
  839. """Infer MatchAs as the Match subject if it's the only MatchCase pattern
  840. else raise StopIteration to yield Uninferable.
  841. """
  842. if (
  843. isinstance(self.parent, nodes.MatchCase)
  844. and isinstance(self.parent.parent, nodes.Match)
  845. and self.pattern is None
  846. ):
  847. yield self.parent.parent.subject
  848. nodes.MatchAs.assigned_stmts = match_as_assigned_stmts