protocols.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980
  1. # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
  2. # For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
  3. # Copyright (c) https://github.com/PyCQA/astroid/blob/main/CONTRIBUTORS.txt
  4. """This module contains a set of functions to handle python protocols for nodes
  5. where it makes sense.
  6. """
  7. from __future__ import annotations
  8. import collections
  9. import itertools
  10. import operator as operator_mod
  11. from collections.abc import Callable, Generator, Iterator, Sequence
  12. from typing import Any, TypeVar
  13. from astroid import arguments, bases, decorators, helpers, nodes, util
  14. from astroid.const import Context
  15. from astroid.context import InferenceContext, copy_context
  16. from astroid.exceptions import (
  17. AstroidIndexError,
  18. AstroidTypeError,
  19. AttributeInferenceError,
  20. InferenceError,
  21. NoDefault,
  22. )
  23. from astroid.nodes import node_classes
  24. from astroid.typing import (
  25. ConstFactoryResult,
  26. InferenceResult,
  27. SuccessfulInferenceResult,
  28. )
  29. raw_building = util.lazy_import("raw_building")
  30. objects = util.lazy_import("objects")
  31. _TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List)
  32. def _reflected_name(name) -> str:
  33. return "__r" + name[2:]
  34. def _augmented_name(name) -> str:
  35. return "__i" + name[2:]
  36. _CONTEXTLIB_MGR = "contextlib.contextmanager"
  37. BIN_OP_METHOD = {
  38. "+": "__add__",
  39. "-": "__sub__",
  40. "/": "__truediv__",
  41. "//": "__floordiv__",
  42. "*": "__mul__",
  43. "**": "__pow__",
  44. "%": "__mod__",
  45. "&": "__and__",
  46. "|": "__or__",
  47. "^": "__xor__",
  48. "<<": "__lshift__",
  49. ">>": "__rshift__",
  50. "@": "__matmul__",
  51. }
  52. REFLECTED_BIN_OP_METHOD = {
  53. key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items()
  54. }
  55. AUGMENTED_OP_METHOD = {
  56. key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items()
  57. }
  58. UNARY_OP_METHOD = {
  59. "+": "__pos__",
  60. "-": "__neg__",
  61. "~": "__invert__",
  62. "not": None, # XXX not '__nonzero__'
  63. }
  64. _UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = {
  65. "+": operator_mod.pos,
  66. "-": operator_mod.neg,
  67. "~": operator_mod.invert,
  68. "not": operator_mod.not_,
  69. }
  70. def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult:
  71. """Perform unary operation on `obj`, unless it is `NotImplemented`.
  72. Can raise TypeError if operation is unsupported.
  73. """
  74. if obj is NotImplemented:
  75. value = obj
  76. else:
  77. func = _UNARY_OPERATORS[op]
  78. value = func(obj)
  79. return nodes.const_factory(value)
  80. nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op)
  81. nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op)
  82. nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op)
  83. nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op)
  84. nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op)
  85. # Binary operations
  86. BIN_OP_IMPL = {
  87. "+": lambda a, b: a + b,
  88. "-": lambda a, b: a - b,
  89. "/": lambda a, b: a / b,
  90. "//": lambda a, b: a // b,
  91. "*": lambda a, b: a * b,
  92. "**": lambda a, b: a**b,
  93. "%": lambda a, b: a % b,
  94. "&": lambda a, b: a & b,
  95. "|": lambda a, b: a | b,
  96. "^": lambda a, b: a ^ b,
  97. "<<": lambda a, b: a << b,
  98. ">>": lambda a, b: a >> b,
  99. "@": operator_mod.matmul,
  100. }
  101. for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
  102. BIN_OP_IMPL[_KEY + "="] = _IMPL
  103. @decorators.yes_if_nothing_inferred
  104. def const_infer_binary_op(
  105. self: nodes.Const,
  106. opnode: nodes.AugAssign | nodes.BinOp,
  107. operator: str,
  108. other: InferenceResult,
  109. context: InferenceContext,
  110. _: SuccessfulInferenceResult,
  111. ) -> Generator[ConstFactoryResult | util.UninferableBase, None, None]:
  112. not_implemented = nodes.Const(NotImplemented)
  113. if isinstance(other, nodes.Const):
  114. if (
  115. operator == "**"
  116. and isinstance(self.value, (int, float))
  117. and isinstance(other.value, (int, float))
  118. and (self.value > 1e5 or other.value > 1e5)
  119. ):
  120. yield not_implemented
  121. return
  122. try:
  123. impl = BIN_OP_IMPL[operator]
  124. try:
  125. yield nodes.const_factory(impl(self.value, other.value))
  126. except TypeError:
  127. # ArithmeticError is not enough: float >> float is a TypeError
  128. yield not_implemented
  129. except Exception: # pylint: disable=broad-except
  130. yield util.Uninferable
  131. except TypeError:
  132. yield not_implemented
  133. elif isinstance(self.value, str) and operator == "%":
  134. # TODO(cpopa): implement string interpolation later on.
  135. yield util.Uninferable
  136. else:
  137. yield not_implemented
  138. nodes.Const.infer_binary_op = const_infer_binary_op
  139. def _multiply_seq_by_int(
  140. self: _TupleListNodeT,
  141. opnode: nodes.AugAssign | nodes.BinOp,
  142. other: nodes.Const,
  143. context: InferenceContext,
  144. ) -> _TupleListNodeT:
  145. node = self.__class__(parent=opnode)
  146. filtered_elts = (
  147. helpers.safe_infer(elt, context) or util.Uninferable
  148. for elt in self.elts
  149. if not isinstance(elt, util.UninferableBase)
  150. )
  151. node.elts = list(filtered_elts) * other.value
  152. return node
  153. def _filter_uninferable_nodes(
  154. elts: Sequence[InferenceResult], context: InferenceContext
  155. ) -> Iterator[SuccessfulInferenceResult]:
  156. for elt in elts:
  157. if isinstance(elt, util.UninferableBase):
  158. yield nodes.Unknown()
  159. else:
  160. for inferred in elt.infer(context):
  161. if not isinstance(inferred, util.UninferableBase):
  162. yield inferred
  163. else:
  164. yield nodes.Unknown()
  165. @decorators.yes_if_nothing_inferred
  166. def tl_infer_binary_op(
  167. self: _TupleListNodeT,
  168. opnode: nodes.AugAssign | nodes.BinOp,
  169. operator: str,
  170. other: InferenceResult,
  171. context: InferenceContext,
  172. method: SuccessfulInferenceResult,
  173. ) -> Generator[_TupleListNodeT | nodes.Const | util.UninferableBase, None, None]:
  174. """Infer a binary operation on a tuple or list.
  175. The instance on which the binary operation is performed is a tuple
  176. or list. This refers to the left-hand side of the operation, so:
  177. 'tuple() + 1' or '[] + A()'
  178. """
  179. # For tuples and list the boundnode is no longer the tuple or list instance
  180. context.boundnode = None
  181. not_implemented = nodes.Const(NotImplemented)
  182. if isinstance(other, self.__class__) and operator == "+":
  183. node = self.__class__(parent=opnode)
  184. node.elts = list(
  185. itertools.chain(
  186. _filter_uninferable_nodes(self.elts, context),
  187. _filter_uninferable_nodes(other.elts, context),
  188. )
  189. )
  190. yield node
  191. elif isinstance(other, nodes.Const) and operator == "*":
  192. if not isinstance(other.value, int):
  193. yield not_implemented
  194. return
  195. yield _multiply_seq_by_int(self, opnode, other, context)
  196. elif isinstance(other, bases.Instance) and operator == "*":
  197. # Verify if the instance supports __index__.
  198. as_index = helpers.class_instance_as_index(other)
  199. if not as_index:
  200. yield util.Uninferable
  201. else:
  202. yield _multiply_seq_by_int(self, opnode, as_index, context)
  203. else:
  204. yield not_implemented
  205. nodes.Tuple.infer_binary_op = tl_infer_binary_op
  206. nodes.List.infer_binary_op = tl_infer_binary_op
  207. @decorators.yes_if_nothing_inferred
  208. def instance_class_infer_binary_op(
  209. self: bases.Instance | nodes.ClassDef,
  210. opnode: nodes.AugAssign | nodes.BinOp,
  211. operator: str,
  212. other: InferenceResult,
  213. context: InferenceContext,
  214. method: SuccessfulInferenceResult,
  215. ) -> Generator[InferenceResult, None, None]:
  216. return method.infer_call_result(self, context)
  217. bases.Instance.infer_binary_op = instance_class_infer_binary_op
  218. nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op
  219. # assignment ##################################################################
  220. """The assigned_stmts method is responsible to return the assigned statement
  221. (e.g. not inferred) according to the assignment type.
  222. The `assign_path` argument is used to record the lhs path of the original node.
  223. For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
  224. will be [1, 1] once arrived to the Assign node.
  225. The `context` argument is the current inference context which should be given
  226. to any intermediary inference necessary.
  227. """
  228. def _resolve_looppart(parts, assign_path, context):
  229. """Recursive function to resolve multiple assignments on loops."""
  230. assign_path = assign_path[:]
  231. index = assign_path.pop(0)
  232. for part in parts:
  233. if isinstance(part, util.UninferableBase):
  234. continue
  235. if not hasattr(part, "itered"):
  236. continue
  237. try:
  238. itered = part.itered()
  239. except TypeError:
  240. continue
  241. try:
  242. if isinstance(itered[index], (nodes.Const, nodes.Name)):
  243. itered = [part]
  244. except IndexError:
  245. pass
  246. for stmt in itered:
  247. index_node = nodes.Const(index)
  248. try:
  249. assigned = stmt.getitem(index_node, context)
  250. except (AttributeError, AstroidTypeError, AstroidIndexError):
  251. continue
  252. if not assign_path:
  253. # we achieved to resolved the assignment path,
  254. # don't infer the last part
  255. yield assigned
  256. elif isinstance(assigned, util.UninferableBase):
  257. break
  258. else:
  259. # we are not yet on the last part of the path
  260. # search on each possibly inferred value
  261. try:
  262. yield from _resolve_looppart(
  263. assigned.infer(context), assign_path, context
  264. )
  265. except InferenceError:
  266. break
  267. @decorators.raise_if_nothing_inferred
  268. def for_assigned_stmts(
  269. self: nodes.For | nodes.Comprehension,
  270. node: node_classes.AssignedStmtsPossibleNode = None,
  271. context: InferenceContext | None = None,
  272. assign_path: list[int] | None = None,
  273. ) -> Any:
  274. if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
  275. # Skip inferring of async code for now
  276. return {
  277. "node": self,
  278. "unknown": node,
  279. "assign_path": assign_path,
  280. "context": context,
  281. }
  282. if assign_path is None:
  283. for lst in self.iter.infer(context):
  284. if isinstance(lst, (nodes.Tuple, nodes.List)):
  285. yield from lst.elts
  286. else:
  287. yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
  288. return {
  289. "node": self,
  290. "unknown": node,
  291. "assign_path": assign_path,
  292. "context": context,
  293. }
  294. nodes.For.assigned_stmts = for_assigned_stmts
  295. nodes.Comprehension.assigned_stmts = for_assigned_stmts
  296. def sequence_assigned_stmts(
  297. self: nodes.Tuple | nodes.List,
  298. node: node_classes.AssignedStmtsPossibleNode = None,
  299. context: InferenceContext | None = None,
  300. assign_path: list[int] | None = None,
  301. ) -> Any:
  302. if assign_path is None:
  303. assign_path = []
  304. try:
  305. index = self.elts.index(node) # type: ignore[arg-type]
  306. except ValueError as exc:
  307. raise InferenceError(
  308. "Tried to retrieve a node {node!r} which does not exist",
  309. node=self,
  310. assign_path=assign_path,
  311. context=context,
  312. ) from exc
  313. assign_path.insert(0, index)
  314. return self.parent.assigned_stmts(
  315. node=self, context=context, assign_path=assign_path
  316. )
  317. nodes.Tuple.assigned_stmts = sequence_assigned_stmts
  318. nodes.List.assigned_stmts = sequence_assigned_stmts
  319. def assend_assigned_stmts(
  320. self: nodes.AssignName | nodes.AssignAttr,
  321. node: node_classes.AssignedStmtsPossibleNode = None,
  322. context: InferenceContext | None = None,
  323. assign_path: list[int] | None = None,
  324. ) -> Any:
  325. return self.parent.assigned_stmts(node=self, context=context)
  326. nodes.AssignName.assigned_stmts = assend_assigned_stmts
  327. nodes.AssignAttr.assigned_stmts = assend_assigned_stmts
  328. def _arguments_infer_argname(
  329. self, name: str | None, context: InferenceContext
  330. ) -> Generator[InferenceResult, None, None]:
  331. # arguments information may be missing, in which case we can't do anything
  332. # more
  333. if not (self.arguments or self.vararg or self.kwarg):
  334. yield util.Uninferable
  335. return
  336. functype = self.parent.type
  337. # first argument of instance/class method
  338. if (
  339. self.arguments
  340. and getattr(self.arguments[0], "name", None) == name
  341. and functype != "staticmethod"
  342. ):
  343. cls = self.parent.parent.scope()
  344. is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
  345. # If this is a metaclass, then the first argument will always
  346. # be the class, not an instance.
  347. if context.boundnode and isinstance(context.boundnode, bases.Instance):
  348. cls = context.boundnode._proxied
  349. if is_metaclass or functype == "classmethod":
  350. yield cls
  351. return
  352. if functype == "method":
  353. yield cls.instantiate_class()
  354. return
  355. if context and context.callcontext:
  356. callee = context.callcontext.callee
  357. while hasattr(callee, "_proxied"):
  358. callee = callee._proxied
  359. if getattr(callee, "name", None) == self.parent.name:
  360. call_site = arguments.CallSite(context.callcontext, context.extra_context)
  361. yield from call_site.infer_argument(self.parent, name, context)
  362. return
  363. if name == self.vararg:
  364. vararg = nodes.const_factory(())
  365. vararg.parent = self
  366. if not self.arguments and self.parent.name == "__init__":
  367. cls = self.parent.parent.scope()
  368. vararg.elts = [cls.instantiate_class()]
  369. yield vararg
  370. return
  371. if name == self.kwarg:
  372. kwarg = nodes.const_factory({})
  373. kwarg.parent = self
  374. yield kwarg
  375. return
  376. # if there is a default value, yield it. And then yield Uninferable to reflect
  377. # we can't guess given argument value
  378. try:
  379. context = copy_context(context)
  380. yield from self.default_value(name).infer(context)
  381. yield util.Uninferable
  382. except NoDefault:
  383. yield util.Uninferable
  384. def arguments_assigned_stmts(
  385. self: nodes.Arguments,
  386. node: node_classes.AssignedStmtsPossibleNode = None,
  387. context: InferenceContext | None = None,
  388. assign_path: list[int] | None = None,
  389. ) -> Any:
  390. try:
  391. node_name = node.name # type: ignore[union-attr]
  392. except AttributeError:
  393. # Added to handle edge cases where node.name is not defined.
  394. # https://github.com/PyCQA/astroid/pull/1644#discussion_r901545816
  395. node_name = None # pragma: no cover
  396. if context and context.callcontext:
  397. callee = context.callcontext.callee
  398. while hasattr(callee, "_proxied"):
  399. callee = callee._proxied
  400. else:
  401. return _arguments_infer_argname(self, node_name, context)
  402. if node and getattr(callee, "name", None) == node.frame(future=True).name:
  403. # reset call context/name
  404. callcontext = context.callcontext
  405. context = copy_context(context)
  406. context.callcontext = None
  407. args = arguments.CallSite(callcontext, context=context)
  408. return args.infer_argument(self.parent, node_name, context)
  409. return _arguments_infer_argname(self, node_name, context)
  410. nodes.Arguments.assigned_stmts = arguments_assigned_stmts
  411. @decorators.raise_if_nothing_inferred
  412. def assign_assigned_stmts(
  413. self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign,
  414. node: node_classes.AssignedStmtsPossibleNode = None,
  415. context: InferenceContext | None = None,
  416. assign_path: list[int] | None = None,
  417. ) -> Any:
  418. if not assign_path:
  419. yield self.value
  420. return None
  421. yield from _resolve_assignment_parts(
  422. self.value.infer(context), assign_path, context
  423. )
  424. return {
  425. "node": self,
  426. "unknown": node,
  427. "assign_path": assign_path,
  428. "context": context,
  429. }
  430. def assign_annassigned_stmts(
  431. self: nodes.AnnAssign,
  432. node: node_classes.AssignedStmtsPossibleNode = None,
  433. context: InferenceContext | None = None,
  434. assign_path: list[int] | None = None,
  435. ) -> Any:
  436. for inferred in assign_assigned_stmts(self, node, context, assign_path):
  437. if inferred is None:
  438. yield util.Uninferable
  439. else:
  440. yield inferred
  441. nodes.Assign.assigned_stmts = assign_assigned_stmts
  442. nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts
  443. nodes.AugAssign.assigned_stmts = assign_assigned_stmts
  444. def _resolve_assignment_parts(parts, assign_path, context):
  445. """Recursive function to resolve multiple assignments."""
  446. assign_path = assign_path[:]
  447. index = assign_path.pop(0)
  448. for part in parts:
  449. assigned = None
  450. if isinstance(part, nodes.Dict):
  451. # A dictionary in an iterating context
  452. try:
  453. assigned, _ = part.items[index]
  454. except IndexError:
  455. return
  456. elif hasattr(part, "getitem"):
  457. index_node = nodes.Const(index)
  458. try:
  459. assigned = part.getitem(index_node, context)
  460. except (AstroidTypeError, AstroidIndexError):
  461. return
  462. if not assigned:
  463. return
  464. if not assign_path:
  465. # we achieved to resolved the assignment path, don't infer the
  466. # last part
  467. yield assigned
  468. elif isinstance(assigned, util.UninferableBase):
  469. return
  470. else:
  471. # we are not yet on the last part of the path search on each
  472. # possibly inferred value
  473. try:
  474. yield from _resolve_assignment_parts(
  475. assigned.infer(context), assign_path, context
  476. )
  477. except InferenceError:
  478. return
  479. @decorators.raise_if_nothing_inferred
  480. def excepthandler_assigned_stmts(
  481. self: nodes.ExceptHandler,
  482. node: node_classes.AssignedStmtsPossibleNode = None,
  483. context: InferenceContext | None = None,
  484. assign_path: list[int] | None = None,
  485. ) -> Any:
  486. for assigned in node_classes.unpack_infer(self.type):
  487. if isinstance(assigned, nodes.ClassDef):
  488. assigned = objects.ExceptionInstance(assigned)
  489. yield assigned
  490. return {
  491. "node": self,
  492. "unknown": node,
  493. "assign_path": assign_path,
  494. "context": context,
  495. }
  496. nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts
  497. def _infer_context_manager(self, mgr, context):
  498. try:
  499. inferred = next(mgr.infer(context=context))
  500. except StopIteration as e:
  501. raise InferenceError(node=mgr) from e
  502. if isinstance(inferred, bases.Generator):
  503. # Check if it is decorated with contextlib.contextmanager.
  504. func = inferred.parent
  505. if not func.decorators:
  506. raise InferenceError(
  507. "No decorators found on inferred generator %s", node=func
  508. )
  509. for decorator_node in func.decorators.nodes:
  510. decorator = next(decorator_node.infer(context=context), None)
  511. if isinstance(decorator, nodes.FunctionDef):
  512. if decorator.qname() == _CONTEXTLIB_MGR:
  513. break
  514. else:
  515. # It doesn't interest us.
  516. raise InferenceError(node=func)
  517. try:
  518. yield next(inferred.infer_yield_types())
  519. except StopIteration as e:
  520. raise InferenceError(node=func) from e
  521. elif isinstance(inferred, bases.Instance):
  522. try:
  523. enter = next(inferred.igetattr("__enter__", context=context))
  524. except (InferenceError, AttributeInferenceError, StopIteration) as exc:
  525. raise InferenceError(node=inferred) from exc
  526. if not isinstance(enter, bases.BoundMethod):
  527. raise InferenceError(node=enter)
  528. yield from enter.infer_call_result(self, context)
  529. else:
  530. raise InferenceError(node=mgr)
  531. @decorators.raise_if_nothing_inferred
  532. def with_assigned_stmts(
  533. self: nodes.With,
  534. node: node_classes.AssignedStmtsPossibleNode = None,
  535. context: InferenceContext | None = None,
  536. assign_path: list[int] | None = None,
  537. ) -> Any:
  538. """Infer names and other nodes from a *with* statement.
  539. This enables only inference for name binding in a *with* statement.
  540. For instance, in the following code, inferring `func` will return
  541. the `ContextManager` class, not whatever ``__enter__`` returns.
  542. We are doing this intentionally, because we consider that the context
  543. manager result is whatever __enter__ returns and what it is binded
  544. using the ``as`` keyword.
  545. class ContextManager(object):
  546. def __enter__(self):
  547. return 42
  548. with ContextManager() as f:
  549. pass
  550. # ContextManager().infer() will return ContextManager
  551. # f.infer() will return 42.
  552. Arguments:
  553. self: nodes.With
  554. node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
  555. context: Inference context used for caching already inferred objects
  556. assign_path:
  557. A list of indices, where each index specifies what item to fetch from
  558. the inference results.
  559. """
  560. try:
  561. mgr = next(mgr for (mgr, vars) in self.items if vars == node)
  562. except StopIteration:
  563. return None
  564. if assign_path is None:
  565. yield from _infer_context_manager(self, mgr, context)
  566. else:
  567. for result in _infer_context_manager(self, mgr, context):
  568. # Walk the assign_path and get the item at the final index.
  569. obj = result
  570. for index in assign_path:
  571. if not hasattr(obj, "elts"):
  572. raise InferenceError(
  573. "Wrong type ({targets!r}) for {node!r} assignment",
  574. node=self,
  575. targets=node,
  576. assign_path=assign_path,
  577. context=context,
  578. )
  579. try:
  580. obj = obj.elts[index]
  581. except IndexError as exc:
  582. raise InferenceError(
  583. "Tried to infer a nonexistent target with index {index} "
  584. "in {node!r}.",
  585. node=self,
  586. targets=node,
  587. assign_path=assign_path,
  588. context=context,
  589. ) from exc
  590. except TypeError as exc:
  591. raise InferenceError(
  592. "Tried to unpack a non-iterable value in {node!r}.",
  593. node=self,
  594. targets=node,
  595. assign_path=assign_path,
  596. context=context,
  597. ) from exc
  598. yield obj
  599. return {
  600. "node": self,
  601. "unknown": node,
  602. "assign_path": assign_path,
  603. "context": context,
  604. }
  605. nodes.With.assigned_stmts = with_assigned_stmts
  606. @decorators.raise_if_nothing_inferred
  607. def named_expr_assigned_stmts(
  608. self: nodes.NamedExpr,
  609. node: node_classes.AssignedStmtsPossibleNode,
  610. context: InferenceContext | None = None,
  611. assign_path: list[int] | None = None,
  612. ) -> Any:
  613. """Infer names and other nodes from an assignment expression."""
  614. if self.target == node:
  615. yield from self.value.infer(context=context)
  616. else:
  617. raise InferenceError(
  618. "Cannot infer NamedExpr node {node!r}",
  619. node=self,
  620. assign_path=assign_path,
  621. context=context,
  622. )
  623. nodes.NamedExpr.assigned_stmts = named_expr_assigned_stmts
  624. @decorators.yes_if_nothing_inferred
  625. def starred_assigned_stmts( # noqa: C901
  626. self: nodes.Starred,
  627. node: node_classes.AssignedStmtsPossibleNode = None,
  628. context: InferenceContext | None = None,
  629. assign_path: list[int] | None = None,
  630. ) -> Any:
  631. """
  632. Arguments:
  633. self: nodes.Starred
  634. node: a node related to the current underlying Node.
  635. context: Inference context used for caching already inferred objects
  636. assign_path:
  637. A list of indices, where each index specifies what item to fetch from
  638. the inference results.
  639. """
  640. # pylint: disable=too-many-locals,too-many-statements
  641. def _determine_starred_iteration_lookups(
  642. starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]]
  643. ) -> None:
  644. # Determine the lookups for the rhs of the iteration
  645. itered = target.itered()
  646. for index, element in enumerate(itered):
  647. if (
  648. isinstance(element, nodes.Starred)
  649. and element.value.name == starred.value.name
  650. ):
  651. lookups.append((index, len(itered)))
  652. break
  653. if isinstance(element, nodes.Tuple):
  654. lookups.append((index, len(element.itered())))
  655. _determine_starred_iteration_lookups(starred, element, lookups)
  656. stmt = self.statement(future=True)
  657. if not isinstance(stmt, (nodes.Assign, nodes.For)):
  658. raise InferenceError(
  659. "Statement {stmt!r} enclosing {node!r} must be an Assign or For node.",
  660. node=self,
  661. stmt=stmt,
  662. unknown=node,
  663. context=context,
  664. )
  665. if context is None:
  666. context = InferenceContext()
  667. if isinstance(stmt, nodes.Assign):
  668. value = stmt.value
  669. lhs = stmt.targets[0]
  670. if not isinstance(lhs, nodes.BaseContainer):
  671. yield util.Uninferable
  672. return
  673. if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
  674. raise InferenceError(
  675. "Too many starred arguments in the assignment targets {lhs!r}.",
  676. node=self,
  677. targets=lhs,
  678. unknown=node,
  679. context=context,
  680. )
  681. try:
  682. rhs = next(value.infer(context))
  683. except (InferenceError, StopIteration):
  684. yield util.Uninferable
  685. return
  686. if isinstance(rhs, util.UninferableBase) or not hasattr(rhs, "itered"):
  687. yield util.Uninferable
  688. return
  689. try:
  690. elts = collections.deque(rhs.itered()) # type: ignore[union-attr]
  691. except TypeError:
  692. yield util.Uninferable
  693. return
  694. # Unpack iteratively the values from the rhs of the assignment,
  695. # until the find the starred node. What will remain will
  696. # be the list of values which the Starred node will represent
  697. # This is done in two steps, from left to right to remove
  698. # anything before the starred node and from right to left
  699. # to remove anything after the starred node.
  700. for index, left_node in enumerate(lhs.elts):
  701. if not isinstance(left_node, nodes.Starred):
  702. if not elts:
  703. break
  704. elts.popleft()
  705. continue
  706. lhs_elts = collections.deque(reversed(lhs.elts[index:]))
  707. for right_node in lhs_elts:
  708. if not isinstance(right_node, nodes.Starred):
  709. if not elts:
  710. break
  711. elts.pop()
  712. continue
  713. # We're done unpacking.
  714. packed = nodes.List(
  715. ctx=Context.Store,
  716. parent=self,
  717. lineno=lhs.lineno,
  718. col_offset=lhs.col_offset,
  719. )
  720. packed.postinit(elts=list(elts))
  721. yield packed
  722. break
  723. if isinstance(stmt, nodes.For):
  724. try:
  725. inferred_iterable = next(stmt.iter.infer(context=context))
  726. except (InferenceError, StopIteration):
  727. yield util.Uninferable
  728. return
  729. if isinstance(inferred_iterable, util.UninferableBase) or not hasattr(
  730. inferred_iterable, "itered"
  731. ):
  732. yield util.Uninferable
  733. return
  734. try:
  735. itered = inferred_iterable.itered() # type: ignore[union-attr]
  736. except TypeError:
  737. yield util.Uninferable
  738. return
  739. target = stmt.target
  740. if not isinstance(target, nodes.Tuple):
  741. raise InferenceError(
  742. "Could not make sense of this, the target must be a tuple",
  743. context=context,
  744. )
  745. lookups: list[tuple[int, int]] = []
  746. _determine_starred_iteration_lookups(self, target, lookups)
  747. if not lookups:
  748. raise InferenceError(
  749. "Could not make sense of this, needs at least a lookup", context=context
  750. )
  751. # Make the last lookup a slice, since that what we want for a Starred node
  752. last_element_index, last_element_length = lookups[-1]
  753. is_starred_last = last_element_index == (last_element_length - 1)
  754. lookup_slice = slice(
  755. last_element_index,
  756. None if is_starred_last else (last_element_length - last_element_index),
  757. )
  758. last_lookup = lookup_slice
  759. for element in itered:
  760. # We probably want to infer the potential values *for each* element in an
  761. # iterable, but we can't infer a list of all values, when only a list of
  762. # step values are expected:
  763. #
  764. # for a, *b in [...]:
  765. # b
  766. #
  767. # *b* should now point to just the elements at that particular iteration step,
  768. # which astroid can't know about.
  769. found_element = None
  770. for index, lookup in enumerate(lookups):
  771. if not hasattr(element, "itered"):
  772. break
  773. if index + 1 is len(lookups):
  774. cur_lookup: slice | int = last_lookup
  775. else:
  776. # Grab just the index, not the whole length
  777. cur_lookup = lookup[0]
  778. try:
  779. itered_inner_element = element.itered()
  780. element = itered_inner_element[cur_lookup]
  781. except IndexError:
  782. break
  783. except TypeError:
  784. # Most likely the itered() call failed, cannot make sense of this
  785. yield util.Uninferable
  786. return
  787. else:
  788. found_element = element
  789. unpacked = nodes.List(
  790. ctx=Context.Store,
  791. parent=self,
  792. lineno=self.lineno,
  793. col_offset=self.col_offset,
  794. )
  795. unpacked.postinit(elts=found_element or [])
  796. yield unpacked
  797. return
  798. yield util.Uninferable
  799. nodes.Starred.assigned_stmts = starred_assigned_stmts
  800. @decorators.yes_if_nothing_inferred
  801. def match_mapping_assigned_stmts(
  802. self: nodes.MatchMapping,
  803. node: nodes.AssignName,
  804. context: InferenceContext | None = None,
  805. assign_path: None = None,
  806. ) -> Generator[nodes.NodeNG, None, None]:
  807. """Return empty generator (return -> raises StopIteration) so inferred value
  808. is Uninferable.
  809. """
  810. return
  811. yield
  812. nodes.MatchMapping.assigned_stmts = match_mapping_assigned_stmts
  813. @decorators.yes_if_nothing_inferred
  814. def match_star_assigned_stmts(
  815. self: nodes.MatchStar,
  816. node: nodes.AssignName,
  817. context: InferenceContext | None = None,
  818. assign_path: None = None,
  819. ) -> Generator[nodes.NodeNG, None, None]:
  820. """Return empty generator (return -> raises StopIteration) so inferred value
  821. is Uninferable.
  822. """
  823. return
  824. yield
  825. nodes.MatchStar.assigned_stmts = match_star_assigned_stmts
  826. @decorators.yes_if_nothing_inferred
  827. def match_as_assigned_stmts(
  828. self: nodes.MatchAs,
  829. node: nodes.AssignName,
  830. context: InferenceContext | None = None,
  831. assign_path: None = None,
  832. ) -> Generator[nodes.NodeNG, None, None]:
  833. """Infer MatchAs as the Match subject if it's the only MatchCase pattern
  834. else raise StopIteration to yield Uninferable.
  835. """
  836. if (
  837. isinstance(self.parent, nodes.MatchCase)
  838. and isinstance(self.parent.parent, nodes.Match)
  839. and self.pattern is None
  840. ):
  841. yield self.parent.parent.subject
  842. nodes.MatchAs.assigned_stmts = match_as_assigned_stmts