inference.py 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273
  1. # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
  2. # For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
  3. # Copyright (c) https://github.com/PyCQA/astroid/blob/main/CONTRIBUTORS.txt
  4. """This module contains a set of functions to handle inference on astroid trees."""
  5. from __future__ import annotations
  6. import ast
  7. import functools
  8. import itertools
  9. import operator
  10. import typing
  11. from collections.abc import Callable, Generator, Iterable, Iterator
  12. from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union
  13. from astroid import bases, constraint, decorators, helpers, nodes, protocols, util
  14. from astroid.const import PY310_PLUS
  15. from astroid.context import (
  16. CallContext,
  17. InferenceContext,
  18. bind_context_to_node,
  19. copy_context,
  20. )
  21. from astroid.exceptions import (
  22. AstroidBuildingError,
  23. AstroidError,
  24. AstroidIndexError,
  25. AstroidTypeError,
  26. AstroidValueError,
  27. AttributeInferenceError,
  28. InferenceError,
  29. NameInferenceError,
  30. _NonDeducibleTypeHierarchy,
  31. )
  32. from astroid.interpreter import dunder_lookup
  33. from astroid.manager import AstroidManager
  34. from astroid.typing import (
  35. InferenceErrorInfo,
  36. InferenceResult,
  37. SuccessfulInferenceResult,
  38. )
  39. if TYPE_CHECKING:
  40. from astroid.objects import Property
  41. # Prevents circular imports
  42. objects = util.lazy_import("objects")
  43. _T = TypeVar("_T")
  44. _BaseContainerT = TypeVar("_BaseContainerT", bound=nodes.BaseContainer)
  45. _FunctionDefT = TypeVar("_FunctionDefT", bound=nodes.FunctionDef)
  46. GetFlowFactory = typing.Callable[
  47. [
  48. InferenceResult,
  49. Optional[InferenceResult],
  50. Union[nodes.AugAssign, nodes.BinOp],
  51. InferenceResult,
  52. Optional[InferenceResult],
  53. InferenceContext,
  54. InferenceContext,
  55. ],
  56. "list[functools.partial[Generator[InferenceResult, None, None]]]",
  57. ]
  58. # .infer method ###############################################################
  59. def infer_end(
  60. self: _T, context: InferenceContext | None = None, **kwargs: Any
  61. ) -> Iterator[_T]:
  62. """Inference's end for nodes that yield themselves on inference.
  63. These are objects for which inference does not have any semantic,
  64. such as Module or Consts.
  65. """
  66. yield self
  67. # We add ignores to all assignments to methods
  68. # See https://github.com/python/mypy/issues/2427
  69. nodes.Module._infer = infer_end
  70. nodes.ClassDef._infer = infer_end
  71. nodes.Lambda._infer = infer_end # type: ignore[assignment]
  72. nodes.Const._infer = infer_end # type: ignore[assignment]
  73. nodes.Slice._infer = infer_end # type: ignore[assignment]
  74. def _infer_sequence_helper(
  75. node: _BaseContainerT, context: InferenceContext | None = None
  76. ) -> list[SuccessfulInferenceResult]:
  77. """Infer all values based on _BaseContainer.elts."""
  78. values = []
  79. for elt in node.elts:
  80. if isinstance(elt, nodes.Starred):
  81. starred = helpers.safe_infer(elt.value, context)
  82. if not starred:
  83. raise InferenceError(node=node, context=context)
  84. if not hasattr(starred, "elts"):
  85. raise InferenceError(node=node, context=context)
  86. values.extend(_infer_sequence_helper(starred))
  87. elif isinstance(elt, nodes.NamedExpr):
  88. value = helpers.safe_infer(elt.value, context)
  89. if not value:
  90. raise InferenceError(node=node, context=context)
  91. values.append(value)
  92. else:
  93. values.append(elt)
  94. return values
  95. @decorators.raise_if_nothing_inferred
  96. def infer_sequence(
  97. self: _BaseContainerT,
  98. context: InferenceContext | None = None,
  99. **kwargs: Any,
  100. ) -> Iterator[_BaseContainerT]:
  101. has_starred_named_expr = any(
  102. isinstance(e, (nodes.Starred, nodes.NamedExpr)) for e in self.elts
  103. )
  104. if has_starred_named_expr:
  105. values = _infer_sequence_helper(self, context)
  106. new_seq = type(self)(
  107. lineno=self.lineno, col_offset=self.col_offset, parent=self.parent
  108. )
  109. new_seq.postinit(values)
  110. yield new_seq
  111. else:
  112. yield self
  113. nodes.List._infer = infer_sequence # type: ignore[assignment]
  114. nodes.Tuple._infer = infer_sequence # type: ignore[assignment]
  115. nodes.Set._infer = infer_sequence # type: ignore[assignment]
  116. def infer_map(
  117. self: nodes.Dict, context: InferenceContext | None = None
  118. ) -> Iterator[nodes.Dict]:
  119. if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items):
  120. yield self
  121. else:
  122. items = _infer_map(self, context)
  123. new_seq = type(self)(self.lineno, self.col_offset, self.parent)
  124. new_seq.postinit(list(items.items()))
  125. yield new_seq
  126. def _update_with_replacement(
  127. lhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult],
  128. rhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult],
  129. ) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]:
  130. """Delete nodes that equate to duplicate keys.
  131. Since an astroid node doesn't 'equal' another node with the same value,
  132. this function uses the as_string method to make sure duplicate keys
  133. don't get through
  134. Note that both the key and the value are astroid nodes
  135. Fixes issue with DictUnpack causing duplicate keys
  136. in inferred Dict items
  137. :param lhs_dict: Dictionary to 'merge' nodes into
  138. :param rhs_dict: Dictionary with nodes to pull from
  139. :return : merged dictionary of nodes
  140. """
  141. combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items())
  142. # Overwrite keys which have the same string values
  143. string_map = {key.as_string(): (key, value) for key, value in combined_dict}
  144. # Return to dictionary
  145. return dict(string_map.values())
  146. def _infer_map(
  147. node: nodes.Dict, context: InferenceContext | None
  148. ) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]:
  149. """Infer all values based on Dict.items."""
  150. values: dict[SuccessfulInferenceResult, SuccessfulInferenceResult] = {}
  151. for name, value in node.items:
  152. if isinstance(name, nodes.DictUnpack):
  153. double_starred = helpers.safe_infer(value, context)
  154. if not double_starred:
  155. raise InferenceError
  156. if not isinstance(double_starred, nodes.Dict):
  157. raise InferenceError(node=node, context=context)
  158. unpack_items = _infer_map(double_starred, context)
  159. values = _update_with_replacement(values, unpack_items)
  160. else:
  161. key = helpers.safe_infer(name, context=context)
  162. safe_value = helpers.safe_infer(value, context=context)
  163. if any(not elem for elem in (key, safe_value)):
  164. raise InferenceError(node=node, context=context)
  165. # safe_value is SuccessfulInferenceResult as bool(Uninferable) == False
  166. values = _update_with_replacement(values, {key: safe_value})
  167. return values
  168. nodes.Dict._infer = infer_map # type: ignore[assignment]
  169. def _higher_function_scope(node: nodes.NodeNG) -> nodes.FunctionDef | None:
  170. """Search for the first function which encloses the given
  171. scope. This can be used for looking up in that function's
  172. scope, in case looking up in a lower scope for a particular
  173. name fails.
  174. :param node: A scope node.
  175. :returns:
  176. ``None``, if no parent function scope was found,
  177. otherwise an instance of :class:`astroid.nodes.scoped_nodes.Function`,
  178. which encloses the given node.
  179. """
  180. current = node
  181. while current.parent and not isinstance(current.parent, nodes.FunctionDef):
  182. current = current.parent
  183. if current and current.parent:
  184. return current.parent # type: ignore[no-any-return]
  185. return None
  186. def infer_name(
  187. self: nodes.Name | nodes.AssignName,
  188. context: InferenceContext | None = None,
  189. **kwargs: Any,
  190. ) -> Generator[InferenceResult, None, None]:
  191. """Infer a Name: use name lookup rules."""
  192. frame, stmts = self.lookup(self.name)
  193. if not stmts:
  194. # Try to see if the name is enclosed in a nested function
  195. # and use the higher (first function) scope for searching.
  196. parent_function = _higher_function_scope(self.scope())
  197. if parent_function:
  198. _, stmts = parent_function.lookup(self.name)
  199. if not stmts:
  200. raise NameInferenceError(
  201. name=self.name, scope=self.scope(), context=context
  202. )
  203. context = copy_context(context)
  204. context.lookupname = self.name
  205. context.constraints[self.name] = constraint.get_constraints(self, frame)
  206. return bases._infer_stmts(stmts, context, frame)
  207. # pylint: disable=no-value-for-parameter
  208. # The order of the decorators here is important
  209. # See https://github.com/PyCQA/astroid/commit/0a8a75db30da060a24922e05048bc270230f5
  210. nodes.Name._infer = decorators.raise_if_nothing_inferred(
  211. decorators.path_wrapper(infer_name)
  212. )
  213. nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper
  214. @decorators.raise_if_nothing_inferred
  215. @decorators.path_wrapper
  216. def infer_call(
  217. self: nodes.Call, context: InferenceContext | None = None, **kwargs: Any
  218. ) -> Generator[InferenceResult, None, InferenceErrorInfo]:
  219. """Infer a Call node by trying to guess what the function returns."""
  220. callcontext = copy_context(context)
  221. callcontext.boundnode = None
  222. if context is not None:
  223. callcontext.extra_context = _populate_context_lookup(self, context.clone())
  224. for callee in self.func.infer(context):
  225. if isinstance(callee, util.UninferableBase):
  226. yield callee
  227. continue
  228. try:
  229. if hasattr(callee, "infer_call_result"):
  230. callcontext.callcontext = CallContext(
  231. args=self.args, keywords=self.keywords, callee=callee
  232. )
  233. yield from callee.infer_call_result(caller=self, context=callcontext)
  234. except InferenceError:
  235. continue
  236. return InferenceErrorInfo(node=self, context=context)
  237. nodes.Call._infer = infer_call # type: ignore[assignment]
  238. @decorators.raise_if_nothing_inferred
  239. @decorators.path_wrapper
  240. def infer_import(
  241. self: nodes.Import,
  242. context: InferenceContext | None = None,
  243. asname: bool = True,
  244. **kwargs: Any,
  245. ) -> Generator[nodes.Module, None, None]:
  246. """Infer an Import node: return the imported module/object."""
  247. context = context or InferenceContext()
  248. name = context.lookupname
  249. if name is None:
  250. raise InferenceError(node=self, context=context)
  251. try:
  252. if asname:
  253. yield self.do_import_module(self.real_name(name))
  254. else:
  255. yield self.do_import_module(name)
  256. except AstroidBuildingError as exc:
  257. raise InferenceError(node=self, context=context) from exc
  258. nodes.Import._infer = infer_import
  259. @decorators.raise_if_nothing_inferred
  260. @decorators.path_wrapper
  261. def infer_import_from(
  262. self: nodes.ImportFrom,
  263. context: InferenceContext | None = None,
  264. asname: bool = True,
  265. **kwargs: Any,
  266. ) -> Generator[InferenceResult, None, None]:
  267. """Infer a ImportFrom node: return the imported module/object."""
  268. context = context or InferenceContext()
  269. name = context.lookupname
  270. if name is None:
  271. raise InferenceError(node=self, context=context)
  272. if asname:
  273. try:
  274. name = self.real_name(name)
  275. except AttributeInferenceError as exc:
  276. # See https://github.com/PyCQA/pylint/issues/4692
  277. raise InferenceError(node=self, context=context) from exc
  278. try:
  279. module = self.do_import_module()
  280. except AstroidBuildingError as exc:
  281. raise InferenceError(node=self, context=context) from exc
  282. try:
  283. context = copy_context(context)
  284. context.lookupname = name
  285. stmts = module.getattr(name, ignore_locals=module is self.root())
  286. return bases._infer_stmts(stmts, context)
  287. except AttributeInferenceError as error:
  288. raise InferenceError(
  289. str(error), target=self, attribute=name, context=context
  290. ) from error
  291. nodes.ImportFrom._infer = infer_import_from # type: ignore[assignment]
  292. def infer_attribute(
  293. self: nodes.Attribute | nodes.AssignAttr,
  294. context: InferenceContext | None = None,
  295. **kwargs: Any,
  296. ) -> Generator[InferenceResult, None, InferenceErrorInfo]:
  297. """Infer an Attribute node by using getattr on the associated object."""
  298. for owner in self.expr.infer(context):
  299. if isinstance(owner, util.UninferableBase):
  300. yield owner
  301. continue
  302. context = copy_context(context)
  303. old_boundnode = context.boundnode
  304. try:
  305. context.boundnode = owner
  306. if isinstance(owner, (nodes.ClassDef, bases.Instance)):
  307. frame = owner if isinstance(owner, nodes.ClassDef) else owner._proxied
  308. context.constraints[self.attrname] = constraint.get_constraints(
  309. self, frame=frame
  310. )
  311. yield from owner.igetattr(self.attrname, context)
  312. except (
  313. AttributeInferenceError,
  314. InferenceError,
  315. AttributeError,
  316. ):
  317. pass
  318. finally:
  319. context.boundnode = old_boundnode
  320. return InferenceErrorInfo(node=self, context=context)
  321. # The order of the decorators here is important
  322. # See https://github.com/PyCQA/astroid/commit/0a8a75db30da060a24922e05048bc270230f5
  323. nodes.Attribute._infer = decorators.raise_if_nothing_inferred(
  324. decorators.path_wrapper(infer_attribute)
  325. )
  326. # won't work with a path wrapper
  327. nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute)
  328. @decorators.raise_if_nothing_inferred
  329. @decorators.path_wrapper
  330. def infer_global(
  331. self: nodes.Global, context: InferenceContext | None = None, **kwargs: Any
  332. ) -> Generator[InferenceResult, None, None]:
  333. if context is None or context.lookupname is None:
  334. raise InferenceError(node=self, context=context)
  335. try:
  336. return bases._infer_stmts(self.root().getattr(context.lookupname), context)
  337. except AttributeInferenceError as error:
  338. raise InferenceError(
  339. str(error), target=self, attribute=context.lookupname, context=context
  340. ) from error
  341. nodes.Global._infer = infer_global # type: ignore[assignment]
  342. _SUBSCRIPT_SENTINEL = object()
  343. def infer_subscript(
  344. self: nodes.Subscript, context: InferenceContext | None = None, **kwargs: Any
  345. ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]:
  346. """Inference for subscripts.
  347. We're understanding if the index is a Const
  348. or a slice, passing the result of inference
  349. to the value's `getitem` method, which should
  350. handle each supported index type accordingly.
  351. """
  352. found_one = False
  353. for value in self.value.infer(context):
  354. if isinstance(value, util.UninferableBase):
  355. yield util.Uninferable
  356. return None
  357. for index in self.slice.infer(context):
  358. if isinstance(index, util.UninferableBase):
  359. yield util.Uninferable
  360. return None
  361. # Try to deduce the index value.
  362. index_value = _SUBSCRIPT_SENTINEL
  363. if value.__class__ == bases.Instance:
  364. index_value = index
  365. elif index.__class__ == bases.Instance:
  366. instance_as_index = helpers.class_instance_as_index(index)
  367. if instance_as_index:
  368. index_value = instance_as_index
  369. else:
  370. index_value = index
  371. if index_value is _SUBSCRIPT_SENTINEL:
  372. raise InferenceError(node=self, context=context)
  373. try:
  374. assigned = value.getitem(index_value, context)
  375. except (
  376. AstroidTypeError,
  377. AstroidIndexError,
  378. AstroidValueError,
  379. AttributeInferenceError,
  380. AttributeError,
  381. ) as exc:
  382. raise InferenceError(node=self, context=context) from exc
  383. # Prevent inferring if the inferred subscript
  384. # is the same as the original subscripted object.
  385. if self is assigned or isinstance(assigned, util.UninferableBase):
  386. yield util.Uninferable
  387. return None
  388. yield from assigned.infer(context)
  389. found_one = True
  390. if found_one:
  391. return InferenceErrorInfo(node=self, context=context)
  392. return None
  393. # The order of the decorators here is important
  394. # See https://github.com/PyCQA/astroid/commit/0a8a75db30da060a24922e05048bc270230f5
  395. nodes.Subscript._infer = decorators.raise_if_nothing_inferred( # type: ignore[assignment]
  396. decorators.path_wrapper(infer_subscript)
  397. )
  398. nodes.Subscript.infer_lhs = decorators.raise_if_nothing_inferred(infer_subscript)
  399. @decorators.raise_if_nothing_inferred
  400. @decorators.path_wrapper
  401. def _infer_boolop(
  402. self: nodes.BoolOp, context: InferenceContext | None = None, **kwargs: Any
  403. ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]:
  404. """Infer a boolean operation (and / or / not).
  405. The function will calculate the boolean operation
  406. for all pairs generated through inference for each component
  407. node.
  408. """
  409. values = self.values
  410. if self.op == "or":
  411. predicate = operator.truth
  412. else:
  413. predicate = operator.not_
  414. try:
  415. inferred_values = [value.infer(context=context) for value in values]
  416. except InferenceError:
  417. yield util.Uninferable
  418. return None
  419. for pair in itertools.product(*inferred_values):
  420. if any(isinstance(item, util.UninferableBase) for item in pair):
  421. # Can't infer the final result, just yield Uninferable.
  422. yield util.Uninferable
  423. continue
  424. bool_values = [item.bool_value() for item in pair]
  425. if any(isinstance(item, util.UninferableBase) for item in bool_values):
  426. # Can't infer the final result, just yield Uninferable.
  427. yield util.Uninferable
  428. continue
  429. # Since the boolean operations are short circuited operations,
  430. # this code yields the first value for which the predicate is True
  431. # and if no value respected the predicate, then the last value will
  432. # be returned (or Uninferable if there was no last value).
  433. # This is conforming to the semantics of `and` and `or`:
  434. # 1 and 0 -> 1
  435. # 0 and 1 -> 0
  436. # 1 or 0 -> 1
  437. # 0 or 1 -> 1
  438. value = util.Uninferable
  439. for value, bool_value in zip(pair, bool_values):
  440. if predicate(bool_value):
  441. yield value
  442. break
  443. else:
  444. yield value
  445. return InferenceErrorInfo(node=self, context=context)
  446. nodes.BoolOp._infer = _infer_boolop
  447. # UnaryOp, BinOp and AugAssign inferences
  448. def _filter_operation_errors(
  449. self: _T,
  450. infer_callable: Callable[
  451. [_T, InferenceContext | None],
  452. Generator[InferenceResult | util.BadOperationMessage, None, None],
  453. ],
  454. context: InferenceContext | None,
  455. error: type[util.BadOperationMessage],
  456. ) -> Generator[InferenceResult, None, None]:
  457. for result in infer_callable(self, context):
  458. if isinstance(result, error):
  459. # For the sake of .infer(), we don't care about operation
  460. # errors, which is the job of pylint. So return something
  461. # which shows that we can't infer the result.
  462. yield util.Uninferable
  463. else:
  464. yield result
  465. def _infer_unaryop(
  466. self: nodes.UnaryOp, context: InferenceContext | None = None
  467. ) -> Generator[InferenceResult | util.BadUnaryOperationMessage, None, None]:
  468. """Infer what an UnaryOp should return when evaluated."""
  469. for operand in self.operand.infer(context):
  470. try:
  471. yield operand.infer_unary_op(self.op)
  472. except TypeError as exc:
  473. # The operand doesn't support this operation.
  474. yield util.BadUnaryOperationMessage(operand, self.op, exc)
  475. except AttributeError as exc:
  476. meth = protocols.UNARY_OP_METHOD[self.op]
  477. if meth is None:
  478. # `not node`. Determine node's boolean
  479. # value and negate its result, unless it is
  480. # Uninferable, which will be returned as is.
  481. bool_value = operand.bool_value()
  482. if not isinstance(bool_value, util.UninferableBase):
  483. yield nodes.const_factory(not bool_value)
  484. else:
  485. yield util.Uninferable
  486. else:
  487. if not isinstance(operand, (bases.Instance, nodes.ClassDef)):
  488. # The operation was used on something which
  489. # doesn't support it.
  490. yield util.BadUnaryOperationMessage(operand, self.op, exc)
  491. continue
  492. try:
  493. try:
  494. methods = dunder_lookup.lookup(operand, meth)
  495. except AttributeInferenceError:
  496. yield util.BadUnaryOperationMessage(operand, self.op, exc)
  497. continue
  498. meth = methods[0]
  499. inferred = next(meth.infer(context=context), None)
  500. if (
  501. isinstance(inferred, util.UninferableBase)
  502. or not inferred.callable()
  503. ):
  504. continue
  505. context = copy_context(context)
  506. context.boundnode = operand
  507. context.callcontext = CallContext(args=[], callee=inferred)
  508. call_results = inferred.infer_call_result(self, context=context)
  509. result = next(call_results, None)
  510. if result is None:
  511. # Failed to infer, return the same type.
  512. yield operand
  513. else:
  514. yield result
  515. except AttributeInferenceError as inner_exc:
  516. # The unary operation special method was not found.
  517. yield util.BadUnaryOperationMessage(operand, self.op, inner_exc)
  518. except InferenceError:
  519. yield util.Uninferable
  520. @decorators.raise_if_nothing_inferred
  521. @decorators.path_wrapper
  522. def infer_unaryop(
  523. self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any
  524. ) -> Generator[InferenceResult, None, InferenceErrorInfo]:
  525. """Infer what an UnaryOp should return when evaluated."""
  526. yield from _filter_operation_errors(
  527. self, _infer_unaryop, context, util.BadUnaryOperationMessage
  528. )
  529. return InferenceErrorInfo(node=self, context=context)
  530. nodes.UnaryOp._infer_unaryop = _infer_unaryop
  531. nodes.UnaryOp._infer = infer_unaryop
  532. def _is_not_implemented(const) -> bool:
  533. """Check if the given const node is NotImplemented."""
  534. return isinstance(const, nodes.Const) and const.value is NotImplemented
  535. def _infer_old_style_string_formatting(
  536. instance: nodes.Const, other: nodes.NodeNG, context: InferenceContext
  537. ) -> tuple[util.UninferableBase | nodes.Const]:
  538. """Infer the result of '"string" % ...'.
  539. TODO: Instead of returning Uninferable we should rely
  540. on the call to '%' to see if the result is actually uninferable.
  541. """
  542. if isinstance(other, nodes.Tuple):
  543. if util.Uninferable in other.elts:
  544. return (util.Uninferable,)
  545. inferred_positional = [helpers.safe_infer(i, context) for i in other.elts]
  546. if all(isinstance(i, nodes.Const) for i in inferred_positional):
  547. values = tuple(i.value for i in inferred_positional)
  548. else:
  549. values = None
  550. elif isinstance(other, nodes.Dict):
  551. values: dict[Any, Any] = {}
  552. for pair in other.items:
  553. key = helpers.safe_infer(pair[0], context)
  554. if not isinstance(key, nodes.Const):
  555. return (util.Uninferable,)
  556. value = helpers.safe_infer(pair[1], context)
  557. if not isinstance(value, nodes.Const):
  558. return (util.Uninferable,)
  559. values[key.value] = value.value
  560. elif isinstance(other, nodes.Const):
  561. values = other.value
  562. else:
  563. return (util.Uninferable,)
  564. try:
  565. return (nodes.const_factory(instance.value % values),)
  566. except (TypeError, KeyError, ValueError):
  567. return (util.Uninferable,)
  568. def _invoke_binop_inference(
  569. instance: InferenceResult,
  570. opnode: nodes.AugAssign | nodes.BinOp,
  571. op: str,
  572. other: InferenceResult,
  573. context: InferenceContext,
  574. method_name: str,
  575. ) -> Generator[InferenceResult, None, None]:
  576. """Invoke binary operation inference on the given instance."""
  577. methods = dunder_lookup.lookup(instance, method_name)
  578. context = bind_context_to_node(context, instance)
  579. method = methods[0]
  580. context.callcontext.callee = method
  581. if (
  582. isinstance(instance, nodes.Const)
  583. and isinstance(instance.value, str)
  584. and op == "%"
  585. ):
  586. return iter(_infer_old_style_string_formatting(instance, other, context))
  587. try:
  588. inferred = next(method.infer(context=context))
  589. except StopIteration as e:
  590. raise InferenceError(node=method, context=context) from e
  591. if isinstance(inferred, util.UninferableBase):
  592. raise InferenceError
  593. if not isinstance(
  594. instance, (nodes.Const, nodes.Tuple, nodes.List, nodes.ClassDef, bases.Instance)
  595. ):
  596. raise InferenceError # pragma: no cover # Used as a failsafe
  597. return instance.infer_binary_op(opnode, op, other, context, inferred)
  598. def _aug_op(
  599. instance: InferenceResult,
  600. opnode: nodes.AugAssign,
  601. op: str,
  602. other: InferenceResult,
  603. context: InferenceContext,
  604. reverse: bool = False,
  605. ) -> functools.partial[Generator[InferenceResult, None, None]]:
  606. """Get an inference callable for an augmented binary operation."""
  607. method_name = protocols.AUGMENTED_OP_METHOD[op]
  608. return functools.partial(
  609. _invoke_binop_inference,
  610. instance=instance,
  611. op=op,
  612. opnode=opnode,
  613. other=other,
  614. context=context,
  615. method_name=method_name,
  616. )
  617. def _bin_op(
  618. instance: InferenceResult,
  619. opnode: nodes.AugAssign | nodes.BinOp,
  620. op: str,
  621. other: InferenceResult,
  622. context: InferenceContext,
  623. reverse: bool = False,
  624. ) -> functools.partial[Generator[InferenceResult, None, None]]:
  625. """Get an inference callable for a normal binary operation.
  626. If *reverse* is True, then the reflected method will be used instead.
  627. """
  628. if reverse:
  629. method_name = protocols.REFLECTED_BIN_OP_METHOD[op]
  630. else:
  631. method_name = protocols.BIN_OP_METHOD[op]
  632. return functools.partial(
  633. _invoke_binop_inference,
  634. instance=instance,
  635. op=op,
  636. opnode=opnode,
  637. other=other,
  638. context=context,
  639. method_name=method_name,
  640. )
  641. def _bin_op_or_union_type(
  642. left: bases.UnionType | nodes.ClassDef | nodes.Const,
  643. right: bases.UnionType | nodes.ClassDef | nodes.Const,
  644. ) -> Generator[InferenceResult, None, None]:
  645. """Create a new UnionType instance for binary or, e.g. int | str."""
  646. yield bases.UnionType(left, right)
  647. def _get_binop_contexts(context, left, right):
  648. """Get contexts for binary operations.
  649. This will return two inference contexts, the first one
  650. for x.__op__(y), the other one for y.__rop__(x), where
  651. only the arguments are inversed.
  652. """
  653. # The order is important, since the first one should be
  654. # left.__op__(right).
  655. for arg in (right, left):
  656. new_context = context.clone()
  657. new_context.callcontext = CallContext(args=[arg])
  658. new_context.boundnode = None
  659. yield new_context
  660. def _same_type(type1, type2) -> bool:
  661. """Check if type1 is the same as type2."""
  662. return type1.qname() == type2.qname()
  663. def _get_binop_flow(
  664. left: InferenceResult,
  665. left_type: InferenceResult | None,
  666. binary_opnode: nodes.AugAssign | nodes.BinOp,
  667. right: InferenceResult,
  668. right_type: InferenceResult | None,
  669. context: InferenceContext,
  670. reverse_context: InferenceContext,
  671. ) -> list[functools.partial[Generator[InferenceResult, None, None]]]:
  672. """Get the flow for binary operations.
  673. The rules are a bit messy:
  674. * if left and right have the same type, then only one
  675. method will be called, left.__op__(right)
  676. * if left and right are unrelated typewise, then first
  677. left.__op__(right) is tried and if this does not exist
  678. or returns NotImplemented, then right.__rop__(left) is tried.
  679. * if left is a subtype of right, then only left.__op__(right)
  680. is tried.
  681. * if left is a supertype of right, then right.__rop__(left)
  682. is first tried and then left.__op__(right)
  683. """
  684. op = binary_opnode.op
  685. if _same_type(left_type, right_type):
  686. methods = [_bin_op(left, binary_opnode, op, right, context)]
  687. elif helpers.is_subtype(left_type, right_type):
  688. methods = [_bin_op(left, binary_opnode, op, right, context)]
  689. elif helpers.is_supertype(left_type, right_type):
  690. methods = [
  691. _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
  692. _bin_op(left, binary_opnode, op, right, context),
  693. ]
  694. else:
  695. methods = [
  696. _bin_op(left, binary_opnode, op, right, context),
  697. _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
  698. ]
  699. if (
  700. PY310_PLUS
  701. and op == "|"
  702. and (
  703. isinstance(left, (bases.UnionType, nodes.ClassDef))
  704. or isinstance(left, nodes.Const)
  705. and left.value is None
  706. )
  707. and (
  708. isinstance(right, (bases.UnionType, nodes.ClassDef))
  709. or isinstance(right, nodes.Const)
  710. and right.value is None
  711. )
  712. ):
  713. methods.extend([functools.partial(_bin_op_or_union_type, left, right)])
  714. return methods
  715. def _get_aug_flow(
  716. left: InferenceResult,
  717. left_type: InferenceResult | None,
  718. aug_opnode: nodes.AugAssign,
  719. right: InferenceResult,
  720. right_type: InferenceResult | None,
  721. context: InferenceContext,
  722. reverse_context: InferenceContext,
  723. ) -> list[functools.partial[Generator[InferenceResult, None, None]]]:
  724. """Get the flow for augmented binary operations.
  725. The rules are a bit messy:
  726. * if left and right have the same type, then left.__augop__(right)
  727. is first tried and then left.__op__(right).
  728. * if left and right are unrelated typewise, then
  729. left.__augop__(right) is tried, then left.__op__(right)
  730. is tried and then right.__rop__(left) is tried.
  731. * if left is a subtype of right, then left.__augop__(right)
  732. is tried and then left.__op__(right).
  733. * if left is a supertype of right, then left.__augop__(right)
  734. is tried, then right.__rop__(left) and then
  735. left.__op__(right)
  736. """
  737. bin_op = aug_opnode.op.strip("=")
  738. aug_op = aug_opnode.op
  739. if _same_type(left_type, right_type):
  740. methods = [
  741. _aug_op(left, aug_opnode, aug_op, right, context),
  742. _bin_op(left, aug_opnode, bin_op, right, context),
  743. ]
  744. elif helpers.is_subtype(left_type, right_type):
  745. methods = [
  746. _aug_op(left, aug_opnode, aug_op, right, context),
  747. _bin_op(left, aug_opnode, bin_op, right, context),
  748. ]
  749. elif helpers.is_supertype(left_type, right_type):
  750. methods = [
  751. _aug_op(left, aug_opnode, aug_op, right, context),
  752. _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
  753. _bin_op(left, aug_opnode, bin_op, right, context),
  754. ]
  755. else:
  756. methods = [
  757. _aug_op(left, aug_opnode, aug_op, right, context),
  758. _bin_op(left, aug_opnode, bin_op, right, context),
  759. _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
  760. ]
  761. return methods
  762. def _infer_binary_operation(
  763. left: InferenceResult,
  764. right: InferenceResult,
  765. binary_opnode: nodes.AugAssign | nodes.BinOp,
  766. context: InferenceContext,
  767. flow_factory: GetFlowFactory,
  768. ) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]:
  769. """Infer a binary operation between a left operand and a right operand.
  770. This is used by both normal binary operations and augmented binary
  771. operations, the only difference is the flow factory used.
  772. """
  773. context, reverse_context = _get_binop_contexts(context, left, right)
  774. left_type = helpers.object_type(left)
  775. right_type = helpers.object_type(right)
  776. methods = flow_factory(
  777. left, left_type, binary_opnode, right, right_type, context, reverse_context
  778. )
  779. for method in methods:
  780. try:
  781. results = list(method())
  782. except AttributeError:
  783. continue
  784. except AttributeInferenceError:
  785. continue
  786. except InferenceError:
  787. yield util.Uninferable
  788. return
  789. else:
  790. if any(isinstance(result, util.UninferableBase) for result in results):
  791. yield util.Uninferable
  792. return
  793. if all(map(_is_not_implemented, results)):
  794. continue
  795. not_implemented = sum(
  796. 1 for result in results if _is_not_implemented(result)
  797. )
  798. if not_implemented and not_implemented != len(results):
  799. # Can't infer yet what this is.
  800. yield util.Uninferable
  801. return
  802. yield from results
  803. return
  804. # The operation doesn't seem to be supported so let the caller know about it
  805. yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type)
  806. def _infer_binop(
  807. self: nodes.BinOp, context: InferenceContext | None = None
  808. ) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]:
  809. """Binary operation inference logic."""
  810. left = self.left
  811. right = self.right
  812. # we use two separate contexts for evaluating lhs and rhs because
  813. # 1. evaluating lhs may leave some undesired entries in context.path
  814. # which may not let us infer right value of rhs
  815. context = context or InferenceContext()
  816. lhs_context = copy_context(context)
  817. rhs_context = copy_context(context)
  818. lhs_iter = left.infer(context=lhs_context)
  819. rhs_iter = right.infer(context=rhs_context)
  820. for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
  821. if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)):
  822. # Don't know how to process this.
  823. yield util.Uninferable
  824. return
  825. try:
  826. yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow)
  827. except _NonDeducibleTypeHierarchy:
  828. yield util.Uninferable
  829. @decorators.yes_if_nothing_inferred
  830. @decorators.path_wrapper
  831. def infer_binop(
  832. self: nodes.BinOp, context: InferenceContext | None = None, **kwargs: Any
  833. ) -> Generator[InferenceResult, None, None]:
  834. return _filter_operation_errors(
  835. self, _infer_binop, context, util.BadBinaryOperationMessage
  836. )
  837. nodes.BinOp._infer_binop = _infer_binop
  838. nodes.BinOp._infer = infer_binop
  839. COMPARE_OPS: dict[str, Callable[[Any, Any], bool]] = {
  840. "==": operator.eq,
  841. "!=": operator.ne,
  842. "<": operator.lt,
  843. "<=": operator.le,
  844. ">": operator.gt,
  845. ">=": operator.ge,
  846. "in": lambda a, b: a in b,
  847. "not in": lambda a, b: a not in b,
  848. }
  849. UNINFERABLE_OPS = {
  850. "is",
  851. "is not",
  852. }
  853. def _to_literal(node: nodes.NodeNG) -> Any:
  854. # Can raise SyntaxError or ValueError from ast.literal_eval
  855. # Can raise AttributeError from node.as_string() as not all nodes have a visitor
  856. # Is this the stupidest idea or the simplest idea?
  857. return ast.literal_eval(node.as_string())
  858. def _do_compare(
  859. left_iter: Iterable[nodes.NodeNG], op: str, right_iter: Iterable[nodes.NodeNG]
  860. ) -> bool | util.UninferableBase:
  861. """
  862. If all possible combinations are either True or False, return that:
  863. >>> _do_compare([1, 2], '<=', [3, 4])
  864. True
  865. >>> _do_compare([1, 2], '==', [3, 4])
  866. False
  867. If any item is uninferable, or if some combinations are True and some
  868. are False, return Uninferable:
  869. >>> _do_compare([1, 3], '<=', [2, 4])
  870. util.Uninferable
  871. """
  872. retval: bool | None = None
  873. if op in UNINFERABLE_OPS:
  874. return util.Uninferable
  875. op_func = COMPARE_OPS[op]
  876. for left, right in itertools.product(left_iter, right_iter):
  877. if isinstance(left, util.UninferableBase) or isinstance(
  878. right, util.UninferableBase
  879. ):
  880. return util.Uninferable
  881. try:
  882. left, right = _to_literal(left), _to_literal(right)
  883. except (SyntaxError, ValueError, AttributeError):
  884. return util.Uninferable
  885. try:
  886. expr = op_func(left, right)
  887. except TypeError as exc:
  888. raise AstroidTypeError from exc
  889. if retval is None:
  890. retval = expr
  891. elif retval != expr:
  892. return util.Uninferable
  893. # (or both, but "True | False" is basically the same)
  894. assert retval is not None
  895. return retval # it was all the same value
  896. def _infer_compare(
  897. self: nodes.Compare, context: InferenceContext | None = None, **kwargs: Any
  898. ) -> Generator[nodes.Const | util.UninferableBase, None, None]:
  899. """Chained comparison inference logic."""
  900. retval: bool | util.UninferableBase = True
  901. ops = self.ops
  902. left_node = self.left
  903. lhs = list(left_node.infer(context=context))
  904. # should we break early if first element is uninferable?
  905. for op, right_node in ops:
  906. # eagerly evaluate rhs so that values can be re-used as lhs
  907. rhs = list(right_node.infer(context=context))
  908. try:
  909. retval = _do_compare(lhs, op, rhs)
  910. except AstroidTypeError:
  911. retval = util.Uninferable
  912. break
  913. if retval is not True:
  914. break # short-circuit
  915. lhs = rhs # continue
  916. if retval is util.Uninferable:
  917. yield retval # type: ignore[misc]
  918. else:
  919. yield nodes.Const(retval)
  920. nodes.Compare._infer = _infer_compare # type: ignore[assignment]
  921. def _infer_augassign(
  922. self: nodes.AugAssign, context: InferenceContext | None = None
  923. ) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]:
  924. """Inference logic for augmented binary operations."""
  925. context = context or InferenceContext()
  926. rhs_context = context.clone()
  927. lhs_iter = self.target.infer_lhs(context=context)
  928. rhs_iter = self.value.infer(context=rhs_context)
  929. for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
  930. if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)):
  931. # Don't know how to process this.
  932. yield util.Uninferable
  933. return
  934. try:
  935. yield from _infer_binary_operation(
  936. left=lhs,
  937. right=rhs,
  938. binary_opnode=self,
  939. context=context,
  940. flow_factory=_get_aug_flow,
  941. )
  942. except _NonDeducibleTypeHierarchy:
  943. yield util.Uninferable
  944. @decorators.raise_if_nothing_inferred
  945. @decorators.path_wrapper
  946. def infer_augassign(
  947. self: nodes.AugAssign, context: InferenceContext | None = None, **kwargs: Any
  948. ) -> Generator[InferenceResult, None, None]:
  949. return _filter_operation_errors(
  950. self, _infer_augassign, context, util.BadBinaryOperationMessage
  951. )
  952. nodes.AugAssign._infer_augassign = _infer_augassign
  953. nodes.AugAssign._infer = infer_augassign
  954. # End of binary operation inference.
  955. @decorators.raise_if_nothing_inferred
  956. def infer_arguments(
  957. self: nodes.Arguments, context: InferenceContext | None = None, **kwargs: Any
  958. ) -> Generator[InferenceResult, None, None]:
  959. if context is None or context.lookupname is None:
  960. raise InferenceError(node=self, context=context)
  961. return protocols._arguments_infer_argname(self, context.lookupname, context)
  962. nodes.Arguments._infer = infer_arguments # type: ignore[assignment]
  963. @decorators.raise_if_nothing_inferred
  964. @decorators.path_wrapper
  965. def infer_assign(
  966. self: nodes.AssignName | nodes.AssignAttr,
  967. context: InferenceContext | None = None,
  968. **kwargs: Any,
  969. ) -> Generator[InferenceResult, None, None]:
  970. """Infer a AssignName/AssignAttr: need to inspect the RHS part of the
  971. assign node.
  972. """
  973. if isinstance(self.parent, nodes.AugAssign):
  974. return self.parent.infer(context)
  975. stmts = list(self.assigned_stmts(context=context))
  976. return bases._infer_stmts(stmts, context)
  977. nodes.AssignName._infer = infer_assign
  978. nodes.AssignAttr._infer = infer_assign
  979. @decorators.raise_if_nothing_inferred
  980. @decorators.path_wrapper
  981. def infer_empty_node(
  982. self: nodes.EmptyNode, context: InferenceContext | None = None, **kwargs: Any
  983. ) -> Generator[InferenceResult, None, None]:
  984. if not self.has_underlying_object():
  985. yield util.Uninferable
  986. else:
  987. try:
  988. yield from AstroidManager().infer_ast_from_something(
  989. self.object, context=context
  990. )
  991. except AstroidError:
  992. yield util.Uninferable
  993. nodes.EmptyNode._infer = infer_empty_node # type: ignore[assignment]
  994. def _populate_context_lookup(call: nodes.Call, context: InferenceContext | None):
  995. # Allows context to be saved for later
  996. # for inference inside a function
  997. context_lookup: dict[InferenceResult, InferenceContext] = {}
  998. if context is None:
  999. return context_lookup
  1000. for arg in call.args:
  1001. if isinstance(arg, nodes.Starred):
  1002. context_lookup[arg.value] = context
  1003. else:
  1004. context_lookup[arg] = context
  1005. keywords = call.keywords if call.keywords is not None else []
  1006. for keyword in keywords:
  1007. context_lookup[keyword.value] = context
  1008. return context_lookup
  1009. @decorators.raise_if_nothing_inferred
  1010. def infer_ifexp(
  1011. self: nodes.IfExp, context: InferenceContext | None = None, **kwargs: Any
  1012. ) -> Generator[InferenceResult, None, None]:
  1013. """Support IfExp inference.
  1014. If we can't infer the truthiness of the condition, we default
  1015. to inferring both branches. Otherwise, we infer either branch
  1016. depending on the condition.
  1017. """
  1018. both_branches = False
  1019. # We use two separate contexts for evaluating lhs and rhs because
  1020. # evaluating lhs may leave some undesired entries in context.path
  1021. # which may not let us infer right value of rhs.
  1022. context = context or InferenceContext()
  1023. lhs_context = copy_context(context)
  1024. rhs_context = copy_context(context)
  1025. try:
  1026. test = next(self.test.infer(context=context.clone()))
  1027. except (InferenceError, StopIteration):
  1028. both_branches = True
  1029. else:
  1030. if not isinstance(test, util.UninferableBase):
  1031. if test.bool_value():
  1032. yield from self.body.infer(context=lhs_context)
  1033. else:
  1034. yield from self.orelse.infer(context=rhs_context)
  1035. else:
  1036. both_branches = True
  1037. if both_branches:
  1038. yield from self.body.infer(context=lhs_context)
  1039. yield from self.orelse.infer(context=rhs_context)
  1040. nodes.IfExp._infer = infer_ifexp # type: ignore[assignment]
  1041. def infer_functiondef(
  1042. self: _FunctionDefT, context: InferenceContext | None = None, **kwargs: Any
  1043. ) -> Generator[Property | _FunctionDefT, None, InferenceErrorInfo]:
  1044. if not self.decorators or not bases._is_property(self):
  1045. yield self
  1046. return InferenceErrorInfo(node=self, context=context)
  1047. # When inferring a property, we instantiate a new `objects.Property` object,
  1048. # which in turn, because it inherits from `FunctionDef`, sets itself in the locals
  1049. # of the wrapping frame. This means that every time we infer a property, the locals
  1050. # are mutated with a new instance of the property. To avoid this, we detect this
  1051. # scenario and avoid passing the `parent` argument to the constructor.
  1052. parent_frame = self.parent.frame(future=True)
  1053. property_already_in_parent_locals = self.name in parent_frame.locals and any(
  1054. isinstance(val, objects.Property) for val in parent_frame.locals[self.name]
  1055. )
  1056. # We also don't want to pass parent if the definition is within a Try node
  1057. if isinstance(self.parent, (nodes.TryExcept, nodes.TryFinally, nodes.If)):
  1058. property_already_in_parent_locals = True
  1059. prop_func = objects.Property(
  1060. function=self,
  1061. name=self.name,
  1062. lineno=self.lineno,
  1063. parent=self.parent if not property_already_in_parent_locals else None,
  1064. col_offset=self.col_offset,
  1065. )
  1066. if property_already_in_parent_locals:
  1067. prop_func.parent = self.parent
  1068. prop_func.postinit(body=[], args=self.args, doc_node=self.doc_node)
  1069. yield prop_func
  1070. return InferenceErrorInfo(node=self, context=context)
  1071. nodes.FunctionDef._infer = infer_functiondef