testmerge.py 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. """Test cases for AST merge (used for fine-grained incremental checking)"""
  2. from __future__ import annotations
  3. import os
  4. import shutil
  5. from mypy import build
  6. from mypy.build import BuildResult
  7. from mypy.errors import CompileError
  8. from mypy.modulefinder import BuildSource
  9. from mypy.nodes import (
  10. UNBOUND_IMPORTED,
  11. Expression,
  12. MypyFile,
  13. Node,
  14. SymbolTable,
  15. SymbolTableNode,
  16. TypeInfo,
  17. TypeVarExpr,
  18. Var,
  19. )
  20. from mypy.options import Options
  21. from mypy.server.subexpr import get_subexpressions
  22. from mypy.server.update import FineGrainedBuildManager
  23. from mypy.strconv import StrConv
  24. from mypy.test.config import test_temp_dir
  25. from mypy.test.data import DataDrivenTestCase, DataSuite
  26. from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options
  27. from mypy.types import Type, TypeStrVisitor
  28. from mypy.util import IdMapper, short_type
  29. # Which data structures to dump in a test case?
  30. SYMTABLE = "SYMTABLE"
  31. TYPEINFO = " TYPEINFO"
  32. TYPES = "TYPES"
  33. AST = "AST"
  34. class ASTMergeSuite(DataSuite):
  35. files = ["merge.test"]
  36. def setup(self) -> None:
  37. super().setup()
  38. self.str_conv = StrConv(show_ids=True, options=Options())
  39. assert self.str_conv.id_mapper is not None
  40. self.id_mapper: IdMapper = self.str_conv.id_mapper
  41. self.type_str_conv = TypeStrVisitor(self.id_mapper, options=Options())
  42. def run_case(self, testcase: DataDrivenTestCase) -> None:
  43. name = testcase.name
  44. # We use the test case name to decide which data structures to dump.
  45. # Dumping everything would result in very verbose test cases.
  46. if name.endswith("_symtable"):
  47. kind = SYMTABLE
  48. elif name.endswith("_typeinfo"):
  49. kind = TYPEINFO
  50. elif name.endswith("_types"):
  51. kind = TYPES
  52. else:
  53. kind = AST
  54. main_src = "\n".join(testcase.input)
  55. result = self.build(main_src, testcase)
  56. assert result is not None, "cases where CompileError occurred should not be run"
  57. result.manager.fscache.flush()
  58. fine_grained_manager = FineGrainedBuildManager(result)
  59. a = []
  60. if result.errors:
  61. a.extend(result.errors)
  62. target_path = os.path.join(test_temp_dir, "target.py")
  63. shutil.copy(os.path.join(test_temp_dir, "target.py.next"), target_path)
  64. a.extend(self.dump(fine_grained_manager, kind, testcase.test_modules))
  65. old_subexpr = get_subexpressions(result.manager.modules["target"])
  66. a.append("==>")
  67. new_file, new_types = self.build_increment(fine_grained_manager, "target", target_path)
  68. a.extend(self.dump(fine_grained_manager, kind, testcase.test_modules))
  69. for expr in old_subexpr:
  70. if isinstance(expr, TypeVarExpr):
  71. # These are merged so we can't perform the check.
  72. continue
  73. # Verify that old AST nodes are removed from the expression type map.
  74. assert expr not in new_types
  75. if testcase.normalize_output:
  76. a = normalize_error_messages(a)
  77. assert_string_arrays_equal(
  78. testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})"
  79. )
  80. def build(self, source: str, testcase: DataDrivenTestCase) -> BuildResult | None:
  81. options = parse_options(source, testcase, incremental_step=1)
  82. options.incremental = True
  83. options.fine_grained_incremental = True
  84. options.use_builtins_fixtures = True
  85. options.export_types = True
  86. options.show_traceback = True
  87. options.allow_empty_bodies = True
  88. options.force_uppercase_builtins = True
  89. main_path = os.path.join(test_temp_dir, "main")
  90. self.str_conv.options = options
  91. self.type_str_conv.options = options
  92. with open(main_path, "w", encoding="utf8") as f:
  93. f.write(source)
  94. try:
  95. result = build.build(
  96. sources=[BuildSource(main_path, None, None)],
  97. options=options,
  98. alt_lib_path=test_temp_dir,
  99. )
  100. except CompileError:
  101. # TODO: Is it okay to return None?
  102. return None
  103. return result
  104. def build_increment(
  105. self, manager: FineGrainedBuildManager, module_id: str, path: str
  106. ) -> tuple[MypyFile, dict[Expression, Type]]:
  107. manager.flush_cache()
  108. manager.update([(module_id, path)], [])
  109. module = manager.manager.modules[module_id]
  110. type_map = manager.graph[module_id].type_map()
  111. return module, type_map
  112. def dump(
  113. self, manager: FineGrainedBuildManager, kind: str, test_modules: list[str]
  114. ) -> list[str]:
  115. modules = {
  116. name: file for name, file in manager.manager.modules.items() if name in test_modules
  117. }
  118. if kind == AST:
  119. return self.dump_asts(modules)
  120. elif kind == TYPEINFO:
  121. return self.dump_typeinfos(modules)
  122. elif kind == SYMTABLE:
  123. return self.dump_symbol_tables(modules)
  124. elif kind == TYPES:
  125. return self.dump_types(modules, manager)
  126. assert False, f"Invalid kind {kind}"
  127. def dump_asts(self, modules: dict[str, MypyFile]) -> list[str]:
  128. a = []
  129. for m in sorted(modules):
  130. s = modules[m].accept(self.str_conv)
  131. a.extend(s.splitlines())
  132. return a
  133. def dump_symbol_tables(self, modules: dict[str, MypyFile]) -> list[str]:
  134. a = []
  135. for id in sorted(modules):
  136. a.extend(self.dump_symbol_table(id, modules[id].names))
  137. return a
  138. def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> list[str]:
  139. a = [f"{module_id}:"]
  140. for name in sorted(symtable):
  141. if name.startswith("__"):
  142. continue
  143. a.append(f" {name}: {self.format_symbol_table_node(symtable[name])}")
  144. return a
  145. def format_symbol_table_node(self, node: SymbolTableNode) -> str:
  146. if node.node is None:
  147. if node.kind == UNBOUND_IMPORTED:
  148. return "UNBOUND_IMPORTED"
  149. return "None"
  150. if isinstance(node.node, Node):
  151. s = f"{str(type(node.node).__name__)}<{self.id_mapper.id(node.node)}>"
  152. else:
  153. s = f"? ({type(node.node)})"
  154. if (
  155. isinstance(node.node, Var)
  156. and node.node.type
  157. and not node.node.fullname.startswith("typing.")
  158. ):
  159. typestr = self.format_type(node.node.type)
  160. s += f"({typestr})"
  161. return s
  162. def dump_typeinfos(self, modules: dict[str, MypyFile]) -> list[str]:
  163. a = []
  164. for id in sorted(modules):
  165. a.extend(self.dump_typeinfos_recursive(modules[id].names))
  166. return a
  167. def dump_typeinfos_recursive(self, names: SymbolTable) -> list[str]:
  168. a = []
  169. for name, node in sorted(names.items(), key=lambda x: x[0]):
  170. if isinstance(node.node, TypeInfo):
  171. a.extend(self.dump_typeinfo(node.node))
  172. a.extend(self.dump_typeinfos_recursive(node.node.names))
  173. return a
  174. def dump_typeinfo(self, info: TypeInfo) -> list[str]:
  175. if info.fullname == "enum.Enum":
  176. # Avoid noise
  177. return []
  178. s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv)
  179. return s.splitlines()
  180. def dump_types(
  181. self, modules: dict[str, MypyFile], manager: FineGrainedBuildManager
  182. ) -> list[str]:
  183. a = []
  184. # To make the results repeatable, we try to generate unique and
  185. # deterministic sort keys.
  186. for module_id in sorted(modules):
  187. all_types = manager.manager.all_types
  188. # Compute a module type map from the global type map
  189. tree = manager.graph[module_id].tree
  190. assert tree is not None
  191. type_map = {
  192. node: all_types[node] for node in get_subexpressions(tree) if node in all_types
  193. }
  194. if type_map:
  195. a.append(f"## {module_id}")
  196. for expr in sorted(
  197. type_map,
  198. key=lambda n: (
  199. n.line,
  200. short_type(n),
  201. n.str_with_options(self.str_conv.options) + str(type_map[n]),
  202. ),
  203. ):
  204. typ = type_map[expr]
  205. a.append(f"{short_type(expr)}:{expr.line}: {self.format_type(typ)}")
  206. return a
  207. def format_type(self, typ: Type) -> str:
  208. return typ.accept(self.type_str_conv)