stubdoc.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401
  1. """Parsing/inferring signatures from documentation.
  2. This module provides several functions to generate better stubs using
  3. docstrings and Sphinx docs (.rst files).
  4. """
  5. from __future__ import annotations
  6. import contextlib
  7. import io
  8. import re
  9. import tokenize
  10. from typing import Any, Final, MutableMapping, MutableSequence, NamedTuple, Sequence, Tuple
  11. from typing_extensions import TypeAlias as _TypeAlias
  12. # Type alias for signatures strings in format ('func_name', '(arg, opt_arg=False)').
  13. Sig: _TypeAlias = Tuple[str, str]
  14. _TYPE_RE: Final = re.compile(r"^[a-zA-Z_][\w\[\], ]*(\.[a-zA-Z_][\w\[\], ]*)*$")
  15. _ARG_NAME_RE: Final = re.compile(r"\**[A-Za-z_][A-Za-z0-9_]*$")
  16. def is_valid_type(s: str) -> bool:
  17. """Try to determine whether a string might be a valid type annotation."""
  18. if s in ("True", "False", "retval"):
  19. return False
  20. if "," in s and "[" not in s:
  21. return False
  22. return _TYPE_RE.match(s) is not None
  23. class ArgSig:
  24. """Signature info for a single argument."""
  25. def __init__(self, name: str, type: str | None = None, default: bool = False):
  26. self.name = name
  27. if type and not is_valid_type(type):
  28. raise ValueError("Invalid type: " + type)
  29. self.type = type
  30. # Does this argument have a default value?
  31. self.default = default
  32. def __repr__(self) -> str:
  33. return "ArgSig(name={}, type={}, default={})".format(
  34. repr(self.name), repr(self.type), repr(self.default)
  35. )
  36. def __eq__(self, other: Any) -> bool:
  37. if isinstance(other, ArgSig):
  38. return (
  39. self.name == other.name
  40. and self.type == other.type
  41. and self.default == other.default
  42. )
  43. return False
  44. class FunctionSig(NamedTuple):
  45. name: str
  46. args: list[ArgSig]
  47. ret_type: str
  48. # States of the docstring parser.
  49. STATE_INIT: Final = 1
  50. STATE_FUNCTION_NAME: Final = 2
  51. STATE_ARGUMENT_LIST: Final = 3
  52. STATE_ARGUMENT_TYPE: Final = 4
  53. STATE_ARGUMENT_DEFAULT: Final = 5
  54. STATE_RETURN_VALUE: Final = 6
  55. STATE_OPEN_BRACKET: Final = 7 # For generic types.
  56. class DocStringParser:
  57. """Parse function signatures in documentation."""
  58. def __init__(self, function_name: str) -> None:
  59. # Only search for signatures of function with this name.
  60. self.function_name = function_name
  61. self.state = [STATE_INIT]
  62. self.accumulator = ""
  63. self.arg_type: str | None = None
  64. self.arg_name = ""
  65. self.arg_default: str | None = None
  66. self.ret_type = "Any"
  67. self.found = False
  68. self.args: list[ArgSig] = []
  69. # Valid signatures found so far.
  70. self.signatures: list[FunctionSig] = []
  71. def add_token(self, token: tokenize.TokenInfo) -> None:
  72. """Process next token from the token stream."""
  73. if (
  74. token.type == tokenize.NAME
  75. and token.string == self.function_name
  76. and self.state[-1] == STATE_INIT
  77. ):
  78. self.state.append(STATE_FUNCTION_NAME)
  79. elif (
  80. token.type == tokenize.OP
  81. and token.string == "("
  82. and self.state[-1] == STATE_FUNCTION_NAME
  83. ):
  84. self.state.pop()
  85. self.accumulator = ""
  86. self.found = True
  87. self.state.append(STATE_ARGUMENT_LIST)
  88. elif self.state[-1] == STATE_FUNCTION_NAME:
  89. # Reset state, function name not followed by '('.
  90. self.state.pop()
  91. elif (
  92. token.type == tokenize.OP
  93. and token.string in ("[", "(", "{")
  94. and self.state[-1] != STATE_INIT
  95. ):
  96. self.accumulator += token.string
  97. self.state.append(STATE_OPEN_BRACKET)
  98. elif (
  99. token.type == tokenize.OP
  100. and token.string in ("]", ")", "}")
  101. and self.state[-1] == STATE_OPEN_BRACKET
  102. ):
  103. self.accumulator += token.string
  104. self.state.pop()
  105. elif (
  106. token.type == tokenize.OP
  107. and token.string == ":"
  108. and self.state[-1] == STATE_ARGUMENT_LIST
  109. ):
  110. self.arg_name = self.accumulator
  111. self.accumulator = ""
  112. self.state.append(STATE_ARGUMENT_TYPE)
  113. elif (
  114. token.type == tokenize.OP
  115. and token.string == "="
  116. and self.state[-1] in (STATE_ARGUMENT_LIST, STATE_ARGUMENT_TYPE)
  117. ):
  118. if self.state[-1] == STATE_ARGUMENT_TYPE:
  119. self.arg_type = self.accumulator
  120. self.state.pop()
  121. else:
  122. self.arg_name = self.accumulator
  123. self.accumulator = ""
  124. self.state.append(STATE_ARGUMENT_DEFAULT)
  125. elif (
  126. token.type == tokenize.OP
  127. and token.string in (",", ")")
  128. and self.state[-1]
  129. in (STATE_ARGUMENT_LIST, STATE_ARGUMENT_DEFAULT, STATE_ARGUMENT_TYPE)
  130. ):
  131. if self.state[-1] == STATE_ARGUMENT_DEFAULT:
  132. self.arg_default = self.accumulator
  133. self.state.pop()
  134. elif self.state[-1] == STATE_ARGUMENT_TYPE:
  135. self.arg_type = self.accumulator
  136. self.state.pop()
  137. elif self.state[-1] == STATE_ARGUMENT_LIST:
  138. self.arg_name = self.accumulator
  139. if not (
  140. token.string == ")" and self.accumulator.strip() == ""
  141. ) and not _ARG_NAME_RE.match(self.arg_name):
  142. # Invalid argument name.
  143. self.reset()
  144. return
  145. if token.string == ")":
  146. self.state.pop()
  147. # arg_name is empty when there are no args. e.g. func()
  148. if self.arg_name:
  149. try:
  150. self.args.append(
  151. ArgSig(
  152. name=self.arg_name, type=self.arg_type, default=bool(self.arg_default)
  153. )
  154. )
  155. except ValueError:
  156. # wrong type, use Any
  157. self.args.append(
  158. ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default))
  159. )
  160. self.arg_name = ""
  161. self.arg_type = None
  162. self.arg_default = None
  163. self.accumulator = ""
  164. elif token.type == tokenize.OP and token.string == "->" and self.state[-1] == STATE_INIT:
  165. self.accumulator = ""
  166. self.state.append(STATE_RETURN_VALUE)
  167. # ENDMAKER is necessary for python 3.4 and 3.5.
  168. elif token.type in (tokenize.NEWLINE, tokenize.ENDMARKER) and self.state[-1] in (
  169. STATE_INIT,
  170. STATE_RETURN_VALUE,
  171. ):
  172. if self.state[-1] == STATE_RETURN_VALUE:
  173. if not is_valid_type(self.accumulator):
  174. self.reset()
  175. return
  176. self.ret_type = self.accumulator
  177. self.accumulator = ""
  178. self.state.pop()
  179. if self.found:
  180. self.signatures.append(
  181. FunctionSig(name=self.function_name, args=self.args, ret_type=self.ret_type)
  182. )
  183. self.found = False
  184. self.args = []
  185. self.ret_type = "Any"
  186. # Leave state as INIT.
  187. else:
  188. self.accumulator += token.string
  189. def reset(self) -> None:
  190. self.state = [STATE_INIT]
  191. self.args = []
  192. self.found = False
  193. self.accumulator = ""
  194. def get_signatures(self) -> list[FunctionSig]:
  195. """Return sorted copy of the list of signatures found so far."""
  196. def has_arg(name: str, signature: FunctionSig) -> bool:
  197. return any(x.name == name for x in signature.args)
  198. def args_kwargs(signature: FunctionSig) -> bool:
  199. return has_arg("*args", signature) and has_arg("**kwargs", signature)
  200. # Move functions with (*args, **kwargs) in their signature to last place.
  201. return list(sorted(self.signatures, key=lambda x: 1 if args_kwargs(x) else 0))
  202. def infer_sig_from_docstring(docstr: str | None, name: str) -> list[FunctionSig] | None:
  203. """Convert function signature to list of TypedFunctionSig
  204. Look for function signatures of function in docstring. Signature is a string of
  205. the format <function_name>(<signature>) -> <return type> or perhaps without
  206. the return type.
  207. Returns empty list, when no signature is found, one signature in typical case,
  208. multiple signatures, if docstring specifies multiple signatures for overload functions.
  209. Return None if the docstring is empty.
  210. Arguments:
  211. * docstr: docstring
  212. * name: name of function for which signatures are to be found
  213. """
  214. if not (isinstance(docstr, str) and docstr):
  215. return None
  216. state = DocStringParser(name)
  217. # Return all found signatures, even if there is a parse error after some are found.
  218. with contextlib.suppress(tokenize.TokenError):
  219. try:
  220. tokens = tokenize.tokenize(io.BytesIO(docstr.encode("utf-8")).readline)
  221. for token in tokens:
  222. state.add_token(token)
  223. except IndentationError:
  224. return None
  225. sigs = state.get_signatures()
  226. def is_unique_args(sig: FunctionSig) -> bool:
  227. """return true if function argument names are unique"""
  228. return len(sig.args) == len({arg.name for arg in sig.args})
  229. # Return only signatures that have unique argument names. Mypy fails on non-unique arg names.
  230. return [sig for sig in sigs if is_unique_args(sig)]
  231. def infer_arg_sig_from_anon_docstring(docstr: str) -> list[ArgSig]:
  232. """Convert signature in form of "(self: TestClass, arg0: str='ada')" to List[TypedArgList]."""
  233. ret = infer_sig_from_docstring("stub" + docstr, "stub")
  234. if ret:
  235. return ret[0].args
  236. return []
  237. def infer_ret_type_sig_from_docstring(docstr: str, name: str) -> str | None:
  238. """Convert signature in form of "func(self: TestClass, arg0) -> int" to their return type."""
  239. ret = infer_sig_from_docstring(docstr, name)
  240. if ret:
  241. return ret[0].ret_type
  242. return None
  243. def infer_ret_type_sig_from_anon_docstring(docstr: str) -> str | None:
  244. """Convert signature in form of "(self: TestClass, arg0) -> int" to their return type."""
  245. return infer_ret_type_sig_from_docstring("stub" + docstr.strip(), "stub")
  246. def parse_signature(sig: str) -> tuple[str, list[str], list[str]] | None:
  247. """Split function signature into its name, positional an optional arguments.
  248. The expected format is "func_name(arg, opt_arg=False)". Return the name of function
  249. and lists of positional and optional argument names.
  250. """
  251. m = re.match(r"([.a-zA-Z0-9_]+)\(([^)]*)\)", sig)
  252. if not m:
  253. return None
  254. name = m.group(1)
  255. name = name.split(".")[-1]
  256. arg_string = m.group(2)
  257. if not arg_string.strip():
  258. # Simple case -- no arguments.
  259. return name, [], []
  260. args = [arg.strip() for arg in arg_string.split(",")]
  261. positional = []
  262. optional = []
  263. i = 0
  264. while i < len(args):
  265. # Accept optional arguments as in both formats: x=None and [x].
  266. if args[i].startswith("[") or "=" in args[i]:
  267. break
  268. positional.append(args[i].rstrip("["))
  269. i += 1
  270. if args[i - 1].endswith("["):
  271. break
  272. while i < len(args):
  273. arg = args[i]
  274. arg = arg.strip("[]")
  275. arg = arg.split("=")[0]
  276. optional.append(arg)
  277. i += 1
  278. return name, positional, optional
  279. def build_signature(positional: Sequence[str], optional: Sequence[str]) -> str:
  280. """Build function signature from lists of positional and optional argument names."""
  281. args: MutableSequence[str] = []
  282. args.extend(positional)
  283. for arg in optional:
  284. if arg.startswith("*"):
  285. args.append(arg)
  286. else:
  287. args.append(f"{arg}=...")
  288. sig = f"({', '.join(args)})"
  289. # Ad-hoc fixes.
  290. sig = sig.replace("(self)", "")
  291. return sig
  292. def parse_all_signatures(lines: Sequence[str]) -> tuple[list[Sig], list[Sig]]:
  293. """Parse all signatures in a given reST document.
  294. Return lists of found signatures for functions and classes.
  295. """
  296. sigs = []
  297. class_sigs = []
  298. for line in lines:
  299. line = line.strip()
  300. m = re.match(r"\.\. *(function|method|class) *:: *[a-zA-Z_]", line)
  301. if m:
  302. sig = line.split("::")[1].strip()
  303. parsed = parse_signature(sig)
  304. if parsed:
  305. name, fixed, optional = parsed
  306. if m.group(1) != "class":
  307. sigs.append((name, build_signature(fixed, optional)))
  308. else:
  309. class_sigs.append((name, build_signature(fixed, optional)))
  310. return sorted(sigs), sorted(class_sigs)
  311. def find_unique_signatures(sigs: Sequence[Sig]) -> list[Sig]:
  312. """Remove names with duplicate found signatures."""
  313. sig_map: MutableMapping[str, list[str]] = {}
  314. for name, sig in sigs:
  315. sig_map.setdefault(name, []).append(sig)
  316. result = []
  317. for name, name_sigs in sig_map.items():
  318. if len(set(name_sigs)) == 1:
  319. result.append((name, name_sigs[0]))
  320. return sorted(result)
  321. def infer_prop_type_from_docstring(docstr: str | None) -> str | None:
  322. """Check for Google/Numpy style docstring type annotation for a property.
  323. The docstring has the format "<type>: <descriptions>".
  324. In the type string, we allow the following characters:
  325. * dot: because sometimes classes are annotated using full path
  326. * brackets: to allow type hints like List[int]
  327. * comma/space: things like Tuple[int, int]
  328. """
  329. if not docstr:
  330. return None
  331. test_str = r"^([a-zA-Z0-9_, \.\[\]]*): "
  332. m = re.match(test_str, docstr)
  333. return m.group(1) if m else None