Discover Python API functions on RNA types (Context.copy, etc.)

Pick up Python functions defined on RNA classes that have RST docstrings
(:rtype: or :type:), indicating they are public API methods rather than
operator/panel implementation details.

Also add Iterable and mathutils imports to bpy.types stub generator
for functions that reference these types in their annotations.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Joseph HENRY 2026-03-31 12:34:19 +02:00
parent cfcb66620a
commit a30e772cf8
2 changed files with 1107 additions and 950 deletions

View File

@ -98,43 +98,21 @@ def collect_all_type_strings(module_data: ModuleData) -> list[str]:
return all_types return all_types
def collect_imports(module_data: ModuleData) -> set[str]: _TOKEN_IMPORTS: tuple[tuple[str, str], ...] = (
"""Collect all import statements needed for the stub file.""" ("Sequence", "from collections.abc import Sequence"),
imports: set[str] = set() ("Iterable", "from collections.abc import Iterable"),
all_types = collect_all_type_strings(module_data) ("Callable", "from collections.abc import Callable"),
joined = " ".join(all_types) ("Iterator", "from collections.abc import Iterator"),
("Literal", "from typing import Literal"),
("Any", "from typing import Any"),
("Generator", "from collections.abc import Generator"),
("Mapping", "from collections.abc import Mapping"),
("Collection", "from collections.abc import Collection"),
("TypeAlias", "from typing import TypeAlias"),
("Generic", "from typing import Generic, TypeVar"),
)
def _has_type(name: str) -> bool: _KNOWN_TYPED_MODULES: tuple[str, ...] = (
return bool(re.search(rf"\b{name}\b", joined))
if _has_type("Sequence"):
imports.add("from collections.abc import Sequence")
if _has_type("Iterable"):
imports.add("from collections.abc import Iterable")
if _has_type("Callable"):
imports.add("from collections.abc import Callable")
if _has_type("Iterator"):
imports.add("from collections.abc import Iterator")
if _has_type("Literal"):
imports.add("from typing import Literal")
if _has_type("Any"):
imports.add("from typing import Any")
if _has_type("Generator"):
imports.add("from collections.abc import Generator")
if _has_type("Mapping"):
imports.add("from collections.abc import Mapping")
if _has_type("Collection"):
imports.add("from collections.abc import Collection")
if "Self" in joined:
imports.add("from typing import Self")
if _has_type("TypeAlias"):
imports.add("from typing import TypeAlias")
if _has_type("Generic"):
imports.add("from typing import Generic, TypeVar")
# Detect module-qualified references (e.g. bpy.types.Object, mathutils.Vector)
# Detect qualified module references
known_modules = [
"bpy.types", "bpy.types",
"bpy.props", "bpy.props",
"bpy.app", "bpy.app",
@ -145,26 +123,62 @@ def collect_imports(module_data: ModuleData) -> set[str]:
"freestyle.types", "freestyle.types",
"bmesh.types", "bmesh.types",
"bmesh", "bmesh",
] )
if "datetime" in joined:
imports.add("import datetime")
if "types.ModuleType" in joined:
imports.add("import types")
if "collections.OrderedDict" in joined:
imports.add("import collections")
if "collections.abc." in joined:
imports.add("import collections.abc")
for mod in known_modules:
if mod + "." in joined or re.search(rf"\b{re.escape(mod)}\b", joined):
imports.add(f"import {mod}")
# fixup_shadowed_builtins will add builtins.X references for structs _SPECIAL_IMPORT_MARKERS: tuple[tuple[str, str], ...] = (
# with property names that shadow builtins — need the import ("datetime", "import datetime"),
("types.ModuleType", "import types"),
("collections.OrderedDict", "import collections"),
("collections.abc.", "import collections.abc"),
)
def _has_token(joined_types: str, token: str) -> bool:
"""Return whether a type token appears as a standalone identifier."""
return bool(re.search(rf"\b{token}\b", joined_types))
def _collect_token_imports(joined_types: str) -> set[str]:
"""Collect import lines triggered by standalone type tokens."""
imports: set[str] = set()
for token, import_line in _TOKEN_IMPORTS:
if _has_token(joined_types, token):
imports.add(import_line)
if "Self" in joined_types:
imports.add("from typing import Self")
return imports
def _collect_module_reference_imports(joined_types: str) -> set[str]:
"""Collect import lines required by module-qualified type references."""
imports: set[str] = set()
for marker, import_line in _SPECIAL_IMPORT_MARKERS:
if marker in joined_types:
imports.add(import_line)
for mod in _KNOWN_TYPED_MODULES:
if mod + "." in joined_types or re.search(
rf"\b{re.escape(mod)}\b", joined_types
):
imports.add(f"import {mod}")
return imports
def _requires_builtins_import(module_data: ModuleData) -> bool:
"""Return whether builtins import is needed for builtin-name shadowing."""
for struct in module_data.get("structs", []): for struct in module_data.get("structs", []):
prop_names = {p["name"] for p in struct["properties"]} prop_names = {p["name"] for p in struct["properties"]}
if prop_names & BUILTIN_NAMES: if prop_names & BUILTIN_NAMES:
return True
return False
def collect_imports(module_data: ModuleData) -> set[str]:
"""Collect all import statements needed for the stub file."""
all_types = collect_all_type_strings(module_data)
joined = " ".join(all_types)
imports = _collect_token_imports(joined) | _collect_module_reference_imports(joined)
if _requires_builtins_import(module_data):
imports.add("import builtins") imports.add("import builtins")
break
return imports return imports
@ -216,9 +230,16 @@ def format_docstring(doc: str, indent: str = " ") -> str:
return result return result
def generate_function_stub(func: FunctionData) -> str: def _build_signature_params(
"""Generate a stub for a single function.""" params: list[ParamData],
# Build parameter list, separating by kind and default presence leading_params: list[str] | None = None,
skip_names: set[str] | None = None,
) -> str:
"""Build a function signature parameter list string from ParamData entries."""
filtered_params = [
param for param in params if param["name"] not in (skip_names or set())
]
positional_only_no_default: list[str] = [] positional_only_no_default: list[str] = []
positional_only_with_default: list[str] = [] positional_only_with_default: list[str] = []
positional_no_default: list[str] = [] positional_no_default: list[str] = []
@ -226,7 +247,7 @@ def generate_function_stub(func: FunctionData) -> str:
keyword_params: list[str] = [] keyword_params: list[str] = []
has_positional_only = False has_positional_only = False
for param in func["params"]: for param in filtered_params:
formatted = format_param(param, force_type=True) formatted = format_param(param, force_type=True)
kind = param.get("kind", "POSITIONAL_OR_KEYWORD") kind = param.get("kind", "POSITIONAL_OR_KEYWORD")
if kind == "POSITIONAL_ONLY": if kind == "POSITIONAL_ONLY":
@ -248,7 +269,7 @@ def generate_function_stub(func: FunctionData) -> str:
else: else:
positional_no_default.append(formatted) positional_no_default.append(formatted)
all_params: list[str] = [] all_params: list[str] = list(leading_params or [])
if has_positional_only and not ( if has_positional_only and not (
positional_only_with_default and positional_no_default positional_only_with_default and positional_no_default
): ):
@ -260,21 +281,30 @@ def generate_function_stub(func: FunctionData) -> str:
# Merge positional-only into regular params when / would be invalid # Merge positional-only into regular params when / would be invalid
positional_no_default = positional_only_no_default + positional_no_default positional_no_default = positional_only_no_default + positional_no_default
positional_with_default = positional_only_with_default + positional_with_default positional_with_default = positional_only_with_default + positional_with_default
# Non-default positional params must come before default ones # Non-default positional params must come before default ones
all_params.extend(positional_no_default) all_params.extend(positional_no_default)
all_params.extend(positional_with_default) all_params.extend(positional_with_default)
if keyword_params: if keyword_params:
# Insert * separator if there are keyword-only args and no VAR_POSITIONAL. # Insert * separator if there are keyword-only args and no VAR_POSITIONAL.
# Don't insert * if the only keyword params are **kwargs (already captures all). # Don't insert * if the only keyword params are **kwargs (already captures all).
has_var_positional = any( has_var_positional = any(
p.get("kind") == "VAR_POSITIONAL" for p in func["params"] p.get("kind") == "VAR_POSITIONAL" for p in filtered_params
)
has_named_keyword = any(
p.get("kind") == "KEYWORD_ONLY" for p in filtered_params
) )
has_named_keyword = any(p.get("kind") == "KEYWORD_ONLY" for p in func["params"])
if not has_var_positional and has_named_keyword: if not has_var_positional and has_named_keyword:
all_params.append("*") all_params.append("*")
all_params.extend(keyword_params) all_params.extend(keyword_params)
params_str = ", ".join(all_params) return ", ".join(all_params)
def generate_function_stub(func: FunctionData) -> str:
"""Generate a stub for a single function."""
params_str = _build_signature_params(func["params"])
# Return type # Return type
ret = f" -> {func['return_type']}" if func["return_type"] else " -> None" ret = f" -> {func['return_type']}" if func["return_type"] else " -> None"
@ -303,63 +333,9 @@ def generate_method_stub(
"""Generate a stub for a method inside a class.""" """Generate a stub for a method inside a class."""
is_cls = func.get("is_classmethod", False) is_cls = func.get("is_classmethod", False)
first_param = "cls" if is_cls else "self" first_param = "cls" if is_cls else "self"
params_str = _build_signature_params(
# Categorize params by kind, keeping positional ordering correct func["params"], leading_params=[first_param], skip_names={"cls", "self"}
positional_only_no_default: list[str] = []
positional_only_with_default: list[str] = []
positional_no_default: list[str] = []
positional_with_default: list[str] = []
keyword_params: list[str] = []
has_positional_only = False
for param in func["params"]:
if param["name"] in ("cls", "self"):
continue
formatted = format_param(param, force_type=True)
kind = param.get("kind", "POSITIONAL_OR_KEYWORD")
if kind == "POSITIONAL_ONLY":
has_positional_only = True
if param["default"] is not None:
positional_only_with_default.append(formatted)
else:
positional_only_no_default.append(formatted)
elif kind == "KEYWORD_ONLY":
keyword_params.append(formatted)
elif kind == "VAR_POSITIONAL":
type_ann = f": {param['type']}" if param["type"] else ": object"
positional_no_default.append(f"*{param['name']}{type_ann}")
elif kind == "VAR_KEYWORD":
type_ann = f": {param['type']}" if param["type"] else ": object"
keyword_params.append(f"**{param['name']}{type_ann}")
elif param["default"] is not None:
positional_with_default.append(formatted)
else:
positional_no_default.append(formatted)
all_params: list[str] = [first_param]
if has_positional_only and not (
positional_only_with_default and positional_no_default
):
# Only emit / when it won't cause "non-default follows default" errors
all_params.extend(positional_only_no_default)
all_params.extend(positional_only_with_default)
all_params.append("/")
else:
# Merge positional-only into regular params when / would be invalid
positional_no_default = positional_only_no_default + positional_no_default
positional_with_default = positional_only_with_default + positional_with_default
all_params.extend(positional_no_default)
all_params.extend(positional_with_default)
if keyword_params:
has_var_positional = any(
p.get("kind") == "VAR_POSITIONAL" for p in func["params"]
) )
has_named_keyword = any(p.get("kind") == "KEYWORD_ONLY" for p in func["params"])
if not has_var_positional and has_named_keyword:
all_params.append("*")
all_params.extend(keyword_params)
params_str = ", ".join(all_params)
ret = f" -> {func['return_type']}" if func["return_type"] else " -> None" ret = f" -> {func['return_type']}" if func["return_type"] else " -> None"
decorators = "" decorators = ""
if is_override: if is_override:
@ -583,6 +559,8 @@ def generate_types_stub(
abc_imports.append("Callable") abc_imports.append("Callable")
if "MutableSequence[" in all_type_strs: if "MutableSequence[" in all_type_strs:
abc_imports.append("MutableSequence") abc_imports.append("MutableSequence")
if re.search(r"(?<!\.)\bIterable\[", all_type_strs):
abc_imports.append("Iterable")
# Don't import Sequence directly — bpy.types.Sequence (video sequencer strip) # Don't import Sequence directly — bpy.types.Sequence (video sequencer strip)
# shadows it. Always use collections.abc.Sequence via the qualified import. # shadows it. Always use collections.abc.Sequence via the qualified import.
@ -602,6 +580,8 @@ def generate_types_stub(
imports.append("import collections") imports.append("import collections")
imports.append("import collections.abc") imports.append("import collections.abc")
imports.append("from collections.abc import Sequence as _Sequence") imports.append("from collections.abc import Sequence as _Sequence")
if "mathutils." in all_type_strs:
imports.append("import mathutils")
parts: list[str] = [] parts: list[str] = []
if doc: if doc:
@ -697,6 +677,105 @@ def strip_self_module_prefix(
return content return content
def _apply_generic_bases(module_data: ModuleData) -> None:
"""Add Generic[_T] bases when classes are used as parameterized generics."""
all_types = collect_all_type_strings(module_data)
joined_types = " ".join(all_types)
struct_names = {s["name"] for s in module_data.get("structs", [])}
for struct in module_data.get("structs", []):
name = struct["name"]
if name in struct_names and re.search(rf"\b{re.escape(name)}\[", joined_types):
base = struct.get("base") or ""
if "Generic[" not in base:
struct["base"] = f"{base}, Generic[_T]" if base else "Generic[_T]"
def _prepare_module_imports(
module_data: ModuleData,
module_name: str,
submodule_names: list[str],
) -> tuple[set[str], set[str], set[str]]:
"""Collect, filter, and classify imports for a generated module stub."""
imports = collect_imports(module_data)
for sub in submodule_names:
imports.add(f"from . import {sub} as {sub}")
imports = {i for i in imports if i != f"import {module_name}"}
class_names = {s["name"] for s in module_data.get("structs", [])}
clashing_imports: set[str] = set()
for imp in imports:
for name in class_names:
if f"import {name}" in imp:
clashing_imports.add(imp)
return imports - clashing_imports, clashing_imports, class_names
def _append_module_members(
parts: list[str],
module_data: ModuleData,
submodule_names: list[str],
) -> None:
"""Append variables, functions, and classes to a module stub body."""
sub_names = set(submodule_names)
if module_data["variables"]:
parts.append("")
for var in module_data["variables"]:
if var["name"] not in sub_names:
parts.append(generate_variable_stub(var))
for func in module_data["functions"]:
parts.append("")
parts.append(generate_function_stub(func))
for struct in module_data.get("structs", []):
parts.append("")
parts.append(generate_struct_stub(struct))
def _qualify_shadowed_builtin_annotations(
result: str,
module_data: ModuleData,
) -> str:
"""Qualify builtin type names when module-level variables shadow them."""
builtins_used_as_types = {"object", "type", "int", "str", "float", "bool", "set"}
var_names = {v["name"] for v in module_data.get("variables", [])}
shadowed = var_names & builtins_used_as_types
if shadowed:
for name in shadowed:
result = re.sub(
rf"(?<=: ){name}\b",
f"builtins.{name}",
result,
)
if "import builtins" not in result:
result = "import builtins\n" + result
return result
def _qualify_clashing_type_names(
result: str,
clashing_imports: set[str],
class_names: set[str],
) -> str:
"""Qualify type references when imported names clash with local class names."""
for clash in clashing_imports:
match = re.search(r"from ([\w.]+) import (\w+)", clash)
if match:
full_module = match.group(1)
name = match.group(2)
if name in class_names:
result = re.sub(
rf"(?<!\w){name}\[",
f"{full_module}.{name}[",
result,
)
if f"import {full_module}" not in result:
result = f"import {full_module}\n" + result
return result
def generate_module_stub( def generate_module_stub(
module_data: ModuleData, module_data: ModuleData,
python_version: str = "3.11", python_version: str = "3.11",
@ -711,41 +790,17 @@ def generate_module_stub(
module_data["structs"], python_version, module_data["doc"] module_data["structs"], python_version, module_data["doc"]
) )
# Detect classes used as generics (e.g. BMElemSeq[BMVert]) and add # Detect classes used as generics and add Generic[_T] base when needed.
# Generic[_T] as base if they don't already have one. _apply_generic_bases(module_data)
# Must happen before collect_imports so Generic/TypeVar get imported.
all_types = collect_all_type_strings(module_data)
joined_types = " ".join(all_types)
struct_names = {s["name"] for s in module_data.get("structs", [])}
for struct in module_data.get("structs", []):
name = struct["name"]
if name in struct_names and re.search(rf"\b{re.escape(name)}\[", joined_types):
base = struct.get("base") or ""
if "Generic[" not in base:
struct["base"] = f"{base}, Generic[_T]" if base else "Generic[_T]"
parts: list[str] = [] parts: list[str] = []
# Module docstring
if module_data["doc"]: if module_data["doc"]:
parts.append(f'"""{module_data["doc"]}"""\n') parts.append(f'"""{module_data["doc"]}"""\n')
# Imports imports, clashing_imports, class_names = _prepare_module_imports(
imports = collect_imports(module_data) module_data, module_name, submodule_names or []
# Re-export submodules so type checkers can resolve e.g. bpy.utils. )
# Use "X as X" pattern to mark as intentional re-export in stubs.
for sub in submodule_names or []:
imports.add(f"from . import {sub} as {sub}")
# Remove exact self-imports (but keep submodule imports like bmesh.types for bmesh)
imports = {i for i in imports if i != f"import {module_name}"}
# Remove imports that clash with class names in this module
class_names = {s["name"] for s in module_data.get("structs", [])}
clashing_imports: set[str] = set()
for i in imports:
for name in class_names:
if f"import {name}" in i:
clashing_imports.add(i)
imports -= clashing_imports
if imports: if imports:
parts.append("") parts.append("")
for imp in sorted(imports): for imp in sorted(imports):
@ -759,57 +814,12 @@ def generate_module_stub(
parts.append('_T = TypeVar("_T")') parts.append('_T = TypeVar("_T")')
parts.append("") parts.append("")
# Variables (skip those that clash with submodule re-exports) _append_module_members(parts, module_data, submodule_names or [])
sub_names = set(submodule_names or [])
if module_data["variables"]:
parts.append("")
for var in module_data["variables"]:
if var["name"] not in sub_names:
parts.append(generate_variable_stub(var))
# Functions
for func in module_data["functions"]:
parts.append("")
parts.append(generate_function_stub(func))
# Classes
for struct in module_data.get("structs", []):
parts.append("")
parts.append(generate_struct_stub(struct))
result = "\n".join(parts) result = "\n".join(parts)
class_names = {s["name"] for s in module_data.get("structs", [])}
result = strip_self_module_prefix(result, module_name, class_names) result = strip_self_module_prefix(result, module_name, class_names)
result = _qualify_shadowed_builtin_annotations(result, module_data)
# If any variable/class name shadows a Python builtin used as a type result = _qualify_clashing_type_names(result, clashing_imports, class_names)
# annotation (e.g. bpy.ops has a variable named "object"), qualify it.
_BUILTINS_USED_AS_TYPES = {"object", "type", "int", "str", "float", "bool", "set"}
var_names = {v["name"] for v in module_data.get("variables", [])}
shadowed = var_names & _BUILTINS_USED_AS_TYPES
if shadowed:
for name in shadowed:
result = re.sub(
rf"(?<=: ){name}\b",
f"builtins.{name}",
result,
)
if "import builtins" not in result:
result = "import builtins\n" + result
# Qualify type names that clash with class names in the same module
for clash in clashing_imports:
match = re.search(r"from ([\w.]+) import (\w+)", clash)
if match:
full_module = match.group(1)
name = match.group(2)
if name in class_names:
result = re.sub(
rf"(?<!\w){name}\[",
f"{full_module}.{name}[",
result,
)
if f"import {full_module}" not in result:
result = f"import {full_module}\n" + result
return prune_unused_imports(result) return prune_unused_imports(result)

File diff suppressed because it is too large Load Diff