Add conformance test files for bpy.ops, bpy.types.Object, bpy.types.Context, bpy.msgbus, and bpy.data from official Blender documentation for both 5.0 and 5.1. Generate a ContextDict TypedDict from Context properties so that Context.copy() returns precisely typed values instead of dict[str, object], fixing type errors when unpacking into temp_override(). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1025 lines
35 KiB
Python
1025 lines
35 KiB
Python
"""Generate PEP 484-compliant .pyi stub files from introspection JSON."""
|
|
|
|
import json
|
|
import os
|
|
import re
|
|
import sys
|
|
from typing import TypedDict
|
|
|
|
import black
|
|
|
|
|
|
from introspect import (
|
|
UNQUALIFIED_TYPES,
|
|
FunctionData,
|
|
ModuleData,
|
|
ParamData,
|
|
PropertyData,
|
|
StructData,
|
|
VariableData,
|
|
)
|
|
|
|
# Map raw type names from introspection to proper Python type annotations
|
|
TYPE_MAP: dict[str, str] = {
|
|
"frozenset": "frozenset[str]",
|
|
}
|
|
|
|
|
|
class ParamOverrides(TypedDict, total=False):
|
|
params: dict[str, str]
|
|
return_type: str
|
|
|
|
|
|
def map_type(type_str: str) -> str:
|
|
"""Map an introspected type string to a proper PEP 484 annotation."""
|
|
if type_str in TYPE_MAP:
|
|
return TYPE_MAP[type_str]
|
|
return type_str
|
|
|
|
|
|
def _qualify_type(type_str: str) -> str:
|
|
"""Qualify bare type names (e.g. Mesh -> bpy.types.Mesh) in a type string.
|
|
|
|
This catches RNA type identifiers that bypass clean_type_str during introspection.
|
|
"""
|
|
for bare, qualified in UNQUALIFIED_TYPES.items():
|
|
type_str = re.sub(rf"(?<!\.)(?<!\w)\b{bare}\b", qualified, type_str)
|
|
return type_str
|
|
|
|
|
|
def qualify_module_types(module_data: ModuleData) -> None:
|
|
"""Qualify bare type names in all type strings of a module (in-place).
|
|
|
|
RNA introspection returns bare identifiers like 'Mesh' which need to be
|
|
qualified to 'bpy.types.Mesh' for modules outside bpy.types.
|
|
"""
|
|
for func in module_data["functions"]:
|
|
for param in func["params"]:
|
|
if param["type"]:
|
|
param["type"] = _qualify_type(param["type"])
|
|
if func["return_type"]:
|
|
func["return_type"] = _qualify_type(func["return_type"])
|
|
for var in module_data["variables"]:
|
|
var["type"] = _qualify_type(var["type"])
|
|
for struct in module_data.get("structs", []):
|
|
for prop in struct["properties"]:
|
|
prop["type"] = _qualify_type(prop["type"])
|
|
for method in struct["methods"]:
|
|
for param in method["params"]:
|
|
if param["type"]:
|
|
param["type"] = _qualify_type(param["type"])
|
|
if method["return_type"]:
|
|
method["return_type"] = _qualify_type(method["return_type"])
|
|
|
|
|
|
def collect_all_type_strings(module_data: ModuleData) -> list[str]:
|
|
"""Collect all type annotation strings from a module's data."""
|
|
all_types: list[str] = []
|
|
for func in module_data["functions"]:
|
|
for param in func["params"]:
|
|
if param["type"]:
|
|
all_types.append(param["type"])
|
|
if func["return_type"]:
|
|
all_types.append(func["return_type"])
|
|
for var in module_data["variables"]:
|
|
all_types.append(map_type(var["type"]))
|
|
for struct in module_data.get("structs", []):
|
|
base = struct.get("base")
|
|
if base:
|
|
all_types.append(base)
|
|
for prop in struct["properties"]:
|
|
all_types.append(prop["type"])
|
|
for method in struct["methods"]:
|
|
for param in method["params"]:
|
|
if param["type"]:
|
|
all_types.append(param["type"])
|
|
if method["return_type"]:
|
|
all_types.append(method["return_type"])
|
|
return all_types
|
|
|
|
|
|
_TOKEN_IMPORTS: tuple[tuple[str, str], ...] = (
|
|
("Sequence", "from collections.abc import Sequence"),
|
|
("Iterable", "from collections.abc import Iterable"),
|
|
("Callable", "from collections.abc import Callable"),
|
|
("Iterator", "from collections.abc import Iterator"),
|
|
("Literal", "from typing import Literal"),
|
|
("Any", "from typing import Any"),
|
|
("Generator", "from collections.abc import Generator"),
|
|
("Mapping", "from collections.abc import Mapping"),
|
|
("Collection", "from collections.abc import Collection"),
|
|
("TypeAlias", "from typing import TypeAlias"),
|
|
("Generic", "from typing import Generic, TypeVar"),
|
|
)
|
|
|
|
_KNOWN_TYPED_MODULES: tuple[str, ...] = (
|
|
"bpy.types",
|
|
"bpy.props",
|
|
"bpy.app",
|
|
"mathutils",
|
|
"gpu.types",
|
|
"imbuf.types",
|
|
"idprop.types",
|
|
"freestyle.types",
|
|
"bmesh.types",
|
|
"bmesh",
|
|
)
|
|
|
|
_SPECIAL_IMPORT_MARKERS: tuple[tuple[str, str], ...] = (
|
|
("datetime", "import datetime"),
|
|
("types.ModuleType", "import types"),
|
|
("collections.OrderedDict", "import collections"),
|
|
("collections.abc.", "import collections.abc"),
|
|
)
|
|
|
|
|
|
def _has_token(joined_types: str, token: str) -> bool:
|
|
"""Return whether a type token appears as a standalone identifier."""
|
|
return bool(re.search(rf"\b{token}\b", joined_types))
|
|
|
|
|
|
def _collect_token_imports(joined_types: str) -> set[str]:
|
|
"""Collect import lines triggered by standalone type tokens."""
|
|
imports: set[str] = set()
|
|
for token, import_line in _TOKEN_IMPORTS:
|
|
if _has_token(joined_types, token):
|
|
imports.add(import_line)
|
|
if "Self" in joined_types:
|
|
imports.add("from typing import Self")
|
|
return imports
|
|
|
|
|
|
def _collect_module_reference_imports(joined_types: str) -> set[str]:
|
|
"""Collect import lines required by module-qualified type references."""
|
|
imports: set[str] = set()
|
|
for marker, import_line in _SPECIAL_IMPORT_MARKERS:
|
|
if marker in joined_types:
|
|
imports.add(import_line)
|
|
for mod in _KNOWN_TYPED_MODULES:
|
|
if mod + "." in joined_types or re.search(
|
|
rf"\b{re.escape(mod)}\b", joined_types
|
|
):
|
|
imports.add(f"import {mod}")
|
|
return imports
|
|
|
|
|
|
def _requires_builtins_import(module_data: ModuleData) -> bool:
|
|
"""Return whether builtins import is needed for builtin-name shadowing."""
|
|
for struct in module_data.get("structs", []):
|
|
prop_names = {p["name"] for p in struct["properties"]}
|
|
if prop_names & BUILTIN_NAMES:
|
|
return True
|
|
return False
|
|
|
|
|
|
def collect_imports(module_data: ModuleData) -> set[str]:
|
|
"""Collect all import statements needed for the stub file."""
|
|
all_types = collect_all_type_strings(module_data)
|
|
joined = " ".join(all_types)
|
|
imports = _collect_token_imports(joined) | _collect_module_reference_imports(joined)
|
|
if _requires_builtins_import(module_data):
|
|
imports.add("import builtins")
|
|
|
|
return imports
|
|
|
|
|
|
def format_param(param: ParamData, force_type: bool = False) -> str:
|
|
"""Format a single parameter for a function signature."""
|
|
parts = [param["name"]]
|
|
|
|
type_str = param["type"]
|
|
if not type_str and force_type:
|
|
type_str = "object"
|
|
|
|
if type_str:
|
|
parts.append(f": {type_str}")
|
|
|
|
if param["default"] is not None:
|
|
default = param["default"]
|
|
# Fix incompatible defaults: Literal types can't have bool defaults
|
|
if type_str and "Literal[" in type_str and default in ("False", "True"):
|
|
default = "..."
|
|
if type_str:
|
|
parts.append(f" = {default}")
|
|
else:
|
|
parts.append(f"={default}")
|
|
|
|
return "".join(parts)
|
|
|
|
|
|
def format_docstring(doc: str, indent: str = " ") -> str:
|
|
"""Format a docstring with proper indentation."""
|
|
if not doc:
|
|
return ""
|
|
# Escape triple quotes and backslashes inside the docstring
|
|
doc = doc.replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
lines = doc.split("\n")
|
|
if len(lines) == 1:
|
|
line = lines[0]
|
|
if line.endswith('"'):
|
|
line += " "
|
|
return f'{indent}"""{line}"""\n'
|
|
|
|
result = f'{indent}"""{lines[0]}\n'
|
|
for line in lines[1:]:
|
|
if line.strip():
|
|
result += f"{indent}{line}\n"
|
|
else:
|
|
result += "\n"
|
|
result += f'{indent}"""\n'
|
|
return result
|
|
|
|
|
|
def _build_signature_params(
|
|
params: list[ParamData],
|
|
leading_params: list[str] | None = None,
|
|
skip_names: set[str] | None = None,
|
|
) -> str:
|
|
"""Build a function signature parameter list string from ParamData entries."""
|
|
filtered_params = [
|
|
param for param in params if param["name"] not in (skip_names or set())
|
|
]
|
|
|
|
positional_only_no_default: list[str] = []
|
|
positional_only_with_default: list[str] = []
|
|
positional_no_default: list[str] = []
|
|
positional_with_default: list[str] = []
|
|
keyword_params: list[str] = []
|
|
has_positional_only = False
|
|
|
|
for param in filtered_params:
|
|
formatted = format_param(param, force_type=True)
|
|
kind = param.get("kind", "POSITIONAL_OR_KEYWORD")
|
|
if kind == "POSITIONAL_ONLY":
|
|
has_positional_only = True
|
|
if param["default"] is not None:
|
|
positional_only_with_default.append(formatted)
|
|
else:
|
|
positional_only_no_default.append(formatted)
|
|
elif kind == "KEYWORD_ONLY":
|
|
keyword_params.append(formatted)
|
|
elif kind == "VAR_POSITIONAL":
|
|
type_ann = f": {param['type']}" if param["type"] else ": object"
|
|
positional_no_default.append(f"*{param['name']}{type_ann}")
|
|
elif kind == "VAR_KEYWORD":
|
|
type_ann = f": {param['type']}" if param["type"] else ": object"
|
|
keyword_params.append(f"**{param['name']}{type_ann}")
|
|
elif param["default"] is not None:
|
|
positional_with_default.append(formatted)
|
|
else:
|
|
positional_no_default.append(formatted)
|
|
|
|
all_params: list[str] = list(leading_params or [])
|
|
if has_positional_only and not (
|
|
positional_only_with_default and positional_no_default
|
|
):
|
|
# Only emit / when it won't cause "non-default follows default" errors
|
|
all_params.extend(positional_only_no_default)
|
|
all_params.extend(positional_only_with_default)
|
|
all_params.append("/")
|
|
else:
|
|
# Merge positional-only into regular params when / would be invalid
|
|
positional_no_default = positional_only_no_default + positional_no_default
|
|
positional_with_default = positional_only_with_default + positional_with_default
|
|
|
|
# Non-default positional params must come before default ones
|
|
all_params.extend(positional_no_default)
|
|
all_params.extend(positional_with_default)
|
|
|
|
if keyword_params:
|
|
# Insert * separator if there are keyword-only args and no VAR_POSITIONAL.
|
|
# Don't insert * if the only keyword params are **kwargs (already captures all).
|
|
has_var_positional = any(
|
|
p.get("kind") == "VAR_POSITIONAL" for p in filtered_params
|
|
)
|
|
has_named_keyword = any(
|
|
p.get("kind") == "KEYWORD_ONLY" for p in filtered_params
|
|
)
|
|
if not has_var_positional and has_named_keyword:
|
|
all_params.append("*")
|
|
all_params.extend(keyword_params)
|
|
|
|
return ", ".join(all_params)
|
|
|
|
|
|
def generate_function_stub(func: FunctionData) -> str:
|
|
"""Generate a stub for a single function."""
|
|
params_str = _build_signature_params(func["params"])
|
|
|
|
# Return type
|
|
ret = f" -> {func['return_type']}" if func["return_type"] else " -> None"
|
|
|
|
# Build the function
|
|
if func["doc"]:
|
|
result = f"def {func['name']}({params_str}){ret}:\n"
|
|
result += format_docstring(func["doc"])
|
|
return result
|
|
return f"def {func['name']}({params_str}){ret}: ...\n"
|
|
|
|
|
|
def generate_variable_stub(var: VariableData) -> str:
|
|
"""Generate a stub for a module-level variable."""
|
|
type_str = map_type(var["type"])
|
|
if type_str == "TypeAlias":
|
|
return f"{var['name']}: TypeAlias = {var['value']}\n"
|
|
return f"{var['name']}: {type_str}\n"
|
|
|
|
|
|
def generate_method_stub(
|
|
func: FunctionData,
|
|
indent: str = " ",
|
|
is_override: bool = False,
|
|
) -> str:
|
|
"""Generate a stub for a method inside a class."""
|
|
is_cls = func.get("is_classmethod", False)
|
|
first_param = "cls" if is_cls else "self"
|
|
params_str = _build_signature_params(
|
|
func["params"], leading_params=[first_param], skip_names={"cls", "self"}
|
|
)
|
|
ret = f" -> {func['return_type']}" if func["return_type"] else " -> None"
|
|
decorators = ""
|
|
if is_override:
|
|
decorators += f"{indent}@override\n"
|
|
if is_cls:
|
|
decorators += f"{indent}@classmethod\n"
|
|
if func["doc"]:
|
|
result = f"{decorators}{indent}def {func['name']}({params_str}){ret}:\n"
|
|
result += format_docstring(func["doc"], indent + " ")
|
|
return result
|
|
return f"{decorators}{indent}def {func['name']}({params_str}){ret}: ...\n"
|
|
|
|
|
|
def generate_property_stub(
|
|
prop: PropertyData,
|
|
indent: str = " ",
|
|
property_decorator: str = "@property",
|
|
) -> str:
|
|
"""Generate a stub for a class property."""
|
|
if prop["is_readonly"]:
|
|
result = f"{indent}{property_decorator}\n"
|
|
result += f"{indent}def {prop['name']}(self) -> {prop['type']}:\n"
|
|
if prop["description"]:
|
|
desc = prop["description"].replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
if desc.endswith('"'):
|
|
desc += " "
|
|
result += f'{indent} """{desc}"""\n'
|
|
else:
|
|
result += f"{indent} ...\n"
|
|
return result
|
|
|
|
# Writable properties with different getter/setter types need @property
|
|
# (e.g. location: mathutils.Vector | Sequence[float] -> getter returns Vector,
|
|
# setter accepts both)
|
|
if " | " in prop["type"]:
|
|
parts_list = [p.strip() for p in prop["type"].split(" | ")]
|
|
getter_type = parts_list[0]
|
|
setter_type = prop["type"]
|
|
result = f"{indent}{property_decorator}\n"
|
|
result += f"{indent}def {prop['name']}(self) -> {getter_type}:\n"
|
|
if prop["description"]:
|
|
desc = prop["description"].replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
if desc.endswith('"'):
|
|
desc += " "
|
|
result += f'{indent} """{desc}"""\n'
|
|
else:
|
|
result += f"{indent} ...\n"
|
|
result += f"{indent}@{prop['name']}.setter\n"
|
|
result += (
|
|
f"{indent}def {prop['name']}(self, value: {setter_type}) -> None: ...\n"
|
|
)
|
|
return result
|
|
|
|
result = f"{indent}{prop['name']}: {prop['type']}\n"
|
|
if prop["description"]:
|
|
desc = prop["description"].replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
if desc.endswith('"'):
|
|
desc += " "
|
|
result += f'{indent}"""{desc}"""\n'
|
|
return result
|
|
|
|
|
|
BUILTIN_NAMES = {
|
|
"int",
|
|
"float",
|
|
"bool",
|
|
"str",
|
|
"list",
|
|
"dict",
|
|
"set",
|
|
"tuple",
|
|
"type",
|
|
"object",
|
|
}
|
|
|
|
|
|
def fixup_shadowed_builtins(
|
|
properties: list[PropertyData],
|
|
) -> list[PropertyData]:
|
|
"""If a property name shadows a builtin, qualify type references with builtins."""
|
|
shadowed = {p["name"] for p in properties} & BUILTIN_NAMES
|
|
if not shadowed:
|
|
return properties
|
|
|
|
fixed: list[PropertyData] = []
|
|
for prop in properties:
|
|
new_type = prop["type"]
|
|
for name in shadowed:
|
|
# Replace bare builtin type references with builtins.X
|
|
# e.g. "int" -> "builtins.int", "list[int]" -> "list[builtins.int]"
|
|
new_type = re.sub(
|
|
rf"\b{name}\b",
|
|
f"builtins.{name}",
|
|
new_type,
|
|
)
|
|
fixed.append(
|
|
{
|
|
"name": prop["name"],
|
|
"type": new_type,
|
|
"is_readonly": prop["is_readonly"],
|
|
"description": prop["description"],
|
|
}
|
|
)
|
|
return fixed
|
|
|
|
|
|
def generate_struct_stub(
|
|
struct: StructData,
|
|
inherited_methods: set[str] | None = None,
|
|
) -> str:
|
|
"""Generate a stub for a single RNA struct (class)."""
|
|
base = struct["base"] if struct["base"] else ""
|
|
class_decl = (
|
|
f"class {struct['name']}({base}):" if base else f"class {struct['name']}:"
|
|
)
|
|
|
|
parts: list[str] = [class_decl]
|
|
if struct["doc"]:
|
|
parts.append(format_docstring(struct["doc"]))
|
|
|
|
has_body = bool(struct["doc"])
|
|
if inherited_methods is None:
|
|
inherited_methods = set()
|
|
|
|
# Collect method names to skip conflicting properties
|
|
method_names = {m["name"] for m in struct["methods"]}
|
|
properties = fixup_shadowed_builtins(struct["properties"])
|
|
# If any property shadows "property", readonly getters must use builtins.property
|
|
prop_names = {p["name"] for p in properties}
|
|
property_decorator = (
|
|
"@builtins.property" if "property" in prop_names else "@property"
|
|
)
|
|
|
|
for prop in properties:
|
|
if prop["name"] in method_names:
|
|
continue
|
|
parts.append(
|
|
generate_property_stub(prop, property_decorator=property_decorator)
|
|
)
|
|
has_body = True
|
|
|
|
for method in struct["methods"]:
|
|
is_override = method["name"] in inherited_methods
|
|
parts.append(generate_method_stub(method, is_override=is_override))
|
|
has_body = True
|
|
|
|
if not has_body:
|
|
parts.append(" ...\n")
|
|
|
|
return "\n".join(parts)
|
|
|
|
|
|
def topological_sort_structs(structs: list[StructData]) -> list[StructData]:
|
|
"""Sort structs so that base classes come before subclasses."""
|
|
by_name: dict[str, StructData] = {s["name"]: s for s in structs}
|
|
visited: set[str] = set()
|
|
result: list[StructData] = []
|
|
|
|
def visit(name: str) -> None:
|
|
if name in visited:
|
|
return
|
|
visited.add(name)
|
|
struct = by_name.get(name)
|
|
if struct and struct["base"] and struct["base"] in by_name:
|
|
visit(struct["base"])
|
|
if struct:
|
|
result.append(struct)
|
|
|
|
for struct in structs:
|
|
visit(struct["name"])
|
|
|
|
return result
|
|
|
|
|
|
class _InheritedInfo:
|
|
methods: set[str]
|
|
readonly_props: set[str]
|
|
|
|
def __init__(self, methods: set[str], readonly_props: set[str]):
|
|
self.methods = methods
|
|
self.readonly_props = readonly_props
|
|
|
|
|
|
def collect_inherited_info(
|
|
structs: list[StructData],
|
|
) -> dict[str, _InheritedInfo]:
|
|
"""Build a map of struct name -> inherited method and readonly property names."""
|
|
by_name: dict[str, StructData] = {s["name"]: s for s in structs}
|
|
cache: dict[str, _InheritedInfo] = {}
|
|
|
|
def get_inherited(name: str) -> _InheritedInfo:
|
|
if name in cache:
|
|
return cache[name]
|
|
struct = by_name.get(name)
|
|
if not struct or not struct["base"]:
|
|
cache[name] = _InheritedInfo(set(), set())
|
|
return cache[name]
|
|
base = struct["base"]
|
|
if base in by_name:
|
|
base_methods = {m["name"] for m in by_name[base]["methods"]}
|
|
base_ro_props = {
|
|
p["name"] for p in by_name[base]["properties"] if p["is_readonly"]
|
|
}
|
|
else:
|
|
base_methods: set[str] = set()
|
|
base_ro_props: set[str] = set()
|
|
parent = get_inherited(base)
|
|
cache[name] = _InheritedInfo(
|
|
base_methods | parent.methods,
|
|
base_ro_props | parent.readonly_props,
|
|
)
|
|
return cache[name]
|
|
|
|
for struct in structs:
|
|
get_inherited(struct["name"])
|
|
|
|
return cache
|
|
|
|
|
|
def _generate_context_dict(context_struct: StructData) -> str:
|
|
"""Generate a ContextDict TypedDict from Context properties.
|
|
|
|
This TypedDict mirrors all Context properties so that Context.copy()
|
|
can return a precisely typed dict instead of dict[str, object].
|
|
"""
|
|
lines: list[str] = ["class ContextDict(TypedDict):"]
|
|
lines.append(' """Dictionary returned by Context.copy() with all context members."""')
|
|
method_names = {m["name"] for m in context_struct["methods"]}
|
|
for prop in context_struct["properties"]:
|
|
if prop["name"] in method_names:
|
|
continue
|
|
lines.append(f" {prop['name']}: {prop['type']}")
|
|
lines.append("")
|
|
return "\n".join(lines)
|
|
|
|
|
|
def _patch_context_copy_return_type(context_struct: StructData) -> None:
|
|
"""Change Context.copy() return type from dict[str, object] to ContextDict."""
|
|
for method in context_struct["methods"]:
|
|
if method["name"] == "copy":
|
|
method["return_type"] = "ContextDict"
|
|
break
|
|
|
|
|
|
def generate_types_stub(
|
|
structs: list[StructData], python_version: str = "3.11", doc: str = ""
|
|
) -> str:
|
|
"""Generate the complete bpy/types.pyi content."""
|
|
# Collect all type strings to detect needed imports
|
|
all_type_strs_parts: list[str] = []
|
|
for s in structs:
|
|
for p in s["properties"]:
|
|
all_type_strs_parts.append(p["type"])
|
|
for m in s["methods"]:
|
|
for param in m["params"]:
|
|
if param["type"]:
|
|
all_type_strs_parts.append(param["type"])
|
|
if m["return_type"]:
|
|
all_type_strs_parts.append(m["return_type"])
|
|
all_type_strs = " ".join(all_type_strs_parts)
|
|
|
|
typing_imports = ["Generic", "TypedDict", "TypeVar"]
|
|
if "Literal[" in all_type_strs:
|
|
typing_imports.append("Literal")
|
|
if re.search(r"\bSelf\b", all_type_strs):
|
|
typing_imports.append("Self")
|
|
|
|
abc_imports = ["Iterator"]
|
|
if "Callable" in all_type_strs:
|
|
abc_imports.append("Callable")
|
|
if "MutableSequence[" in all_type_strs:
|
|
abc_imports.append("MutableSequence")
|
|
if re.search(r"(?<!\.)\bIterable\[", all_type_strs):
|
|
abc_imports.append("Iterable")
|
|
# Don't import Sequence directly — bpy.types.Sequence (video sequencer strip)
|
|
# shadows it. Always use collections.abc.Sequence via the qualified import.
|
|
|
|
imports: list[str] = [
|
|
f"from collections.abc import {', '.join(abc_imports)}",
|
|
f"from typing import {', '.join(typing_imports)}",
|
|
"import builtins",
|
|
(
|
|
"from typing import override"
|
|
if tuple(int(x) for x in python_version.split(".")) >= (3, 12)
|
|
else "from typing_extensions import override"
|
|
),
|
|
]
|
|
if "Sequence[" in all_type_strs:
|
|
imports.append("import collections.abc")
|
|
if "mathutils." in all_type_strs:
|
|
imports.append("import mathutils")
|
|
|
|
parts: list[str] = []
|
|
if doc:
|
|
parts.append(f'"""{doc}"""\n')
|
|
parts.extend(
|
|
[
|
|
"\n".join(sorted(imports)),
|
|
"",
|
|
'_T = TypeVar("_T")',
|
|
]
|
|
)
|
|
|
|
sorted_structs = topological_sort_structs(structs)
|
|
inherited_map = collect_inherited_info(sorted_structs)
|
|
|
|
# Patch Context.copy() return type and generate ContextDict TypedDict
|
|
context_struct = next((s for s in sorted_structs if s["name"] == "Context"), None)
|
|
if context_struct:
|
|
_patch_context_copy_return_type(context_struct)
|
|
|
|
for struct in sorted_structs:
|
|
info = inherited_map.get(struct["name"], _InheritedInfo(set(), set()))
|
|
# Force writable properties to readonly when they override a parent's
|
|
# readonly @property (avoids reportIncompatibleMethodOverride)
|
|
for prop in struct["properties"]:
|
|
if not prop["is_readonly"] and prop["name"] in info.readonly_props:
|
|
prop["is_readonly"] = True
|
|
# Emit ContextDict TypedDict right before the Context class
|
|
if struct["name"] == "Context":
|
|
parts.append("")
|
|
parts.append(_generate_context_dict(struct))
|
|
parts.append("")
|
|
parts.append(generate_struct_stub(struct, info.methods))
|
|
|
|
result = "\n".join(parts)
|
|
class_names = {s["name"] for s in structs}
|
|
result = strip_self_module_prefix(result, "bpy.types", class_names)
|
|
# Qualify all bare Sequence[ to collections.abc.Sequence[ to avoid
|
|
# shadowing by bpy.types.Sequence (the video sequencer strip type).
|
|
result = re.sub(r"(?<!\.)(?<!\w)\bSequence\[", "collections.abc.Sequence[", result)
|
|
# Qualify bare "object" in type annotations to avoid shadowing by
|
|
# properties named "object" (e.g. Context.object: Object).
|
|
result = re.sub(r"(?<=: )object\b", "builtins.object", result)
|
|
result = re.sub(r"(?<=\| )object\b", "builtins.object", result)
|
|
result = re.sub(r"(?<=\[)object\b", "builtins.object", result)
|
|
result = re.sub(r"(?<=-> )object\b", "builtins.object", result)
|
|
return prune_unused_imports(result)
|
|
|
|
|
|
def prune_unused_imports(content: str) -> str:
|
|
"""Remove import lines where the imported name is not used in the body."""
|
|
lines = content.split("\n")
|
|
import_lines: list[int] = []
|
|
for i, line in enumerate(lines):
|
|
# Skip relative re-exports (from . import X) — these are intentional
|
|
if line.strip().startswith("from . import"):
|
|
continue
|
|
if re.match(r"^(import |from .+ import )", line.strip()):
|
|
import_lines.append(i)
|
|
|
|
to_remove: set[int] = set()
|
|
for i in import_lines:
|
|
line = lines[i].strip()
|
|
# "from X import Y" -> check Y is used
|
|
m = re.match(r"from .+ import (\w+)", line)
|
|
if m:
|
|
name = m.group(1)
|
|
# Check if name is used in type annotations (not just in docstrings).
|
|
# Look for: ": Name", "-> Name", "[Name", "| Name", "(Name)"
|
|
body = "\n".join(lines[j] for j in range(len(lines)) if j != i)
|
|
if not re.search(rf"[:>\[|( @]{name}\b", body):
|
|
to_remove.add(i)
|
|
continue
|
|
# "import X" -> check X. is used
|
|
m = re.match(r"import ([\w.]+)", line)
|
|
if m:
|
|
mod = m.group(1)
|
|
body = "\n".join(lines[j] for j in range(len(lines)) if j != i)
|
|
if mod + "." not in body:
|
|
to_remove.add(i)
|
|
|
|
return "\n".join(line for i, line in enumerate(lines) if i not in to_remove)
|
|
|
|
|
|
def strip_self_module_prefix(
|
|
content: str, module_name: str, class_names: set[str]
|
|
) -> str:
|
|
"""Remove self-referencing module prefixes only for types defined in this module."""
|
|
# Strip full module prefix: bpy_extras.anim_utils.BakeOptions -> BakeOptions
|
|
for cls_name in class_names:
|
|
content = content.replace(f"{module_name}.{cls_name}", cls_name)
|
|
|
|
# Also strip short module name prefix for relative references:
|
|
# anim_utils.BakeOptions -> BakeOptions (when inside bpy_extras.anim_utils)
|
|
# This handles classes that weren't introspected but are referenced by type annotations.
|
|
# Use negative lookbehind to avoid stripping "types." from "bpy.types.Mesh" etc.
|
|
short_name = module_name.rsplit(".", 1)[-1]
|
|
if short_name != module_name:
|
|
content = re.sub(
|
|
rf"(?<!\w\.)(?<!\w){re.escape(short_name)}\.(\w+)", r"\1", content
|
|
)
|
|
|
|
return content
|
|
|
|
|
|
def _apply_generic_bases(module_data: ModuleData) -> None:
|
|
"""Add Generic[_T] bases when classes are used as parameterized generics."""
|
|
all_types = collect_all_type_strings(module_data)
|
|
joined_types = " ".join(all_types)
|
|
struct_names = {s["name"] for s in module_data.get("structs", [])}
|
|
for struct in module_data.get("structs", []):
|
|
name = struct["name"]
|
|
if name in struct_names and re.search(rf"\b{re.escape(name)}\[", joined_types):
|
|
base = struct.get("base") or ""
|
|
if "Generic[" not in base:
|
|
struct["base"] = f"{base}, Generic[_T]" if base else "Generic[_T]"
|
|
|
|
|
|
def _prepare_module_imports(
|
|
module_data: ModuleData,
|
|
module_name: str,
|
|
submodule_names: list[str],
|
|
) -> tuple[set[str], set[str], set[str]]:
|
|
"""Collect, filter, and classify imports for a generated module stub."""
|
|
imports = collect_imports(module_data)
|
|
for sub in submodule_names:
|
|
imports.add(f"from . import {sub} as {sub}")
|
|
imports = {i for i in imports if i != f"import {module_name}"}
|
|
|
|
class_names = {s["name"] for s in module_data.get("structs", [])}
|
|
clashing_imports: set[str] = set()
|
|
for imp in imports:
|
|
for name in class_names:
|
|
if f"import {name}" in imp:
|
|
clashing_imports.add(imp)
|
|
|
|
return imports - clashing_imports, clashing_imports, class_names
|
|
|
|
|
|
def _append_module_members(
|
|
parts: list[str],
|
|
module_data: ModuleData,
|
|
submodule_names: list[str],
|
|
) -> None:
|
|
"""Append variables, functions, and classes to a module stub body."""
|
|
sub_names = set(submodule_names)
|
|
|
|
if module_data["variables"]:
|
|
parts.append("")
|
|
for var in module_data["variables"]:
|
|
if var["name"] not in sub_names:
|
|
parts.append(generate_variable_stub(var))
|
|
|
|
for func in module_data["functions"]:
|
|
parts.append("")
|
|
parts.append(generate_function_stub(func))
|
|
|
|
for struct in module_data.get("structs", []):
|
|
parts.append("")
|
|
parts.append(generate_struct_stub(struct))
|
|
|
|
|
|
def _qualify_shadowed_builtin_annotations(
|
|
result: str,
|
|
module_data: ModuleData,
|
|
) -> str:
|
|
"""Qualify builtin type names when module-level variables shadow them."""
|
|
builtins_used_as_types = {"object", "type", "int", "str", "float", "bool", "set"}
|
|
var_names = {v["name"] for v in module_data.get("variables", [])}
|
|
shadowed = var_names & builtins_used_as_types
|
|
if shadowed:
|
|
for name in shadowed:
|
|
result = re.sub(
|
|
rf"(?<=: ){name}\b",
|
|
f"builtins.{name}",
|
|
result,
|
|
)
|
|
if "import builtins" not in result:
|
|
result = "import builtins\n" + result
|
|
return result
|
|
|
|
|
|
def _qualify_clashing_type_names(
|
|
result: str,
|
|
clashing_imports: set[str],
|
|
class_names: set[str],
|
|
) -> str:
|
|
"""Qualify type references when imported names clash with local class names."""
|
|
for clash in clashing_imports:
|
|
match = re.search(r"from ([\w.]+) import (\w+)", clash)
|
|
if match:
|
|
full_module = match.group(1)
|
|
name = match.group(2)
|
|
if name in class_names:
|
|
result = re.sub(
|
|
rf"(?<!\w){name}\[",
|
|
f"{full_module}.{name}[",
|
|
result,
|
|
)
|
|
if f"import {full_module}" not in result:
|
|
result = f"import {full_module}\n" + result
|
|
return result
|
|
|
|
|
|
def generate_module_stub(
|
|
module_data: ModuleData,
|
|
python_version: str = "3.11",
|
|
submodule_names: list[str] | None = None,
|
|
) -> str:
|
|
"""Generate the complete .pyi content for a module."""
|
|
module_name = module_data["module"]
|
|
|
|
# bpy.types uses the specialized types generator
|
|
if module_name == "bpy.types":
|
|
return generate_types_stub(
|
|
module_data["structs"], python_version, module_data["doc"]
|
|
)
|
|
|
|
# Detect classes used as generics and add Generic[_T] base when needed.
|
|
_apply_generic_bases(module_data)
|
|
|
|
parts: list[str] = []
|
|
|
|
if module_data["doc"]:
|
|
parts.append(f'"""{module_data["doc"]}"""\n')
|
|
|
|
imports, clashing_imports, class_names = _prepare_module_imports(
|
|
module_data, module_name, submodule_names or []
|
|
)
|
|
if imports:
|
|
parts.append("")
|
|
for imp in sorted(imports):
|
|
parts.append(imp)
|
|
parts.append("")
|
|
|
|
# Add TypeVar if any struct uses Generic[_T]
|
|
if any(
|
|
"Generic[_T]" in (s.get("base") or "") for s in module_data.get("structs", [])
|
|
):
|
|
parts.append('_T = TypeVar("_T")')
|
|
parts.append("")
|
|
|
|
_append_module_members(parts, module_data, submodule_names or [])
|
|
|
|
result = "\n".join(parts)
|
|
result = strip_self_module_prefix(result, module_name, class_names)
|
|
result = _qualify_shadowed_builtin_annotations(result, module_data)
|
|
result = _qualify_clashing_type_names(result, clashing_imports, class_names)
|
|
|
|
return prune_unused_imports(result)
|
|
|
|
|
|
def load_overrides(overrides_dir: str, module_name: str) -> dict[str, ParamOverrides]:
|
|
"""Load type overrides for a module from a versioned overrides directory."""
|
|
override_path = os.path.join(overrides_dir, f"{module_name}.json")
|
|
if os.path.exists(override_path):
|
|
with open(override_path) as f:
|
|
loaded: dict[str, ParamOverrides] = json.load(f)
|
|
return loaded
|
|
return {}
|
|
|
|
|
|
def _apply_func_overrides(func: FunctionData, func_overrides: ParamOverrides) -> None:
|
|
"""Apply overrides to a single function/method."""
|
|
param_overrides = func_overrides.get("params", {})
|
|
for param in func["params"]:
|
|
if param["name"] in param_overrides:
|
|
param["type"] = param_overrides[param["name"]]
|
|
if "return_type" in func_overrides:
|
|
func["return_type"] = func_overrides["return_type"]
|
|
|
|
|
|
def apply_overrides(
|
|
module_data: ModuleData, overrides: dict[str, ParamOverrides]
|
|
) -> ModuleData:
|
|
"""Apply type overrides to introspected module data.
|
|
|
|
Keys can be function names (e.g. ``"my_func"``) for module-level functions,
|
|
or ``"ClassName.method_name"`` for struct methods.
|
|
"""
|
|
for func in module_data["functions"]:
|
|
func_overrides = overrides.get(func["name"], {})
|
|
if func_overrides:
|
|
_apply_func_overrides(func, func_overrides)
|
|
|
|
for struct in module_data.get("structs", []):
|
|
for method in struct["methods"]:
|
|
key = f"{struct['name']}.{method['name']}"
|
|
method_overrides = overrides.get(key, {})
|
|
if method_overrides:
|
|
_apply_func_overrides(method, method_overrides)
|
|
|
|
return module_data
|
|
|
|
|
|
def write_stubs(
|
|
modules_data: list[ModuleData],
|
|
output_dir: str,
|
|
overrides_dir: str | None = None,
|
|
python_version: str = "3.11",
|
|
) -> list[str]:
|
|
"""Write .pyi stub files from introspection data.
|
|
|
|
Returns the list of top-level package directory names created.
|
|
"""
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
# Determine which modules are packages (have submodules)
|
|
all_module_names = {m["module"] for m in modules_data}
|
|
package_modules: set[str] = set()
|
|
for name in all_module_names:
|
|
parts = name.split(".")
|
|
for i in range(1, len(parts)):
|
|
package_modules.add(".".join(parts[:i]))
|
|
|
|
top_level_packages: set[str] = set()
|
|
|
|
for module_data in modules_data:
|
|
module_name = module_data["module"]
|
|
|
|
# Apply type overrides if available
|
|
if overrides_dir:
|
|
overrides = load_overrides(overrides_dir, module_name)
|
|
if overrides:
|
|
module_data = apply_overrides(module_data, overrides)
|
|
|
|
# Qualify bare RNA type names for modules outside bpy.types
|
|
if module_name != "bpy.types":
|
|
qualify_module_types(module_data)
|
|
|
|
parts = module_name.split(".")
|
|
top_level_packages.add(parts[0])
|
|
|
|
# Create package directories and __init__.pyi files
|
|
current_dir = output_dir
|
|
for part in parts[:-1]:
|
|
current_dir = os.path.join(current_dir, part)
|
|
os.makedirs(current_dir, exist_ok=True)
|
|
init_pyi = os.path.join(current_dir, "__init__.pyi")
|
|
if not os.path.exists(init_pyi):
|
|
with open(init_pyi, "w") as f:
|
|
pass
|
|
|
|
# Always write as directory/__init__.pyi for consistent packaging
|
|
pkg_dir = os.path.join(current_dir, parts[-1])
|
|
os.makedirs(pkg_dir, exist_ok=True)
|
|
stub_path = os.path.join(pkg_dir, "__init__.pyi")
|
|
|
|
# Collect direct child submodule names for re-export in __init__.pyi
|
|
child_submodules: list[str] = []
|
|
if module_name in package_modules:
|
|
prefix = module_name + "."
|
|
child_submodules = sorted(
|
|
{
|
|
n[len(prefix) :].split(".")[0]
|
|
for n in all_module_names
|
|
if n.startswith(prefix)
|
|
}
|
|
)
|
|
|
|
content = generate_module_stub(module_data, python_version, child_submodules)
|
|
try:
|
|
content = black.format_str(content, mode=black.Mode(is_pyi=True))
|
|
except Exception as e:
|
|
print(f" ERROR formatting {module_name}: {e}", file=sys.stderr)
|
|
with open(stub_path, "w") as f:
|
|
f.write(content)
|
|
|
|
print(f" {stub_path}")
|
|
|
|
return sorted(top_level_packages)
|
|
|
|
|
|
def main() -> None:
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser(
|
|
description="Generate .pyi stubs from introspection JSON"
|
|
)
|
|
parser.add_argument("input", help="Path to introspection JSON file")
|
|
parser.add_argument(
|
|
"--output-dir", default="blender-stubs", help="Output directory for stubs"
|
|
)
|
|
args = parser.parse_args()
|
|
|
|
with open(args.input) as f:
|
|
modules_data: list[ModuleData] = json.load(f)
|
|
|
|
print(f"Generating stubs in {args.output_dir}/")
|
|
write_stubs(modules_data, args.output_dir)
|
|
print("Done.")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|