- Widen bare mathutils type params (Vector, Euler, etc.) to also accept Sequence[float], matching Blender's mathutils_array_parse C behavior - Fix getset_descriptor readonly detection by probing __set__ on the descriptor instead of checking fset (which doesn't exist on C descriptors) - Accept int | slice keys in __getitem__/__setitem__/__delitem__ - Accept Sequence[element_type] values in __setitem__ for slice assignment - Add mathutils overrides for Matrix.Translation and Matrix.Scale - Extend apply_overrides to support ClassName.method_name keys - Add conformance test files from Blender docs examples - Disable reportUnusedExpression in conformance checks Remaining known conformance issues: - draw_handler_add missing from SpaceView3D - Vector not nominally Sequence[float] (buffer protocol, swizzle setters) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
896 lines
31 KiB
Python
896 lines
31 KiB
Python
"""Generate PEP 484-compliant .pyi stub files from introspection JSON."""
|
|
|
|
import json
|
|
import os
|
|
import re
|
|
import sys
|
|
from typing import TypedDict
|
|
|
|
import black
|
|
|
|
|
|
from introspect import (
|
|
UNQUALIFIED_TYPES,
|
|
FunctionData,
|
|
ModuleData,
|
|
ParamData,
|
|
PropertyData,
|
|
StructData,
|
|
VariableData,
|
|
)
|
|
|
|
# Map raw type names from introspection to proper Python type annotations
|
|
TYPE_MAP: dict[str, str] = {
|
|
"frozenset": "frozenset[str]",
|
|
}
|
|
|
|
|
|
class ParamOverrides(TypedDict, total=False):
|
|
params: dict[str, str]
|
|
return_type: str
|
|
|
|
|
|
def map_type(type_str: str) -> str:
|
|
"""Map an introspected type string to a proper PEP 484 annotation."""
|
|
if type_str in TYPE_MAP:
|
|
return TYPE_MAP[type_str]
|
|
return type_str
|
|
|
|
|
|
def _qualify_type(type_str: str) -> str:
|
|
"""Qualify bare type names (e.g. Mesh -> bpy.types.Mesh) in a type string.
|
|
|
|
This catches RNA type identifiers that bypass clean_type_str during introspection.
|
|
"""
|
|
for bare, qualified in UNQUALIFIED_TYPES.items():
|
|
type_str = re.sub(rf"(?<!\.)(?<!\w)\b{bare}\b", qualified, type_str)
|
|
return type_str
|
|
|
|
|
|
def qualify_module_types(module_data: ModuleData) -> None:
|
|
"""Qualify bare type names in all type strings of a module (in-place).
|
|
|
|
RNA introspection returns bare identifiers like 'Mesh' which need to be
|
|
qualified to 'bpy.types.Mesh' for modules outside bpy.types.
|
|
"""
|
|
for func in module_data["functions"]:
|
|
for param in func["params"]:
|
|
if param["type"]:
|
|
param["type"] = _qualify_type(param["type"])
|
|
if func["return_type"]:
|
|
func["return_type"] = _qualify_type(func["return_type"])
|
|
for var in module_data["variables"]:
|
|
var["type"] = _qualify_type(var["type"])
|
|
for struct in module_data.get("structs", []):
|
|
for prop in struct["properties"]:
|
|
prop["type"] = _qualify_type(prop["type"])
|
|
for method in struct["methods"]:
|
|
for param in method["params"]:
|
|
if param["type"]:
|
|
param["type"] = _qualify_type(param["type"])
|
|
if method["return_type"]:
|
|
method["return_type"] = _qualify_type(method["return_type"])
|
|
|
|
|
|
def collect_all_type_strings(module_data: ModuleData) -> list[str]:
|
|
"""Collect all type annotation strings from a module's data."""
|
|
all_types: list[str] = []
|
|
for func in module_data["functions"]:
|
|
for param in func["params"]:
|
|
if param["type"]:
|
|
all_types.append(param["type"])
|
|
if func["return_type"]:
|
|
all_types.append(func["return_type"])
|
|
for var in module_data["variables"]:
|
|
all_types.append(map_type(var["type"]))
|
|
for struct in module_data.get("structs", []):
|
|
for prop in struct["properties"]:
|
|
all_types.append(prop["type"])
|
|
for method in struct["methods"]:
|
|
for param in method["params"]:
|
|
if param["type"]:
|
|
all_types.append(param["type"])
|
|
if method["return_type"]:
|
|
all_types.append(method["return_type"])
|
|
return all_types
|
|
|
|
|
|
def collect_imports(module_data: ModuleData) -> set[str]:
|
|
"""Collect all import statements needed for the stub file."""
|
|
imports: set[str] = set()
|
|
all_types = collect_all_type_strings(module_data)
|
|
joined = " ".join(all_types)
|
|
|
|
def _has_type(name: str) -> bool:
|
|
return bool(re.search(rf"\b{name}\b", joined))
|
|
|
|
if _has_type("Sequence"):
|
|
imports.add("from collections.abc import Sequence")
|
|
if _has_type("Iterable"):
|
|
imports.add("from collections.abc import Iterable")
|
|
if _has_type("Callable"):
|
|
imports.add("from collections.abc import Callable")
|
|
if _has_type("Iterator"):
|
|
imports.add("from collections.abc import Iterator")
|
|
if _has_type("Literal"):
|
|
imports.add("from typing import Literal")
|
|
if _has_type("Any"):
|
|
imports.add("from typing import Any")
|
|
if _has_type("Generator"):
|
|
imports.add("from collections.abc import Generator")
|
|
if _has_type("Mapping"):
|
|
imports.add("from collections.abc import Mapping")
|
|
if _has_type("Collection"):
|
|
imports.add("from collections.abc import Collection")
|
|
if "Self" in joined:
|
|
imports.add("from typing import Self")
|
|
if _has_type("TypeAlias"):
|
|
imports.add("from typing import TypeAlias")
|
|
|
|
# Detect module-qualified references (e.g. bpy.types.Object, mathutils.Vector)
|
|
# Detect qualified module references
|
|
known_modules = [
|
|
"bpy.types",
|
|
"bpy.props",
|
|
"bpy.app",
|
|
"mathutils",
|
|
"gpu.types",
|
|
"imbuf.types",
|
|
"idprop.types",
|
|
"freestyle.types",
|
|
"bmesh.types",
|
|
"bmesh",
|
|
]
|
|
if "datetime" in joined:
|
|
imports.add("import datetime")
|
|
if "types.ModuleType" in joined:
|
|
imports.add("import types")
|
|
if "collections.OrderedDict" in joined:
|
|
imports.add("import collections")
|
|
if "collections.abc." in joined:
|
|
imports.add("import collections.abc")
|
|
for mod in known_modules:
|
|
if mod + "." in joined or re.search(rf"\b{re.escape(mod)}\b", joined):
|
|
imports.add(f"import {mod}")
|
|
|
|
# fixup_shadowed_builtins will add builtins.X references for structs
|
|
# with property names that shadow builtins — need the import
|
|
for struct in module_data.get("structs", []):
|
|
prop_names = {p["name"] for p in struct["properties"]}
|
|
if prop_names & BUILTIN_NAMES:
|
|
imports.add("import builtins")
|
|
break
|
|
|
|
return imports
|
|
|
|
|
|
def format_param(param: ParamData, force_type: bool = False) -> str:
|
|
"""Format a single parameter for a function signature."""
|
|
parts = [param["name"]]
|
|
|
|
type_str = param["type"]
|
|
if not type_str and force_type:
|
|
type_str = "object"
|
|
|
|
if type_str:
|
|
parts.append(f": {type_str}")
|
|
|
|
if param["default"] is not None:
|
|
if type_str:
|
|
parts.append(f" = {param['default']}")
|
|
else:
|
|
parts.append(f"={param['default']}")
|
|
|
|
return "".join(parts)
|
|
|
|
|
|
def format_docstring(doc: str, indent: str = " ") -> str:
|
|
"""Format a docstring with proper indentation."""
|
|
if not doc:
|
|
return ""
|
|
# Escape triple quotes and backslashes inside the docstring
|
|
doc = doc.replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
lines = doc.split("\n")
|
|
if len(lines) == 1:
|
|
line = lines[0]
|
|
if line.endswith('"'):
|
|
line += " "
|
|
return f'{indent}"""{line}"""\n'
|
|
|
|
result = f'{indent}"""{lines[0]}\n'
|
|
for line in lines[1:]:
|
|
if line.strip():
|
|
result += f"{indent}{line}\n"
|
|
else:
|
|
result += "\n"
|
|
result += f'{indent}"""\n'
|
|
return result
|
|
|
|
|
|
def generate_function_stub(func: FunctionData) -> str:
|
|
"""Generate a stub for a single function."""
|
|
# Build parameter list, separating by kind and default presence
|
|
positional_only_no_default: list[str] = []
|
|
positional_only_with_default: list[str] = []
|
|
positional_no_default: list[str] = []
|
|
positional_with_default: list[str] = []
|
|
keyword_params: list[str] = []
|
|
has_positional_only = False
|
|
|
|
for param in func["params"]:
|
|
formatted = format_param(param, force_type=True)
|
|
kind = param.get("kind", "POSITIONAL_OR_KEYWORD")
|
|
if kind == "POSITIONAL_ONLY":
|
|
has_positional_only = True
|
|
if param["default"] is not None:
|
|
positional_only_with_default.append(formatted)
|
|
else:
|
|
positional_only_no_default.append(formatted)
|
|
elif kind == "KEYWORD_ONLY":
|
|
keyword_params.append(formatted)
|
|
elif kind == "VAR_POSITIONAL":
|
|
type_ann = f": {param['type']}" if param["type"] else ": object"
|
|
positional_no_default.append(f"*{param['name']}{type_ann}")
|
|
elif kind == "VAR_KEYWORD":
|
|
type_ann = f": {param['type']}" if param["type"] else ": object"
|
|
keyword_params.append(f"**{param['name']}{type_ann}")
|
|
elif param["default"] is not None:
|
|
positional_with_default.append(formatted)
|
|
else:
|
|
positional_no_default.append(formatted)
|
|
|
|
all_params: list[str] = []
|
|
if has_positional_only and not (
|
|
positional_only_with_default and positional_no_default
|
|
):
|
|
# Only emit / when it won't cause "non-default follows default" errors
|
|
all_params.extend(positional_only_no_default)
|
|
all_params.extend(positional_only_with_default)
|
|
all_params.append("/")
|
|
else:
|
|
# Merge positional-only into regular params when / would be invalid
|
|
positional_no_default = positional_only_no_default + positional_no_default
|
|
positional_with_default = positional_only_with_default + positional_with_default
|
|
# Non-default positional params must come before default ones
|
|
all_params.extend(positional_no_default)
|
|
all_params.extend(positional_with_default)
|
|
if keyword_params:
|
|
# Insert * separator if there are keyword-only args and no VAR_POSITIONAL.
|
|
# Don't insert * if the only keyword params are **kwargs (already captures all).
|
|
has_var_positional = any(
|
|
p.get("kind") == "VAR_POSITIONAL" for p in func["params"]
|
|
)
|
|
has_named_keyword = any(p.get("kind") == "KEYWORD_ONLY" for p in func["params"])
|
|
if not has_var_positional and has_named_keyword:
|
|
all_params.append("*")
|
|
all_params.extend(keyword_params)
|
|
|
|
params_str = ", ".join(all_params)
|
|
|
|
# Return type
|
|
ret = f" -> {func['return_type']}" if func["return_type"] else " -> None"
|
|
|
|
# Build the function
|
|
if func["doc"]:
|
|
result = f"def {func['name']}({params_str}){ret}:\n"
|
|
result += format_docstring(func["doc"])
|
|
return result
|
|
return f"def {func['name']}({params_str}){ret}: ...\n"
|
|
|
|
|
|
def generate_variable_stub(var: VariableData) -> str:
|
|
"""Generate a stub for a module-level variable."""
|
|
type_str = map_type(var["type"])
|
|
if type_str == "TypeAlias":
|
|
return f"{var['name']}: TypeAlias = {var['value']}\n"
|
|
return f"{var['name']}: {type_str}\n"
|
|
|
|
|
|
def generate_method_stub(
|
|
func: FunctionData,
|
|
indent: str = " ",
|
|
is_override: bool = False,
|
|
) -> str:
|
|
"""Generate a stub for a method inside a class."""
|
|
is_cls = func.get("is_classmethod", False)
|
|
first_param = "cls" if is_cls else "self"
|
|
|
|
# Categorize params by kind, keeping positional ordering correct
|
|
positional_only_no_default: list[str] = []
|
|
positional_only_with_default: list[str] = []
|
|
positional_no_default: list[str] = []
|
|
positional_with_default: list[str] = []
|
|
keyword_params: list[str] = []
|
|
has_positional_only = False
|
|
|
|
for param in func["params"]:
|
|
if param["name"] in ("cls", "self"):
|
|
continue
|
|
formatted = format_param(param, force_type=True)
|
|
kind = param.get("kind", "POSITIONAL_OR_KEYWORD")
|
|
if kind == "POSITIONAL_ONLY":
|
|
has_positional_only = True
|
|
if param["default"] is not None:
|
|
positional_only_with_default.append(formatted)
|
|
else:
|
|
positional_only_no_default.append(formatted)
|
|
elif kind == "KEYWORD_ONLY":
|
|
keyword_params.append(formatted)
|
|
elif kind == "VAR_POSITIONAL":
|
|
type_ann = f": {param['type']}" if param["type"] else ": object"
|
|
positional_no_default.append(f"*{param['name']}{type_ann}")
|
|
elif kind == "VAR_KEYWORD":
|
|
type_ann = f": {param['type']}" if param["type"] else ": object"
|
|
keyword_params.append(f"**{param['name']}{type_ann}")
|
|
elif param["default"] is not None:
|
|
positional_with_default.append(formatted)
|
|
else:
|
|
positional_no_default.append(formatted)
|
|
|
|
all_params: list[str] = [first_param]
|
|
if has_positional_only and not (
|
|
positional_only_with_default and positional_no_default
|
|
):
|
|
# Only emit / when it won't cause "non-default follows default" errors
|
|
all_params.extend(positional_only_no_default)
|
|
all_params.extend(positional_only_with_default)
|
|
all_params.append("/")
|
|
else:
|
|
# Merge positional-only into regular params when / would be invalid
|
|
positional_no_default = positional_only_no_default + positional_no_default
|
|
positional_with_default = positional_only_with_default + positional_with_default
|
|
all_params.extend(positional_no_default)
|
|
all_params.extend(positional_with_default)
|
|
if keyword_params:
|
|
has_var_positional = any(
|
|
p.get("kind") == "VAR_POSITIONAL" for p in func["params"]
|
|
)
|
|
has_named_keyword = any(p.get("kind") == "KEYWORD_ONLY" for p in func["params"])
|
|
if not has_var_positional and has_named_keyword:
|
|
all_params.append("*")
|
|
all_params.extend(keyword_params)
|
|
|
|
params_str = ", ".join(all_params)
|
|
ret = f" -> {func['return_type']}" if func["return_type"] else " -> None"
|
|
decorators = ""
|
|
if is_override:
|
|
decorators += f"{indent}@override\n"
|
|
if is_cls:
|
|
decorators += f"{indent}@classmethod\n"
|
|
if func["doc"]:
|
|
result = f"{decorators}{indent}def {func['name']}({params_str}){ret}:\n"
|
|
result += format_docstring(func["doc"], indent + " ")
|
|
return result
|
|
return f"{decorators}{indent}def {func['name']}({params_str}){ret}: ...\n"
|
|
|
|
|
|
def generate_property_stub(
|
|
prop: PropertyData,
|
|
indent: str = " ",
|
|
property_decorator: str = "@property",
|
|
) -> str:
|
|
"""Generate a stub for a class property."""
|
|
if prop["is_readonly"]:
|
|
result = f"{indent}{property_decorator}\n"
|
|
result += f"{indent}def {prop['name']}(self) -> {prop['type']}:\n"
|
|
if prop["description"]:
|
|
desc = prop["description"].replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
if desc.endswith('"'):
|
|
desc += " "
|
|
result += f'{indent} """{desc}"""\n'
|
|
else:
|
|
result += f"{indent} ...\n"
|
|
return result
|
|
|
|
result = f"{indent}{prop['name']}: {prop['type']}\n"
|
|
if prop["description"]:
|
|
desc = prop["description"].replace("\\", "\\\\").replace('"""', r"\"\"\"")
|
|
if desc.endswith('"'):
|
|
desc += " "
|
|
result += f'{indent}"""{desc}"""\n'
|
|
return result
|
|
|
|
|
|
BUILTIN_NAMES = {
|
|
"int",
|
|
"float",
|
|
"bool",
|
|
"str",
|
|
"list",
|
|
"dict",
|
|
"set",
|
|
"tuple",
|
|
"type",
|
|
"object",
|
|
}
|
|
|
|
|
|
def fixup_shadowed_builtins(
|
|
properties: list[PropertyData],
|
|
) -> list[PropertyData]:
|
|
"""If a property name shadows a builtin, qualify type references with builtins."""
|
|
shadowed = {p["name"] for p in properties} & BUILTIN_NAMES
|
|
if not shadowed:
|
|
return properties
|
|
|
|
fixed: list[PropertyData] = []
|
|
for prop in properties:
|
|
new_type = prop["type"]
|
|
for name in shadowed:
|
|
# Replace bare builtin type references with builtins.X
|
|
# e.g. "int" -> "builtins.int", "list[int]" -> "list[builtins.int]"
|
|
new_type = re.sub(
|
|
rf"\b{name}\b",
|
|
f"builtins.{name}",
|
|
new_type,
|
|
)
|
|
fixed.append(
|
|
{
|
|
"name": prop["name"],
|
|
"type": new_type,
|
|
"is_readonly": prop["is_readonly"],
|
|
"description": prop["description"],
|
|
}
|
|
)
|
|
return fixed
|
|
|
|
|
|
def generate_struct_stub(
|
|
struct: StructData,
|
|
inherited_methods: set[str] | None = None,
|
|
) -> str:
|
|
"""Generate a stub for a single RNA struct (class)."""
|
|
base = struct["base"] if struct["base"] else ""
|
|
class_decl = (
|
|
f"class {struct['name']}({base}):" if base else f"class {struct['name']}:"
|
|
)
|
|
|
|
parts: list[str] = [class_decl]
|
|
if struct["doc"]:
|
|
parts.append(format_docstring(struct["doc"]))
|
|
|
|
has_body = bool(struct["doc"])
|
|
if inherited_methods is None:
|
|
inherited_methods = set()
|
|
|
|
# Collect method names to skip conflicting properties
|
|
method_names = {m["name"] for m in struct["methods"]}
|
|
properties = fixup_shadowed_builtins(struct["properties"])
|
|
# If any property shadows "property", readonly getters must use builtins.property
|
|
prop_names = {p["name"] for p in properties}
|
|
property_decorator = (
|
|
"@builtins.property" if "property" in prop_names else "@property"
|
|
)
|
|
|
|
for prop in properties:
|
|
if prop["name"] in method_names:
|
|
continue
|
|
parts.append(
|
|
generate_property_stub(prop, property_decorator=property_decorator)
|
|
)
|
|
has_body = True
|
|
|
|
for method in struct["methods"]:
|
|
is_override = method["name"] in inherited_methods
|
|
parts.append(generate_method_stub(method, is_override=is_override))
|
|
has_body = True
|
|
|
|
if not has_body:
|
|
parts.append(" ...\n")
|
|
|
|
return "\n".join(parts)
|
|
|
|
|
|
def topological_sort_structs(structs: list[StructData]) -> list[StructData]:
|
|
"""Sort structs so that base classes come before subclasses."""
|
|
by_name: dict[str, StructData] = {s["name"]: s for s in structs}
|
|
visited: set[str] = set()
|
|
result: list[StructData] = []
|
|
|
|
def visit(name: str) -> None:
|
|
if name in visited:
|
|
return
|
|
visited.add(name)
|
|
struct = by_name.get(name)
|
|
if struct and struct["base"] and struct["base"] in by_name:
|
|
visit(struct["base"])
|
|
if struct:
|
|
result.append(struct)
|
|
|
|
for struct in structs:
|
|
visit(struct["name"])
|
|
|
|
return result
|
|
|
|
|
|
def collect_all_methods(
|
|
structs: list[StructData],
|
|
) -> dict[str, set[str]]:
|
|
"""Build a map of struct name -> all method names inherited from ancestors."""
|
|
by_name: dict[str, StructData] = {s["name"]: s for s in structs}
|
|
cache: dict[str, set[str]] = {}
|
|
|
|
def get_inherited(name: str) -> set[str]:
|
|
if name in cache:
|
|
return cache[name]
|
|
struct = by_name.get(name)
|
|
if not struct or not struct["base"]:
|
|
cache[name] = set()
|
|
return cache[name]
|
|
base = struct["base"]
|
|
base_own: set[str] = (
|
|
{m["name"] for m in by_name[base]["methods"]} if base in by_name else set()
|
|
)
|
|
cache[name] = base_own | get_inherited(base)
|
|
return cache[name]
|
|
|
|
for struct in structs:
|
|
get_inherited(struct["name"])
|
|
|
|
return cache
|
|
|
|
|
|
def generate_types_stub(
|
|
structs: list[StructData], python_version: str = "3.11", doc: str = ""
|
|
) -> str:
|
|
"""Generate the complete bpy/types.pyi content."""
|
|
# Collect all type strings to detect needed imports
|
|
all_type_strs_parts: list[str] = []
|
|
for s in structs:
|
|
for p in s["properties"]:
|
|
all_type_strs_parts.append(p["type"])
|
|
for m in s["methods"]:
|
|
for param in m["params"]:
|
|
if param["type"]:
|
|
all_type_strs_parts.append(param["type"])
|
|
if m["return_type"]:
|
|
all_type_strs_parts.append(m["return_type"])
|
|
all_type_strs = " ".join(all_type_strs_parts)
|
|
|
|
typing_imports = ["Generic", "TypeVar"]
|
|
if "Literal[" in all_type_strs:
|
|
typing_imports.append("Literal")
|
|
|
|
abc_imports = ["Iterator"]
|
|
if "MutableSequence[" in all_type_strs:
|
|
abc_imports.append("MutableSequence")
|
|
# Only import bare Sequence if used without collections.abc. prefix
|
|
if re.search(r"(?<!\.)\bSequence\[", all_type_strs):
|
|
abc_imports.append("Sequence")
|
|
|
|
imports: list[str] = [
|
|
f"from collections.abc import {', '.join(abc_imports)}",
|
|
f"from typing import {', '.join(typing_imports)}",
|
|
"import builtins",
|
|
(
|
|
"from typing import override"
|
|
if tuple(int(x) for x in python_version.split(".")) >= (3, 12)
|
|
else "from typing_extensions import override"
|
|
),
|
|
]
|
|
if "Sequence[" in all_type_strs:
|
|
# Use fully qualified import to avoid shadowing by bpy.types.Sequence
|
|
# (the video sequencer strip type)
|
|
imports.append("import collections.abc")
|
|
|
|
parts: list[str] = []
|
|
if doc:
|
|
parts.append(f'"""{doc}"""\n')
|
|
parts.extend(
|
|
[
|
|
"\n".join(sorted(imports)),
|
|
"",
|
|
'_T = TypeVar("_T")',
|
|
]
|
|
)
|
|
|
|
sorted_structs = topological_sort_structs(structs)
|
|
inherited_map = collect_all_methods(sorted_structs)
|
|
|
|
for struct in sorted_structs:
|
|
parts.append("")
|
|
parts.append(
|
|
generate_struct_stub(struct, inherited_map.get(struct["name"], set()))
|
|
)
|
|
|
|
result = "\n".join(parts)
|
|
class_names = {s["name"] for s in structs}
|
|
result = strip_self_module_prefix(result, "bpy.types", class_names)
|
|
return prune_unused_imports(result)
|
|
|
|
|
|
def prune_unused_imports(content: str) -> str:
|
|
"""Remove import lines where the imported name is not used in the body."""
|
|
lines = content.split("\n")
|
|
import_lines: list[int] = []
|
|
for i, line in enumerate(lines):
|
|
# Skip relative re-exports (from . import X) — these are intentional
|
|
if line.strip().startswith("from . import"):
|
|
continue
|
|
if re.match(r"^(import |from .+ import )", line.strip()):
|
|
import_lines.append(i)
|
|
|
|
to_remove: set[int] = set()
|
|
for i in import_lines:
|
|
line = lines[i].strip()
|
|
# "from X import Y" -> check Y is used
|
|
m = re.match(r"from .+ import (\w+)", line)
|
|
if m:
|
|
name = m.group(1)
|
|
# Check if name is used in type annotations (not just in docstrings).
|
|
# Look for: ": Name", "-> Name", "[Name", "| Name", "(Name)"
|
|
body = "\n".join(lines[j] for j in range(len(lines)) if j != i)
|
|
if not re.search(rf"[:>\[|( @]{name}\b", body):
|
|
to_remove.add(i)
|
|
continue
|
|
# "import X" -> check X. is used
|
|
m = re.match(r"import ([\w.]+)", line)
|
|
if m:
|
|
mod = m.group(1)
|
|
body = "\n".join(lines[j] for j in range(len(lines)) if j != i)
|
|
if mod + "." not in body:
|
|
to_remove.add(i)
|
|
|
|
return "\n".join(line for i, line in enumerate(lines) if i not in to_remove)
|
|
|
|
|
|
def strip_self_module_prefix(
|
|
content: str, module_name: str, class_names: set[str]
|
|
) -> str:
|
|
"""Remove self-referencing module prefixes only for types defined in this module."""
|
|
# Strip full module prefix: bpy_extras.anim_utils.BakeOptions -> BakeOptions
|
|
for cls_name in class_names:
|
|
content = content.replace(f"{module_name}.{cls_name}", cls_name)
|
|
|
|
# Also strip short module name prefix for relative references:
|
|
# anim_utils.BakeOptions -> BakeOptions (when inside bpy_extras.anim_utils)
|
|
# This handles classes that weren't introspected but are referenced by type annotations.
|
|
# Use negative lookbehind to avoid stripping "types." from "bpy.types.Mesh" etc.
|
|
short_name = module_name.rsplit(".", 1)[-1]
|
|
if short_name != module_name:
|
|
content = re.sub(
|
|
rf"(?<!\w\.)(?<!\w){re.escape(short_name)}\.(\w+)", r"\1", content
|
|
)
|
|
|
|
return content
|
|
|
|
|
|
def generate_module_stub(
|
|
module_data: ModuleData,
|
|
python_version: str = "3.11",
|
|
submodule_names: list[str] | None = None,
|
|
) -> str:
|
|
"""Generate the complete .pyi content for a module."""
|
|
module_name = module_data["module"]
|
|
|
|
# bpy.types uses the specialized types generator
|
|
if module_name == "bpy.types":
|
|
return generate_types_stub(
|
|
module_data["structs"], python_version, module_data["doc"]
|
|
)
|
|
|
|
parts: list[str] = []
|
|
|
|
# Module docstring
|
|
if module_data["doc"]:
|
|
parts.append(f'"""{module_data["doc"]}"""\n')
|
|
|
|
# Imports
|
|
imports = collect_imports(module_data)
|
|
# Re-export submodules so type checkers can resolve e.g. bpy.utils.
|
|
# Use "X as X" pattern to mark as intentional re-export in stubs.
|
|
for sub in submodule_names or []:
|
|
imports.add(f"from . import {sub} as {sub}")
|
|
# Remove exact self-imports (but keep submodule imports like bmesh.types for bmesh)
|
|
imports = {i for i in imports if i != f"import {module_name}"}
|
|
# Remove imports that clash with class names in this module
|
|
class_names = {s["name"] for s in module_data.get("structs", [])}
|
|
clashing_imports: set[str] = set()
|
|
for i in imports:
|
|
for name in class_names:
|
|
if f"import {name}" in i:
|
|
clashing_imports.add(i)
|
|
imports -= clashing_imports
|
|
if imports:
|
|
parts.append("")
|
|
for imp in sorted(imports):
|
|
parts.append(imp)
|
|
parts.append("")
|
|
|
|
# Variables
|
|
if module_data["variables"]:
|
|
parts.append("")
|
|
for var in module_data["variables"]:
|
|
parts.append(generate_variable_stub(var))
|
|
|
|
# Functions
|
|
for func in module_data["functions"]:
|
|
parts.append("")
|
|
parts.append(generate_function_stub(func))
|
|
|
|
# Classes
|
|
for struct in module_data.get("structs", []):
|
|
parts.append("")
|
|
parts.append(generate_struct_stub(struct))
|
|
|
|
result = "\n".join(parts)
|
|
class_names = {s["name"] for s in module_data.get("structs", [])}
|
|
result = strip_self_module_prefix(result, module_name, class_names)
|
|
|
|
# If any variable/class name shadows a Python builtin used as a type
|
|
# annotation (e.g. bpy.ops has a variable named "object"), qualify it.
|
|
_BUILTINS_USED_AS_TYPES = {"object", "type", "int", "str", "float", "bool", "set"}
|
|
var_names = {v["name"] for v in module_data.get("variables", [])}
|
|
shadowed = var_names & _BUILTINS_USED_AS_TYPES
|
|
if shadowed:
|
|
for name in shadowed:
|
|
result = re.sub(
|
|
rf"(?<=: ){name}\b",
|
|
f"builtins.{name}",
|
|
result,
|
|
)
|
|
if "import builtins" not in result:
|
|
result = "import builtins\n" + result
|
|
|
|
# Qualify type names that clash with class names in the same module
|
|
for clash in clashing_imports:
|
|
match = re.search(r"from ([\w.]+) import (\w+)", clash)
|
|
if match:
|
|
full_module = match.group(1)
|
|
name = match.group(2)
|
|
if name in class_names:
|
|
result = re.sub(
|
|
rf"(?<!\w){name}\[",
|
|
f"{full_module}.{name}[",
|
|
result,
|
|
)
|
|
if f"import {full_module}" not in result:
|
|
result = f"import {full_module}\n" + result
|
|
|
|
return prune_unused_imports(result)
|
|
|
|
|
|
def load_overrides(overrides_dir: str, module_name: str) -> dict[str, ParamOverrides]:
|
|
"""Load type overrides for a module from a versioned overrides directory."""
|
|
override_path = os.path.join(overrides_dir, f"{module_name}.json")
|
|
if os.path.exists(override_path):
|
|
with open(override_path) as f:
|
|
loaded: dict[str, ParamOverrides] = json.load(f)
|
|
return loaded
|
|
return {}
|
|
|
|
|
|
def _apply_func_overrides(func: FunctionData, func_overrides: ParamOverrides) -> None:
|
|
"""Apply overrides to a single function/method."""
|
|
param_overrides = func_overrides.get("params", {})
|
|
for param in func["params"]:
|
|
if param["name"] in param_overrides:
|
|
param["type"] = param_overrides[param["name"]]
|
|
if "return_type" in func_overrides:
|
|
func["return_type"] = func_overrides["return_type"]
|
|
|
|
|
|
def apply_overrides(
|
|
module_data: ModuleData, overrides: dict[str, ParamOverrides]
|
|
) -> ModuleData:
|
|
"""Apply type overrides to introspected module data.
|
|
|
|
Keys can be function names (e.g. ``"my_func"``) for module-level functions,
|
|
or ``"ClassName.method_name"`` for struct methods.
|
|
"""
|
|
for func in module_data["functions"]:
|
|
func_overrides = overrides.get(func["name"], {})
|
|
if func_overrides:
|
|
_apply_func_overrides(func, func_overrides)
|
|
|
|
for struct in module_data.get("structs", []):
|
|
for method in struct["methods"]:
|
|
key = f"{struct['name']}.{method['name']}"
|
|
method_overrides = overrides.get(key, {})
|
|
if method_overrides:
|
|
_apply_func_overrides(method, method_overrides)
|
|
|
|
return module_data
|
|
|
|
|
|
def write_stubs(
|
|
modules_data: list[ModuleData],
|
|
output_dir: str,
|
|
overrides_dir: str | None = None,
|
|
python_version: str = "3.11",
|
|
) -> list[str]:
|
|
"""Write .pyi stub files from introspection data.
|
|
|
|
Returns the list of top-level package directory names created.
|
|
"""
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
# Determine which modules are packages (have submodules)
|
|
all_module_names = {m["module"] for m in modules_data}
|
|
package_modules: set[str] = set()
|
|
for name in all_module_names:
|
|
parts = name.split(".")
|
|
for i in range(1, len(parts)):
|
|
package_modules.add(".".join(parts[:i]))
|
|
|
|
top_level_packages: set[str] = set()
|
|
|
|
for module_data in modules_data:
|
|
module_name = module_data["module"]
|
|
|
|
# Apply type overrides if available
|
|
if overrides_dir:
|
|
overrides = load_overrides(overrides_dir, module_name)
|
|
if overrides:
|
|
module_data = apply_overrides(module_data, overrides)
|
|
|
|
# Qualify bare RNA type names for modules outside bpy.types
|
|
if module_name != "bpy.types":
|
|
qualify_module_types(module_data)
|
|
|
|
parts = module_name.split(".")
|
|
top_level_packages.add(parts[0])
|
|
|
|
# Create package directories and __init__.pyi files
|
|
current_dir = output_dir
|
|
for part in parts[:-1]:
|
|
current_dir = os.path.join(current_dir, part)
|
|
os.makedirs(current_dir, exist_ok=True)
|
|
init_pyi = os.path.join(current_dir, "__init__.pyi")
|
|
if not os.path.exists(init_pyi):
|
|
with open(init_pyi, "w") as f:
|
|
pass
|
|
|
|
# Always write as directory/__init__.pyi for consistent packaging
|
|
pkg_dir = os.path.join(current_dir, parts[-1])
|
|
os.makedirs(pkg_dir, exist_ok=True)
|
|
stub_path = os.path.join(pkg_dir, "__init__.pyi")
|
|
|
|
# Collect direct child submodule names for re-export in __init__.pyi
|
|
child_submodules: list[str] = []
|
|
if module_name in package_modules:
|
|
prefix = module_name + "."
|
|
child_submodules = sorted(
|
|
{
|
|
n[len(prefix) :].split(".")[0]
|
|
for n in all_module_names
|
|
if n.startswith(prefix)
|
|
}
|
|
)
|
|
|
|
content = generate_module_stub(module_data, python_version, child_submodules)
|
|
try:
|
|
content = black.format_str(content, mode=black.Mode(is_pyi=True))
|
|
except Exception as e:
|
|
print(f" ERROR formatting {module_name}: {e}", file=sys.stderr)
|
|
with open(stub_path, "w") as f:
|
|
f.write(content)
|
|
|
|
print(f" {stub_path}")
|
|
|
|
return sorted(top_level_packages)
|
|
|
|
|
|
def main() -> None:
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser(
|
|
description="Generate .pyi stubs from introspection JSON"
|
|
)
|
|
parser.add_argument("input", help="Path to introspection JSON file")
|
|
parser.add_argument(
|
|
"--output-dir", default="blender-stubs", help="Output directory for stubs"
|
|
)
|
|
args = parser.parse_args()
|
|
|
|
with open(args.input) as f:
|
|
modules_data: list[ModuleData] = json.load(f)
|
|
|
|
print(f"Generating stubs in {args.output_dir}/")
|
|
write_stubs(modules_data, args.output_dir)
|
|
print("Done.")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|