blender-python-stubs/introspect.py
Joseph HENRY e4284ce7d9 Use @property/@setter for writable mathutils properties
Writable mathutils properties (Vector, Euler, etc.) were typed as plain
attributes with a union type, causing the getter to also return the union.
This made `obj.location.x` fail type checking since Sequence[float] has
no `.x`. Now these properties use @property for the getter (returning the
concrete mathutils type) and @setter accepting the wider union.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 15:54:40 +02:00

3096 lines
106 KiB
Python

"""Introspection script that runs inside Blender headless.
Usage: blender --background --factory-startup -noaudio --python introspect.py
"""
import argparse
import importlib
import inspect
import json
import pkgutil
import re
import sys
from collections.abc import Callable
from dataclasses import dataclass
from types import ModuleType
from typing import TypedDict, cast
BLENDER_MODULES = [
"aud",
"bl_math",
"blf",
"bmesh",
"bpy",
"bpy_extras",
"freestyle",
"gpu",
"gpu_extras",
"idprop",
"imbuf",
"mathutils",
]
# Virtual modules not discoverable via pkgutil (C-level or RNA-defined)
EXTRA_MODULES = [
"bpy.types",
"bpy.props",
"bpy.app",
"bmesh.types",
"gpu.types",
"imbuf.types",
"idprop.types",
]
# Hardcoded types for screen context members that are None in headless mode.
# These are dynamically injected by Blender based on the active editor/mode.
SCREEN_CONTEXT_TYPE_OVERRIDES: dict[str, str] = {
"active_action": "Action",
"active_annotation_layer": "GPencilLayer",
"active_bone": "EditBone",
"active_editable_fcurve": "FCurve",
"active_gpencil_frame": "GreasePencilFrame",
"active_gpencil_layer": "GreasePencilLayer",
"active_nla_strip": "NlaStrip",
"active_nla_track": "NlaTrack",
"active_node": "Node",
"active_object": "Object",
"active_operator": "Operator",
"active_pose_bone": "PoseBone",
"active_sequence_strip": "Sequence",
"active_strip": "NlaStrip",
"annotation_data": "GreasePencil",
"annotation_data_owner": "ID",
"edit_object": "Object",
"editable_bones": "Sequence[EditBone]",
"editable_gpencil_layers": "Sequence[GPencilLayer]",
"editable_gpencil_strokes": "Sequence[GPencilStroke]",
"editable_objects": "Sequence[Object]",
"gpencil_data": "GreasePencil",
"gpencil_data_owner": "ID",
"grease_pencil": "GreasePencil",
"image_paint_object": "Object",
"object": "Object",
"objects_in_mode": "Sequence[Object]",
"objects_in_mode_unique_data": "Sequence[Object]",
"particle_edit_object": "Object",
"pose_object": "Object",
"property": "tuple[bpy_struct, str, int]",
"sculpt_object": "Object",
"selectable_objects": "Sequence[Object]",
"selected_bones": "Sequence[EditBone]",
"selected_editable_actions": "Sequence[Action]",
"selected_editable_bones": "Sequence[EditBone]",
"selected_editable_fcurves": "Sequence[FCurve]",
"selected_editable_keyframes": "Sequence[Keyframe]",
"selected_editable_objects": "Sequence[Object]",
"selected_editable_sequences": "Sequence[Sequence]",
"selected_editable_strips": "Sequence[NlaStrip]",
"selected_movieclip_tracks": "Sequence[MovieTrackingTrack]",
"selected_nla_strips": "Sequence[NlaStrip]",
"selected_objects": "Sequence[Object]",
"selected_pose_bones": "Sequence[PoseBone]",
"selected_pose_bones_from_active_object": "Sequence[PoseBone]",
"selected_sequences": "Sequence[Sequence]",
"selected_strips": "Sequence[NlaStrip]",
"selected_visible_actions": "Sequence[Action]",
"selected_visible_fcurves": "Sequence[FCurve]",
"sequencer_scene": "Scene",
"sequences": "Sequence[Sequence]",
"strips": "Sequence[NlaStrip]",
"ui_list": "UIList",
"vertex_paint_object": "Object",
"visible_bones": "Sequence[EditBone]",
"visible_fcurves": "Sequence[FCurve]",
"visible_gpencil_layers": "Sequence[GPencilLayer]",
"visible_objects": "Sequence[Object]",
"visible_pose_bones": "Sequence[PoseBone]",
"weight_paint_object": "Object",
# Buttons context members (Properties editor panels, not in dir() in headless)
"armature": "Armature",
"bone": "Bone",
"brush": "Brush",
"camera": "Camera",
"cloth": "ClothModifier",
"collision": "CollisionModifier",
"curve": "Curve",
"dynamic_paint": "DynamicPaintModifier",
"edit_bone": "EditBone",
"fluid": "FluidModifier",
"hair_curves": "Curves",
"lattice": "Lattice",
"light": "Light",
"lightprobe": "LightProbe",
"line_style": "FreestyleLineStyle",
"material": "Material",
"material_slot": "MaterialSlot",
"mesh": "Mesh",
"meta_ball": "MetaBall",
"node": "Node",
"particle_settings": "ParticleSettings",
"particle_system": "ParticleSystem",
"particle_system_editable": "ParticleSystem",
"pointcloud": "PointCloud",
"pose_bone": "PoseBone",
"soft_body": "SoftBodyModifier",
"speaker": "Speaker",
"texture": "Texture",
"texture_node": "Node",
"texture_slot": "TextureSlot",
"texture_user": "ID",
"texture_user_property": "Property",
"volume": "Volume",
"world": "World",
}
# Suffix-based heuristics for screen context member types (order matters: longer first)
SCREEN_CONTEXT_NAME_PATTERNS: list[tuple[str, str]] = [
("_objects", "Sequence[Object]"),
("_object", "Object"),
("_bones", "Sequence[EditBone]"),
("_bone", "EditBone"),
("_fcurves", "Sequence[FCurve]"),
("_fcurve", "FCurve"),
("_strips", "Sequence[NlaStrip]"),
("_strip", "NlaStrip"),
("_actions", "Sequence[Action]"),
("_action", "Action"),
("_track", "NlaTrack"),
("_sequences", "Sequence[Sequence]"),
("_nodes", "Sequence[Node]"),
("_node", "Node"),
]
def infer_context_member_type(name: str) -> str | None:
"""Infer a screen context member's type from its name suffix."""
for suffix, type_str in SCREEN_CONTEXT_NAME_PATTERNS:
if name.endswith(suffix):
return type_str
return None
class ParamData(TypedDict):
name: str
type: str | None
default: str | None
kind: str
class _FunctionDataOptional(TypedDict, total=False):
is_override: bool
class FunctionData(_FunctionDataOptional):
name: str
doc: str
params: list[ParamData]
return_type: str | None
is_classmethod: bool
class VariableData(TypedDict):
name: str
type: str
value: str
class _PropertyDataOptional(TypedDict, total=False):
setter_type: str
class PropertyData(_PropertyDataOptional):
name: str
type: str
is_readonly: bool
description: str
class StructData(TypedDict):
name: str
doc: str
base: str | None
properties: list[PropertyData]
methods: list[FunctionData]
class ModuleData(TypedDict):
module: str
doc: str
functions: list[FunctionData]
variables: list[VariableData]
structs: list[StructData]
def parse_docstring_types(docstring: str) -> tuple[dict[str, str], str | None]:
"""Parse RST-style :type: and :rtype: annotations from a docstring.
Returns (param_types, return_type) where param_types maps param name to type string.
"""
if not docstring:
return {}, None
param_types: dict[str, str] = {}
return_type: str | None = None
# Match :type param: ... up to the next RST directive (:arg, :type, :rtype, :return)
# but NOT :class: or :func: which appear inside type annotations
directive_lookahead = (
r"(?=\n\s*:(?:arg|param|type|rtype|return|returns|raises)[\s:]|$)"
)
for match in re.finditer(
rf":type\s+(\w+):\s*(.+?){directive_lookahead}", docstring, re.DOTALL
):
name = match.group(1)
type_str = clean_type_str(match.group(2).strip())
param_types[name] = type_str
rtype_match = re.search(
rf":rtype:\s*(.+?){directive_lookahead}", docstring, re.DOTALL
)
if rtype_match:
return_type = clean_type_str(rtype_match.group(1).strip())
# Also match standalone `:type: X` (without a param name) used in
# property docstrings (e.g. `:type: bool`, `:type: :class:`Vector``).
if return_type is None:
bare_type_match = re.search(
rf"(?<!\w):type:\s*(.+?){directive_lookahead}", docstring, re.DOTALL
)
if bare_type_match:
return_type = clean_type_str(bare_type_match.group(1).strip())
# Infer Literal types from :arg: descriptions when :type: is just "str".
# Blender 5.0 and earlier list enum values as ``VALUE`` bullet items in :arg:
# but only declare :type param: str.
for name, type_str in param_types.items():
if type_str != "str":
continue
# Find the :arg name: block
arg_match = re.search(
rf":arg\s+{re.escape(name)}:\s*(.+?){directive_lookahead}",
docstring,
re.DOTALL,
)
if not arg_match:
continue
arg_text = arg_match.group(1)
values = re.findall(r"``([A-Z][A-Z0-9_]*)``", arg_text)
if len(values) >= 2:
quoted = ", ".join(f'"{v}"' for v in values)
param_types[name] = f"Literal[{quoted}]"
return param_types, return_type
UNQUALIFIED_TYPES: dict[str, str] = {
"Stroke": "freestyle.types.Stroke",
"ViewEdge": "freestyle.types.ViewEdge",
"Interface0DIterator": "freestyle.types.Interface0DIterator",
"UnaryFunction0D": "freestyle.types.UnaryFunction0D",
"IntegrationType": "freestyle.types.IntegrationType",
"ImBuf": "imbuf.types.ImBuf",
"Buffer": "gpu.types.Buffer",
"GPUShader": "gpu.types.GPUShader",
"GPUShaderCreateInfo": "gpu.types.GPUShaderCreateInfo",
"GPUStageInterfaceInfo": "gpu.types.GPUStageInterfaceInfo",
"GPUBatch": "gpu.types.GPUBatch",
"GPUTexture": "gpu.types.GPUTexture",
"GPUFrameBuffer": "gpu.types.GPUFrameBuffer",
"GPUOffScreen": "gpu.types.GPUOffScreen",
"GPUVertBuf": "gpu.types.GPUVertBuf",
"GPUVertFormat": "gpu.types.GPUVertFormat",
"GPUIndexBuf": "gpu.types.GPUIndexBuf",
"GPUUniformBuf": "gpu.types.GPUUniformBuf",
"bpy_struct": "bpy.types.bpy_struct",
"Context": "bpy.types.Context",
"BlendData": "bpy.types.BlendData",
"Mesh": "bpy.types.Mesh",
"Object": "bpy.types.Object",
"Depsgraph": "bpy.types.Depsgraph",
"Scene": "bpy.types.Scene",
"ViewLayer": "bpy.types.ViewLayer",
"SpaceView3D": "bpy.types.SpaceView3D",
"Region": "bpy.types.Region",
"AdjacencyIterator": "freestyle.types.AdjacencyIterator",
"ChainingIterator": "freestyle.types.ChainingIterator",
"BMesh": "bmesh.types.BMesh",
"BMLayerItem": "bmesh.types.BMLayerItem",
"BMVert": "bmesh.types.BMVert",
"BMEdge": "bmesh.types.BMEdge",
"BMFace": "bmesh.types.BMFace",
"BMLoop": "bmesh.types.BMLoop",
}
def _replace_commas_outside_brackets(s: str) -> str:
"""Replace top-level commas with union separators while preserving generics."""
result: list[str] = []
depth = 0
i = 0
while i < len(s):
if s[i] in "([":
depth += 1
result.append(s[i])
elif s[i] in ")]":
depth -= 1
result.append(s[i])
elif s[i] == "," and depth == 0:
result.append(" |")
else:
result.append(s[i])
i += 1
return "".join(result)
def _fix_multi_arg_list(match: re.Match[str]) -> str:
"""Normalize malformed list[T, U] style annotations from docstrings."""
inner = match.group(1)
parts: list[str] = []
depth = 0
current: list[str] = []
for ch in inner:
if ch in "([":
depth += 1
elif ch in ")]":
depth -= 1
if ch == "," and depth == 0:
parts.append("".join(current).strip())
current = []
else:
current.append(ch)
parts.append("".join(current).strip())
if len(parts) <= 1:
return match.group(0)
non_ellipsis = [p for p in parts if p != "..."]
if len(set(non_ellipsis)) == 1:
return f"list[{non_ellipsis[0]}]"
return f"tuple[{', '.join(non_ellipsis)}]"
def _split_union_outside_brackets(s: str) -> list[str]:
"""Split a union string on top-level pipes, preserving nested generics."""
parts: list[str] = []
current: list[str] = []
depth = 0
for ch in s:
if ch in "([":
depth += 1
current.append(ch)
elif ch in ")]":
depth -= 1
current.append(ch)
elif ch == "|" and depth == 0:
parts.append("".join(current))
current = []
else:
current.append(ch)
parts.append("".join(current))
return parts
def _is_valid_union_component(type_part: str) -> bool:
"""Return True if a union component looks like a valid type expression."""
if " " in type_part and "[" not in type_part:
return False
if re.match(r"^[a-z][a-z0-9_]+$", type_part) and "_" in type_part:
return False
if re.match(r"^[a-z]+$", type_part) and type_part not in (
"bool",
"int",
"float",
"str",
"bytes",
"object",
"type",
"None",
):
return False
return True
def _normalize_union_fallback(type_str: str) -> str:
"""Apply final union cleanup and prose fallback handling."""
if "|" in type_str:
parts = [
p.strip().rstrip(".,;:") for p in _split_union_outside_brackets(type_str)
]
cleaned = [p if _is_valid_union_component(p) else "object" for p in parts if p]
seen: set[str] = set()
unique: list[str] = []
for part in cleaned:
if part not in seen:
seen.add(part)
unique.append(part)
return " | ".join(unique)
if " " in type_str and "[" not in type_str:
return "object"
return type_str
RegexReplacement = str | Callable[[re.Match[str]], str]
RegexRule = tuple[re.Pattern[str], RegexReplacement]
_CLEAN_TYPE_RST_RULES: tuple[RegexRule, ...] = (
(re.compile(r",?\s*\(readonly\)"), ""),
(re.compile(r",?\s*\(never None\)"), ""),
(re.compile(r":class:`([^`]+)`"), r"\1"),
(re.compile("\\\\\\s*\\["), "["),
(re.compile(r"``([^`]+)``"), r"\1"),
(re.compile(r"`\s*([^`]+?)\s*`"), r"\1"),
(re.compile(r"\.?\s*(?:r?type|returns?):.*"), ""),
(re.compile(r":(?!param|arg|type|return)(\w)"), r"\1"),
)
_CLEAN_TYPE_PRE_PROSE_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\btuple\(([^)]+)\)"), r"tuple[\1]"),
(re.compile(r",\s+\w+\s*:"), ""),
)
_CLEAN_TYPE_PROSE_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\.\.\s+\w+::.*", re.DOTALL), ""),
(re.compile(r"\s+(?!None\b|True\b|False\b)[A-Z][a-z]+\s+[a-z].*$"), ""),
(re.compile(r"(\bNone)\s+\w.*$"), r"\1"),
(re.compile(r"\s+of size \d+"), ""),
(re.compile(r"\b\d+[dDxX]\d*(?:\s+or\s+\d+[dDxX]\d*)*\s+"), ""),
)
_CLEAN_TYPE_PROSE_POST_COLLECTION_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\b(float|int)\s+(triplet|pair|array)\b"), r"\1"),
(
re.compile(r"\b(?:one|two|three|four|five|six|seven|eight|nine|ten)\s+"),
"",
),
(re.compile(r"\btuple of (?:\d+ )?([\w.]+\w)\b"), r"tuple[\1, ...]"),
(re.compile(r"\blist of ([\w.]+\w)\b"), r"list[\1]"),
)
_DIM_PREFIX = r"(?:\d+(?:\s+(?:or|and|to)\s+(?:\d+|more|fewer))*\s+)?"
_RE_SEQUENCE_CONTAINING = re.compile(r"\b[Ss]equence of \w+s\s+containing\s+(\w+)s?\b")
_RE_PLURAL_CONTAINING = re.compile(r"\b\w+s\s+containing\s+(\w+)s?\b")
_RE_SEQUENCE_OF = re.compile(rf"\b[Ss]equence of {_DIM_PREFIX}(\w[\w.]*)\b")
_RE_ITERABLE_OF = re.compile(rf"\b[Ii]terable of {_DIM_PREFIX}(\w[\w.]*)\b")
_RE_COLLECTION_OF = re.compile(rf"\b[Cc]ollection of {_DIM_PREFIX}(\w[\w.]*)\b")
_RE_SEQUENCE_OF_TUPLE = re.compile(r"\b[Ss]equence of \(([^)]+)\)")
_RE_ITERABLE_OF_TUPLE = re.compile(r"\b[Ii]terable of \(([^)]+)\)")
_CLEAN_TYPE_ALIAS_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\bclass\b"), "type"),
(re.compile(r"\bstrings\b"), "str"),
(re.compile(r"\bfloats\b"), "float"),
(re.compile(r"\bints\b"), "int"),
(re.compile(r"\bbools\b"), "bool"),
(re.compile(r"\bnumbers\b"), "float"),
(re.compile(r"\bvectors\b"), "mathutils.Vector"),
(re.compile(r"\bmatrices\b"), "mathutils.Matrix"),
(re.compile(r"\btuples\b"), "tuple[object, ...]"),
(re.compile(r"\bstring\b"), "str"),
(re.compile(r"\bdouble\b"), "float"),
(re.compile(r"\binteger\b"), "int"),
(re.compile(r"\bboolean\b"), "bool"),
(re.compile(r"\bnumber\b"), "float"),
(re.compile(r"\buint\b"), "int"),
(re.compile(r"\bNone[Tt]ype\b"), "None"),
(re.compile(r"\bbuffer\b"), "object"),
(re.compile(r"\b[Aa]ny\b"), "object"),
(re.compile(r"\bidprop\.types?\.\w+\b"), "object"),
(re.compile(r"\b(?:bpy\.types\.)?IDProperty\w*\b"), "object"),
(re.compile(r"\b(?:bpy\.types\.)?bpy_prop\b(?!_)"), "object"),
(re.compile(r"\b(int|float|bool|str)\s+sequence\b"), r"Sequence[\1]"),
(re.compile(r"\s+or\s+"), " | "),
(re.compile(r"\|[A-Z_]+\|"), "str"),
(re.compile(r"\bcallable\b"), "Callable[..., object]"),
(re.compile(r"\bfunction\b"), "Callable[..., object]"),
(re.compile(r"\bgenerator\b"), "Generator"),
(re.compile(r"\bsequence\b"), "Sequence"),
# Old typing module names -> modern builtins
(re.compile(r"\bDict\b"), "dict"),
(re.compile(r"\bList\b"), "list"),
(re.compile(r"\bTuple\b"), "tuple"),
(re.compile(r"\bSet\b(?!tings)"), "set"),
(re.compile(r"\bFrozenSet\b"), "frozenset"),
)
_CLEAN_TYPE_GENERIC_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\[\d+\]"), ""),
(re.compile(r"\s*\|\s*\d+\b"), ""),
(re.compile(r"(\w)\[\]"), r"\1"),
(re.compile(r"\bCallable\b(?!\[)"), "Callable[..., object]"),
# Callable[[...], X] -> Callable[..., X] (consume trailing comma)
(re.compile(r"Callable\[\[[^\]]*\.\.\.[^\]]*\],?\s*"), "Callable[..., "),
(re.compile(r"\bdict\b(?!\[)"), "dict[str, object]"),
(re.compile(r"\blist\b(?!\[)"), "list[object]"),
(re.compile(r"\btuple\b(?!\[)"), "tuple[object, ...]"),
(re.compile(r"\bset\b(?!\[)"), "set[object]"),
(re.compile(r"\bfrozenset\b(?!\[)"), "frozenset[object]"),
(re.compile(r"\bGenerator\b(?!\[)"), "Generator[object, None, None]"),
(re.compile(r"\bSequence\b(?!\[)"), "Sequence[object]"),
(re.compile(r"\bIterator\b(?!\[)"), "Iterator[object]"),
(re.compile(r"\bIterable\b(?!\[)"), "Iterable[object]"),
(re.compile(r"\bSequence\[(\w+),\s*(\w+)\]"), r"Sequence[tuple[\1, \2]]"),
)
_CLEAN_TYPE_POST_PRECHECK_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\s+"), " "),
(re.compile(r"'s\b"), ""),
)
_CLEAN_TYPE_POST_FINAL_RULES: tuple[RegexRule, ...] = (
(re.compile(r"\b(a|an|the)\s+", re.IGNORECASE), ""),
(re.compile(r"\|\s*\|"), "|"),
(re.compile(r"\|\s*$"), ""),
(re.compile(r"^\s*\|"), ""),
(re.compile(r"(\])\s*:.*"), r"\1"),
(re.compile(r"(\])\s+\w.*"), r"\1"),
(re.compile(r"(\w)\s*:\s+\w.*"), r"\1"),
(re.compile(r"\breal\b"), "float"),
)
_UNDEFINED_TYPE_NAMES = {
"numpy",
"bpy_app_translations",
"BLFImBufContext",
"AnimateablePropertyP",
"ModuleType",
"Undefined",
"capsule",
"_translations_type",
"_PropertyDeferred",
}
_UNDEFINED_TYPE_MAP: dict[str, str] = {}
_UNDEFINED_GENERIC_TYPES = {"BMVertSkin"}
_RE_LITERAL_VALUES = re.compile(r"\bLiteral\[([^\]]+)\]")
_RE_STR_IN_VALUES = re.compile(r"str(?:ing)?\s+in\s+\[([^\]]+)\]", re.IGNORECASE)
_RE_QUOTED_LITERAL_VALUE = re.compile(r"""['"]([^'"]+)['"]""")
_INFORMAL_TYPE_RULES: tuple[RegexRule, ...] = (
(re.compile(r"(?<!\.)(?<!\w)\bvector\b", re.IGNORECASE), "mathutils.Vector"),
(re.compile(r"(?<!\.)(?<!\w)\bmatrix\b", re.IGNORECASE), "mathutils.Matrix"),
(
re.compile(r"(?<!\.)(?<!\w)\bquaternion\b", re.IGNORECASE),
"mathutils.Quaternion",
),
(re.compile(r"(?<!\.)(?<!\w)\beuler\b", re.IGNORECASE), "mathutils.Euler"),
(re.compile(r"(?<!\.)(?<!\w)\bcolor\b", re.IGNORECASE), "mathutils.Color"),
)
def _apply_regex_rules(text: str, rules: tuple[RegexRule, ...]) -> str:
"""Apply an ordered sequence of regex substitutions."""
for pattern, replacement in rules:
text = pattern.sub(replacement, text)
return text
def _normalize_collection_prose(type_str: str) -> str:
"""Normalize prose-style collection descriptions to generic type syntax."""
type_str = _RE_SEQUENCE_CONTAINING.sub(
lambda m: f"Sequence[Sequence[{m.group(1)}]]",
type_str,
)
type_str = _RE_PLURAL_CONTAINING.sub(
lambda m: f"Sequence[{m.group(1)}]",
type_str,
)
type_str = _RE_SEQUENCE_OF.sub(lambda m: f"Sequence[{m.group(1)}]", type_str)
type_str = _RE_ITERABLE_OF.sub(lambda m: f"Iterable[{m.group(1)}]", type_str)
type_str = _RE_COLLECTION_OF.sub(lambda m: f"Collection[{m.group(1)}]", type_str)
type_str = _RE_SEQUENCE_OF_TUPLE.sub(
lambda m: f"Sequence[tuple[{m.group(1)}]]",
type_str,
)
type_str = _RE_ITERABLE_OF_TUPLE.sub(
lambda m: f"Iterable[tuple[{m.group(1)}]]",
type_str,
)
return type_str
def _normalize_literal_values(type_str: str) -> str:
"""Quote bare Literal values to valid Python string literals."""
def _quote_items(match: re.Match[str]) -> str:
items = match.group(1)
quoted = ", ".join(
f"'{item.strip()}'" if not item.strip().startswith("'") else item.strip()
for item in items.split(",")
)
return f"Literal[{quoted}]"
return _RE_LITERAL_VALUES.sub(_quote_items, type_str)
def _extract_string_enum_literal(type_str: str) -> str | None:
"""Extract `str in [...]` docstring enums into a Literal[...] annotation."""
match = _RE_STR_IN_VALUES.match(type_str)
if not match:
return None
values = _RE_QUOTED_LITERAL_VALUE.findall(match.group(1))
if not values:
return None
quoted = ", ".join(f"'{value}'" for value in values)
return f"Literal[{quoted}]"
def _qualify_informal_types(type_str: str) -> str:
"""Qualify informal mathutils type names to fully-qualified types."""
return _apply_regex_rules(type_str, _INFORMAL_TYPE_RULES)
def clean_type_str(type_str: str) -> str:
"""Clean up RST type annotations to plain Python type strings."""
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_RST_RULES)
type_str = type_str.rstrip(":.,")
literal_from_str_in = _extract_string_enum_literal(type_str)
if literal_from_str_in is not None:
return _normalize_literal_values(literal_from_str_in)
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_PRE_PROSE_RULES)
# Normalize comma-separated types to unions (outside brackets only)
# "int, float" -> "int | float" but not "tuple[int, float]"
if "Callable" not in type_str:
type_str = _replace_commas_outside_brackets(type_str)
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_PROSE_RULES)
type_str = _normalize_collection_prose(type_str)
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_PROSE_POST_COLLECTION_RULES)
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_ALIAS_RULES)
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_GENERIC_RULES)
# Apply from innermost out, then handle nested brackets
prev = ""
while prev != type_str:
prev = type_str
type_str = re.sub(r"\blist\[([^\[\]]+)\]", _fix_multi_arg_list, type_str)
type_str = re.sub(r"\blist\[(.+)\]", _fix_multi_arg_list, type_str)
type_str = _normalize_literal_values(type_str)
type_str = _qualify_informal_types(type_str)
# Replace types not available in Python 3.11
type_str = type_str.replace("collections.abc.Buffer", "bytes")
for undef in _UNDEFINED_TYPE_NAMES:
type_str = re.sub(rf"\b{re.escape(undef)}\b(\.\w+)*", "object", type_str)
# Map known unqualified Python stdlib types
type_str = re.sub(r"\b[Mm]odule\b", "types.ModuleType", type_str)
# Fix GPU types wrongly referenced as bpy.types.GPU* (Blender docstring bug in 4.x)
type_str = re.sub(r"\bbpy\.types\.(GPU\w+)\b", r"gpu.types.\1", type_str)
for bare, qualified in UNQUALIFIED_TYPES.items():
type_str = re.sub(rf"(?<!\.)(?<!\w)\b{bare}\b", qualified, type_str)
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_POST_PRECHECK_RULES).strip()
# Strip trailing punctuation that leaked from docstrings
type_str = type_str.rstrip(".,;:")
# If the type contains hyphens (prose like "per-vector weights"), it's not a type
if "-" in type_str and not re.match(r"^[\w.\[\], |>()\"']+$", type_str):
return "object"
# If brackets are unbalanced, the type is malformed - fall back to object
if type_str.count("[") != type_str.count("]") or type_str.count(
"("
) != type_str.count(")"):
return "object"
type_str = _apply_regex_rules(type_str, _CLEAN_TYPE_POST_FINAL_RULES)
# Final fallback: check each union component for prose (spaces without brackets)
# Also strip trailing punctuation from each component
type_str = _normalize_union_fallback(type_str)
# Standalone invalid types (snake_case variable names, bare lowercase prose words)
if re.match(r"^[a-z][a-z0-9_]+$", type_str) and "_" in type_str:
type_str = "object"
if re.match(r"^[a-z]+$", type_str) and type_str not in (
"bool",
"int",
"float",
"str",
"bytes",
"object",
"type",
):
type_str = "object"
# Map types that exist in docstrings but not in the Blender Python API
for undef_name, replacement in _UNDEFINED_TYPE_MAP.items():
type_str = re.sub(rf"\b{re.escape(undef_name)}\b", replacement, type_str)
# Replace undefined types in generic arguments with object
for undef in _UNDEFINED_GENERIC_TYPES:
type_str = re.sub(rf"(?:\w+\.)*{re.escape(undef)}", "object", type_str)
# Final balance check - catch any remaining malformed types
if type_str.count("[") != type_str.count("]") or type_str.count(
"("
) != type_str.count(")"):
return "object"
return type_str
def sanitize_default(value: str) -> str:
"""Sanitize a repr'd default value to be valid Python syntax."""
if "<" in value:
return "..."
# Replace callable/mutable defaults with ... (not valid as literal defaults in stubs)
if value in ("set()", "frozenset()", "dict()", "list()"):
return "..."
if value.startswith("{") or value.startswith("["):
return "..."
# Replace complex expressions (e.g. sys.float_info.min) with ...
if "." in value and not value.replace(".", "", 1).lstrip("-").isdigit():
return "..."
# Replace bare identifiers that aren't Python literals
# (e.g. "data" from "data=data" in RST signatures)
if value.isidentifier() and value not in ("True", "False", "None"):
return "..."
# Parenthesized single value like (1) is not a valid tuple literal
if re.match(r"^\(\d+\)$", value):
return "..."
return value
# Types that are C-level descriptors, not valid as type annotations
C_INTERNAL_TYPES = {
"getset_descriptor",
"member_descriptor",
"method_descriptor",
"wrapper_descriptor",
"builtin_function_or_method",
"_tuplegetter",
"classmethod_descriptor",
"_translations_type",
}
def clean_docstring(docstring: str) -> str:
"""Extract the descriptive part of a docstring, removing RST directives and markup."""
if not docstring:
return ""
lines: list[str] = []
skip_block = False
for line in docstring.split("\n"):
stripped = line.strip()
# Stop at type annotation directives
if stripped.startswith((":arg ", ":type ", ":rtype:", ":return:", ":returns:")):
break
# Skip RST directive blocks (.. code-block::, .. method::, .. seealso::, etc.)
if stripped.startswith(".. "):
skip_block = True
continue
# Indented lines after a directive are part of the block
if skip_block:
if stripped and not line[0].isspace():
skip_block = False
else:
continue
lines.append(line)
while lines and not lines[-1].strip():
lines.pop()
return "\n".join(lines)
def param_kind_str(kind: int) -> str:
"""Convert inspect parameter kind to a string."""
if kind == inspect.Parameter.POSITIONAL_ONLY:
return "POSITIONAL_ONLY"
if kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
return "POSITIONAL_OR_KEYWORD"
if kind == inspect.Parameter.VAR_POSITIONAL:
return "VAR_POSITIONAL"
if kind == inspect.Parameter.KEYWORD_ONLY:
return "KEYWORD_ONLY"
if kind == inspect.Parameter.VAR_KEYWORD:
return "VAR_KEYWORD"
msg = f"Unknown parameter kind: {kind}"
raise ValueError(msg)
# bpy.props param names whose type is always set[str] (string option enums)
PROP_SET_PARAMS = {"options", "override", "tags", "search_options"}
def refine_types_by_context(
func_name: str,
param_types: dict[str, str],
return_type: str | None,
) -> tuple[dict[str, str], str | None]:
"""Refine imprecise types using function name context.
For example, BoolVectorProperty's 'default' param with bare 'Sequence'
can be refined to 'Sequence[bool]' from the function name.
"""
is_property_func = func_name.endswith("Property")
element_type_map: dict[str, str] = {
"Bool": "bool",
"Float": "float",
"Int": "int",
}
for prefix, element_type in element_type_map.items():
if func_name.startswith(prefix) and "Vector" in func_name:
for pname, ptype in param_types.items():
if pname == "default" and ptype in ("Sequence", "Sequence[object]"):
param_types[pname] = f"Sequence[{element_type}]"
# bpy.props *Property functions: all set types contain string enum values
if is_property_func:
for pname, ptype in param_types.items():
if "set[object]" in ptype:
param_types[pname] = ptype.replace("set[object]", "set[str]")
if return_type in ("Generator", "Generator[object, None, None]"):
return_type = "Generator[str, None, None]"
return param_types, return_type
def parse_rst_function_sig(
docstring: str,
) -> dict[str, tuple[str | None, str]]:
"""Parse the '.. function:: name(args)' RST directive for defaults and kinds.
Returns {param_name: (default_value_or_None, kind_str)}.
"""
result: dict[str, tuple[str | None, str]] = {}
# Find the function signature, handling nested parens in defaults like set()
match = re.search(r"\.\.\s+(?:function|method|class)::\s+\w+\(", docstring)
if not match:
return result
# Extract content between outermost parens, respecting nesting
start = match.end()
depth = 1
i = start
while i < len(docstring) and depth > 0:
if docstring[i] == "(":
depth += 1
elif docstring[i] == ")":
depth -= 1
i += 1
sig_str = docstring[start : i - 1]
# Strip RST optional parameter brackets:
# "data[, position]" -> "data, position"
# "[rows]" -> "rows" (all-optional)
# These indicate optional params in RST, not Python generics.
# Process from innermost outward to handle nested brackets like "a[, b[, c]]"
while "[," in sig_str:
sig_str = re.sub(r"\[,([^\[\]]*)\]", r",\1", sig_str)
# Handle remaining RST optional brackets: "[param]" or "[param=default]"
# Only strip brackets that wrap param-like content (identifiers, not types)
while re.search(r"\[(?!['\"])\w+[^\[\]]*\]", sig_str):
sig_str = re.sub(r"\[(\w+[^\[\]]*)\]", r"\1", sig_str)
parts: list[str] = []
current: list[str] = []
depth = 0
for ch in sig_str:
if ch in "({[":
depth += 1
current.append(ch)
elif ch in ")}]":
depth -= 1
current.append(ch)
elif ch == "," and depth == 0:
parts.append("".join(current))
current = []
else:
current.append(ch)
if current:
parts.append("".join(current))
kind = "POSITIONAL_OR_KEYWORD"
for part in parts:
part = part.strip()
if not part:
continue
if part == "/":
# Positional-only separator: mark all preceding params as POSITIONAL_ONLY
for pname in result:
result[pname] = (result[pname][0], "POSITIONAL_ONLY")
continue
if part == "*":
kind = "KEYWORD_ONLY"
continue
if part.startswith("**"):
param_name = part.lstrip("*").split("=")[0].strip()
result[param_name] = (None, "VAR_KEYWORD")
continue
if part.startswith("*"):
param_name = part.lstrip("*").split("=")[0].strip()
result[param_name] = (None, "VAR_POSITIONAL")
kind = "KEYWORD_ONLY"
continue
if "=" in part:
param_name, default = part.split("=", 1)
result[param_name.strip()] = (sanitize_default(default.strip()), kind)
else:
result[part.strip()] = (None, kind)
return result
# Bare mathutils types that Blender's C code accepts interchangeably with
# Sequence[float] via mathutils_array_parse. When a param is typed as one
# of these in a docstring, widen it to also accept Sequence[float].
_MATHUTILS_ARRAY_TYPES = {
"mathutils.Vector",
"mathutils.Euler",
"mathutils.Quaternion",
"mathutils.Color",
"Vector",
"Euler",
"Quaternion",
"Color",
}
def _widen_mathutils_params(params: list[ParamData]) -> None:
"""Widen bare mathutils type params to also accept Sequence[float]."""
for param in params:
ptype = param.get("type")
if ptype and ptype in _MATHUTILS_ARRAY_TYPES:
param["type"] = f"{ptype} | Sequence[float]"
def _annotation_to_type_str(ann: object) -> str:
"""Convert a Python annotation object to a clean type string for stubs."""
s = ann if isinstance(ann, str) else str(ann)
# Clean up internal module references
s = s.replace("_bpy_types.", "bpy.types.")
s = s.replace("bpy_types.", "bpy.types.")
# typing.Union[X, Y] -> X | Y
s = re.sub(r"\bUnion\[([^\]]+)\]", lambda m: " | ".join(m.group(1).split(", ")), s)
# typing.Optional[X] -> X | None
s = re.sub(r"\bOptional\[([^\]]+)\]", r"\1 | None", s)
# <class 'int'> -> int
s = re.sub(r"<class '([^']+)'>", r"\1", s)
s = s.replace("typing.", "")
# NoneType -> None
s = re.sub(r"\bNoneType\b", "None", s)
# Old typing aliases -> modern builtins
s = re.sub(r"\bDict\b", "dict", s)
s = re.sub(r"\bList\b", "list", s)
s = re.sub(r"\bTuple\b", "tuple", s)
s = re.sub(r"\bSet\b(?!tings)", "set", s)
s = re.sub(r"\bFrozenSet\b", "frozenset", s)
# Bare module type -> types.ModuleType
s = re.sub(r"\bmodule\b", "types.ModuleType", s)
# Parameterize bare generics (e.g., bare set -> set[object])
s = re.sub(r"\bdict\b(?!\[)", "dict[str, object]", s)
s = re.sub(r"\blist\b(?!\[)", "list[object]", s)
s = re.sub(r"\bset\b(?!\[)", "set[object]", s)
s = re.sub(r"\bfrozenset\b(?!\[)", "frozenset[object]", s)
s = re.sub(r"\btuple\b(?!\[)", "tuple[object, ...]", s)
# Qualify bare mathutils types (avoid double-qualifying already-qualified ones)
for mt in ("Vector", "Matrix", "Euler", "Quaternion", "Color"):
s = re.sub(rf"(?<!\.)(?<!\w)\b{mt}\b", f"mathutils.{mt}", s)
return s
def introspect_callable(func: Callable[..., object], name: str) -> FunctionData | None:
"""Introspect a callable (function or builtin) and return its metadata."""
docstring = inspect.getdoc(func) or ""
param_types, return_type = parse_docstring_types(docstring)
param_types, return_type = refine_types_by_context(name, param_types, return_type)
try:
sig = inspect.signature(func)
# fmt: off
except (ValueError, TypeError):
# fmt: on
# C extension without signature — build params from docstring :type:
# and extract defaults/kinds from RST ".. function::" directive
rst_sig = parse_rst_function_sig(docstring)
params: list[ParamData] = []
if rst_sig:
# RST signature has the authoritative param names and order.
# Match :type: info by name first, then positionally for mismatches.
# Positional fallback only fires when ALL :type: names are mismatched
# (i.e. the docstring uses different names than the RST signature).
rst_names = set(rst_sig.keys())
any_name_match = bool(rst_names & set(param_types.keys()))
type_values = list(param_types.values())
positional_idx = 0
for rst_name, (default, kind) in rst_sig.items():
param_type = param_types.get(rst_name)
if param_type is None and not any_name_match and type_values:
# Positional fallback: all :type: names differ from RST names
if positional_idx < len(type_values):
param_type = type_values[positional_idx]
positional_idx += 1
if (
default == "None"
and param_type
and not re.search(r"\| None\b", param_type)
):
param_type = param_type + " | None"
params.append(
{
"name": rst_name,
"type": param_type,
"default": default,
"kind": kind,
}
)
else:
# No RST signature — use :type: directives only
for param_name, param_type in param_types.items():
params.append(
{
"name": param_name,
"type": param_type,
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
)
_widen_mathutils_params(params)
return {
"name": name,
"doc": clean_docstring(docstring),
"params": params,
"return_type": return_type,
"is_classmethod": False,
}
# Build positional fallback for param name mismatches:
# C functions often use generic names like "object" in __text_signature__
# while docstrings use descriptive names like "string", "cls", etc.
doc_param_list = list(param_types.items())
sig_param_list = [(n, p) for n, p in sig.parameters.items() if n != "self"]
params = []
for i, (pname, param) in enumerate(sig_param_list):
default: str | None = None
if param.default is not inspect.Parameter.empty:
default = sanitize_default(repr(param.default))
type_str = param_types.get(pname)
actual_name = pname
# Positional fallback: use docstring name + type when sig name doesn't match
if type_str is None and i < len(doc_param_list):
doc_name, doc_type = doc_param_list[i]
if doc_name not in sig.parameters:
type_str = doc_type
actual_name = doc_name
# Fall back to signature annotations (Python functions with type hints)
if type_str is None and param.annotation is not inspect.Parameter.empty:
type_str = _annotation_to_type_str(param.annotation)
if default == "None" and type_str and not re.search(r"\| None\b", type_str):
type_str = type_str + " | None"
params.append(
{
"name": actual_name,
"type": type_str,
"default": default,
"kind": param_kind_str(param.kind),
}
)
# Fall back to signature return annotation
if return_type is None and sig.return_annotation is not inspect.Signature.empty:
return_type = _annotation_to_type_str(sig.return_annotation)
_widen_mathutils_params(params)
return {
"name": name,
"doc": clean_docstring(docstring),
"params": params,
"return_type": return_type,
"is_classmethod": False,
}
RUNTIME_TYPE_QUALIFICATIONS: dict[str, str] = {
"Context": "bpy.types.Context",
"BlendData": "bpy.types.BlendData",
"bpy_app_translations": "object",
"dict": "dict[str, object]",
"tuple": "tuple[object, ...]",
"OrderedDict": "collections.OrderedDict[str, object]",
"Callable": "Callable[..., object]",
"ShaderWrapper": "object",
}
def python_type_name(obj: object, var_name: str = "") -> str:
"""Get a reasonable type annotation string for a Python object."""
type_name = type(obj).__name__
if type_name in C_INTERNAL_TYPES:
return "object"
if type_name == var_name:
return "object"
if type_name in RUNTIME_TYPE_QUALIFICATIONS:
return RUNTIME_TYPE_QUALIFICATIONS[type_name]
if isinstance(obj, type):
return f"type[{obj.__name__}]"
# Parameterize containers by inspecting their runtime contents.
# Cast before list() to avoid basedpyright inferring set[Unknown]/list[Unknown]
# from isinstance narrowing of `object`.
if type_name in ("set", "frozenset", "list"):
from collections.abc import Iterable
contents = list(cast(Iterable[object], obj))
if contents:
elem_type = type(contents[0]).__name__
if elem_type in C_INTERNAL_TYPES:
elem_type = "object"
elif elem_type in RUNTIME_TYPE_QUALIFICATIONS:
elem_type = RUNTIME_TYPE_QUALIFICATIONS[elem_type]
elif isinstance(obj, (set, frozenset)):
elem_type = "str"
else:
elem_type = "object"
return f"{type_name}[{elem_type}]"
return type_name
def _parse_class_constructor(class_doc: str, cls: type) -> FunctionData | None:
"""Parse a ``.. class:: ClassName(params)`` RST directive into an __init__ method.
C extension types expose constructor info in their class docstring rather
than via an inspectable ``__init__``. Returns None if no constructor
directive is found or if the constructor takes no parameters.
"""
# Check if this class already has an inspectable __init__ with a real signature
init = cls.__dict__.get("__init__")
if init is not None:
try:
sig = inspect.signature(init)
# Has real params beyond just *args/**kwargs → skip RST parsing
real_params = [
p
for p in sig.parameters.values()
if p.name != "self"
and p.kind
not in (
inspect.Parameter.VAR_POSITIONAL,
inspect.Parameter.VAR_KEYWORD,
)
]
if real_params:
return None
except (ValueError, TypeError):
pass
# Look for ".. class:: ClassName(params)" in the docstring
if not re.search(r"\.\.\s+class::", class_doc):
return None
rst_sig = parse_rst_function_sig(class_doc)
if not rst_sig:
return None
param_types, _ = parse_docstring_types(class_doc)
params: list[ParamData] = []
for param_name, (default, kind) in rst_sig.items():
param_type = param_types.get(param_name)
if default == "None" and param_type and not re.search(r"\| None\b", param_type):
param_type = param_type + " | None"
params.append(
{
"name": param_name,
"type": param_type,
"default": default,
"kind": kind,
}
)
if not params:
return None
_widen_mathutils_params(params)
return {
"name": "__init__",
"doc": "",
"params": params,
"return_type": "None",
"is_classmethod": False,
}
# Dunders worth exposing in stubs — these affect how the type is used
# in type checking (subscript, iteration, arithmetic, comparison, etc.)
_USEFUL_DUNDERS = {
"__getitem__",
"__setitem__",
"__delitem__",
"__len__",
"__iter__",
"__contains__",
"__add__",
"__radd__",
"__iadd__",
"__sub__",
"__rsub__",
"__isub__",
"__mul__",
"__rmul__",
"__imul__",
"__matmul__",
"__rmatmul__",
"__imatmul__",
"__truediv__",
"__rtruediv__",
"__itruediv__",
"__neg__",
"__pos__",
"__invert__",
"__eq__",
"__ne__",
"__lt__",
"__le__",
"__gt__",
"__ge__",
"__enter__",
"__exit__",
}
_TYPING_REEXPORT_NAMES = {
"Callable",
"Collection",
"Generator",
"Iterable",
"Iterator",
"Mapping",
"MutableMapping",
"MutableSequence",
"MutableSet",
"Sequence",
"Set",
"FrozenSet",
"Dict",
"List",
"Tuple",
"Type",
"Optional",
"Union",
}
_SAFE_PROBE_MODULES = {"mathutils"}
_SKIP_HIDDEN_PROP_TYPES = frozenset(
("int", "float", "str", "bool", "NoneType", "list", "tuple", "dict", "set")
)
def _type_name(obj: object) -> str:
"""Return the type name of an object, normalizing NoneType to None."""
name = type(obj).__name__
return "None" if name == "NoneType" else name
def _fix_dunder_signatures_with_instance(
instance: object, methods: list[FunctionData]
) -> None:
"""Fix dunder signatures using an existing instance (for non-constructible types)."""
_fix_dunder_signatures(type(instance), methods, instance=instance)
def _fix_dunder_signatures(
cls: type[object],
methods: list[FunctionData],
instance: object | None = None,
) -> None:
"""Fix dunder method signatures by runtime probing.
C wrapper descriptors (``__getitem__``, ``__add__``, etc.) have no type
info in their docstrings. We create a default instance of the class and
call the dunders to discover actual return types and refine parameter types.
"""
# Fixed return types that don't need runtime probing
_FIXED_RETURNS: dict[str, str] = {
"__len__": "int",
"__contains__": "bool",
"__eq__": "bool",
"__ne__": "bool",
"__lt__": "bool",
"__le__": "bool",
"__gt__": "bool",
"__ge__": "bool",
"__delitem__": "None",
"__setitem__": "None",
"__exit__": "None",
}
for method in methods:
fixed = _FIXED_RETURNS.get(method["name"])
if fixed is not None:
method["return_type"] = fixed
if method["name"] == "__len__":
method["params"] = []
if method["name"] == "__enter__":
method["return_type"] = "Self"
method["params"] = []
if method["name"] == "__exit__":
method["params"] = [
{
"name": "exc_type",
"type": "type[BaseException] | None",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
{
"name": "exc_val",
"type": "BaseException | None",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
{
"name": "exc_tb",
"type": "object",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
]
if method["name"] == "__delitem__":
method["params"] = [
{
"name": "key",
"type": "int | slice",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
]
if instance is None:
try:
instance = cls()
except Exception:
return
for method in methods:
name = method["name"]
# __getitem__: probe with int index to discover element type
if name == "__getitem__":
try:
getitem = getattr(instance, "__getitem__")
result = getitem(0)
rtype = _type_name(result)
method["return_type"] = rtype
method["params"] = [
{
"name": "key",
"type": "int | slice",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
]
except Exception:
pass
continue
# __iter__: return Iterator[element_type] based on __getitem__
if name == "__iter__":
try:
getitem = getattr(instance, "__getitem__")
result = getitem(0)
etype = _type_name(result)
method["return_type"] = f"Iterator[{etype}]"
method["params"] = []
except Exception:
pass
continue
# __setitem__: refine value type from __getitem__ return type
if name == "__setitem__":
try:
getitem = getattr(instance, "__getitem__")
result = getitem(0)
vtype = _type_name(result)
method["params"] = [
{
"name": "key",
"type": "int | slice",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
{
"name": "value",
"type": f"{vtype} | Sequence[{vtype}] | {cls.__name__}",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
]
except Exception:
pass
continue
# __neg__, __pos__, __invert__: unary ops return same type
if name in ("__neg__", "__pos__", "__invert__"):
try:
op = getattr(instance, name)
result = op()
method["return_type"] = _type_name(result)
method["params"] = []
except Exception:
pass
continue
# Binary arithmetic: probe with same-type operand to get return type.
# The parameter type stays as object (could be Self, float, etc.)
if name in (
"__add__",
"__radd__",
"__iadd__",
"__sub__",
"__rsub__",
"__isub__",
"__mul__",
"__rmul__",
"__imul__",
"__matmul__",
"__rmatmul__",
"__imatmul__",
"__truediv__",
"__rtruediv__",
"__itruediv__",
):
try:
op = getattr(instance, name)
result = op(instance)
method["return_type"] = _type_name(result)
except Exception:
# If same-type fails (e.g. Vector / Vector), try with float
try:
op = getattr(instance, name)
result = op(1.0)
method["return_type"] = _type_name(result)
except Exception:
pass
continue
def _is_getset_writable(cls: type[object], attr_name: str) -> bool:
"""Test if a C getset_descriptor property is writable.
We check the docstring for "(read-only)" or "(readonly)" hints.
If no hint is found, assume writable (most C getset_descriptors are).
"""
descriptor = cls.__dict__.get(attr_name)
if descriptor is None:
return False
doc = getattr(descriptor, "__doc__", "") or ""
if "read-only" in doc.lower() or "readonly" in doc.lower():
return False
return True
def _resolve_base_name(cls: type[object], module_name: str) -> str | None:
"""Resolve a class base name only if it is part of the public API."""
bases = [
b for b in cls.__mro__[1:] if b is not object and b.__module__ != "builtins"
]
if not bases:
return None
base_cls = bases[0]
parent_mod = importlib.import_module(base_cls.__module__)
public = getattr(parent_mod, "__all__", None)
is_public = public is None or base_cls.__name__ in public
if not is_public:
return None
if base_cls.__module__ == module_name:
return base_cls.__name__
return f"{base_cls.__module__}.{base_cls.__name__}"
def _iter_declared_class_members(
cls: type[object],
) -> list[tuple[str, object, object]]:
"""List public members declared directly on the class (not inherited)."""
members: list[tuple[str, object, object]] = []
for name in sorted(dir(cls)):
if name.startswith("_") and name not in _USEFUL_DUNDERS:
continue
try:
obj = getattr(cls, name)
except AttributeError:
continue
if name not in cls.__dict__:
continue
raw = cls.__dict__[name]
members.append((name, obj, raw))
return members
def _append_callable_method(
methods: list[FunctionData],
obj: object,
name: str,
is_classmethod: bool = False,
) -> None:
"""Introspect and append a callable method when possible."""
if not callable(obj):
return
func_data = introspect_callable(obj, name)
if not func_data:
return
if is_classmethod:
func_data["is_classmethod"] = True
methods.append(func_data)
def _property_data_from_member(
cls: type[object],
name: str,
raw: object,
) -> PropertyData:
"""Build PropertyData from a property/getset descriptor class member."""
doc = inspect.getdoc(raw) or ""
_, rtype = parse_docstring_types(doc)
if isinstance(raw, property):
is_readonly = raw.fset is None
else:
is_readonly = not _is_getset_writable(cls, name)
prop_type = rtype or "object"
result: PropertyData = {
"name": name,
"type": prop_type,
"is_readonly": is_readonly,
"description": doc,
}
if not is_readonly and prop_type in _MATHUTILS_ARRAY_TYPES:
result["setter_type"] = f"{prop_type} | Sequence[float]"
return result
def _introspect_declared_class_members(
cls: type[object],
) -> tuple[list[PropertyData], list[FunctionData]]:
"""Introspect properties and methods declared directly on the class."""
properties: list[PropertyData] = []
methods: list[FunctionData] = []
for name, obj, raw in _iter_declared_class_members(cls):
if (
isinstance(raw, classmethod)
or type(raw).__name__ == "classmethod_descriptor"
):
_append_callable_method(methods, obj, name, is_classmethod=True)
continue
if isinstance(raw, staticmethod):
_append_callable_method(methods, obj, name)
continue
if callable(obj):
_append_callable_method(methods, obj, name)
continue
if isinstance(raw, property) or type(raw).__name__ == "getset_descriptor":
properties.append(_property_data_from_member(cls, name, raw))
continue
properties.append(
{
"name": name,
"type": python_type_name(obj, name),
"is_readonly": True,
"description": "",
}
)
return properties, methods
def _insert_constructor_from_doc(
cls: type[object],
class_doc: str,
methods: list[FunctionData],
) -> None:
"""Insert a constructor parsed from class RST doc when available."""
init_method = _parse_class_constructor(class_doc, cls)
if init_method:
methods.insert(0, init_method)
def _refine_and_synthesize_dunders(
cls: type[object], methods: list[FunctionData]
) -> None:
"""Refine dunder signatures and synthesize missing protocol methods."""
dunder_methods = [m for m in methods if m["name"] in _USEFUL_DUNDERS]
if dunder_methods:
_fix_dunder_signatures(cls, dunder_methods)
method_names = {m["name"] for m in methods}
if "__getitem__" in method_names and "__iter__" not in method_names:
getitem = next(m for m in methods if m["name"] == "__getitem__")
elem_type = getitem["return_type"] or "object"
methods.append(
{
"name": "__iter__",
"doc": "",
"params": [],
"return_type": f"Iterator[{elem_type}]",
"is_classmethod": False,
}
)
if "__buffer__" not in method_names and hasattr(cls, "__buffer__"):
methods.append(
{
"name": "__buffer__",
"doc": "",
"params": [
{
"name": "flags",
"type": "int",
"default": None,
"kind": "POSITIONAL_ONLY",
}
],
"return_type": "memoryview",
"is_classmethod": False,
}
)
def _collect_public_module_names(module: ModuleType, module_name: str) -> list[str]:
"""Collect public module members, extending __all__ with local callables/types."""
all_attr_obj = getattr(module, "__all__", None)
if all_attr_obj is None:
return [n for n in dir(module) if not n.startswith("_")]
names_set = set(cast(list[str] | tuple[str, ...], all_attr_obj))
for name in dir(module):
if name.startswith("_") or name in names_set:
continue
obj = getattr(module, name, None)
if obj is None:
continue
obj_module = getattr(obj, "__module__", None)
if obj_module == module_name and (callable(obj) or isinstance(obj, type)):
names_set.add(name)
elif hasattr(obj, "__origin__") and name not in _TYPING_REEXPORT_NAMES:
names_set.add(name)
return sorted(names_set)
def _is_type_alias_object(obj: object) -> bool:
"""Return whether an object should be emitted as a type alias variable."""
return hasattr(obj, "__origin__") or (
hasattr(obj, "__module__") and getattr(obj, "__module__", "") == "typing"
)
def _normalize_type_alias_repr(obj: object) -> str:
"""Normalize typing-based alias repr to modern built-in generic forms."""
type_repr = str(obj).replace("typing.", "")
type_repr = re.sub(r"\bTuple\b", "tuple", type_repr)
type_repr = re.sub(r"\bList\b", "list", type_repr)
type_repr = re.sub(r"\bDict\b", "dict", type_repr)
type_repr = re.sub(r"\bSet\b", "set", type_repr)
type_repr = re.sub(r"\bFrozenSet\b", "frozenset", type_repr)
return type_repr
def _classify_module_member(
module_name: str,
name: str,
obj: object,
functions: list[FunctionData],
variables: list[VariableData],
structs: list[StructData],
) -> None:
"""Classify and append a module member into functions/variables/structs."""
if isinstance(obj, ModuleType):
return
if isinstance(obj, type):
structs.append(introspect_class(obj, module_name))
return
if _is_type_alias_object(obj):
variables.append(
{
"name": name,
"type": "TypeAlias",
"value": _normalize_type_alias_repr(obj),
}
)
return
if callable(obj):
func_data = introspect_callable(obj, name)
if func_data:
functions.append(func_data)
return
variables.append(
{
"name": name,
"type": python_type_name(obj, name),
"value": repr(obj),
}
)
def _probe_hidden_property_structs(
module: ModuleType,
module_name: str,
structs: list[StructData],
) -> None:
"""Discover hidden C types reachable through runtime property values."""
if module_name not in _SAFE_PROBE_MODULES:
return
import builtins as _builtins_mod
known_struct_names = {s["name"] for s in structs}
for struct in list(structs):
cls = getattr(module, struct["name"], None)
if cls is None or not isinstance(cls, type):
continue
try:
instance = cls()
except Exception:
continue
for prop in struct["properties"]:
if prop["type"] != "object":
continue
try:
val = getattr(instance, prop["name"])
except Exception:
continue
val_cls = val.__class__
val_name = val_cls.__name__
if hasattr(_builtins_mod, val_name) or val_name in _SKIP_HIDDEN_PROP_TYPES:
continue
if val_name not in known_struct_names:
hidden_struct = introspect_class(val_cls, module_name)
dunder_methods = [
m for m in hidden_struct["methods"] if m["name"] in _USEFUL_DUNDERS
]
if dunder_methods:
_fix_dunder_signatures_with_instance(val, dunder_methods)
structs.append(hidden_struct)
known_struct_names.add(val_name)
prop["type"] = val_name
def introspect_class(cls: type[object], module_name: str) -> StructData:
"""Introspect a class (C extension or Python) and return StructData."""
class_doc = inspect.getdoc(cls) or ""
base_name = _resolve_base_name(cls, module_name)
properties, methods = _introspect_declared_class_members(cls)
_insert_constructor_from_doc(cls, class_doc, methods)
_refine_and_synthesize_dunders(cls, methods)
# Mark __eq__/__ne__ as overrides (they override object.__eq__/__ne__)
_OBJECT_OVERRIDES = {"__eq__", "__ne__"}
for method in methods:
if method["name"] in _OBJECT_OVERRIDES:
method["is_override"] = True
return {
"name": cls.__name__,
"doc": class_doc,
"base": base_name,
"properties": properties,
"methods": methods,
}
def infer_getter_return_types(functions: list[FunctionData]) -> None:
"""Infer return types for *_get functions from matching *_set parameters.
Many Blender modules (e.g. gpu.state) follow a pattern where ``foo_set(value)``
and ``foo_get()`` are paired. When the getter has no return type but the setter
has a typed parameter, the getter's return type is inferred from it.
"""
setters: dict[str, str] = {}
for func in functions:
name = func["name"]
if not name.endswith("_set"):
continue
params = func["params"]
if len(params) != 1:
continue
param_type = params[0].get("type")
if param_type:
prefix = name[: -len("_set")]
setters[prefix] = param_type
for func in functions:
name = func["name"]
if not name.endswith("_get"):
continue
if func["return_type"] is not None:
continue
prefix = name[: -len("_get")]
if prefix in setters:
func["return_type"] = setters[prefix]
def _build_ops_fallback_module(submodule_names: list[str]) -> ModuleData:
"""Build a safe fallback bpy.ops module when no operator instances are found."""
return {
"module": "bpy.ops",
"doc": "Blender operator access.",
"functions": [],
"variables": [
{"name": name, "type": "object", "value": "..."} for name in submodule_names
],
"structs": [],
}
def _find_sample_operator(ops_mod: object, submodule_names: list[str]) -> object | None:
"""Find one callable bpy.ops operator instance for structural introspection."""
for sub_name in submodule_names:
try:
sub_mod = getattr(ops_mod, sub_name)
except Exception:
continue
for op_name in sorted(n for n in dir(sub_mod) if not n.startswith("_")):
try:
op_candidate: object = getattr(sub_mod, op_name)
except Exception:
continue
if callable(op_candidate):
return op_candidate
return None
def _apply_ops_method_return_fixups(op_struct: StructData, operator: object) -> None:
"""Fill missing operator wrapper return types by probing a live operator."""
return_type_fixups: dict[str, str] = {}
for method in op_struct["methods"]:
name = method["name"]
if method["return_type"] is not None:
continue
func = getattr(operator, name, None)
if func is None or not callable(func):
continue
try:
result = func()
return_type_fixups[name] = _type_name(result)
except Exception:
pass
for method in op_struct["methods"]:
fixed = return_type_fixups.get(method["name"])
if fixed is not None:
method["return_type"] = fixed
def _apply_ops_property_and_method_fixups(op_struct: StructData) -> None:
"""Apply known manual type fixups for bpy.ops wrapper metadata."""
for prop in op_struct["properties"]:
if prop["name"] == "bl_options":
prop["type"] = "set[str]"
for method in op_struct["methods"]:
if method["name"] == "get_rna_type":
method["return_type"] = "bpy.types.Struct"
_RAW_RNA_TYPE_MAP: dict[str, str] = {
"BOOLEAN": "bool",
"INT": "int",
"FLOAT": "float",
"STRING": "str",
"ENUM": "str",
}
_VECTOR_SUBTYPES = {
"TRANSLATION",
"DIRECTION",
"VELOCITY",
"ACCELERATION",
"XYZ",
"XYZ_LENGTH",
}
def _raw_rna_prop_to_type(prop: object) -> str:
"""Convert a raw RNA Property to a type string (for operator params)."""
ptype: str = getattr(prop, "type", "")
is_array: bool = getattr(prop, "is_array", False)
array_length: int = getattr(prop, "array_length", 0)
subtype: str = getattr(prop, "subtype", "NONE")
if ptype == "ENUM":
items = list(getattr(prop, "enum_items", []))
if items:
values = [str(getattr(i, "identifier", "")) for i in items]
if values:
quoted = ", ".join(f'"{v}"' for v in values)
return f"Literal[{quoted}]"
return "str"
if ptype == "POINTER":
fixed = getattr(prop, "fixed_type", None)
if fixed is not None:
return f"bpy.types.{getattr(fixed, 'identifier', 'object')} | None"
return "object | None"
if ptype == "COLLECTION":
return "object"
if ptype in ("FLOAT", "INT", "BOOLEAN") and is_array:
base = _RAW_RNA_TYPE_MAP.get(ptype, "object")
if ptype == "FLOAT" and subtype in _VECTOR_SUBTYPES:
return "mathutils.Vector | collections.abc.Sequence[float]"
if ptype == "FLOAT" and subtype == "EULER":
return "mathutils.Euler | collections.abc.Sequence[float]"
if (
ptype == "FLOAT"
and subtype in ("COLOR", "COLOR_GAMMA")
and array_length == 3
):
return "mathutils.Color | collections.abc.Sequence[float]"
if ptype == "FLOAT" and subtype == "MATRIX":
return "mathutils.Matrix | collections.abc.Sequence[float]"
return f"collections.abc.Sequence[{base}]"
return _RAW_RNA_TYPE_MAP.get(ptype, "object")
def _rna_prop_default(prop: object) -> str:
"""Extract the default value from an RNA property as a string for stubs."""
ptype: str = getattr(prop, "type", "")
is_array: bool = getattr(prop, "is_array", False)
try:
if ptype == "ENUM":
is_flag = getattr(prop, "is_enum_flag", False)
if is_flag:
# Enum flags are sets — use ellipsis since the type is Literal, not set
return "..."
val = str(getattr(prop, "default", ""))
# If the default is not in the enum items, use ellipsis
items = {
str(getattr(i, "identifier", ""))
for i in getattr(prop, "enum_items", [])
}
if val and val in items:
return repr(val)
return "..."
if is_array:
arr = getattr(prop, "default_array", None)
if arr is not None:
return repr(list(arr))
return "..."
val = getattr(prop, "default", None)
if val is None:
return "None"
return repr(val)
except Exception:
return "..."
def _introspect_operator(op: object) -> FunctionData | None:
"""Introspect a single bpy.ops operator and return its typed signature."""
get_rna = getattr(op, "get_rna_type", None)
if get_rna is None or not callable(get_rna):
return None
try:
rna = get_rna()
except Exception:
return None
idname_fn = getattr(op, "idname_py", None)
if idname_fn is None:
return None
try:
py_name: str = idname_fn()
except Exception:
return None
# Extract the function name (e.g. "mesh.primitive_cube_add" -> "primitive_cube_add")
func_name = py_name.split(".")[-1] if "." in py_name else py_name
doc: str = getattr(rna, "description", "") or ""
# All operators accept an optional execution context as the first positional arg
params: list[ParamData] = [
{
"name": "execution_context",
"type": "Literal['INVOKE_DEFAULT', 'INVOKE_REGION_WIN', 'INVOKE_REGION_CHANNELS', 'INVOKE_REGION_PREVIEW', 'INVOKE_AREA', 'INVOKE_SCREEN', 'EXEC_DEFAULT', 'EXEC_REGION_WIN', 'EXEC_REGION_CHANNELS', 'EXEC_REGION_PREVIEW', 'EXEC_AREA', 'EXEC_SCREEN'] | None",
"default": "None",
"kind": "POSITIONAL_ONLY",
},
]
for prop in getattr(rna, "properties", []):
pid: str = getattr(prop, "identifier", "")
if pid == "rna_type":
continue
param_type = _raw_rna_prop_to_type(prop)
default = _rna_prop_default(prop)
params.append(
{
"name": pid,
"type": param_type,
"default": default,
"kind": "KEYWORD_ONLY",
}
)
return {
"name": func_name,
"doc": doc,
"params": params,
"return_type": "set[str]",
"is_classmethod": False,
}
def _introspect_ops_submodule(
ops_mod: object, sub_name: str, op_base_name: str
) -> tuple[StructData, list[StructData]]:
"""Introspect an ops submodule.
Returns the module-level struct (_OpsModule_*) and a list of per-operator
structs that inherit from the operator base class (e.g. BPyOpFunction).
Each operator is a class with a typed __call__ so that both
``bpy.ops.mesh.subdivide(number_cuts=3)`` and
``bpy.ops.mesh.subdivide.poll()`` type-check correctly.
"""
sub = getattr(ops_mod, sub_name)
op_structs: list[StructData] = []
op_props: list[PropertyData] = []
for op_name in sorted(dir(sub)):
if op_name.startswith("_"):
continue
op = getattr(sub, op_name, None)
if op is None:
continue
func_data = _introspect_operator(op)
if func_data is None:
continue
# Create a per-operator class inheriting from the base operator type
op_class_name = f"_Op_{sub_name}_{op_name}"
call_method: FunctionData = {
"name": "__call__",
"doc": func_data["doc"],
"params": func_data["params"],
"return_type": "set[str]",
"is_classmethod": False,
}
op_struct: StructData = {
"name": op_class_name,
"doc": func_data["doc"],
"base": op_base_name,
"properties": [],
"methods": [call_method],
}
op_structs.append(op_struct)
op_props.append(
{
"name": op_name,
"type": op_class_name,
"is_readonly": False,
"description": func_data["doc"],
}
)
class_name = f"_OpsModule_{sub_name}"
module_struct: StructData = {
"name": class_name,
"doc": f"Operators in bpy.ops.{sub_name}.",
"base": None,
"properties": op_props,
"methods": [],
}
return module_struct, op_structs
def introspect_ops_module() -> ModuleData:
"""Introspect bpy.ops, including the operator proxy classes.
bpy.ops submodules (e.g. bpy.ops.mesh) are real Python module objects,
but individual operators (e.g. bpy.ops.mesh.primitive_cube_add) are
instances of _BPyOpsSubModOp — a C class with methods like poll(),
idname(), get_rna_type(), and bl_options.
We introspect _BPyOpsSubModOp from a live instance and fix up return
types that C methods don't expose via docstrings. For the submodule
level, we create a synthetic _OpsModule class with __getattr__ since
the actual type is just Python's builtin module.
"""
bpy = importlib.import_module("bpy")
# Get a real operator instance to discover the proxy type
ops_mod = getattr(bpy, "ops")
submodule_names = sorted(n for n in dir(ops_mod) if not n.startswith("_"))
sample_op = _find_sample_operator(ops_mod, submodule_names)
# Some stripped-down or unusual Blender environments can expose bpy.ops
# without any discoverable operator instances. Fall back to a safe module.
if sample_op is None:
return _build_ops_fallback_module(submodule_names)
# Introspect _BPyOpsSubModOp (individual operator wrapper)
op_cls = type(sample_op)
op_struct = introspect_class(op_cls, "bpy.ops")
_apply_ops_method_return_fixups(op_struct, sample_op)
_apply_ops_property_and_method_fixups(op_struct)
# Introspect each ops submodule with typed operator classes
structs: list[StructData] = [op_struct]
variables: list[VariableData] = []
print(" Introspecting bpy.ops operators...", file=sys.stderr, flush=True)
total_ops = 0
for sub_name in submodule_names:
module_struct, per_op_structs = _introspect_ops_submodule(
ops_mod, sub_name, op_struct["name"]
)
# Add per-operator classes first (they're referenced by the module struct)
structs.extend(per_op_structs)
structs.append(module_struct)
total_ops += len(per_op_structs)
variables.append(
{"name": sub_name, "type": module_struct["name"], "value": "..."}
)
print(
f" Introspected {total_ops} operators "
+ f"in {len(submodule_names)} submodules",
file=sys.stderr,
flush=True,
)
return {
"module": "bpy.ops",
"doc": "Blender operator access.",
"functions": [],
"variables": variables,
"structs": structs,
}
def introspect_module(module_name: str) -> ModuleData:
"""Introspect a module and return its full metadata as a dict."""
if module_name == "bpy.types":
return introspect_rna_types()
if module_name == "bpy.ops":
return introspect_ops_module()
module = importlib.import_module(module_name)
public_names = _collect_public_module_names(module, module_name)
functions: list[FunctionData] = []
variables: list[VariableData] = []
structs: list[StructData] = []
for name in public_names:
obj = getattr(module, name, None)
if obj is None:
continue
_classify_module_member(module_name, name, obj, functions, variables, structs)
infer_getter_return_types(functions)
_probe_hidden_property_structs(module, module_name, structs)
return {
"module": module_name,
"doc": inspect.getdoc(module) or "",
"functions": functions,
"variables": variables,
"structs": structs,
}
def _try_import_or_attr(module_name: str) -> bool:
"""Try to import a module, falling back to attribute lookup on parent.
Returns True if the module is now accessible via importlib.
"""
try:
importlib.import_module(module_name)
return True
except ImportError:
pass
# Fallback: access the submodule via attribute lookup on the parent
# and register it in sys.modules so importlib works later.
# This is needed for C-level submodules in older Blender versions (< 4.1).
parts = module_name.split(".")
try:
parent = importlib.import_module(parts[0])
obj: object = parent
for part in parts[1:]:
obj = getattr(obj, part)
if isinstance(obj, ModuleType):
sys.modules[module_name] = obj
return True
except (ImportError, AttributeError):
pass
return False
def _discover_submodules_via_dir(mod: ModuleType, parent_name: str) -> list[str]:
"""Discover C-level submodules by inspecting dir() for ModuleType attributes.
pkgutil.walk_packages only works for filesystem-backed packages with __path__.
Many Blender modules (gpu.state, bpy.app.handlers, etc.) are C-level and
only discoverable via attribute access.
"""
found: list[str] = []
for attr_name in dir(mod):
if attr_name.startswith("_"):
continue
obj = getattr(mod, attr_name, None)
if isinstance(obj, ModuleType):
submodule_name = f"{parent_name}.{attr_name}"
# Verify the module actually belongs to this parent
# (filter out stray re-exports like 'sys', 'os', etc.)
obj_name = getattr(obj, "__name__", "")
if (
obj_name == submodule_name
or obj_name.startswith(parent_name + ".")
or obj_name == attr_name
):
found.append(submodule_name)
return found
def discover_modules() -> list[str]:
"""Discover all Blender Python modules and submodules."""
modules: list[str] = []
seen: set[str] = set()
def _add(name: str) -> bool:
if name in seen:
return False
seen.add(name)
modules.append(name)
return True
for top_name in BLENDER_MODULES:
try:
mod = importlib.import_module(top_name)
except ImportError:
print(f" Skipping {top_name} (import failed)", file=sys.stderr)
continue
_add(top_name)
# Discover via pkgutil for filesystem-backed packages
if hasattr(mod, "__path__"):
for _importer, subname, _ispkg in pkgutil.walk_packages(
mod.__path__, prefix=top_name + "."
):
try:
importlib.import_module(subname)
_add(subname)
except ImportError:
print(f" Skipping {subname} (import failed)", file=sys.stderr)
# Also discover C-level submodules via dir() attribute inspection
for subname in _discover_submodules_via_dir(mod, top_name):
if _try_import_or_attr(subname):
if _add(subname):
# Recurse one level for nested submodules (e.g. bpy.app.handlers)
sub_mod = importlib.import_module(subname)
for nested in _discover_submodules_via_dir(sub_mod, subname):
if _try_import_or_attr(nested):
_add(nested)
# Add hardcoded extra modules that can't be discovered via dir() either
# (e.g. modules only accessible after explicit import in some versions)
for extra in EXTRA_MODULES:
if extra not in seen and _try_import_or_attr(extra):
_add(extra)
return modules
# --- RNA introspection (bpy.types) ---
RNA_TYPE_MAP: dict[str, str] = {
"boolean": "bool",
"int": "int",
"float": "float",
"string": "str",
"enum": "str",
}
def rna_property_to_type(prop: object) -> str:
"""Map an RNA property to a PEP 484 type annotation string."""
prop_type: str = getattr(prop, "type", "")
fixed_type: object = getattr(prop, "fixed_type", None)
array_length: int = getattr(prop, "array_length", 0)
if prop_type == "pointer" and fixed_type is not None:
type_name: str = getattr(fixed_type, "identifier", "object")
return type_name
if prop_type == "collection" and fixed_type is not None:
# Use the specific collection wrapper class (e.g. BlendDataImages) if
# available via srna, rather than the generic bpy_prop_collection[T].
# This preserves collection-specific methods like new(), remove(), etc.
srna: object = getattr(prop, "srna", None)
if srna is not None:
srna_id: str = getattr(srna, "identifier", "")
if srna_id:
return srna_id
element_type: str = getattr(fixed_type, "identifier", "object")
return f"bpy_prop_collection[{element_type}]"
# Dynamic-length arrays have array_length=0 but is_array=True on the raw
# RNA property. rna_info wraps properties in InfoPropertyRNA which stores
# the raw prop as bl_prop; fall back to checking the prop itself.
raw_prop: object = getattr(prop, "bl_prop", prop)
is_array: bool = getattr(raw_prop, "is_array", False)
if prop_type in ("float", "int", "boolean") and (array_length > 0 or is_array):
base = RNA_TYPE_MAP.get(prop_type, prop_type)
if array_length == 0 and is_array:
# Dynamic-length array — at runtime this is a bpy_prop_array
return f"bpy_prop_array[{base}]"
# Fixed-length float arrays with vector/matrix subtypes are mathutils types
subtype: str = getattr(prop, "subtype", "NONE")
if prop_type == "float" and subtype in (
"TRANSLATION",
"DIRECTION",
"VELOCITY",
"ACCELERATION",
"XYZ",
"XYZ_LENGTH",
):
return "mathutils.Vector"
if prop_type == "float" and subtype == "EULER":
return "mathutils.Euler"
if prop_type == "float" and subtype == "QUATERNION":
return "mathutils.Quaternion"
if (
prop_type == "float"
and subtype in ("COLOR", "COLOR_GAMMA")
and array_length == 3
):
return "mathutils.Color"
if prop_type == "float" and subtype == "MATRIX":
return "mathutils.Matrix"
return f"bpy_prop_array[{base}]"
return RNA_TYPE_MAP.get(prop_type, prop_type)
def rna_function_to_data(func_info: object) -> FunctionData:
"""Convert an RNA function info object to FunctionData."""
identifier: str = getattr(func_info, "identifier", "")
description: str = getattr(func_info, "description", "")
is_classmethod: bool = getattr(func_info, "is_classmethod", False)
args_list: list[object] = getattr(func_info, "args", [])
return_values: tuple[object, ...] = getattr(func_info, "return_values", ())
params: list[ParamData] = []
if is_classmethod:
params.append(
{
"name": "cls",
"type": None,
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
)
for arg in args_list:
arg_name: str = getattr(arg, "identifier", "")
arg_type = rna_property_to_type(arg)
default_val: str | None = None
is_required: bool = getattr(arg, "is_required", False)
if not is_required:
arg_type = f"{arg_type} | None"
default_val = "None"
params.append(
{
"name": arg_name,
"type": arg_type,
"default": default_val,
"kind": "POSITIONAL_OR_KEYWORD",
}
)
return_type: str | None = None
if return_values:
if len(return_values) == 1:
return_type = rna_property_to_type(return_values[0])
else:
types = [rna_property_to_type(rv) for rv in return_values]
return_type = f"tuple[{', '.join(types)}]"
return {
"name": identifier,
"doc": description,
"params": params,
"return_type": return_type,
"is_classmethod": is_classmethod,
}
def _import_rna_info() -> ModuleType:
"""Import the rna_info module, handling different Blender versions."""
try:
return importlib.import_module("_rna_info")
except ImportError:
return importlib.import_module("rna_info")
def _infer_type_from_runtime_value(value: object) -> str | None:
"""Infer a type string from a runtime bpy.context attribute value."""
type_name = type(value).__name__
# Single RNA objects — use the class name directly
if hasattr(type(value), "bl_rna"):
return type_name
if isinstance(value, list):
contents = cast(list[object], value)
if contents:
elem_type = type(contents[0])
if hasattr(elem_type, "bl_rna"):
return f"Sequence[{elem_type.__name__}]"
return None
if isinstance(value, str):
return "str"
if isinstance(value, bool):
return "bool"
if isinstance(value, int):
return "int"
if isinstance(value, float):
return "float"
return None
def introspect_screen_context_members(
rna_property_names: set[str],
) -> list[PropertyData]:
"""Discover screen context members from bpy.context that aren't in RNA.
These are dynamically injected by Blender based on the active editor/mode.
All are typed as T | None since they're context-dependent.
This function only runs inside Blender's Python environment.
"""
bpy = importlib.import_module("bpy")
ctx: object = getattr(bpy, "context")
skip = {"bl_rna", "id_data", "rna_type"}
# Fetch each context attribute once and handle failures per-attribute.
# Some context members can raise depending on active mode/editor.
extra_members: list[tuple[str, object]] = []
for name in sorted(dir(ctx)):
if name.startswith("_") or name in rna_property_names or name in skip:
continue
try:
value = cast(object, getattr(ctx, name))
except Exception:
continue
if callable(value) and not isinstance(value, (list, tuple)):
continue
extra_members.append((name, cast(object, value)))
properties: list[PropertyData] = []
for name, value in extra_members:
type_str: str | None = None
# Tier 1: runtime inspection (non-None values)
if value is not None:
type_str = _infer_type_from_runtime_value(value)
# Tier 2: hardcoded override
if type_str is None:
type_str = SCREEN_CONTEXT_TYPE_OVERRIDES.get(name)
# Tier 3: name-pattern heuristic
if type_str is None:
type_str = infer_context_member_type(name)
# Final fallback
if type_str is None:
type_str = "object"
# Sequence/collection types are never None — they return empty sequences.
# Only singular object references (active_object, etc.) can be None.
is_collection = type_str.startswith("Sequence[") or type_str.startswith(
"bpy_prop_collection["
)
final_type = type_str if is_collection else f"{type_str} | None"
properties.append(
{
"name": name,
"type": final_type,
"is_readonly": True,
"description": "",
}
)
# Also inject overrides not found in dir() (e.g. buttons context members
# like meta_ball, mesh, armature that require active UI panels)
discovered = {p["name"] for p in properties}
for name, type_str in sorted(SCREEN_CONTEXT_TYPE_OVERRIDES.items()):
if name not in discovered and name not in rna_property_names:
is_collection = type_str.startswith("Sequence[") or type_str.startswith(
"bpy_prop_collection["
)
final_type = type_str if is_collection else f"{type_str} | None"
properties.append(
{
"name": name,
"type": final_type,
"is_readonly": True,
"description": "",
}
)
return properties
def _validate_context_prop_type(type_str: str, known_types: set[str]) -> str:
"""Replace type references that don't exist in this version with 'object'."""
import re as _re
def _replace_match(match: re.Match[str]) -> str:
name = match.group(1)
if name == "None":
return name
# "X[" is a generic usage (e.g. Sequence[...]) — keep it
end = match.end()
if end < len(type_str) and type_str[end] == "[":
return name
if name not in known_types:
return "object"
return name
return _re.sub(r"\b([A-Z]\w+)\b", _replace_match, type_str)
def _struct_identifier(struct_info: object) -> str:
"""Return RNA struct identifier safely as a string."""
return str(getattr(struct_info, "identifier", ""))
def _add_core_bpy_type_structs(
bpy_types_module: ModuleType,
generic_bases: dict[str, str],
extra_dunders: dict[str, list[FunctionData]],
) -> list[StructData]:
"""Introspect core bpy.types C base classes and generic collection wrappers."""
structs: list[StructData] = []
bpy_struct_cls = getattr(bpy_types_module, "bpy_struct", None)
if bpy_struct_cls is not None:
bpy_struct_data = introspect_class(bpy_struct_cls, "bpy.types")
bpy_struct_data["properties"].extend(
[
{
"name": "bl_rna",
"type": "Struct",
"is_readonly": True,
"description": "RNA type definition",
},
{
"name": "rna_type",
"type": "Struct",
"is_readonly": True,
"description": "RNA type definition",
},
]
)
structs.append(bpy_struct_data)
for cls_name in generic_bases:
cls = getattr(bpy_types_module, cls_name, None)
if cls is None:
continue
struct = introspect_class(cls, "bpy.types")
struct["base"] = generic_bases[cls_name]
replacement_dunders = extra_dunders[cls_name]
replacement_names = {m["name"] for m in replacement_dunders}
struct["methods"] = replacement_dunders + [
m for m in struct["methods"] if m["name"] not in replacement_names
]
structs.append(struct)
return structs
def _collect_collection_element_types(
structs_dict: dict[object, object],
) -> dict[str, str]:
"""Build map of collection wrapper class name to element type name."""
collection_element_types: dict[str, str] = {}
for raw_struct_info in structs_dict.values():
for prop in getattr(raw_struct_info, "properties", []):
if getattr(prop, "type", "") != "collection":
continue
fixed_type = getattr(prop, "fixed_type", None)
if fixed_type is None:
continue
srna: object = getattr(prop, "srna", None)
if srna is None:
continue
srna_id = str(getattr(srna, "identifier", ""))
element_type = str(getattr(fixed_type, "identifier", ""))
if srna_id and element_type:
collection_element_types[srna_id] = element_type
return collection_element_types
def _rna_struct_to_data(
struct_info: object,
collection_element_types: dict[str, str],
) -> StructData:
"""Convert a single RNA struct info object to StructData."""
sid = _struct_identifier(struct_info)
base_obj = getattr(struct_info, "base", None)
if sid in collection_element_types:
base_name = f"bpy_prop_collection[{collection_element_types[sid]}]"
elif base_obj:
base_name = str(getattr(base_obj, "identifier", "bpy_struct"))
else:
base_name = "bpy_struct"
properties: list[PropertyData] = []
for prop in getattr(struct_info, "properties", []):
prop_type = rna_property_to_type(prop)
is_readonly = bool(getattr(prop, "is_readonly", False))
prop_data: PropertyData = {
"name": str(getattr(prop, "identifier", "")),
"type": prop_type,
"is_readonly": is_readonly,
"description": str(getattr(prop, "description", "") or ""),
}
# Writable mathutils properties also accept Sequence[float] for assignment
if not is_readonly and prop_type in _MATHUTILS_ARRAY_TYPES:
prop_data["setter_type"] = f"{prop_type} | Sequence[float]"
properties.append(prop_data)
methods: list[FunctionData] = []
is_collection_wrapper = sid in collection_element_types
for func_info in getattr(struct_info, "functions", []):
func_name = str(getattr(func_info, "identifier", ""))
if is_collection_wrapper and func_name in ("find", "get"):
continue
methods.append(rna_function_to_data(func_info))
return {
"name": sid,
"doc": str(getattr(struct_info, "description", "") or ""),
"base": base_name,
"properties": properties,
"methods": methods,
}
def _merge_screen_context_members(structs: list[StructData]) -> None:
"""Merge dynamic bpy.context members into the Context struct."""
known_types = {s["name"] for s in structs}
for struct in structs:
if struct["name"] != "Context":
continue
rna_names = {p["name"] for p in struct["properties"]}
rna_names |= {m["name"] for m in struct["methods"]}
screen_props = introspect_screen_context_members(rna_names)
for prop in screen_props:
prop["type"] = _validate_context_prop_type(prop["type"], known_types)
prop["type"] = prop["type"].replace(
"Sequence[", "collections.abc.Sequence["
)
struct["properties"].extend(screen_props)
break
def _merge_non_rna_bpy_types(
structs: list[StructData], bpy_types_module: ModuleType
) -> None:
"""Add C-level bpy.types classes not present in RNA metadata."""
import builtins as _builtins
known_names = {s["name"] for s in structs}
for name in sorted(dir(bpy_types_module)):
if name.startswith("_") or name in known_names:
continue
obj = getattr(bpy_types_module, name, None)
if not isinstance(obj, type):
continue
if obj.__module__ == "bpy.types" or (
obj.__module__ == "builtins" and not hasattr(_builtins, name)
):
structs.append(introspect_class(obj, "bpy.types"))
def _merge_missing_c_methods(
structs: list[StructData], bpy_types_module: ModuleType
) -> None:
"""Add C-level methods missing from RNA metadata for each struct."""
# Build parent method/property lookup for override detection
struct_by_name = {s["name"]: s for s in structs}
def _get_all_parent_names(struct: StructData) -> set[str]:
names: set[str] = set()
base = struct.get("base")
if base:
# Strip generic params (e.g. "bpy_prop_collection[Object]" -> "bpy_prop_collection")
base_name = base.split("[")[0]
parent = struct_by_name.get(base_name)
if parent:
names |= {m["name"] for m in parent["methods"]}
names |= {p["name"] for p in parent["properties"]}
names |= _get_all_parent_names(parent)
return names
for struct in structs:
cls = getattr(bpy_types_module, struct["name"], None)
if cls is None:
continue
existing = {m["name"] for m in struct["methods"]}
existing |= {p["name"] for p in struct["properties"]}
parent_names = _get_all_parent_names(struct)
for attr_name in sorted(cls.__dict__):
if attr_name.startswith("_") or attr_name in existing:
continue
if attr_name in ("bl_rna", "rna_type"):
continue
raw = cls.__dict__[attr_name]
raw_type = type(raw).__name__
is_c_classmethod = raw_type == "classmethod_descriptor"
if not is_c_classmethod and raw_type == "classmethod":
inner = getattr(raw, "__func__", None)
is_c_classmethod = inner is not None and not hasattr(inner, "__code__")
if is_c_classmethod:
bound = getattr(cls, attr_name)
if callable(bound):
func_data = introspect_callable(bound, attr_name)
if func_data:
func_data["is_classmethod"] = True
if attr_name in parent_names:
func_data["is_override"] = True
struct["methods"].append(func_data)
elif raw_type in ("method_descriptor", "builtin_function_or_method"):
obj = getattr(cls, attr_name)
if callable(obj):
func_data = introspect_callable(obj, attr_name)
if func_data:
if attr_name in parent_names:
func_data["is_override"] = True
struct["methods"].append(func_data)
elif raw_type == "function":
# Python functions with RST docstrings are API methods
# (e.g. Context.copy, Object.evaluated_geometry)
doc = inspect.getdoc(raw) or ""
if ":rtype:" in doc or ":type " in doc:
func_data = introspect_callable(raw, attr_name)
if func_data:
if attr_name in parent_names:
func_data["is_override"] = True
struct["methods"].append(func_data)
def introspect_rna_types() -> ModuleData:
"""Introspect all RNA-defined types using rna_info.BuildRNAInfo()."""
rna_info = _import_rna_info()
info = rna_info.BuildRNAInfo()
rna_structs_dict = cast(dict[object, object], info[0])
# Introspect the C-level base classes that aren't in RNA but are in bpy.types.
# These provide fundamental methods like __getitem__, foreach_get, etc.
_bpy_types = importlib.import_module("bpy.types")
# Generic base classes need manual type parameter annotation since
# introspection can't discover Python generics from C types.
_GENERIC_BASES: dict[str, str] = {
"bpy_prop_collection": "Generic[_T]",
"bpy_prop_array": "Generic[_T]",
}
# Dunder methods for generic types can't be discovered from runtime since
# they need generic type parameters (_T). Define them explicitly.
_COLLECTION_DUNDERS: list[FunctionData] = [
{
"name": "__getitem__",
"doc": "",
"params": [
{
"name": "key",
"type": "int | str",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
],
"return_type": "_T",
"is_classmethod": False,
},
{
"name": "__iter__",
"doc": "",
"params": [],
"return_type": "Iterator[_T]",
"is_classmethod": False,
},
{
"name": "__len__",
"doc": "",
"params": [],
"return_type": "int",
"is_classmethod": False,
},
{
"name": "__contains__",
"doc": "",
"params": [
{
"name": "key",
"type": "str",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
],
"return_type": "bool",
"is_classmethod": False,
},
]
_ARRAY_DUNDERS: list[FunctionData] = [
{
"name": "__getitem__",
"doc": "",
"params": [
{
"name": "index",
"type": "int",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
],
"return_type": "_T",
"is_classmethod": False,
},
{
"name": "__setitem__",
"doc": "",
"params": [
{
"name": "index",
"type": "int",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
{
"name": "value",
"type": "_T",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
],
"return_type": "None",
"is_classmethod": False,
},
{
"name": "__delitem__",
"doc": "",
"params": [
{
"name": "index",
"type": "int | slice",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
],
"return_type": "None",
"is_classmethod": False,
},
{
"name": "__iter__",
"doc": "",
"params": [],
"return_type": "Iterator[_T]",
"is_classmethod": False,
},
{
"name": "__len__",
"doc": "",
"params": [],
"return_type": "int",
"is_classmethod": False,
},
{
"name": "__contains__",
"doc": "",
"params": [
{
"name": "value",
"type": "_T",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
}
],
"return_type": "bool",
"is_classmethod": False,
},
]
_EXTRA_DUNDERS: dict[str, list[FunctionData]] = {
"bpy_prop_collection": _COLLECTION_DUNDERS,
"bpy_prop_array": _ARRAY_DUNDERS,
}
structs = _add_core_bpy_type_structs(_bpy_types, _GENERIC_BASES, _EXTRA_DUNDERS)
collection_element_types = _collect_collection_element_types(rna_structs_dict)
for struct_info in sorted(rna_structs_dict.values(), key=_struct_identifier):
structs.append(_rna_struct_to_data(struct_info, collection_element_types))
_merge_screen_context_members(structs)
_merge_non_rna_bpy_types(structs, _bpy_types)
_merge_missing_c_methods(structs, _bpy_types)
# Discover hidden C types reachable only via method return values
# (e.g. ContextTempOverride from Context.temp_override()).
known = {s["name"] for s in structs}
bpy = importlib.import_module("bpy")
ctx = getattr(bpy, "context")
# Probe temp_override to discover ContextTempOverride.
# Skip on Blender < 4.4 where this call hangs.
bpy_app = importlib.import_module("bpy.app")
blender_version: tuple[int, ...] = getattr(bpy_app, "version", (0, 0, 0))
temp_override = ctx.__class__.__dict__.get("temp_override")
if temp_override is not None and blender_version < (4, 4, 0):
# On Blender < 4.4, probing temp_override() hangs, but the docstring
# still references ContextTempOverride. Generate a minimal stub.
if "ContextTempOverride" not in known:
stub: StructData = {
"name": "ContextTempOverride",
"doc": "Context manager for context overrides.",
"base": None,
"properties": [],
"methods": [
{
"name": "__enter__",
"doc": "",
"params": [],
"return_type": "Context",
"is_classmethod": False,
},
{
"name": "__exit__",
"doc": "",
"params": [
{
"name": "exc_type",
"type": "object",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
{
"name": "exc_val",
"type": "object",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
{
"name": "exc_tb",
"type": "object",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
],
"return_type": "None",
"is_classmethod": False,
},
],
}
structs.append(stub)
known.add("ContextTempOverride")
if (
temp_override is not None
and type(temp_override).__name__ == "method_descriptor"
and blender_version >= (4, 4, 0)
):
try:
result = ctx.temp_override()
result_cls = result.__class__
if result_cls.__name__ not in known:
hidden = introspect_class(result_cls, "bpy.types")
# Fix logging_set if it was introspected without params
# (older Blender versions lack the docstring)
for method in hidden["methods"]:
if method["name"] == "logging_set" and not method["params"]:
method["params"] = [
{
"name": "enable",
"type": "bool",
"default": None,
"kind": "POSITIONAL_OR_KEYWORD",
},
]
structs.append(hidden)
known.add(result_cls.__name__)
exit_fn = getattr(result, "__exit__", None)
if exit_fn is not None:
exit_fn(None, None, None)
except Exception:
pass
return {
"module": "bpy.types",
"doc": "Blender RNA type definitions.",
"functions": [],
"variables": [],
"structs": structs,
}
@dataclass
class IntrospectArgs:
output: str | None = None
def main() -> None:
argv = sys.argv
if "--" in argv:
argv = argv[argv.index("--") + 1 :]
else:
argv = []
parser = argparse.ArgumentParser(description="Introspect Blender Python modules")
parser.add_argument(
"--output", default=None, help="Output JSON file (default: stdout)"
)
parsed = parser.parse_args(argv)
args = IntrospectArgs(output=parsed.output)
print("Discovering modules...", file=sys.stderr)
module_names = discover_modules()
print(f"Found {len(module_names)} modules", file=sys.stderr)
results: list[ModuleData] = []
for module_name in module_names:
print(f" Introspecting {module_name}...", file=sys.stderr)
results.append(introspect_module(module_name))
output = json.dumps(results, indent=2)
if args.output:
with open(args.output, "w") as f:
f.write(output)
print(f"Written to {args.output}", file=sys.stderr)
else:
print("__INTROSPECT_JSON_START__")
print(output)
print("__INTROSPECT_JSON_END__")
if __name__ == "__main__":
main()