Joseph HENRY 6ea41f3b4d Add rna_type/bl_rna, GeometrySet, bpy.ops typing, publish task, and README updates
- Add rna_type and bl_rna as readonly properties on bpy_struct (fixes fake-bpy-module#419)
- Discover non-RNA C classes in bpy.types like GeometrySet (fixes fake-bpy-module#436)
- Introspect bpy.ops operator wrapper with poll(), idname(), get_rna_type(), bl_options
- Handle builtin name shadowing (e.g. bpy.ops.object) with builtins. qualification
- Add poe publish task for building and publishing to PyPI
- Update project URLs to Gitea, improve generated README, add disclaimer
- Fix f-string lint warning and type annotation for introspect_class

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 15:07:35 +01:00

433 lines
13 KiB
Python

"""Blender type stubs generator.
Orchestrates the introspection and stub generation pipeline.
"""
import argparse
import json
import shutil
import subprocess
import sys
import tomllib
from dataclasses import dataclass
from pathlib import Path
from typing import TypeAlias
from blender_downloader import get_blender_executable
from generate_stubs import write_stubs
from introspect import ModuleData
SCRIPT_DIR = Path(__file__).parent
INTROSPECT_SCRIPT = SCRIPT_DIR / "introspect.py"
OVERRIDES_DIR = SCRIPT_DIR / "overrides"
TomlValue: TypeAlias = str | int | bool | list["TomlValue"] | dict[str, "TomlValue"]
TomlDict: TypeAlias = dict[str, "TomlValue"]
def _serialize_toml_value(value: TomlValue) -> str:
"""Serialize a single TOML value."""
if isinstance(value, bool):
return "true" if value else "false"
if isinstance(value, str):
return f'"{value}"'
if isinstance(value, int):
return str(value)
if isinstance(value, list):
if value and isinstance(value[0], dict):
items: list[str] = []
for item in value:
if isinstance(item, dict):
pairs = ", ".join(
f"{k} = {_serialize_toml_value(v)}" for k, v in item.items()
)
items.append(f"{{ {pairs} }}")
joined = ",\n ".join(items)
return f"[\n {joined},\n]"
str_items = [_serialize_toml_value(v) for v in value]
if len(str_items) <= 3:
return f"[{', '.join(str_items)}]"
joined = ",\n ".join(str_items)
return f"[\n {joined},\n]"
return str(value)
def _serialize_toml(data: TomlDict, prefix: str = "") -> str:
"""Serialize a dict to TOML format."""
lines: list[str] = []
tables: list[tuple[str, TomlDict]] = []
for key, value in data.items():
full_key = f"{prefix}.{key}" if prefix else key
if isinstance(value, dict):
has_values = any(not isinstance(v, dict) for v in value.values())
has_tables = any(isinstance(v, dict) for v in value.values())
if has_values:
tables.append((full_key, value))
elif has_tables:
lines.append(_serialize_toml(value, full_key))
else:
quoted_key = f'"{key}"' if "." in key else key
lines.append(f"{quoted_key} = {_serialize_toml_value(value)}")
result_parts: list[str] = []
if lines:
if prefix:
result_parts.append(f"[{prefix}]")
result_parts.extend(lines)
for table_key, table_value in tables:
result_parts.append("")
result_parts.append(f"[{table_key}]")
for k, v in table_value.items():
if isinstance(v, dict):
result_parts.append("")
result_parts.append(_serialize_toml(v, f"{table_key}.{k}"))
else:
quoted_k = f'"{k}"' if "." in k else k
result_parts.append(f"{quoted_k} = {_serialize_toml_value(v)}")
return "\n".join(result_parts)
def load_project_metadata() -> TomlDict:
"""Load [project] metadata from this project's pyproject.toml."""
with (SCRIPT_DIR / "pyproject.toml").open("rb") as f:
data: TomlDict = tomllib.load(f)["project"]
return data
def build_generated_pyproject(
blender_version: str,
package_version: str,
packages: list[str],
python_version: str,
) -> str:
"""Build a pyproject.toml for the generated stubs, inheriting metadata from the project."""
meta = load_project_metadata()
classifiers_raw = meta.get("classifiers", [])
classifiers: list[TomlValue] = []
if isinstance(classifiers_raw, list):
classifiers = [c for c in classifiers_raw if isinstance(c, str)]
classifiers.append("Typing :: Stubs Only")
project: TomlDict = {
"name": str(meta.get("name", "blender-python-stubs")),
"version": package_version,
"description": f"Type stubs for Blender {blender_version} Python API",
"readme": "README.md",
"requires-python": f">={python_version}",
"license": str(meta.get("license", "GPL-2.0-or-later")),
"keywords": meta.get("keywords", []),
"authors": meta.get("authors", []),
"classifiers": classifiers,
}
urls = meta.get("urls")
if isinstance(urls, dict):
project["urls"] = urls
build_system: TomlDict = {
"requires": ["hatchling"],
"build-backend": "hatchling.build",
}
pkg_list: list[TomlValue] = [p for p in sorted(packages)]
wheel: TomlDict = {"packages": pkg_list}
targets: TomlDict = {"wheel": wheel}
build: TomlDict = {"targets": targets}
hatch: TomlDict = {"build": build}
tool: TomlDict = {"hatch": hatch}
generated: TomlDict = {
"build-system": build_system,
"project": project,
"tool": tool,
}
return _serialize_toml(generated)
README_TEMPLATE = """\
# blender-python-stubs
Type stubs for Blender {blender_version} Python API.
Provides autocomplete, type checking, and inline documentation for `bpy`, `mathutils`, `bmesh`, `gpu`, `freestyle`, and all other Blender Python modules.
## Installation
```bash
pip install "blender-python-stubs>={major_minor},<{next_minor}"
```
## Features
- Full Blender API coverage (`bpy`, `mathutils`, `bmesh`, `gpu`, `gpu_extras`, `bpy_extras`, `freestyle`, `aud`, `blf`, `bl_math`, `imbuf`, `idprop`)
- Accurate collection types (`BlendDataObjects` instead of generic `bpy_prop_collection`)
- Readonly `@property` decorators for RNA attributes
- Typed context members (`bpy.context.active_object`, `selected_objects`, etc.)
- Constructor signatures for `mathutils`, `gpu`, and other C extension types
- Literal enum types instead of plain `str`
- Zero `typing.Any` usage
- 0 errors in basedpyright strict mode
## Usage
```python
import bpy
obj: bpy.types.Object = bpy.context.active_object
obj.location.x = 1.0
bpy.data.objects.new("Cube", bpy.data.meshes.new("Mesh"))
```
---
Generated by [blender-python-stubs](https://git.autourdeminuit.com/autour_de_minuit/blender-python-stubs).
"""
def get_blender_version(blender_path: str) -> tuple[str, str]:
"""Get the full and major.minor version strings from a Blender executable."""
result = subprocess.run(
[blender_path, "--version"],
capture_output=True,
text=True,
)
for line in result.stdout.splitlines():
if line.startswith("Blender "):
full_version = line.split()[1]
parts = full_version.split(".")
major_minor = f"{parts[0]}.{parts[1]}"
return full_version, major_minor
print("Could not determine Blender version", file=sys.stderr)
sys.exit(1)
def get_blender_python_version(blender_path: str) -> str:
"""Detect the Python version embedded in a Blender executable."""
result = subprocess.run(
[
blender_path,
"--background",
"--factory-startup",
"-noaudio",
"--python-expr",
"import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')",
],
capture_output=True,
text=True,
)
for line in result.stdout.splitlines():
line = line.strip()
if line and line[0].isdigit() and "." in line:
return line
print("Could not determine Blender Python version", file=sys.stderr)
sys.exit(1)
def run_introspection(blender_path: str) -> list[ModuleData]:
"""Run the introspection script inside Blender and return the JSON result."""
cmd = [
blender_path,
"--background",
"--factory-startup",
"-noaudio",
"--python-exit-code",
"1",
"--python",
str(INTROSPECT_SCRIPT),
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
print("Blender introspection failed:", file=sys.stderr)
print(proc.stderr, file=sys.stderr)
sys.exit(1)
output = proc.stdout
start_marker = "__INTROSPECT_JSON_START__"
end_marker = "__INTROSPECT_JSON_END__"
start_idx = output.find(start_marker)
end_idx = output.find(end_marker)
if start_idx == -1 or end_idx == -1:
print("Could not find JSON markers in Blender output:", file=sys.stderr)
print(output, file=sys.stderr)
sys.exit(1)
json_str = output[start_idx + len(start_marker) : end_idx].strip()
parsed: list[ModuleData] = json.loads(json_str)
return parsed
def compute_next_minor(major_minor: str) -> str:
"""Compute the next minor version. 5.0 -> 5.1, 4.9 -> 4.10."""
major, minor = major_minor.split(".")
return f"{major}.{int(minor) + 1}"
def generate_package_files(
output_dir: Path,
full_version: str,
major_minor: str,
top_level_packages: list[str],
python_version: str,
) -> None:
"""Generate pyproject.toml and README.md for the publishable package."""
package_version = f"{full_version}.0"
pyproject = build_generated_pyproject(
major_minor, package_version, top_level_packages, python_version
)
(output_dir / "pyproject.toml").write_text(pyproject + "\n")
next_minor = compute_next_minor(major_minor)
readme = README_TEMPLATE.format(
blender_version=major_minor,
major_minor=major_minor,
next_minor=next_minor,
)
(output_dir / "README.md").write_text(readme)
# Add py.typed marker to each top-level package so mypy recognizes the stubs
for pkg in top_level_packages:
(output_dir / pkg / "py.typed").touch()
def generate_for_version(blender_path: str) -> None:
"""Generate stubs for a single Blender executable."""
full_version, major_minor = get_blender_version(blender_path)
python_version = get_blender_python_version(blender_path)
print(f" Blender {full_version} (Python {python_version})")
output_dir = SCRIPT_DIR / "dist" / major_minor
if output_dir.exists():
shutil.rmtree(output_dir)
print(" Running introspection...")
modules_data = run_introspection(blender_path)
print(f" Introspected {len(modules_data)} modules")
overrides_path = OVERRIDES_DIR / major_minor
overrides_str: str | None = None
if overrides_path.exists():
print(f" Using overrides from {overrides_path}/")
overrides_str = str(overrides_path)
print(" Generating stubs...")
top_level_packages = write_stubs(
modules_data, str(output_dir), overrides_str, python_version
)
print(" Generating package files...")
generate_package_files(
output_dir, full_version, major_minor, top_level_packages, python_version
)
# Store the Python version for later type checking
(output_dir / ".python-version").write_text(python_version)
print(f" Output: {output_dir}/")
@dataclass
class MainArgs:
versions: list[str]
def typecheck_stubs(versions: list[str] | None = None) -> None:
"""Type-check generated stubs by running basedpyright scoped to each version directory."""
dist_dir = SCRIPT_DIR / "dist"
if not dist_dir.exists():
print("No dist/ directory found. Run 'poe generate' first.")
sys.exit(1)
if not versions:
versions = sorted(
d.name
for d in dist_dir.iterdir()
if d.is_dir() and not d.name.startswith(".")
)
if not versions:
print("No generated stubs found in dist/.")
sys.exit(1)
failed = False
for version in versions:
version_dir = dist_dir / version
print(f"=== Checking stubs for Blender {version} ===")
python_version_file = version_dir / ".python-version"
python_version = (
python_version_file.read_text().strip()
if python_version_file.exists()
else "3.11"
)
config = version_dir / "pyrightconfig.json"
config.write_text(
json.dumps(
{
"extraPaths": ["."],
"typeCheckingMode": "strict",
"pythonVersion": python_version,
}
)
)
result = subprocess.run(
["basedpyright", "--project", str(config)],
)
config.unlink()
if result.returncode != 0:
failed = True
if failed:
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Blender type stubs generator")
parser.add_argument(
"--typecheck-stubs",
action="store_true",
help="Type-check generated stubs instead of generating",
)
parser.add_argument(
"versions",
nargs="*",
help="Blender versions (e.g., 4.0 4.1)",
)
args = parser.parse_args()
if args.typecheck_stubs:
typecheck_stubs(args.versions or None)
else:
if not args.versions:
parser.error("versions are required for generation")
main_args = MainArgs(versions=args.versions)
min_version = (4, 0)
for version in main_args.versions:
major, minor = (int(x) for x in version.split("."))
if (major, minor) < min_version:
print(
f"Blender {version} is not supported"
f" (minimum: {min_version[0]}.{min_version[1]})"
)
sys.exit(1)
print(f"=== Blender {version} ===")
blender_path = get_blender_executable(version)
generate_for_version(str(blender_path))
print()
print("Done.")