Compare commits

..

4 Commits

3 changed files with 358 additions and 77 deletions

403
dumper.py
View File

@ -1,52 +1,341 @@
from __future__ import annotations from __future__ import annotations
from abc import ABC, abstractmethod
from copy import copy from copy import copy
from dataclasses import dataclass
from os.path import abspath from os.path import abspath
from typing import Any
import bpy import bpy
from . import utils from . import utils
from . utils import BlenderProperty
def get_dumper(bl_object: bpy.types.bpy_struct) -> type[Dumper]: def serialize_selected_nodes_from_node_tree(node_tree: bpy.types.NodeTree):
"""Get the closest corresponding dumper for a given Blender object using its MRO""" """Serialize the selected nodes from a node tree"""
for cls in bl_object.__class__.mro(): selected_nodes = [node for node in node_tree.nodes if node.select]
dumper_map = DumperRegistry().dumper_map selected_links = [link for link in node_tree.links if link.from_node.select
if cls in dumper_map: and link.to_node.select]
return dumper_map[cls]
# Fallback to base Dumper if no matches are found bl_pointers = {}
return Dumper nodes_data = [Serializer.serialize(node, bl_pointers) for node in selected_nodes]
links_data = [Serializer.serialize(link, bl_pointers) for link in selected_links]
# Only serialize selected nodes and their links
ntree_data = {
"nodes": nodes_data,
"links": links_data,
}
return ntree_data
def dump_nodes(nodes: list[bpy.types.Node]): def deserialize_nodes_into_node_tree(data: dict, node_tree: bpy.types.NodeTree):
"""Generic recursive dump, convert nodes into a dict""" """Deserialize node data into a specific node tree"""
Dumper.pointers.clear() # TODO: Bad global bl_pointers = {}
Serializer.deserialize(data, node_tree, bl_pointers)
data = [dump_node(node) for node in nodes]
Dumper.pointers.clear()
return data
def dump_node(node: bpy.types.Node): # TODO: Sub serialize function where the isinstance is set to the default number of things.
dumper = get_dumper(node)
return dumper.dump(node)
# TODO: Collection is not handled as a class anymore, handle it manually
def load_nodes(data, node_tree): class Serializer(ABC):
"""Load/Dump nodes into a specific node tree""" """
Dumper.pointers.clear() Base Serializer class.
`bl_pointers_ref` corresponds to a mutable dict passed through serialize/deserialize
functions, containing a map of Blender pointers IDs and their corresponding objects.
"""
# Whitelisted properties, applied after the blacklist
prop_whitelist = None
# Properties that are excluded from (de)serialization, in addition to any bl_* properties
prop_blacklist = ("rna_type", "id_data", "depsgraph")
dumper = get_dumper(node_tree) serializer_map = {}
dumper.load(data, node_tree)
Dumper.pointers.clear() @classmethod
@abstractmethod
def construct_bl_object(cls, data: dict):
"""Abstract method to construct a Serializer's specific Blender Object"""
print("DEBUG: construct_bl_object called on Base Serializer, shouldn't happen")
return None
# --- Serialization ---
@classmethod
@abstractmethod
def serialize(cls, obj: bpy.types.bpy_struct | Any, bl_pointers_ref: dict) -> dict:
"""Base serialization method, overridden by subclasses"""
# Early recursive return case
# TODO: Ported as is, check the heuristics, (if there are more or attributes type to add)
if isinstance(obj, (str, int, float, dict, list, type(None))):
return obj
# Returned data, tracks the pointer for later re-assignments during deserialization
data = {"_kit_ptr": obj.as_pointer()}
bl_pointers_ref[obj.as_pointer()] = obj
# Iterate over all *filtered* properties found in the object
for bl_prop in cls.get_serialized_properties(obj):
# Do not store default values nor read-only properties
if (array := getattr(bl_prop.rep, "default_array", None)) and bl_prop.attr == array:
continue
if isinstance(bl_prop.attr, (str, int, float)) and bl_prop.attr == bl_prop.rep.default:
continue
if obj.is_property_readonly(bl_prop.rep.identifier):
continue
print(type(bl_prop.attr))
# Serialize each property
data[bl_prop.rep.identifier] = cls.serialize_property(bl_prop, bl_pointers_ref)
return data
@classmethod
def serialize_property(cls, bl_prop: BlenderProperty, bl_pointers_ref: dict) -> Any:
"""Serialize node property, special cases for arrays/collections/pointers"""
# Property array case
if getattr(bl_prop.rep, "is_array", False):
# Contained in BoolProperty, IntProperty and FloatProperty
prop_array = []
for item in bl_prop.attr:
assert isinstance(item, (bool, int, float)) # TODO: For development, replace by list comprehension later
prop_array.append(item)
return prop_array
# Collection case
if isinstance(bl_prop.attr, bpy.types.bpy_prop_collection):
collection = bl_prop.attr
if not collection:
return []
values = [cls.sub_serialize(sub_prop) for sub_prop in collection]
# TODO: Check why the original code has a None check
return [v for v in values if v is not None]
# Pointer case
if bl_prop.rep.type == "POINTER" and bl_prop.attr:
# Property points to another object, stores it ptr/deref value in our pointer table
ptr = bl_prop.attr.as_pointer()
if ptr in bl_pointers_ref:
return ptr
bl_pointers_ref[ptr] = bl_prop.attr
return cls.sub_serialize(bl_prop.attr, bl_pointers_ref)
@classmethod
def sub_serialize(cls, obj: bpy.types.bpy_struct | Any, bl_pointers_ref: dict) -> Any:
if not isinstance(obj, bpy.types.bpy_struct):
# Primitive type, return directly
return obj
"""Resolve which Serializer class to use"""
serializer = cls.get_serializer(obj)
return serializer.serialize(obj, bl_pointers_ref)
# --- Deserialization ---
@classmethod
@abstractmethod
def deserialize(cls, data: dict, target_obj: bpy.types.bpy_struct, bl_pointers_ref: dict):
"""
Base deserialization method.
Deserialize data into a specific Blender object, creating sub-objects as needed.
Partial data may be provided, in which case, fields not specified will be left to default.
"""
if (kit_ptr := data.get("_kit_ptr", None)):
bl_pointers_ref[kit_ptr] = target_obj
data_to_deserialize = cls.get_data_to_deserialize(data, target_obj)
for stored_key, stored_value in data_to_deserialize:
if stored_key.startswith("_kit") or stored_key not in target_obj.bl_rna.properties:
continue
target_bl_prop = BlenderProperty(rep=target_obj.bl_rna.properties[stored_key],
attr=getattr(target_obj, stored_key))
# Collection case
# Unlike serialization, there's no property array case, as they are just directly assigned
if isinstance(target_bl_prop.attr, bpy.types.bpy_prop_collection):
cls.deserialize_collection(stored_value, target_bl_prop.attr, bl_pointers_ref)
continue
value_to_set = stored_value
# Pointer case
# Dereference the value if its already present in the pointers_ref map
if target_bl_prop.rep.type == "POINTER":
value_to_set = cls.deserialize_pointer(stored_value, target_bl_prop.attr, bl_pointers_ref)
# Skip setting the property if it's read-only
if target_bl_prop.rep.is_readonly:
continue
# Assign the property
setattr(target_obj, stored_key, value_to_set)
# If supported, update the Blender property after setting it
if hasattr(target_bl_prop.attr, "update"):
target_bl_prop.attr.update()
@classmethod
def deserialize_collection(cls, stored_value: Any, bl_coll: bpy.types.bpy_prop_collection, bl_pointers_ref: dict):
# Static collection case
if not hasattr(bl_coll, "new"):
cls.sub_deserialize(stored_value, bl_coll, bl_pointers_ref)
return
# We need to call the collection "new" function, parse and construct its parameters
new_func = bl_coll.bl_rna.functions["new"]
for i, value in enumerate(stored_value):
# Using a dictionary of {parameter: parameter_value}
default_new_func_params = {
k: value.get(k, utils.get_bl_default(v))
for k, v in new_func.parameters.items()[:-1]
}
new_func_params = value.get("_kit_new_params", default_new_func_params)
solved_all_pointers = True
for param in bl_coll.bl_rna.functions["new"].parameters:
if param.identifier not in new_func_params or param.type != "POINTER":
continue
pointer_id = param[param.identifier]
if bl_object := bl_pointers_ref.get(pointer_id):
new_func_params[param.identifier] = bl_object
else:
print(f"No pointer found for param {param.identifier} of new function of {bl_coll}")
solved_all_pointers = False
# Bail out if we fail to solve all pointers (TODO: I'm guessing this causes a runtimerror, but double check)
if not solved_all_pointers:
continue
print("Calling BL collection new with the following parameters")
print(new_func_params)
# Creates a collection item, type from the collection type, no need to manually construct
collection_item = bl_coll.new(**new_func_params)
deserializer = cls.get_serializer(collection_item)
# Recursively deserialize into the newly constructured object
cls.deserialize(value, collection_item, bl_pointers_ref)
# Old code guarded by a RuntimeError before, investigate later
# Static collection case, would need to check how to detect this.
collection_item = bl_coll[i]
# TODO: The target_bl_prop_attr terminology is unclear
@classmethod
def deserialize_pointer(cls, stored_value: Any, target_bl_prop_attr: bpy.types.bpy_struct, bl_pointers_ref: dict):
if stored_value is None:
return None
# Actual existing pointer, dereference and return
if isinstance(stored_value, int):
if stored_value not in bl_pointers_ref:
print("DEBUG: Pointer reference hasn't been loaded yet")
# Obtain a reference to a previously dereferenced object
return bl_pointers_ref[stored_value]
# Pointer doesn't exist yet, create it if it doesn't exist yet, store it, and return its object
deserializer = cls.get_serializer(target_bl_prop_attr)
# Create the Blender object if it doesn't exist yet
if target_bl_prop_attr is None:
target_bl_prop_attr = cls.construct_bl_object(stored_value)
# Recursively deserialize into the target object
deserializer.deserialize(stored_value, target_bl_prop_attr, bl_pointers_ref)
bl_pointers_ref[stored_value["_kit_ptr"]] = target_bl_prop_attr
return target_bl_prop_attr
@classmethod
def get_data_to_deserialize(cls, data: dict, target_obj: bpy.types.bpy_struct=None):
props_to_deserialize = cls.get_serialized_properties(target_obj)
sorted_data = sorted(
data.items(), key=lambda x: props_to_deserialize.index(x[0]) if x[0] in props_to_deserialize else 0
)
return sorted_data
# --- Getters for sub-serializers ---
@classmethod
def get_serializer_map(cls) -> dict[type[bpy.types.bpy_struct], type[Serializer]]:
"""Get the serializer map, stored in a class variable for simple caching"""
if not cls.serializer_map:
for subclass in utils.all_subclasses(Serializer):
assert hasattr(subclass, "bl_type")
cls.serializer_map[subclass.bl_type] = subclass
return cls.serializer_map
@classmethod
def get_serializer(cls, bl_object: bpy.types.bpy_struct) -> type[Serializer]:
"""Get the closest corresponding serializer for a given Blender object using its MRO"""
serializer_map = cls.get_serializer_map()
bl_type = type(bl_object.bl_rna.type_recast())
for bl_parents in bl_type.mro():
if bl_parents in serializer_map:
return serializer_map[bl_parents]
# Fallback to base Serializer if no matches are found
return Serializer
# --- Properties to (de)serialize ---
@classmethod
def get_serialized_properties(cls, obj: bpy.types.bpy_struct | Any):
serialized_properties: list[BlenderProperty] = [
BlenderProperty(rep=prop, attr=getattr(obj, prop.identifier))
for prop in obj.bl_rna.properties
if not prop.identifier.startswith("bl_") # Exclude internal Blender properties
and prop.identifier not in cls.prop_blacklist # Additional blacklist filtering
]
if cls.prop_whitelist: # Additional whitelist, applied after the blacklist
serialized_properties: list[BlenderProperty] = [
prop for prop in serialized_properties
if prop.rep.identifier in cls.prop_whitelist
]
return serialized_properties
# class NodeSocket(Serializer):
# bl_type = bpy.types.NodeSocket
# prop_blacklist = Serializer.prop_blacklist + (
# "node",
# "links",
# "display_shape",
# "link_limit",
# )
# @classmethod
# def serialize(cls, socket_obj: bpy.types.NodeSocket, _: dict) -> dict:
# if socket_obj.is_unavailable:
# return None
# return super().serialize(socket_obj)
class Dumper: class Dumper:
pointers = {}
includes = [] includes = []
excludes = ["rna_type", "bl_rna", "id_data", "depsgraph"] excludes = ["rna_type", "bl_rna", "id_data", "depsgraph"]
@ -67,7 +356,7 @@ class Dumper:
print(f"New not implemented for data {data}") print(f"New not implemented for data {data}")
@classmethod @classmethod
def load(cls, data, bl_object=None): def load(cls, data, bl_pointers_ref, bl_object=None):
if bl_object is None: if bl_object is None:
bl_object = cls.new(data) bl_object = cls.new(data)
@ -75,7 +364,7 @@ class Dumper:
return return
if bl_pointer := data.get("bl_pointer"): if bl_pointer := data.get("bl_pointer"):
cls.pointers[bl_pointer] = bl_object bl_pointers_ref[bl_pointer] = bl_object
props = cls.properties(bl_object) props = cls.properties(bl_object)
for key, value in sorted( for key, value in sorted(
@ -98,9 +387,9 @@ class Dumper:
elif prop.type == "POINTER": elif prop.type == "POINTER":
if isinstance(value, int): # It's a pointer if isinstance(value, int): # It's a pointer
if value not in cls.pointers: if value not in bl_pointers_ref:
print(bl_object, "not loaded yet", prop) print(bl_object, "not loaded yet", prop)
value = cls.pointers[value] value = bl_pointers_ref[value]
elif value is None: elif value is None:
utils.set_bl_attribute(bl_object, key, value) utils.set_bl_attribute(bl_object, key, value)
@ -114,7 +403,7 @@ class Dumper:
attr = dumper.new(value) attr = dumper.new(value)
dumper.load(value, attr) dumper.load(value, attr)
cls.pointers[value["bl_pointer"]] = attr bl_pointers_ref[value["bl_pointer"]] = attr
if hasattr(attr, "update"): if hasattr(attr, "update"):
attr.update() attr.update()
@ -135,12 +424,12 @@ class Dumper:
# return bl_object # return bl_object
@classmethod @classmethod
def dump(cls, bl_object): def dump(cls, bl_object, bl_pointers_ref):
if isinstance(bl_object, (str, int, float, dict, list, type(None))): if isinstance(bl_object, (str, int, float, dict, list, type(None))):
return bl_object return bl_object
data = {"bl_pointer": bl_object.as_pointer()} data = {"bl_pointer": bl_object.as_pointer()}
cls.pointers[bl_object.as_pointer()] = bl_object bl_pointers_ref[bl_object.as_pointer()] = bl_object
for prop in cls.properties(bl_object): for prop in cls.properties(bl_object):
if not hasattr(bl_object, prop.identifier): if not hasattr(bl_object, prop.identifier):
@ -163,10 +452,10 @@ class Dumper:
value = PropCollection.dump(value) value = PropCollection.dump(value)
elif prop.type == "POINTER" and value: elif prop.type == "POINTER" and value:
if value.as_pointer() in cls.pointers: if value.as_pointer() in bl_pointers_ref:
value = value.as_pointer() value = value.as_pointer()
else: else:
cls.pointers[value.as_pointer()] = value bl_pointers_ref[value.as_pointer()] = value
dumper = get_dumper(value) dumper = get_dumper(value)
value = dumper.dump(value) value = dumper.dump(value)
@ -204,7 +493,7 @@ class PropCollection(Dumper):
dumper = None dumper = None
if not hasattr(coll, "new"): # Static collection if not hasattr(coll, "new"): # Static collection
for item, value in zip(coll, values): for item, value in zip(coll, values): # TODO: That zip doesn't make sense, or does it?
dumper = dumper or get_dumper(item) dumper = dumper or get_dumper(item)
dumper.load(value, item) dumper.load(value, item)
@ -227,7 +516,7 @@ class PropCollection(Dumper):
continue continue
pointer_id = params[param.identifier] pointer_id = params[param.identifier]
if bl_object := cls.pointers.get(pointer_id): if bl_object := bl_pointers_ref.get(pointer_id):
params[param.identifier] = bl_object params[param.identifier] = bl_object
else: else:
print(f"No Pointer found for param {param.identifier} of {coll}") print(f"No Pointer found for param {param.identifier} of {coll}")
@ -277,7 +566,7 @@ class NodeSocket(Dumper):
if socket.is_unavailable: if socket.is_unavailable:
return None return None
# cls.pointers[socket.as_pointer()] = socket # bl_pointers_ref[socket.as_pointer()] = socket
data = super().dump(socket) data = super().dump(socket)
@ -316,7 +605,7 @@ class NodeTreeInterfaceSocket(Dumper):
@classmethod @classmethod
def dump(cls, socket): def dump(cls, socket):
# cls.pointers[socket.as_pointer()] = socket # bl_pointers_ref[socket.as_pointer()] = socket
data = super().dump(socket) data = super().dump(socket)
# data["_id"] = socket.as_pointer() # data["_id"] = socket.as_pointer()
@ -340,7 +629,7 @@ class NodeSockets(PropCollection):
node_sockets = [s for s in coll if not s.is_unavailable] node_sockets = [s for s in coll if not s.is_unavailable]
for socket, value in zip(node_sockets, values): for socket, value in zip(node_sockets, values):
cls.pointers[value["bl_pointer"]] = socket bl_pointers_ref[value["bl_pointer"]] = socket
Dumper.load(value, socket) Dumper.load(value, socket)
# for k, v in value.items(): # for k, v in value.items():
# if k not in socket.bl_rna.properties: # if k not in socket.bl_rna.properties:
@ -353,7 +642,7 @@ class NodeSockets(PropCollection):
if len(node_sockets) == len(inputs): # Match by index if len(node_sockets) == len(inputs): # Match by index
super().load({"inputs": inputs}, node) super().load({"inputs": inputs}, node)
for socket, value in zip(node_sockets, coll): for socket, value in zip(node_sockets, coll):
cls.pointers[value['_id']] = socket bl_pointers_ref[value['_id']] = socket
else: # Match by name else: # Match by name
print(f'Match Inputs by Name for node {node}') print(f'Match Inputs by Name for node {node}')
for socket in node_sockets: for socket in node_sockets:
@ -363,7 +652,7 @@ class NodeSockets(PropCollection):
value = inputs[index] value = inputs[index]
print(socket, value) print(socket, value)
cls.pointers[value['_id']] = socket bl_pointers_ref[value['_id']] = socket
Dumper.load(value, socket) Dumper.load(value, socket)
del inputs[index] del inputs[index]
@ -389,7 +678,7 @@ class Node(Dumper):
@classmethod @classmethod
def dump(cls, node=None): def dump(cls, node=None):
# cls.pointers[node.as_pointer()] = node # bl_pointers_ref[node.as_pointer()] = node
data = super().dump(node) data = super().dump(node)
# data["_id"] = node.as_pointer() # data["_id"] = node.as_pointer()
@ -409,7 +698,7 @@ class Node(Dumper):
def load(cls, data, node): def load(cls, data, node):
if node is None: if node is None:
return return
# cls.pointers[data['bl_pointer']] = node # bl_pointers_ref[data['bl_pointer']] = node
inputs = copy(data.pop("inputs", [])) inputs = copy(data.pop("inputs", []))
outputs = copy(data.pop("outputs", [])) outputs = copy(data.pop("outputs", []))
@ -465,8 +754,8 @@ class Nodes(PropCollection):
# Pair zone input and output # Pair zone input and output
for node_data in values: for node_data in values:
if paired_output_id := node_data.get("_pair_with_output", None): if paired_output_id := node_data.get("_pair_with_output", None):
node = cls.pointers[node_data["bl_pointer"]] node = bl_pointers_ref[node_data["bl_pointer"]]
node.pair_with_output(cls.pointers[paired_output_id]) node.pair_with_output(bl_pointers_ref[paired_output_id])
Dumper.load( Dumper.load(
{"inputs": node_data["inputs"], "outputs": node_data["outputs"]}, {"inputs": node_data["inputs"], "outputs": node_data["outputs"]},
@ -701,7 +990,7 @@ class CompositorNodeRLayers(Node):
view_layer_data[prop.identifier] view_layer_data[prop.identifier]
""" """
# cls.pointers[bl_object.as_pointer()] = bl_object # bl_pointers_ref[bl_object.as_pointer()] = bl_object
data["scene"] = { data["scene"] = {
"bl_pointer": node.scene.as_pointer(), "bl_pointer": node.scene.as_pointer(),
@ -743,21 +1032,3 @@ class ViewLayers(PropCollection):
view_layer = coll.new(value["name"]) view_layer = coll.new(value["name"])
Dumper.load(value, view_layer) Dumper.load(value, view_layer)
class DumperRegistry:
"""Singleton-like class that holds a map of all parsers, constructed on first instantiation"""
dumper_map = None
def __init__(self):
if self.dumper_map is None:
self.construct_dumper_map()
@classmethod
def construct_dumper_map(cls):
cls.dumper_map = {}
for subclass in utils.all_subclasses(Dumper):
assert hasattr(subclass, "bl_type")
cls.dumper_map[subclass.bl_type] = subclass
print(cls.dumper_map)

View File

@ -14,7 +14,7 @@ import bpy
from bpy.props import BoolProperty, EnumProperty from bpy.props import BoolProperty, EnumProperty
from bpy.types import Operator from bpy.types import Operator
from .dumper import dump_nodes, load_nodes from .dumper import serialize_selected_nodes_from_node_tree, deserialize_nodes_into_node_tree
from .node_utils import remap_node_group_duplicates from .node_utils import remap_node_group_duplicates
from .pack_nodes import combine_objects, extract_objects from .pack_nodes import combine_objects, extract_objects
from .formats import dump_nkit_format, parse_nkit_format from .formats import dump_nkit_format, parse_nkit_format
@ -28,18 +28,12 @@ class NODEKIT_OT_copy(Operator):
def execute(self, context): def execute(self, context):
ntree = context.space_data.edit_tree ntree = context.space_data.edit_tree
selected_nodes = [node for node in ntree.nodes if node.select] serialized_nodes_data = serialize_selected_nodes_from_node_tree(ntree)
ntree_data = { context.window_manager.clipboard = dump_nkit_format(serialized_nodes_data)
"nodes": dump_nodes(selected_nodes),
"links": dump_nodes(
[l for l in ntree.links if l.from_node.select and l.to_node.select]
),
}
context.window_manager.clipboard = dump_nkit_format(ntree_data) num_selected_nodes = len([n for n in ntree.nodes if n.select])
self.report({"INFO"}, f"Copied {num_selected_nodes} selected nodes to system clipboard")
self.report({"INFO"}, f"Copied {len(selected_nodes)} selected nodes to system clipboard")
return {"FINISHED"} return {"FINISHED"}
@ -55,7 +49,8 @@ class NODEKIT_OT_copy_tree(Operator):
context.window_manager.clipboard = dump_nkit_format(ntree_data) context.window_manager.clipboard = dump_nkit_format(ntree_data)
self.report({"INFO"}, f"Copied {len(ntree.nodes)} selected nodes to system clipboard") num_nodes = len(ntree.nodes)
self.report({"INFO"}, f"Copied {num_nodes} selected nodes to system clipboard")
return {"FINISHED"} return {"FINISHED"}
@ -67,7 +62,7 @@ class NODEKIT_OT_paste(Operator):
def execute(self, context): def execute(self, context):
ntree_data = parse_nkit_format(context.window_manager.clipboard) ntree_data = parse_nkit_format(context.window_manager.clipboard)
load_nodes(ntree_data, context.space_data.edit_tree) deserialize_nodes_into_node_tree(ntree_data, context.space_data.edit_tree)
self.report({"INFO"}, f"X node(s) pasted from system clipboard") # TODO: Ge the number of parsed nodes returned self.report({"INFO"}, f"X node(s) pasted from system clipboard") # TODO: Ge the number of parsed nodes returned
return {"FINISHED"} return {"FINISHED"}

View File

@ -1,6 +1,21 @@
from dataclasses import dataclass
from typing import Any
import bpy import bpy
@dataclass
class BlenderProperty:
"""
Blender Property abstraction, used since a Blender property value isn't
directly accessible from its Property object representation
NOTE: Do not rely on value being up-to-date, data will get stall
"""
rep: bpy.types.Property
attr: Any
def all_subclasses(cls): def all_subclasses(cls):
return set(cls.__subclasses__()).union( return set(cls.__subclasses__()).union(
[s for c in cls.__subclasses__() for s in all_subclasses(c)] [s for c in cls.__subclasses__() for s in all_subclasses(c)]