1022 lines
31 KiB
Python
1022 lines
31 KiB
Python
from __future__ import annotations
|
|
|
|
from abc import ABC, abstractmethod
|
|
from copy import copy
|
|
from dataclasses import dataclass
|
|
from os.path import abspath
|
|
from typing import Any
|
|
|
|
import bpy
|
|
|
|
from . import utils
|
|
from . utils import BlenderProperty
|
|
|
|
|
|
def dump_nodes(nodes: list[bpy.types.Node]):
|
|
"""Generic recursive dump, convert nodes into a dict"""
|
|
dump_pointers = {}
|
|
data = [Serializer.serialize(node, dump_pointers) for node in nodes]
|
|
|
|
return data
|
|
|
|
|
|
def load_nodes(data, node_tree):
|
|
"""Load/Dump nodes into a specific node tree"""
|
|
dump_pointers = {}
|
|
Serializer.deserialize(data, node_tree, dump_pointers)
|
|
|
|
|
|
# TODO: Sub serialize function where the isinstance is set to the default number of things.
|
|
|
|
# TODO: Collection is not handled as a class anymore, handle it manually
|
|
|
|
class Serializer(ABC):
|
|
"""
|
|
Base Serializer class, from which other Serializer are derived.
|
|
`bl_pointers_ref` corresponds to a mutable dict passed through serialize/deserialize
|
|
functions, containing Blender object pointers from the current operation from relinking.
|
|
"""
|
|
# Whitelisted properties, applied after the blacklist
|
|
prop_whitelist = None
|
|
# Properties that are excluded from (de)serialization, in addition to any bl_* properties
|
|
prop_blacklist = ("rna_type", "id_data", "depsgraph")
|
|
|
|
serializer_map = {}
|
|
|
|
# --- Getters for sub-serializers ---
|
|
|
|
@classmethod
|
|
def get_serializer_map(cls) -> dict[type[bpy.types.bpy_struct], type[Serializer]]:
|
|
"""Store the Serializer Map in a class variable for simple caching"""
|
|
if not cls.serializer_map:
|
|
for subclass in utils.all_subclasses(Dumper):
|
|
assert hasattr(subclass, "bl_type")
|
|
cls.serializer_map[subclass.bl_type] = subclass
|
|
|
|
return cls.serializer_map
|
|
|
|
@classmethod
|
|
def get_serializer(cls, bl_object: bpy.types.bpy_struct) -> type[Serializer]:
|
|
"""Get the closest corresponding dumper for a given Blender object using its MRO"""
|
|
serializer_map = cls.get_serializer_map()
|
|
|
|
bl_type = type(bl_object.bl_rna.type_recast())
|
|
|
|
for bl_parents in bl_type.mro():
|
|
if bl_parents in serializer_map:
|
|
return serializer_map[bl_parents]
|
|
|
|
# Fallback to base Serializer if no matches are found
|
|
return Serializer
|
|
|
|
# --- Properties to (de)serialize ---
|
|
|
|
@classmethod
|
|
def get_serialized_properties(cls, obj: bpy.types.bpy_struct | Any):
|
|
serialized_properties: list[BlenderProperty] = [
|
|
BlenderProperty(rep=prop, attr=getattr(obj, prop.identifier))
|
|
for prop in obj.bl_rna.properties
|
|
if not prop.identifier.startswith("bl_") # Exclude internal Blender properties
|
|
and prop.identifier not in cls.prop_blacklist # Additional blacklist filtering
|
|
]
|
|
|
|
if cls.prop_whitelist: # Additional whitelist, applied after the blacklist
|
|
serialized_properties: list[BlenderProperty] = [
|
|
prop for prop in serialized_properties
|
|
if prop.rep.identifier in cls.prop_whitelist
|
|
]
|
|
|
|
return serialized_properties
|
|
|
|
# --- Serialization ---
|
|
|
|
@classmethod
|
|
@abstractmethod
|
|
def serialize(cls, obj: bpy.types.bpy_struct | Any, bl_pointers_ref: dict) -> dict:
|
|
"""Base Serialize method, overridded by subclasses"""
|
|
|
|
# Early recursive return case
|
|
# TODO: Ported as is, check the heuristics, (if there are more or attributes type to add)
|
|
if isinstance(obj, (str, int, float, dict, list, type(None))):
|
|
return obj
|
|
|
|
# Returned data, tracks the pointer for later re-assignments during deserialization
|
|
data = {"_kit_ptr": obj.as_pointer()}
|
|
bl_pointers_ref[obj.as_pointer()] = obj
|
|
|
|
# Iterate over all *filtered* properties found in the object
|
|
for bl_prop in cls.get_serialized_properties(obj):
|
|
# Do not store default values nor read-only properties
|
|
if (array := getattr(bl_prop.rep, "default_array", None)) and bl_prop.attr == array:
|
|
continue
|
|
if isinstance(bl_prop.attr, (str, int, float)) and bl_prop.attr == bl_prop.rep.default:
|
|
continue
|
|
if obj.is_property_readonly(bl_prop.rep.identifier):
|
|
continue
|
|
|
|
print(type(bl_prop.attr))
|
|
|
|
# Serialize each property
|
|
data[bl_prop.rep.identifier] = cls.serialize_property(bl_prop, bl_pointers_ref)
|
|
|
|
return data
|
|
|
|
@classmethod
|
|
def serialize_property(cls, bl_prop: BlenderProperty, bl_pointers_ref: dict) -> Any:
|
|
"""Serialize node property, special cases for arrays/collections/pointers"""
|
|
# Property array case
|
|
if getattr(bl_prop.rep, "is_array", False):
|
|
# Contained in BoolProperty, IntProperty and FloatProperty
|
|
prop_array = []
|
|
for item in bl_prop.attr:
|
|
assert isinstance(item, (bool, int, float)) # TODO: For development, replace by list comprehension later
|
|
prop_array.append(item)
|
|
|
|
return prop_array
|
|
|
|
# Collection case
|
|
if isinstance(bl_prop.attr, bpy.types.bpy_prop_collection):
|
|
collection = bl_prop.attr
|
|
if not collection:
|
|
return []
|
|
|
|
values = [cls.sub_serialize(sub_prop) for sub_prop in collection]
|
|
|
|
# TODO: Check why the original code has a None check
|
|
return [v for v in values if v is not None]
|
|
|
|
# Pointer case
|
|
if bl_prop.rep.type == "POINTER" and bl_prop.attr:
|
|
# Property points to another object, stores it ptr/deref value in our pointer table
|
|
ptr = bl_prop.attr.as_pointer()
|
|
|
|
if ptr in bl_pointers_ref:
|
|
return ptr
|
|
|
|
bl_pointers_ref[ptr] = bl_prop.attr
|
|
|
|
return cls.sub_serialize(bl_prop.attr, bl_pointers_ref)
|
|
|
|
@classmethod
|
|
def sub_serialize(cls, obj: bpy.types.bpy_struct | Any, bl_pointers_ref: dict) -> Any:
|
|
if not isinstance(obj, bpy.types.bpy_struct):
|
|
# Primitive type, return directly
|
|
return obj
|
|
|
|
"""Resolve which Serializer class to use"""
|
|
serializer = cls.get_serializer(obj)
|
|
|
|
return serializer.serialize(obj, bl_pointers_ref)
|
|
|
|
@classmethod
|
|
@abstractmethod
|
|
def construct_bl_object(cls, data: dict):
|
|
"""Abstract method for Serializer specificaiton to provide a way to construct their object"""
|
|
print("DEBUG: construct_bl_object called on Base Serializer, shouldn't happen")
|
|
return None
|
|
|
|
# --- Deserialization ---
|
|
|
|
@classmethod
|
|
@abstractmethod
|
|
def deserialize(cls, data: dict, target_obj: bpy.types.bpy_struct, bl_pointers_ref: dict):
|
|
if target_obj is None:
|
|
target_obj = cls.construct_bl_object(data)
|
|
|
|
if target_obj is None:
|
|
print("DEBUG: No method to construct object")
|
|
# Failed to construct object/no speciailization to construct object
|
|
return None
|
|
|
|
if (kit_ptr := data.get("_kit_ptr", None)):
|
|
bl_pointers_ref[kit_ptr] = target_obj
|
|
|
|
data_to_deserialize = cls.get_data_to_deserialize(data, target_obj)
|
|
|
|
for stored_key, stored_value in data_to_deserialize:
|
|
if stored_key.startswith("_kit") or stored_key not in target_obj.bl_rna.properties:
|
|
continue
|
|
|
|
target_bl_prop = BlenderProperty(rep=target_obj.bl_rna.properties[stored_key],
|
|
attr=getattr(target_obj, stored_key))
|
|
|
|
# Skip the property if it's read-only
|
|
if target_bl_prop.rep.is_readonly:
|
|
continue
|
|
|
|
# Unlike serialization, there's no property array case, as they are just directly assigned
|
|
|
|
# Collection case
|
|
if isinstance(target_bl_prop.attr, bpy.types.bpy_prop_collection):
|
|
cls.deserialize_collection(stored_value, target_bl_prop.attr, bl_pointers_ref)
|
|
continue
|
|
|
|
value_to_set = stored_value
|
|
|
|
# Pointer case
|
|
if target_bl_prop.rep.type == "POINTER":
|
|
value_to_set = cls.deserialize_pointer(stored_value, target_bl_prop.attr, bl_pointers_ref)
|
|
|
|
# Assign the property
|
|
setattr(target_obj, stored_key, value_to_set)
|
|
|
|
# If supported, update the Blender property after setting it
|
|
if hasattr(target_bl_prop.attr, "update"):
|
|
target_bl_prop.attr.update()
|
|
|
|
@classmethod
|
|
def deserialize_collection(cls, stored_value: Any, bl_coll: bpy.types.bpy_prop_collection, bl_pointers_ref: dict):
|
|
# Static collection case
|
|
if not hasattr(bl_coll, "new"):
|
|
cls.sub_deserialize(stored_value, bl_coll, bl_pointers_ref)
|
|
return
|
|
|
|
|
|
# TODO: Code pasted as is, review later
|
|
new_func = bl_coll.bl_rna.functions["new"]
|
|
for i, value in enumerate(stored_value):
|
|
if value.get("_new"):
|
|
params = value["_new"]
|
|
else:
|
|
params = {
|
|
k: value.get(k, utils.get_bl_default(v))
|
|
for k, v in new_func.parameters.items()[:-1]
|
|
}
|
|
|
|
# Replace arg pointer with bl object
|
|
valid_pointers = True
|
|
for param in bl_coll.bl_rna.functions["new"].parameters:
|
|
if param.identifier not in params or param.type != "POINTER":
|
|
continue
|
|
|
|
# TODO: It might be possible to abstract this into deserialize_pointer somehow
|
|
pointer_id = params[param.identifier]
|
|
if bl_object := bl_pointers_ref.get(pointer_id):
|
|
params[param.identifier] = bl_object
|
|
else:
|
|
print(f"No Pointer found for param {param.identifier} of {bl_coll}")
|
|
valid_pointers = False
|
|
|
|
if not valid_pointers:
|
|
continue
|
|
|
|
# TODO: Ugly bad :(
|
|
try:
|
|
item = bl_coll.new(**params)
|
|
except RuntimeError as e:
|
|
try:
|
|
item = bl_coll[i]
|
|
except IndexError as e:
|
|
break
|
|
|
|
@classmethod
|
|
def deserialize_pointer(cls, stored_value: Any, target_bl_prop_attr: bpy.types.bpy_struct, bl_pointers_ref: dict):
|
|
if stored_value is None:
|
|
return None
|
|
|
|
# Actual pointer
|
|
if isinstance(stored_value, int):
|
|
if stored_value not in bl_pointers_ref:
|
|
print("DEBUG: Pointer reference hasn't been loaded yet")
|
|
# Obtain a reference to a previously dereferenced object
|
|
return bl_pointers_ref[stored_value]
|
|
|
|
# Create the object by passing it to the sub-serializer
|
|
cls.sub_deserialize(stored_value, target_bl_prop_attr, bl_pointers_ref)
|
|
|
|
bl_pointers_ref[stored_value["_kit_ptr"]] = target_bl_prop_attr
|
|
|
|
return target_bl_prop_attr
|
|
|
|
@classmethod
|
|
def sub_deserialize(cls, data: dict, target_bl_obj: bpy.types.bpy_struct, bl_pointers_ref: dict):
|
|
# Get the deserializer for the corresponding target_bl_prop
|
|
# Introspects on its type, recreateas the object even if its None
|
|
deserializer = cls.get_serializer(target_bl_obj)
|
|
|
|
return deserializer.deserialize(data, bl_pointers_ref, target_bl_obj)
|
|
|
|
@classmethod
|
|
def get_data_to_deserialize(cls, data: dict, target_obj: bpy.types.bpy_struct=None):
|
|
props_to_deserialize = cls.get_serialized_properties(target_obj)
|
|
|
|
sorted_data = sorted(
|
|
data.items(), key=lambda x: props_to_deserialize.index(x[0]) if x[0] in props_to_deserialize else 0
|
|
)
|
|
|
|
return sorted_data
|
|
|
|
# class NodeSocket(Serializer):
|
|
# bl_type = bpy.types.NodeSocket
|
|
# prop_blacklist = Serializer.prop_blacklist + (
|
|
# "node",
|
|
# "links",
|
|
# "display_shape",
|
|
# "link_limit",
|
|
# )
|
|
|
|
# @classmethod
|
|
# def serialize(cls, socket_obj: bpy.types.NodeSocket, _: dict) -> dict:
|
|
# if socket_obj.is_unavailable:
|
|
# return None
|
|
|
|
# return super().serialize(socket_obj)
|
|
|
|
|
|
class Dumper:
|
|
includes = []
|
|
excludes = ["rna_type", "bl_rna", "id_data", "depsgraph"]
|
|
|
|
@classmethod
|
|
def properties(cls, bl_object):
|
|
if cls.includes and not cls.excludes:
|
|
return [bl_object.bl_rna.properties[p] for p in cls.includes]
|
|
else:
|
|
return [
|
|
p
|
|
for p in bl_object.bl_rna.properties
|
|
if not p.identifier.startswith("bl_")
|
|
and p.identifier not in cls.excludes
|
|
]
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
print(f"New not implemented for data {data}")
|
|
|
|
@classmethod
|
|
def load(cls, data, bl_pointers_ref, bl_object=None):
|
|
if bl_object is None:
|
|
bl_object = cls.new(data)
|
|
|
|
if bl_object is None:
|
|
return
|
|
|
|
if bl_pointer := data.get("bl_pointer"):
|
|
bl_pointers_ref[bl_pointer] = bl_object
|
|
|
|
props = cls.properties(bl_object)
|
|
for key, value in sorted(
|
|
data.items(), key=lambda x: props.index(x[0]) if x[0] in props else 0
|
|
):
|
|
if key.startswith("_") or key not in bl_object.bl_rna.properties:
|
|
continue
|
|
|
|
prop = bl_object.bl_rna.properties[key]
|
|
attr = getattr(bl_object, key)
|
|
|
|
if prop.type == "COLLECTION":
|
|
dumper = PropCollection
|
|
if hasattr(attr, "bl_rna"):
|
|
bl_type = attr.bl_rna.type_recast()
|
|
dumper = PropCollection or get_dumper(bl_type)
|
|
|
|
dumper.load(value, attr)
|
|
continue
|
|
|
|
elif prop.type == "POINTER":
|
|
if isinstance(value, int): # It's a pointer
|
|
if value not in bl_pointers_ref:
|
|
print(bl_object, "not loaded yet", prop)
|
|
value = bl_pointers_ref[value]
|
|
|
|
elif value is None:
|
|
utils.set_bl_attribute(bl_object, key, value)
|
|
|
|
else:
|
|
bl_type = prop.fixed_type.bl_rna.type_recast()
|
|
dumper = get_dumper(bl_type)
|
|
|
|
# If the pointer exist register the pointer then load data
|
|
if attr is None:
|
|
attr = dumper.new(value)
|
|
|
|
dumper.load(value, attr)
|
|
bl_pointers_ref[value["bl_pointer"]] = attr
|
|
|
|
if hasattr(attr, "update"):
|
|
attr.update()
|
|
|
|
value = attr
|
|
|
|
if not prop.is_readonly:
|
|
utils.set_bl_attribute(bl_object, key, value)
|
|
|
|
# Some coll needs a manual update like curve mapping
|
|
if hasattr(attr, "update"):
|
|
attr.update()
|
|
|
|
elif not prop.is_readonly:
|
|
utils.set_bl_attribute(bl_object, key, value)
|
|
continue
|
|
|
|
# return bl_object
|
|
|
|
@classmethod
|
|
def dump(cls, bl_object, bl_pointers_ref):
|
|
if isinstance(bl_object, (str, int, float, dict, list, type(None))):
|
|
return bl_object
|
|
|
|
data = {"bl_pointer": bl_object.as_pointer()}
|
|
bl_pointers_ref[bl_object.as_pointer()] = bl_object
|
|
|
|
for prop in cls.properties(bl_object):
|
|
if not hasattr(bl_object, prop.identifier):
|
|
print(f"{bl_object} has no attribute {prop.identifier}")
|
|
continue
|
|
|
|
value = getattr(bl_object, prop.identifier)
|
|
|
|
# Not storing default value
|
|
if prop.identifier not in cls.includes:
|
|
if (array := getattr(prop, "default_array", None)) and value == array:
|
|
continue
|
|
if isinstance(value, (str, int, float)) and value == prop.default:
|
|
continue
|
|
|
|
if getattr(prop, "is_array", False):
|
|
value = PropArray.dump(value)
|
|
|
|
elif prop.type == "COLLECTION":
|
|
value = PropCollection.dump(value)
|
|
|
|
elif prop.type == "POINTER" and value:
|
|
if value.as_pointer() in bl_pointers_ref:
|
|
value = value.as_pointer()
|
|
else:
|
|
bl_pointers_ref[value.as_pointer()] = value
|
|
dumper = get_dumper(value)
|
|
value = dumper.dump(value)
|
|
|
|
elif bl_object.is_property_readonly(prop.identifier):
|
|
continue
|
|
|
|
else:
|
|
dumper = get_dumper(value)
|
|
value = dumper.dump(value)
|
|
|
|
data[prop.identifier] = value
|
|
|
|
return data
|
|
|
|
|
|
class PropCollection(Dumper):
|
|
bl_type = bpy.types.bpy_prop_collection
|
|
|
|
@classmethod
|
|
def dump(cls, coll):
|
|
if not len(coll):
|
|
return []
|
|
|
|
dumper = get_dumper(coll[0])
|
|
values = [dumper.dump(e) for e in coll]
|
|
|
|
# Value cannot be None
|
|
return [v for v in values if v is not None]
|
|
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
if not values:
|
|
return
|
|
|
|
dumper = None
|
|
|
|
if not hasattr(coll, "new"): # Static collection
|
|
for item, value in zip(coll, values): # TODO: That zip doesn't make sense, or does it?
|
|
dumper = dumper or get_dumper(item)
|
|
dumper.load(value, item)
|
|
|
|
return
|
|
|
|
new_func = coll.bl_rna.functions["new"]
|
|
for i, value in enumerate(values):
|
|
if value.get("_new"):
|
|
params = value["_new"]
|
|
else:
|
|
params = {
|
|
k: value.get(k, utils.get_bl_default(v))
|
|
for k, v in new_func.parameters.items()[:-1]
|
|
}
|
|
|
|
# Replace arg pointer with bl object
|
|
valid_pointers = True
|
|
for param in coll.bl_rna.functions["new"].parameters:
|
|
if param.identifier not in params or param.type != "POINTER":
|
|
continue
|
|
|
|
pointer_id = params[param.identifier]
|
|
if bl_object := bl_pointers_ref.get(pointer_id):
|
|
params[param.identifier] = bl_object
|
|
else:
|
|
print(f"No Pointer found for param {param.identifier} of {coll}")
|
|
valid_pointers = False
|
|
|
|
if not valid_pointers:
|
|
continue
|
|
|
|
try:
|
|
item = coll.new(**params)
|
|
except RuntimeError as e:
|
|
try:
|
|
item = coll[i]
|
|
except IndexError as e:
|
|
break
|
|
|
|
dumper = get_dumper(item)
|
|
dumper.load(value, item) # (item, value)
|
|
|
|
|
|
class PropArray(Dumper):
|
|
bl_type = bpy.types.bpy_prop_array
|
|
|
|
@classmethod
|
|
def dump(cls, array):
|
|
flat_array = []
|
|
for item in array:
|
|
if isinstance(item, (int, float)):
|
|
flat_array.append(item)
|
|
else:
|
|
flat_array.extend(cls.dump(item))
|
|
return flat_array
|
|
|
|
|
|
class NodeSocket(Dumper):
|
|
bl_type = bpy.types.NodeSocket
|
|
excludes = Dumper.excludes + [
|
|
"node",
|
|
"links",
|
|
"display_shape",
|
|
"rna_type",
|
|
"link_limit",
|
|
]
|
|
|
|
@classmethod
|
|
def dump(cls, socket):
|
|
if socket.is_unavailable:
|
|
return None
|
|
|
|
# bl_pointers_ref[socket.as_pointer()] = socket
|
|
|
|
data = super().dump(socket)
|
|
|
|
# data["_id"] = socket.as_pointer()
|
|
# data.pop('name', '')
|
|
|
|
return data
|
|
|
|
|
|
class NodeGeometryRepeatOutputItems(PropCollection):
|
|
bl_type = bpy.types.NodeGeometryRepeatOutputItems
|
|
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
coll.clear()
|
|
|
|
super().load(values, coll)
|
|
|
|
|
|
class NodeLink(Dumper):
|
|
bl_type = bpy.types.NodeLink
|
|
|
|
@classmethod
|
|
def dump(cls, link):
|
|
return {
|
|
"_new": {
|
|
"input": link.from_socket.as_pointer(),
|
|
"output": link.to_socket.as_pointer(),
|
|
}
|
|
}
|
|
|
|
|
|
class NodeTreeInterfaceSocket(Dumper):
|
|
bl_type = bpy.types.NodeTreeInterfaceSocket
|
|
excludes = Dumper.excludes + ["parent", "interface_items"]
|
|
|
|
@classmethod
|
|
def dump(cls, socket):
|
|
# bl_pointers_ref[socket.as_pointer()] = socket
|
|
|
|
data = super().dump(socket)
|
|
# data["_id"] = socket.as_pointer()
|
|
|
|
data["_new"] = {"name": data.get("name", "")}
|
|
|
|
if socket.item_type == "SOCKET":
|
|
data["_new"]["in_out"] = socket.in_out
|
|
|
|
# It's a real panel not the interface root
|
|
if socket.parent.parent:
|
|
data["parent"] = socket.parent.as_pointer()
|
|
|
|
return data
|
|
|
|
|
|
class NodeSockets(PropCollection):
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
# return
|
|
|
|
node_sockets = [s for s in coll if not s.is_unavailable]
|
|
for socket, value in zip(node_sockets, values):
|
|
bl_pointers_ref[value["bl_pointer"]] = socket
|
|
Dumper.load(value, socket)
|
|
# for k, v in value.items():
|
|
# if k not in socket.bl_rna.properties:
|
|
# continue
|
|
# setattr(socket, k, v)
|
|
|
|
"""
|
|
# Match Inputs Pointers
|
|
node_sockets = [s for s in coll if not s.is_unavailable]
|
|
if len(node_sockets) == len(inputs): # Match by index
|
|
super().load({"inputs": inputs}, node)
|
|
for socket, value in zip(node_sockets, coll):
|
|
bl_pointers_ref[value['_id']] = socket
|
|
else: # Match by name
|
|
print(f'Match Inputs by Name for node {node}')
|
|
for socket in node_sockets:
|
|
index = next((i for i, v in enumerate(inputs) if v['name'] == socket.name), None)
|
|
if index is None:
|
|
continue
|
|
|
|
value = inputs[index]
|
|
print(socket, value)
|
|
bl_pointers_ref[value['_id']] = socket
|
|
|
|
Dumper.load(value, socket)
|
|
del inputs[index]
|
|
"""
|
|
|
|
|
|
class NodeInputs(NodeSockets):
|
|
bl_type = bpy.types.NodeInputs
|
|
|
|
|
|
class NodeOutputs(NodeSockets):
|
|
bl_type = bpy.types.NodeOutputs
|
|
|
|
|
|
class Node(Dumper):
|
|
bl_type = bpy.types.Node
|
|
excludes = Dumper.excludes + [
|
|
"dimensions",
|
|
"height",
|
|
"internal_links",
|
|
"paired_output",
|
|
]
|
|
|
|
@classmethod
|
|
def dump(cls, node=None):
|
|
# bl_pointers_ref[node.as_pointer()] = node
|
|
|
|
data = super().dump(node)
|
|
# data["_id"] = node.as_pointer()
|
|
data["_new"] = {
|
|
"type": node.bl_rna.identifier
|
|
} # 'node_tree': node.id_data.as_pointer()
|
|
|
|
if paired_output := getattr(node, "paired_output", None):
|
|
data["_pair_with_output"] = paired_output.as_pointer()
|
|
|
|
# if node.parent:
|
|
# data['location'] -= Vector()node.parent.location
|
|
|
|
return data
|
|
|
|
@classmethod
|
|
def load(cls, data, node):
|
|
if node is None:
|
|
return
|
|
# bl_pointers_ref[data['bl_pointer']] = node
|
|
|
|
inputs = copy(data.pop("inputs", []))
|
|
outputs = copy(data.pop("outputs", []))
|
|
|
|
super().load(data, node)
|
|
|
|
data["inputs"] = inputs
|
|
data["outputs"] = outputs
|
|
|
|
# Loading input and outputs after the properties
|
|
super().load({"inputs": inputs, "outputs": outputs}, node)
|
|
|
|
if node.parent:
|
|
node.location += node.parent.location
|
|
|
|
# if node.type != 'FRAME':
|
|
# node.location.y -= 500
|
|
|
|
|
|
class CompositorNodeGlare(Node):
|
|
bl_type = bpy.types.CompositorNodeGlare
|
|
|
|
includes = ["quality"]
|
|
|
|
|
|
class NodeTreeInterface(Dumper):
|
|
bl_type = bpy.types.NodeTreeInterface
|
|
|
|
@classmethod
|
|
def load(cls, data, interface):
|
|
print("Load Interface")
|
|
|
|
for value in data.get("items_tree", []):
|
|
item_type = value.get("item_type", "SOCKET")
|
|
if item_type == "SOCKET":
|
|
item = interface.new_socket(**value["_new"])
|
|
elif item_type == "PANEL":
|
|
# print(value['_new'])
|
|
item = interface.new_panel(**value["_new"])
|
|
|
|
NodeTreeInterfaceSocket.load(value, item)
|
|
|
|
interface.active_index = data.get("active_index", 0)
|
|
|
|
|
|
class Nodes(PropCollection):
|
|
bl_type = bpy.types.Nodes
|
|
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
super().load(values, coll)
|
|
|
|
# Pair zone input and output
|
|
for node_data in values:
|
|
if paired_output_id := node_data.get("_pair_with_output", None):
|
|
node = bl_pointers_ref[node_data["bl_pointer"]]
|
|
node.pair_with_output(bl_pointers_ref[paired_output_id])
|
|
|
|
Dumper.load(
|
|
{"inputs": node_data["inputs"], "outputs": node_data["outputs"]},
|
|
node,
|
|
)
|
|
|
|
|
|
class NodeTree(Dumper):
|
|
bl_type = bpy.types.NodeTree
|
|
excludes = []
|
|
includes = ["name", "interface", "nodes", "links"]
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
if link := data.get("_link"):
|
|
with bpy.data.libraries.load(link["filepath"], link=True) as (
|
|
data_from,
|
|
data_to,
|
|
):
|
|
setattr(data_to, link["data_type"], [link["name"]])
|
|
return getattr(data_to, link["data_type"])[0]
|
|
|
|
return bpy.data.node_groups.new(**data["_new"])
|
|
|
|
@classmethod
|
|
def dump(cls, node_tree):
|
|
if node_tree.library:
|
|
data = {"bl_pointer": node_tree.as_pointer()}
|
|
filepath = abspath(
|
|
bpy.path.abspath(
|
|
node_tree.library.filepath, library=node_tree.library.library
|
|
)
|
|
)
|
|
data["_link"] = {
|
|
"filepath": filepath,
|
|
"data_type": "node_groups",
|
|
"name": node_tree.name,
|
|
}
|
|
else:
|
|
data = super().dump(node_tree)
|
|
data["_new"] = {"type": node_tree.bl_rna.identifier, "name": node_tree.name}
|
|
|
|
return data
|
|
|
|
|
|
class Points(PropCollection):
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
new_func = coll.bl_rna.functions["new"]
|
|
params = {k: utils.get_bl_default(v) + 1.1 for k, v in new_func.parameters.items()[:-1]}
|
|
|
|
# Match the same number of elements in collection
|
|
if len(values) > len(coll):
|
|
for _ in range(len(values) - len(coll)):
|
|
coll.new(**params)
|
|
|
|
for i, value in enumerate(values):
|
|
Dumper.load(value, coll[i])
|
|
# for k, v in value.items():
|
|
# setattr(coll[i], k, v)
|
|
|
|
|
|
class CurveMapPoints(Points):
|
|
bl_type = bpy.types.CurveMapPoints
|
|
|
|
|
|
class ColorRampElements(Points):
|
|
bl_type = bpy.types.ColorRampElements
|
|
|
|
|
|
class CompositorNodeOutputFileLayerSlots(PropCollection):
|
|
bl_type = bpy.types.CompositorNodeOutputFileLayerSlots
|
|
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
coll.clear()
|
|
|
|
super().load(values, coll)
|
|
|
|
|
|
class CompositorNodeOutputFileFileSlots(PropCollection):
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
coll.clear()
|
|
|
|
super().load(values, coll)
|
|
|
|
|
|
class AOVs(PropCollection):
|
|
bl_type = bpy.types.AOVs
|
|
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
for value in values:
|
|
aov = coll.get(value["name"])
|
|
|
|
if not aov:
|
|
aov = coll.add()
|
|
|
|
Dumper.load(value, aov)
|
|
|
|
|
|
class Image(Dumper):
|
|
bl_type = bpy.types.Image
|
|
|
|
excludes = []
|
|
includes = ["name", "filepath"]
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
# image = next(( img for img in bpy.data.images if not img.library
|
|
# and img.filepath == data['filepath']), None)
|
|
|
|
# if image is None:
|
|
# image = bpy.data.images.load(data['filepath'])
|
|
|
|
return bpy.data.images.load(data["filepath"], check_existing=True)
|
|
|
|
|
|
class Material(Dumper):
|
|
bl_type = bpy.types.Material
|
|
|
|
excludes = Dumper.excludes + ["preview", "original"]
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
material = bpy.data.materials.get(data.get("name", ""))
|
|
|
|
if material is None:
|
|
material = bpy.data.materials.new(data["name"])
|
|
|
|
return material
|
|
|
|
|
|
class Object(Dumper):
|
|
bl_type = bpy.types.Object
|
|
excludes = []
|
|
includes = ["name"]
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
if name := data.get("name"):
|
|
return bpy.data.objects.get(name)
|
|
|
|
|
|
class Scene(Dumper):
|
|
bl_type = bpy.types.Scene
|
|
excludes = []
|
|
includes = ["name"]
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
if scene := bpy.data.scenes.get(data.get("name", "")):
|
|
return scene
|
|
|
|
return bpy.data.scenes.new(name=data.get("name", ""))
|
|
|
|
"""
|
|
@classmethod
|
|
def dump(cls, scene):
|
|
view_layer = scene.view_layers[node.layer]
|
|
view_layer_data = ViewLayer.dump(view_layer)
|
|
|
|
return {
|
|
'bl_pointer': scene.as_pointer(),
|
|
'name': scene.name,
|
|
'render' : {'bl_pointer': scene.render.as_pointer(), "engine": scene.render.engine},
|
|
'view_layers': [view_layer_data]
|
|
}
|
|
"""
|
|
|
|
|
|
class Collection(Dumper):
|
|
bl_type = bpy.types.Collection
|
|
includes = ["name"]
|
|
excludes = []
|
|
|
|
@classmethod
|
|
def new(cls, data):
|
|
if name := data.get("name"):
|
|
return bpy.data.collections.get(name)
|
|
|
|
# @classmethod
|
|
# def dump(cls, data):
|
|
# data = super().dump(data)
|
|
|
|
# data['render'] = {"engine": scene.render.engine}
|
|
# return data
|
|
|
|
|
|
class CompositorNodeRLayers(Node):
|
|
bl_type = bpy.types.CompositorNodeRLayers
|
|
|
|
excludes = Dumper.excludes + ["scene"]
|
|
|
|
@classmethod
|
|
def load(cls, data, node):
|
|
# print('load CompositorNodeRLayers')
|
|
|
|
scene_data = data.pop("scene")
|
|
# print(scene_data)
|
|
layer = data.pop("layer")
|
|
scene = Scene.new(scene_data)
|
|
Scene.load(scene_data, scene)
|
|
|
|
node.scene = scene
|
|
node.layer = layer
|
|
|
|
super().load(data, node)
|
|
|
|
# Resetter the view_layer because it might have been created
|
|
# with the scene attr in the dictionnary and nor available yet
|
|
|
|
# print(bpy.)
|
|
|
|
@classmethod
|
|
def dump(cls, node):
|
|
# Add scene and viewlayer passes
|
|
data = super().dump(node)
|
|
|
|
# if
|
|
|
|
view_layer = node.scene.view_layers[node.layer]
|
|
view_layer_data = ViewLayer.dump(view_layer)
|
|
|
|
"""
|
|
view_layer_data = {
|
|
"name": view_layer.name}
|
|
properties = {p.name: p for p in view_layer.bl_rna.properties}
|
|
for prop in view_layer.bl_rna:
|
|
if prop.identifier.startswith('use_pass'):
|
|
view_layer_data[prop.identifier]
|
|
"""
|
|
|
|
# bl_pointers_ref[bl_object.as_pointer()] = bl_object
|
|
|
|
data["scene"] = {
|
|
"bl_pointer": node.scene.as_pointer(),
|
|
"name": node.scene.name,
|
|
"render": {
|
|
"bl_pointer": node.scene.render.as_pointer(),
|
|
"engine": node.scene.render.engine,
|
|
},
|
|
"view_layers": [view_layer_data],
|
|
}
|
|
|
|
return data
|
|
|
|
|
|
class ViewLayer(Dumper):
|
|
bl_type = bpy.types.ViewLayer
|
|
excludes = Dumper.excludes + [
|
|
"freestyle_settings",
|
|
"eevee",
|
|
"cycles",
|
|
"active_layer_collection",
|
|
"active_aov",
|
|
"active_lightgroup_index",
|
|
"active_lightgroup",
|
|
]
|
|
# includes = ['name']
|
|
|
|
|
|
class ViewLayers(PropCollection):
|
|
bl_type = bpy.types.ViewLayers
|
|
|
|
@classmethod
|
|
def load(cls, values, coll):
|
|
# print('LOAD VIEWLAYERS', values)
|
|
for value in values:
|
|
view_layer = coll.get(value["name"])
|
|
|
|
if view_layer is None:
|
|
view_layer = coll.new(value["name"])
|
|
|
|
Dumper.load(value, view_layer) |