Compare commits

..

21 Commits

Author SHA1 Message Date
510de4253d Swap serialize/sub-serialize, always solve which Serializer class to use 2025-05-27 12:10:07 +02:00
cf5095202b Clean-up previous implementation from dumper.py 2025-05-22 11:01:53 +02:00
dc253425f8 Reorganize Serializer class layout 2025-05-20 16:41:53 +02:00
7af137867b Progress on deserialization 2025-05-20 16:27:41 +02:00
8f4d7d5684 Improve node serialization interface functions and logic 2025-05-20 16:09:48 +02:00
2f75f7492d Initial Serializer class refactor, very WIP, partial serialization working 2025-05-19 12:45:00 +02:00
90aa72a767 Cleanup: Dumper comments 2025-03-24 16:27:56 +01:00
fb1caf9174 Cleanup: Move Blender utility functions from dumper.py to utils.py 2025-03-24 16:11:59 +01:00
fc34669af7 Cleanup: Remove unused geonode tree lookup if load_nodes node_tree was None 2025-03-24 16:07:31 +01:00
0ab4ffc098 Cleanup: Move files out of core/ 2025-03-24 15:57:29 +01:00
876511c435 Cleanup: Remove old Blender abstraction class files 2025-03-24 15:49:27 +01:00
cbf1ea64e6 Cleanup: Better perator names and description, moved format logic to its own file 2025-03-24 15:48:52 +01:00
94627debc6 MRO based Dumper resolution system 2025-03-20 16:59:45 +01:00
e4ca202608 Debugging code cleanup 2025-03-20 15:30:40 +01:00
e65d6d8a75 Added a magic token to ensure only plugin data can be parsed from the clipboard 2025-03-20 15:25:39 +01:00
18f75eed25 Initial Dumper static functions cleanup 2025-03-20 15:12:18 +01:00
b251a3b122 Add Preferences class, empty for now 2025-03-20 11:49:26 +01:00
698ace38fd Further Operators / UI Improvements 2025-03-20 11:34:21 +01:00
4e029c59a2 Copy/Paste operators cleanup 2025-03-20 01:42:42 +01:00
9940a9c8ca ruff format 2025-03-19 17:40:17 +01:00
30ae14f9bb Cleanup init and remove old unused files 2025-03-19 17:36:52 +01:00
16 changed files with 739 additions and 1405 deletions

View File

@ -1,33 +1,25 @@
bl_info = {
"name": "Node Kit",
"author": "Florentin Luce",
"author": "Florentin Luce, Christophe Seux, Jonas Holzman",
"version": (0, 1),
"blender": (4, 0, 2),
"category": "Node"}
"blender": (4, 3, 2),
"location": "Node Editor -> Node Kit",
"description": "Collection of node-related tools",
"doc_url": "https://git.autourdeminuit.com/autour_de_minuit/node_kit",
"category": "Node",
}
import sys
import importlib
from pathlib import Path
from . import ui, operators
from . import ui, operators, preferences
modules = (
ui,
operators,
preferences
)
if "bpy" in locals():
import importlib
for mod in modules:
importlib.reload(mod)
def register():
print('Register Node kit')
for mod in modules:
mod.register()

View File

View File

View File

@ -1,212 +0,0 @@
import bpy
from mathutils import Color, Vector
from .sockets import Input, Output
class Node:
"""Blender Node abstraction."""
def __init__(self, bl_node, parent):
self.bl_node = bl_node
self.tree = parent
self.id = hex(id(self.bl_node))
self.data = {}
self.parameters = []
self._parent = None
self._scene = None
for prop in self.bl_node.bl_rna.properties:
if prop.is_readonly:
continue
prop_id = prop.identifier
setattr(self, prop_id, getattr(self.bl_node, prop_id))
self.parameters.append(prop_id)
self.inputs = [Input(ipt, self.tree) for ipt in self.bl_node.inputs]
self.outputs = [Output(opt, self.tree) for opt in self.bl_node.outputs]
@property
def parent(self):
"""Get the Node from all the other nodes in the tree checking that the
parent of its blender node is the same as the blender node we are comparing.
Returns:
Node: Node parent.
"""
if self._parent:
return self._parent
# if blender node doesn't have a parent
if not self.bl_node.parent:
self._parent = None
return self._parent
for node in self.tree.nodes:
if node.bl_node == self.bl_node.parent:
self._parent = node
return self._parent
@parent.setter
def parent(self, value):
"""Set the Node parent, using the python object, it's id or the blender node.
Args:
value (Node|str|bpy.types.Node): Node, id or blender node to set as parent.
"""
# Node object case
if isinstance(value, Node):
self._parent = value
# Node id case
elif isinstance(value, str) and value.startswith('0x'):
for node in self.tree.nodes:
if node.id == value:
self._parent = node
else:
print('Cannot find parent')
# blender node case
elif isinstance(value, bpy.types.Node):
for node in self.tree.nodes:
if node.bl_node == value:
self._parent = node
if self._parent:
self.bl_node.parent = self._parent.bl_node
@classmethod
def from_blender_node(cls, bl_node, tree):
"""Instanciate an abstract class based of the blender node idname.
Args:
bl_node (bpy.types.Node): Blender Node To create abstraction from.
tree (NodeTree): Node tree object node belongs to.
Returns:
Node: Node abstract according to the blender node type.
"""
if bl_node.bl_idname == 'CompositorNodeRLayers':
return RenderLayersNode(bl_node, tree)
elif bl_node.bl_idname == 'CompositorNodeValToRGB':
return ColorRampNode(bl_node, tree)
else:
return cls(bl_node, tree)
@classmethod
def from_dict(cls, data, tree):
"""Create all nodes from their dict representation.
Args:
data (dict): dict nodes representation.
tree (Tree): blender node tree abstraction.
Returns:
Node: Create abstract node.
"""
new_bl_node = tree.bl_node_tree.nodes.new(type=data['bl_idname'])
node = cls.from_blender_node(new_bl_node, tree)
node.id = data['id']
for p in node.parameters:
setattr(node, p, data[p])
# set attribute on the blender node only if correct type is retrieve
if p not in ('parent', 'scene'):
setattr(node.bl_node, p, getattr(node, p))
node.inputs = [Input.from_dict(ipt_data, node) for ipt_data in data['inputs'].values()]
node.outputs = [Output.from_dict(opt_data, node) for opt_data in data['outputs'].values()]
return node
def dump(self):
"""Export currrent Node to its dict representation.
Returns:
dict: Node dict representation.
"""
for prop_id in self.parameters:
if not hasattr(self, prop_id):
continue
attr_value = getattr(self, prop_id)
if attr_value is None:
attr_value = None
elif isinstance(attr_value, Node):
attr_value = attr_value.id
elif isinstance(attr_value, (Color, Vector)):
attr_value = list(attr_value)
self.data[prop_id] = attr_value
self.data['id'] = self.id
self.data['inputs'] = {ipt.id: ipt.dump() for ipt in self.inputs}
self.data['outputs'] = {opt.id: opt.dump() for opt in self.outputs}
return self.data
class RenderLayersNode(Node):
"""Blender Render Layers Node abstraction"""
@property
def scene(self):
"""Get the name of the scene used by the node.
Returns:
str: scene name.
"""
if self._scene:
return self._scene.name
@scene.setter
def scene(self, value):
"""Set the blender scene using the bpy Scene object or its name.
Args:
value (str|bpy.types.Scene): scene name or scene object to set the node.
"""
if isinstance(value, str):
self._scene = bpy.data.scenes[value]
elif isinstance(value, bpy.types.Scene):
self._scene = value
if self._scene:
self.bl_node.scene = self._scene
class Link:
"""Blender Link abstraction."""
def __init__(self, bl_link, parent):
self.bl_link = bl_link
self.tree = parent
self.id = hex(id(self.bl_link))
self.input = self.bl_link.to_socket
self.output = self.bl_link.from_socket
self.data = {}
def dump(self):
self.data['id'] = self.id
return self.data

View File

@ -1,69 +0,0 @@
import json
from pathlib import Path
from .node import Node, Link
class NodeTree:
"""Blender node tree abstraction."""
def __init__(self, bl_node_tree):
self.bl_node_tree = bl_node_tree
self.data = {}
self.links = [Link(lnk, parent=self) for lnk in self.bl_node_tree.links]
self.nodes = []
for n in self.bl_node_tree.nodes:
self.nodes.append(Node.from_blender_node(n, self))
def dump(self, select_only=False):
"""Convert all blender nodes and links inside the tree into a dictionnary.
Args:
select_only (bool, optional): True to convert only selected nodes.
Defaults to False.
Returns:
dict: Nodes and links as dict.
"""
self.data['nodes'] = {n.id: n.dump() for n in self.nodes if not select_only or (select_only and n.select)}
self.data['links'] = [l.id for l in self.links]
return self.data
def load(self, data):
"""From a Tree dict representation, create new nodes with their attributes.
Then create a connection dict by comparing link id from inputs and outputs of each nodes.
Use this dict to link nodes between each others.
Args:
data (dict): Tree dict representation to generate nodes and links from.
"""
connections = {}
self.data = data
for node_id, node_data in self.data['nodes'].items():
new_node = Node.from_dict(node_data, self)
self.nodes.append(new_node)
new_node.bl_node.select = True
for ipt in new_node.inputs:
if ipt.is_linked:
connections.setdefault(ipt.link, {})['input'] = ipt.bl_input
for opt in new_node.outputs:
if opt.is_linked:
for link in opt.link:
connections.setdefault(link, {})['output'] = opt.bl_output
for link_id in self.data['links']:
ipt = connections[link_id]['input']
opt = connections[link_id]['output']
self.bl_node_tree.links.new(ipt, opt)

View File

@ -1,110 +0,0 @@
class Socket:
def __init__(self, bl_socket, tree):
self.tree = tree
self.bl_socket = bl_socket
self.data = {}
self.id = hex(id(bl_socket))
self.identifier = bl_socket.identifier
self.is_linked = bl_socket.is_linked
self._value = None
if hasattr(bl_socket, 'default_value'):
self._value = bl_socket.default_value
@property
def value(self):
if not isinstance(self._value, (str, int, float, bool)):
self._value = [v for v in self._value]
return self._value
@value.setter
def value(self, v):
self.bl_socket.default_value = v
self._value = v
return self._value
def to_dict(self):
self.data['id'] = self.id
self.data['value'] = self.value
self.data['identifier'] = self.identifier
self.data['is_linked'] = self.is_linked
self.data['link'] = self.get_link()
return self.data
class Input(Socket):
def __init__(self, bl_input, tree):
super().__init__(bl_input, tree)
self.bl_input = bl_input
@classmethod
def from_dict(cls, data, node):
for bl_ipt in node.bl_node.inputs:
if bl_ipt.identifier != data['identifier']:
continue
new_ipt = cls(bl_ipt, node.tree)
for k, v in data.items():
setattr(new_ipt, k, v)
return new_ipt
def get_link(self):
if not self.is_linked:
return None
for ipt_link in self.bl_input.links:
for tree_link in self.tree.links:
if ipt_link == tree_link.bl_link:
return tree_link.id
class Output(Socket):
def __init__(self, bl_output, tree):
super().__init__(bl_output, tree)
self.bl_output = bl_output
@classmethod
def from_dict(cls, data, node):
for bl_opt in node.bl_node.outputs:
if bl_opt.identifier != data['identifier']:
continue
new_opt = cls(bl_opt, node.tree)
for k, v in data.items():
setattr(new_opt, k, v)
return new_opt
def get_link(self):
links = []
if not self.is_linked:
return None
for opt_link in self.bl_output.links:
for tree_link in self.tree.links:
if opt_link == tree_link.bl_link:
links.append(tree_link.id)
return links

View File

@ -1,790 +0,0 @@
import bpy
import mathutils
from pprint import pprint
import json
import itertools
from copy import copy
from os.path import abspath
def get_default(prop):
"""Get the default value of a bl property"""
if getattr(prop, 'is_array', False):
return list(prop.default_array)
elif hasattr(prop, 'default'):
return prop.default
def get_dumper(bl_object, fallback=None):
"""Find the right dumper type by checking inheritance"""
for dp in dumpers:
if isinstance(bl_object, dp.bl_type):
return dp
return fallback or Dumper
def get_bl_object(data):
"""Find the bl object for loading data into it depending on the type and the context"""
if data.get('_new', {}).get('type') == 'GeometryNodeTree':
return bpy.context.object.modifiers.active.node_group
def dump(ob):
"""Generic Recursive Dump, convert any object into a dict"""
Dumper.pointers.clear()
if isinstance(ob, (list, tuple)):
data = [get_dumper(o).dump(o) for o in ob]
else:
data = get_dumper(ob).dump(ob)
Dumper.pointers.clear()
return data
def load(data, bl_object=None):
"""Generic Load to create an object from a dict"""
Dumper.pointers.clear()
#print(Dumper.pointers)
if bl_object is None:
bl_object = get_bl_object(data)
dumper = get_dumper(bl_object)
dumper.load(data, bl_object)
Dumper.pointers.clear()
def set_attribute(bl_object, attr, value):
try:
setattr(bl_object, attr, value)
except Exception as e:
print(e)
class Dumper:
pointers = {}
includes = []
excludes = ["rna_type", "bl_rna", 'id_data', 'depsgraph']
@classmethod
def properties(cls, bl_object):
if cls.includes and not cls.excludes:
return [bl_object.bl_rna.properties[p] for p in cls.includes]
else:
return [ p for p in bl_object.bl_rna.properties if not
p.identifier.startswith('bl_') and p.identifier not in cls.excludes]
@classmethod
def new(cls, data):
print(f'New not implemented for data {data}')
@classmethod
def load(cls, data, bl_object=None):
if bl_object is None:
bl_object = cls.new(data)
if bl_object is None:
return
#pprint(data)
if bl_pointer := data.get('bl_pointer'):
cls.pointers[bl_pointer] = bl_object
props = cls.properties(bl_object)
for key, value in sorted(data.items(), key=lambda x: props.index(x[0]) if x[0] in props else 0):
if key.startswith('_') or key not in bl_object.bl_rna.properties:
continue
prop = bl_object.bl_rna.properties[key]
attr = getattr(bl_object, key)
if prop.type == 'COLLECTION':
dumper = PropCollection
if hasattr(attr, 'bl_rna'):
bl_type = attr.bl_rna.type_recast()
dumper = get_dumper(bl_type, fallback=PropCollection)
dumper.load(value, attr)
continue
elif prop.type == 'POINTER':
# if key == 'node_tree':
# print('--------------')
# print(bl_object, value)
# print(cls.pointers)
if isinstance(value, int): # It's a pointer
if value not in cls.pointers:
print(bl_object, "not loaded yet", prop)
value = cls.pointers[value]
elif value is None:
set_attribute(bl_object, key, value)
else:
bl_type = prop.fixed_type.bl_rna.type_recast()
dumper = get_dumper(bl_type)
# If the pointer exist register the pointer then load data
#print('-----', value)
#pointer =
if attr is None:
attr = dumper.new(value)
dumper.load(value, attr)
#attr = getattr(bl_object, key)
#if not attr:
cls.pointers[value['bl_pointer']] = attr
if hasattr(attr, 'update'):
attr.update()
value = attr
if not prop.is_readonly:
set_attribute(bl_object, key, value)
# Some coll needs a manual update like curve mapping
if hasattr(attr, 'update'):
attr.update()
elif not prop.is_readonly:
#print(key, value)
set_attribute(bl_object, key, value)
continue
#return bl_object
@classmethod
def dump(cls, bl_object):
if isinstance(bl_object, (str, int, float, dict, list, type(None))):
return bl_object
#print('Dumping object', bl_object)
data = {"bl_pointer": bl_object.as_pointer()}
cls.pointers[bl_object.as_pointer()] = bl_object
for prop in cls.properties(bl_object):
if not hasattr(bl_object, prop.identifier):
print(f'{bl_object} has no attribute {prop.identifier}')
continue
#print(prop.identifier)
value = getattr(bl_object, prop.identifier)
# Not storing default value
if prop.identifier not in cls.includes:
if (array := getattr(prop, 'default_array', None)) and value == array:
continue
if isinstance(value, (str, int, float)) and value == prop.default:
continue
if getattr(prop, "is_array", False):
value = PropArray.dump(value)
elif prop.type == 'COLLECTION':
value = PropCollection.dump(value)
elif prop.type == 'POINTER' and value:
#if prop.identifier == 'image':
# print(bl_object, cls.pointers)
if value.as_pointer() in cls.pointers:
value = value.as_pointer()
else:
# print('Register Pointer', value.as_pointer(), value)
cls.pointers[value.as_pointer()] = value
# print(cls.pointers)
# print()
dumper = get_dumper(value)
value = dumper.dump(value)
elif bl_object.is_property_readonly(prop.identifier):
continue
else:
dumper = get_dumper(value)
value = dumper.dump(value)
data[prop.identifier] = value
return data
class PropCollection(Dumper):
bl_type = bpy.types.bpy_prop_collection
@classmethod
def dump(cls, coll):
if not len(coll):
return []
dumper = get_dumper(coll[0])
values = [dumper.dump(e) for e in coll]
# Value cannot be None
return [v for v in values if v is not None]
@classmethod
def load(cls, values, coll):
if not values:
return
dumper = None
if not hasattr(coll, 'new'): # Static collection
for item, value in zip(coll, values):
dumper = dumper or get_dumper(item)
dumper.load(value, item)
return
new_func = coll.bl_rna.functions['new']
for i, value in enumerate(values):
if value.get('_new'):
params = value['_new']
else:
params = {k: value.get(k, get_default(v)) for k, v in new_func.parameters.items()[:-1]}
# Replace arg pointer with bl object
valid_pointers = True
for param in coll.bl_rna.functions['new'].parameters:
if param.identifier not in params or param.type != 'POINTER':
continue
pointer_id = params[param.identifier]
if bl_object := cls.pointers.get(pointer_id):
params[param.identifier] = bl_object
else:
print(f'No Pointer found for param {param.identifier} of {coll}')
valid_pointers = False
if not valid_pointers:
continue
#print(param.identifier, cls.pointers[pointer_id])
try:
item = coll.new(**params)
except RuntimeError as e:
#print(e, coll.data)
#print()
try:
item = coll[i]
except IndexError as e:
#print(e, coll.data)
break
dumper = get_dumper(item)
dumper.load(value, item)#(item, value)
class PropArray(Dumper):
bl_type = bpy.types.bpy_prop_array
@classmethod
def dump(cls, array):
flat_array = []
for item in array:
if isinstance(item, (int, float)):
flat_array.append(item)
else:
flat_array.extend(cls.dump(item))
return flat_array
class NodeSocket(Dumper):
bl_type = bpy.types.NodeSocket
excludes = Dumper.excludes + ["node", "links", "display_shape", "rna_type", "link_limit"]
@classmethod
def dump(cls, socket):
if socket.is_unavailable:
return None
#cls.pointers[socket.as_pointer()] = socket
data = super().dump(socket)
#data["_id"] = socket.as_pointer()
#data.pop('name', '')
return data
class NodeGeometryRepeatOutputItems(PropCollection):
bl_type = bpy.types.NodeGeometryRepeatOutputItems
@classmethod
def load(cls, values, coll):
coll.clear()
super().load(values, coll)
class NodeLink(Dumper):
bl_type = bpy.types.NodeLink
@classmethod
def dump(cls, link):
return {"_new": {
"input": link.from_socket.as_pointer(),
"output": link.to_socket.as_pointer()
}
}
class NodeTreeInterfaceSocket(Dumper):
bl_type = bpy.types.NodeTreeInterfaceSocket
excludes = Dumper.excludes + ["parent", "interface_items"]
@classmethod
def dump(cls, socket):
#cls.pointers[socket.as_pointer()] = socket
data = super().dump(socket)
#data["_id"] = socket.as_pointer()
data['_new'] = {"name": data.get('name', '')}
if socket.item_type == 'SOCKET':
data['_new']["in_out"] = socket.in_out
# It's a real panel not the interface root
if socket.parent.parent:
data['parent'] = socket.parent.as_pointer()
return data
class NodeSockets(PropCollection):
@classmethod
def load(cls, values, coll):
#return
node_sockets = [s for s in coll if not s.is_unavailable]
for socket, value in zip(node_sockets, values):
cls.pointers[value['bl_pointer']] = socket
Dumper.load(value, socket)
# for k, v in value.items():
# if k not in socket.bl_rna.properties:
# continue
# setattr(socket, k, v)
"""
# Match Inputs Pointers
node_sockets = [s for s in coll if not s.is_unavailable]
if len(node_sockets) == len(inputs): # Match by index
super().load({"inputs": inputs}, node)
for socket, value in zip(node_sockets, coll):
cls.pointers[value['_id']] = socket
else: # Match by name
print(f'Match Inputs by Name for node {node}')
for socket in node_sockets:
index = next((i for i, v in enumerate(inputs) if v['name'] == socket.name), None)
if index is None:
continue
value = inputs[index]
print(socket, value)
cls.pointers[value['_id']] = socket
Dumper.load(value, socket)
del inputs[index]
"""
class NodeInputs(NodeSockets):
bl_type = bpy.types.NodeInputs
class NodeOutputs(NodeSockets):
bl_type = bpy.types.NodeOutputs
class Node(Dumper):
bl_type = bpy.types.Node
excludes = Dumper.excludes + ["dimensions", "height", "internal_links", "paired_output"]
@classmethod
def dump(cls, node=None):
#cls.pointers[node.as_pointer()] = node
data = super().dump(node)
#data["_id"] = node.as_pointer()
data["_new"] = {"type": node.bl_rna.identifier} # 'node_tree': node.id_data.as_pointer()
if paired_output := getattr(node, "paired_output", None):
data["_pair_with_output"] = paired_output.as_pointer()
#if node.parent:
# data['location'] -= Vector()node.parent.location
return data
@classmethod
def load(cls, data, node):
if node is None:
return
#cls.pointers[data['bl_pointer']] = node
inputs = copy(data.pop('inputs', []))
outputs = copy(data.pop('outputs', []))
super().load(data, node)
data['inputs'] = inputs
data['outputs'] = outputs
# Loading input and outputs after the properties
super().load({"inputs": inputs, "outputs": outputs}, node)
if node.parent:
node.location += node.parent.location
#if node.type != 'FRAME':
# node.location.y -= 500
class CompositorNodeGlare(Node):
bl_type = bpy.types.CompositorNodeGlare
includes = ["quality"]
class NodeTreeInterface(Dumper):
bl_type = bpy.types.NodeTreeInterface
@classmethod
def load(cls, data, interface):
print('Load Interface')
for value in data.get('items_tree', []):
item_type = value.get('item_type', 'SOCKET')
if item_type == 'SOCKET':
item = interface.new_socket(**value['_new'])
elif item_type == 'PANEL':
#print(value['_new'])
item = interface.new_panel(**value['_new'])
NodeTreeInterfaceSocket.load(value, item)
interface.active_index = data.get('active_index', 0)
class Nodes(PropCollection):
bl_type = bpy.types.Nodes
@classmethod
def load(cls, values, coll):
super().load(values, coll)
# Pair zone input and output
for node_data in values:
if paired_output_id := node_data.get('_pair_with_output', None):
node = cls.pointers[node_data['bl_pointer']]
node.pair_with_output(cls.pointers[paired_output_id])
#print(node, node_data['outputs'])
Dumper.load({"inputs": node_data['inputs'], "outputs": node_data['outputs']}, node)
class NodeTree(Dumper):
bl_type = bpy.types.NodeTree
excludes = []
includes = ["name", "interface", "nodes", "links"]
@classmethod
def new(cls, data):
if link := data.get('_link'):
with bpy.data.libraries.load(link['filepath'], link=True) as (data_from, data_to):
setattr(data_to, link['data_type'], [link['name']])
return getattr(data_to, link['data_type'])[0]
return bpy.data.node_groups.new(**data["_new"])
@classmethod
def dump(cls, node_tree):
if node_tree.library:
data = {'bl_pointer': node_tree.as_pointer()}
filepath = abspath(bpy.path.abspath(node_tree.library.filepath, library=node_tree.library.library))
data["_link"] = {"filepath": filepath, "data_type": 'node_groups', 'name': node_tree.name}
else:
data = super().dump(node_tree)
data["_new"] = {"type": node_tree.bl_rna.identifier, 'name': node_tree.name}
return data
class Points(PropCollection):
@classmethod
def load(cls, values, coll):
new_func = coll.bl_rna.functions['new']
params = {k: get_default(v)+1.1 for k, v in new_func.parameters.items()[:-1]}
# Match the same number of elements in collection
if len(values) > len(coll):
for _ in range(len(values) - len(coll)):
coll.new(**params)
for i, value in enumerate(values):
Dumper.load(value, coll[i])
#for k, v in value.items():
#setattr(coll[i], k, v)
class CurveMapPoints(Points):
bl_type = bpy.types.CurveMapPoints
class ColorRampElements(Points):
bl_type = bpy.types.ColorRampElements
class CompositorNodeOutputFileLayerSlots(PropCollection):
bl_type = bpy.types.CompositorNodeOutputFileLayerSlots
@classmethod
def load(cls, values, coll):
coll.clear()
super().load(values, coll)
class CompositorNodeOutputFileFileSlots(PropCollection):
@classmethod
def load(cls, values, coll):
coll.clear()
super().load(values, coll)
class AOVs(PropCollection):
bl_type = bpy.types.AOVs
@classmethod
def load(cls, values, coll):
for value in values:
aov = coll.get(value['name'])
if not aov:
aov = coll.add()
Dumper.load(value, aov)
class Image(Dumper):
bl_type = bpy.types.Image
excludes = []
includes = ['name', 'filepath']
@classmethod
def new(cls, data):
# image = next(( img for img in bpy.data.images if not img.library
# and img.filepath == data['filepath']), None)
# if image is None:
# image = bpy.data.images.load(data['filepath'])
return bpy.data.images.load(data['filepath'], check_existing=True)
class Material(Dumper):
bl_type = bpy.types.Material
excludes = Dumper.excludes + ['preview', "original"]
@classmethod
def new(cls, data):
material = bpy.data.materials.get(data.get('name', ''))
if material is None:
material = bpy.data.materials.new(data['name'])
return material
class Object(Dumper):
bl_type = bpy.types.Object
excludes = []
includes = ['name']
@classmethod
def new(cls, data):
if name := data.get('name'):
return bpy.data.objects.get(name)
class Scene(Dumper):
bl_type = bpy.types.Scene
excludes = []
includes = ['name']
@classmethod
def new(cls, data):
if scene := bpy.data.scenes.get(data.get('name', '')):
return scene
return bpy.data.scenes.new(name=data.get('name', ''))
"""
@classmethod
def dump(cls, scene):
view_layer = scene.view_layers[node.layer]
view_layer_data = ViewLayer.dump(view_layer)
return {
'bl_pointer': scene.as_pointer(),
'name': scene.name,
'render' : {'bl_pointer': scene.render.as_pointer(), "engine": scene.render.engine},
'view_layers': [view_layer_data]
}
"""
class Collection(Dumper):
bl_type = bpy.types.Collection
includes = ['name']
excludes = []
@classmethod
def new(cls, data):
if name := data.get('name'):
return bpy.data.collections.get(name)
# @classmethod
# def dump(cls, data):
# data = super().dump(data)
# data['render'] = {"engine": scene.render.engine}
# return data
class CompositorNodeRLayers(Node):
bl_type = bpy.types.CompositorNodeRLayers
excludes = Dumper.excludes + ['scene']
@classmethod
def load(cls, data, node):
#print('load CompositorNodeRLayers')
scene_data = data.pop('scene')
#print(scene_data)
layer = data.pop('layer')
scene = Scene.new(scene_data)
Scene.load(scene_data, scene)
node.scene = scene
node.layer = layer
super().load(data, node)
# Resetter the view_layer because it might have been created
# with the scene attr in the dictionnary and nor available yet
#print(bpy.)
@classmethod
def dump(cls, node):
# Add scene and viewlayer passes
data = super().dump(node)
#if
view_layer = node.scene.view_layers[node.layer]
view_layer_data = ViewLayer.dump(view_layer)
'''
view_layer_data = {
"name": view_layer.name}
properties = {p.name: p for p in view_layer.bl_rna.properties}
for prop in view_layer.bl_rna:
if prop.identifier.startswith('use_pass'):
view_layer_data[prop.identifier]
'''
#cls.pointers[bl_object.as_pointer()] = bl_object
data['scene'] = {
'bl_pointer': node.scene.as_pointer(),
'name': node.scene.name,
'render' : {'bl_pointer': node.scene.render.as_pointer(), "engine": node.scene.render.engine},
'view_layers': [view_layer_data]
}
return data
class ViewLayer(Dumper):
bl_type = bpy.types.ViewLayer
excludes = Dumper.excludes + ['freestyle_settings', 'eevee', 'cycles', 'active_layer_collection',
'active_aov', 'active_lightgroup_index', 'active_lightgroup']
#includes = ['name']
class ViewLayers(PropCollection):
bl_type = bpy.types.ViewLayers
@classmethod
def load(cls, values, coll):
#print('LOAD VIEWLAYERS', values)
for value in values:
view_layer = coll.get(value['name'])
if view_layer is None:
view_layer = coll.new(value['name'])
Dumper.load(value, view_layer)
dumpers = [
CompositorNodeRLayers,
CompositorNodeGlare,
Node,
NodeSocket,
NodeTree,
NodeLink,
NodeTreeInterface,
NodeTreeInterfaceSocket,
NodeGeometryRepeatOutputItems,
Image,
Material,
Object,
Scene,
Collection,
ViewLayer,
CurveMapPoints,
ColorRampElements,
NodeInputs,
NodeOutputs,
Nodes,
ViewLayers,
PropCollection,
AOVs,
PropArray,
CompositorNodeOutputFileLayerSlots,
CompositorNodeOutputFileFileSlots,
]

349
dumper.py Normal file
View File

@ -0,0 +1,349 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from copy import copy
from dataclasses import dataclass
from os.path import abspath
from typing import Any
import bpy
from . import utils
from . utils import BlenderProperty
def serialize_selected_nodes_from_node_tree(node_tree: bpy.types.NodeTree):
"""Serialize the selected nodes from a node tree"""
selected_nodes = [node for node in node_tree.nodes if node.select]
selected_links = [link for link in node_tree.links if link.from_node.select
and link.to_node.select]
bl_pointers = {}
nodes_data = [Serializer.serialize(node, bl_pointers) for node in selected_nodes]
links_data = [Serializer.serialize(link, bl_pointers) for link in selected_links]
# Only serialize selected nodes and their links
# Data format corresponds to the bpy.types.NodeTree properties that we want to (de)serialize
ntree_data = {
"nodes": nodes_data,
"links": links_data,
}
return ntree_data
def deserialize_nodes_into_node_tree(data: dict, node_tree: bpy.types.NodeTree):
"""Deserialize node data into a specific node tree"""
bl_pointers = {}
Serializer.deserialize(data, node_tree, bl_pointers)
# TODO: Sub serialize function where the isinstance is set to the default number of things.
# TODO: Collection is not handled as a class anymore, handle it manually
class Serializer(ABC):
"""
Base Serializer class.
`bl_pointers_ref` corresponds to a mutable dict passed through serialize/deserialize
functions, containing a map of Blender pointers IDs and their corresponding objects.
"""
# Whitelisted properties, applied after the blacklist
prop_whitelist = None
# Properties that are excluded from (de)serialization, in addition to any bl_* properties
prop_blacklist = ("rna_type", "id_data", "depsgraph")
serializer_map = {}
@classmethod
@abstractmethod
def construct_bl_object(cls, data: dict):
"""Abstract method to construct a Serializer's specific Blender Object"""
print("DEBUG: construct_bl_object called on Base Serializer, shouldn't happen")
return None
# --- Serialization ---
@classmethod
def serialize(cls, obj: bpy.types.bpy_struct | Any, bl_pointers_ref: dict) -> Any:
if not isinstance(obj, bpy.types.bpy_struct):
# Primitive type, return directly
return obj
"""Resolve which Serializer class to use"""
serializer = cls.get_serializer(obj)
return serializer.serialize_obj(obj, bl_pointers_ref)
@classmethod
@abstractmethod
def serialize_obj(cls, obj: bpy.types.bpy_struct | Any, bl_pointers_ref: dict) -> dict:
"""Base serialization method, overridden by subclasses"""
# Early recursive return case
# TODO: Ported as is, check the heuristics, (if there are more or attributes type to add)
if isinstance(obj, (str, int, float, dict, list, type(None))):
return obj
# Returned data, tracks the pointer for later re-assignments during deserialization
data = {"_kit_ptr": obj.as_pointer()}
bl_pointers_ref[obj.as_pointer()] = obj
# Iterate over all *filtered* properties found in the object
for bl_prop in cls.get_serialized_properties(obj):
# Do not store default values nor read-only properties
if (array := getattr(bl_prop.rep, "default_array", None)) and bl_prop.attr == array:
continue
if isinstance(bl_prop.attr, (str, int, float)) and bl_prop.attr == bl_prop.rep.default:
continue
if obj.is_property_readonly(bl_prop.rep.identifier):
continue
print(type(bl_prop.attr))
# Serialize each property
data[bl_prop.rep.identifier] = cls.serialize_property(bl_prop, bl_pointers_ref)
return data
@classmethod
def serialize_property(cls, bl_prop: BlenderProperty, bl_pointers_ref: dict) -> Any:
"""Serialize Blender property, special cases for arrays/collections/pointers"""
# Property array case
if getattr(bl_prop.rep, "is_array", False):
# Contained in BoolProperty, IntProperty and FloatProperty
prop_array = []
for item in bl_prop.attr:
assert isinstance(item, (bool, int, float)) # TODO: For development, replace by list comprehension later
prop_array.append(item)
return prop_array
# Collection case
if isinstance(bl_prop.attr, bpy.types.bpy_prop_collection):
collection = bl_prop.attr
if not collection:
return []
values = [cls.serialize(sub_prop) for sub_prop in collection]
# TODO: Check why the original code has a None check
return [v for v in values if v is not None]
# Pointer case
if bl_prop.rep.type == "POINTER" and bl_prop.attr:
# Property points to another object, stores it ptr/deref value in our pointer table
ptr = bl_prop.attr.as_pointer()
if ptr in bl_pointers_ref:
return ptr
bl_pointers_ref[ptr] = bl_prop.attr
return cls.serialize(bl_prop.rep, bl_pointers_ref)
# --- Deserialization ---
@classmethod
@abstractmethod
def deserialize(cls, data: dict, target_obj: bpy.types.bpy_struct, bl_pointers_ref: dict):
"""
Base deserialization method.
Deserialize data into a specific Blender object, creating sub-objects as needed.
Partial data may be provided, in which case, fields not specified will be left to default.
"""
if (kit_ptr := data.get("_kit_ptr", None)):
bl_pointers_ref[kit_ptr] = target_obj
data_to_deserialize = cls.get_data_to_deserialize(data, target_obj)
for stored_key, stored_value in data_to_deserialize:
if stored_key.startswith("_kit") or stored_key not in target_obj.bl_rna.properties:
continue
target_bl_prop = BlenderProperty(rep=target_obj.bl_rna.properties[stored_key],
attr=getattr(target_obj, stored_key))
# Collection case
# Unlike serialization, there's no property array case, as they are just directly assigned
if isinstance(target_bl_prop.attr, bpy.types.bpy_prop_collection):
cls.deserialize_collection(stored_value, target_bl_prop.attr, bl_pointers_ref)
continue
value_to_set = stored_value
# Pointer case
# Dereference the value if its already present in the pointers_ref map
if target_bl_prop.rep.type == "POINTER":
value_to_set = cls.deserialize_pointer(stored_value, target_bl_prop.attr, bl_pointers_ref)
# Skip setting the property if it's read-only
if target_bl_prop.rep.is_readonly:
continue
# Assign the property
setattr(target_obj, stored_key, value_to_set)
# If supported, update the Blender property after setting it
if hasattr(target_bl_prop.attr, "update"):
target_bl_prop.attr.update()
@classmethod
def deserialize_collection(cls, stored_value: Any, bl_coll: bpy.types.bpy_prop_collection, bl_pointers_ref: dict):
# Static collection case
if not hasattr(bl_coll, "new"):
cls.sub_deserialize(stored_value, bl_coll, bl_pointers_ref)
return
# We need to call the collection "new" function, parse and construct its parameters
new_func = bl_coll.bl_rna.functions["new"]
for i, value in enumerate(stored_value):
# Using a dictionary of {parameter: parameter_value}
default_new_func_params = {
k: value.get(k, utils.get_bl_default(v))
for k, v in new_func.parameters.items()[:-1]
}
new_func_params = value.get("_kit_new_params", default_new_func_params)
solved_all_pointers = True
for param in bl_coll.bl_rna.functions["new"].parameters:
if param.identifier not in new_func_params or param.type != "POINTER":
continue
pointer_id = param[param.identifier]
if bl_object := bl_pointers_ref.get(pointer_id):
new_func_params[param.identifier] = bl_object
else:
print(f"No pointer found for param {param.identifier} of new function of {bl_coll}")
solved_all_pointers = False
# Bail out if we fail to solve all pointers (TODO: I'm guessing this causes a runtimerror, but double check)
if not solved_all_pointers:
continue
print("Calling BL collection new with the following parameters")
print(new_func_params)
# Creates a collection item, type from the collection type, no need to manually construct
collection_item = bl_coll.new(**new_func_params)
deserializer = cls.get_serializer(collection_item)
# Recursively deserialize into the newly constructured object
cls.deserialize(value, collection_item, bl_pointers_ref)
# Old code guarded by a RuntimeError before, investigate later
# Static collection case, would need to check how to detect this.
collection_item = bl_coll[i]
# TODO: The target_bl_prop_attr terminology is unclear
@classmethod
def deserialize_pointer(cls, stored_value: Any, target_bl_prop_attr: bpy.types.bpy_struct, bl_pointers_ref: dict):
if stored_value is None:
return None
# Actual existing pointer, dereference and return
if isinstance(stored_value, int):
if stored_value not in bl_pointers_ref:
print("DEBUG: Pointer reference hasn't been loaded yet")
# Obtain a reference to a previously dereferenced object
return bl_pointers_ref[stored_value]
# Pointer doesn't exist yet, create it if it doesn't exist yet, store it, and return its object
deserializer = cls.get_serializer(target_bl_prop_attr)
# Create the Blender object if it doesn't exist yet
if target_bl_prop_attr is None:
target_bl_prop_attr = cls.construct_bl_object(stored_value)
# Recursively deserialize into the target object
deserializer.deserialize(stored_value, target_bl_prop_attr, bl_pointers_ref)
bl_pointers_ref[stored_value["_kit_ptr"]] = target_bl_prop_attr
return target_bl_prop_attr
@classmethod
def get_data_to_deserialize(cls, data: dict, target_obj: bpy.types.bpy_struct=None):
props_to_deserialize = cls.get_serialized_properties(target_obj)
sorted_data = sorted(
data.items(), key=lambda x: props_to_deserialize.index(x[0]) if x[0] in props_to_deserialize else 0
)
return sorted_data
# --- Getters for sub-serializers ---
@classmethod
def get_serializer_map(cls) -> dict[type[bpy.types.bpy_struct], type[Serializer]]:
"""Get the serializer map, stored in a class variable for simple caching"""
if not cls.serializer_map:
for subclass in utils.all_subclasses(Serializer):
assert hasattr(subclass, "bl_type")
cls.serializer_map[subclass.bl_type] = subclass
return cls.serializer_map
@classmethod
def get_serializer(cls, bl_object: bpy.types.bpy_struct) -> type[Serializer]:
"""Get the closest corresponding serializer for a given Blender object using its MRO"""
serializer_map = cls.get_serializer_map()
bl_type = type(bl_object.bl_rna.type_recast())
for bl_parents in bl_type.mro():
if bl_parents in serializer_map:
return serializer_map[bl_parents]
# Fallback to base Serializer if no matches are found
return Serializer
# --- Properties to (de)serialize ---
@classmethod
def get_serialized_properties(cls, obj: bpy.types.bpy_struct | Any):
serialized_properties: list[BlenderProperty] = [
BlenderProperty(rep=prop, attr=getattr(obj, prop.identifier))
for prop in obj.bl_rna.properties
if not prop.identifier.startswith("bl_") # Exclude internal Blender properties
and prop.identifier not in cls.prop_blacklist # Additional blacklist filtering
]
if cls.prop_whitelist: # Additional whitelist, applied after the blacklist
serialized_properties: list[BlenderProperty] = [
prop for prop in serialized_properties
if prop.rep.identifier in cls.prop_whitelist
]
return serialized_properties
# class NodeSocket(Serializer):
# bl_type = bpy.types.NodeSocket
# prop_blacklist = Serializer.prop_blacklist + (
# "node",
# "links",
# "display_shape",
# "link_limit",
# )
# @classmethod
# def serialize(cls, socket_obj: bpy.types.NodeSocket, _: dict) -> dict:
# if socket_obj.is_unavailable:
# return None
# return super().serialize(socket_obj)
class NodeSerializer(Serializer):
bl_type = bpy.types.Node
@classmethod
def construct_bl_object(cls, data: dict):
return super().construct_bl_object(data)
@classmethod
def serialize(cls, obj, bl_pointers_ref):
return super().serialize(obj, bl_pointers_ref)

View File

@ -1,12 +0,0 @@
import plateform
from pathlib import Path
from os.path import expandvars
def get_cache_dir()
if plateform.system() == 'Linux':
return Path(expandvars('$HOME/.cache/blender'))
elif plateform.system() == 'Darwin':
return Path('/Library/Caches/Blender')
elif plateform.system() == 'Windows':
return Path(expandvars('%USERPROFILE%\AppData\Local\Blender Foundation\Blender'))

17
formats.py Normal file
View File

@ -0,0 +1,17 @@
import json
format_token = "#FMT:NODE_KIT#"
def dump_nkit_format(data: str) -> str:
return format_token + json.dumps(data)
def parse_nkit_format(data: str) -> str | None:
if data.startswith(format_token):
print(data[len(format_token):])
return json.loads(data[len(format_token):])
return None

View File

@ -1,18 +1,18 @@
import bpy
import re
def clean_name(name):
if re.match(r'(.*)\.\d{3}$', name):
if re.match(r"(.*)\.\d{3}$", name):
return name[:-4]
return name
def is_node_groups_duplicate(node_groups):
node_group_types = sorted([n.type for n in node_groups[0].nodes])
return all( sorted([n.type for n in ng.nodes]) ==
node_group_types for ng in node_groups[1:])
return all(
sorted([n.type for n in ng.nodes]) == node_group_types for ng in node_groups[1:]
)
def remap_node_group_duplicates(nodes=None, force=False):
@ -41,7 +41,7 @@ def remap_node_group_duplicates(nodes=None, force=False):
continue
if not force:
node_groups.sort(key=lambda x : x.name, reverse=True)
node_groups.sort(key=lambda x: x.name, reverse=True)
print(node_groups)
@ -50,11 +50,13 @@ def remap_node_group_duplicates(nodes=None, force=False):
if not is_duplicate and not force:
failed.append((node_group.name, node_groups[0].name))
print(f'Cannot merge Nodegroup {node_group.name} with {node_groups[0].name} they are different')
print(
f"Cannot merge Nodegroup {node_group.name} with {node_groups[0].name} they are different"
)
continue
merged.append((node_group.name, node_groups[0].name))
print(f'Merge Nodegroup {node_group.name} into {node_groups[0].name}')
print(f"Merge Nodegroup {node_group.name} into {node_groups[0].name}")
node_group.user_remap(node_groups[0])
bpy.data.node_groups.remove(node_group)

View File

@ -1,5 +1,5 @@
"""
This module contains all addons operators
Node Kit Operators
:author: Autour de Minuit
:maintainers: Florentin LUCE
@ -14,104 +14,133 @@ import bpy
from bpy.props import BoolProperty, EnumProperty
from bpy.types import Operator
#from node_kit.core.node_tree import NodeTree
from . core.dumper import dump, load
from .core.node_utils import remap_node_group_duplicates
from .core.pack_nodes import combine_objects, extract_objects
from .dumper import serialize_selected_nodes_from_node_tree, deserialize_nodes_into_node_tree
from .node_utils import remap_node_group_duplicates
from .pack_nodes import combine_objects, extract_objects
from .formats import dump_nkit_format, parse_nkit_format
class NODEKIT_OT_copy(Operator):
bl_idname = 'node_kit.copy_node_tree'
bl_label = 'Copy nodes'
bl_options = {'REGISTER', 'UNDO'}
select_only: BoolProperty(default=True)
bl_idname = "node_kit.copy_nodes"
bl_label = "Copy Nodes"
bl_description = "Copy nodes to system clipboard"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
ntree = context.space_data.edit_tree
if self.select_only:
ntree_data = {
"nodes" : dump([n for n in ntree.nodes if n.select]) ,#[dump(n) for n in ntree.nodes if n.select],
"links" : dump([l for l in ntree.links if l.from_node.select and l.to_node.select])
}
else:
ntree_data = dump(ntree)
serialized_nodes_data = serialize_selected_nodes_from_node_tree(ntree)
pprint(ntree_data)
context.window_manager.clipboard = dump_nkit_format(serialized_nodes_data)
context.window_manager.clipboard = json.dumps(ntree_data)
num_selected_nodes = len([n for n in ntree.nodes if n.select])
self.report({"INFO"}, f"Copied {num_selected_nodes} selected nodes to system clipboard")
return {"FINISHED"}
return {'FINISHED'}
class NODEKIT_OT_copy_tree(Operator):
bl_idname = "node_kit.copy_node_tree"
bl_label = "Copy Node Tree"
bl_description = "Copy node tree to system clipboard"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
ntree = context.space_data.edit_tree
ntree_data = dict(ntree)
context.window_manager.clipboard = dump_nkit_format(ntree_data)
num_nodes = len(ntree.nodes)
self.report({"INFO"}, f"Copied {num_nodes} selected nodes to system clipboard")
return {"FINISHED"}
class NODEKIT_OT_paste(Operator):
bl_idname = 'node_kit.paste_node_tree'
bl_label = 'Paste nodes'
bl_idname = "node_kit.paste_nodes"
bl_label = "Paste Nodes"
bl_description = "Paste nodes from system clipboard"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
ntree_data = parse_nkit_format(context.window_manager.clipboard)
deserialize_nodes_into_node_tree(ntree_data, context.space_data.edit_tree)
ntree_data = json.loads(context.window_manager.clipboard)
load(ntree_data, context.space_data.edit_tree)
return {'FINISHED'}
self.report({"INFO"}, f"X node(s) pasted from system clipboard") # TODO: Ge the number of parsed nodes returned
return {"FINISHED"}
class NODEKIT_OT_remap_node_group_duplicates(Operator):
bl_idname = 'node_kit.remap_node_group_duplicates'
bl_label = 'Clean nodes'
bl_idname = "node_kit.remap_node_group_duplicates"
bl_label = "Clean Node Groups Duplicates"
bl_description = "Remap Node Groups duplicates to the latest imported version"
bl_options = {"REGISTER", "UNDO"}
selection : EnumProperty(items=[(s, s.title(), '') for s in ('ALL', 'SELECTED', 'CURRENT')], default="CURRENT", name='All Nodes')
force : BoolProperty(default=False, description='Remap nodes even if there are different', name='Force')
selection: EnumProperty(
items=[(s, s.title(), "") for s in ("ALL", "SELECTED", "CURRENT")],
default="CURRENT",
name="All Nodes",
)
force: BoolProperty(
default=False,
description="Remap nodes even if there are different",
name="Force",
)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def execute(self, context):
nodes = None
if self.selection == 'SELECTED':
nodes = [ n.node_tree for n in context.space_data.edit_tree.nodes
if n.type == "GROUP" and n.select]
elif self.selection == 'ACTIVE':
if self.selection == "SELECTED":
nodes = [
n.node_tree
for n in context.space_data.edit_tree.nodes
if n.type == "GROUP" and n.select
]
elif self.selection == "ACTIVE":
active_node = context.space_data.edit_tree
nodes = [active_node]
merged, failed = remap_node_group_duplicates(nodes=nodes, force=self.force)
if failed and not merged:
self.report({"ERROR"}, 'No duplicates remapped, Node Group are differents')
self.report({"ERROR"}, "No duplicates remapped, Node Group are differents")
return {"CANCELLED"}
self.report({"INFO"}, f'{len(merged)} Node Groups Remapped, {len(failed)} Node Groups failed')
self.report(
{"INFO"},
f"{len(merged)} Node Groups Remapped, {len(failed)} Node Groups failed",
)
return {'FINISHED'}
return {"FINISHED"}
def draw(self, context):
layout = self.layout
layout.prop(self, "selection", expand=True)
layout.prop(self, "force")
if self.force and self.selection == 'CURRENT':
if self.force and self.selection == "CURRENT":
ntree = context.space_data.edit_tree
layout.label(text=f'Remap node {ntree.name} to others')
elif self.force and self.selection == 'SELECTED':
layout.label(text='Selected nodes will override others')
elif self.selection == 'SELECTED':
layout.label(text='Remap last .*** nodes')
layout.label(text='Ex: Node.001 will override Node')
elif self.selection in ('CURRENT', 'ALL'):
layout.label(text='Remap last .*** nodes')
layout.label(text='Ex: Node.001 will override Node')
layout.label(text=f"Remap node {ntree.name} to others")
elif self.force and self.selection == "SELECTED":
layout.label(text="Selected nodes will override others")
elif self.selection == "SELECTED":
layout.label(text="Remap last .*** nodes")
layout.label(text="Ex: Node.001 will override Node")
elif self.selection in ("CURRENT", "ALL"):
layout.label(text="Remap last .*** nodes")
layout.label(text="Ex: Node.001 will override Node")
class NODEKIT_OT_update_nodes(Operator):
bl_idname = 'node_kit.update_nodes'
bl_label = 'Update node'
bl_idname = "node_kit.update_nodes"
bl_label = "Update Nodes from Library"
bl_description = "Overrides node group using the latest version from Asset Library"
bl_options = {"REGISTER", "UNDO"}
selection : EnumProperty(items=[(s, s.title(), '') for s in ('ALL', 'SELECTED', 'ACTIVE')], default="ACTIVE", name='All Nodes')
selection: EnumProperty(
items=[(s, s.title(), "") for s in ("ALL", "SELECTED", "ACTIVE")],
default="ACTIVE",
name="All Nodes",
)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
@ -123,61 +152,75 @@ class NODEKIT_OT_update_nodes(Operator):
ntree_name = ntree.name
new_ntree = None
if self.selection == 'SELECTED':
nodes = [ n.node_tree for n in context.space_data.edit_tree.nodes
if n.type == "GROUP" and n.select]
elif self.selection == 'ACTIVE':
if self.selection == "SELECTED":
nodes = [
n.node_tree
for n in context.space_data.edit_tree.nodes
if n.type == "GROUP" and n.select
]
elif self.selection == "ACTIVE":
active_node = context.space_data.edit_tree
nodes = [active_node]
else:
nodes = list(bpy.data.node_groups)
node_names = set(n.name for n in nodes)
#new_node_groups = []
# new_node_groups = []
#print("node_names", node_names)
# print("node_names", node_names)
for asset_library in asset_libraries:
library_path = Path(asset_library.path)
blend_files = [fp for fp in library_path.glob("**/*.blend") if fp.is_file()]
node_groups = list(bpy.data.node_groups)# Storing original node_geoup to compare with imported
node_groups = list(
bpy.data.node_groups
) # Storing original node_geoup to compare with imported
link = (asset_library.import_method == 'LINK')
link = asset_library.import_method == "LINK"
for blend_file in blend_files:
print(blend_file)
with bpy.data.libraries.load(str(blend_file), assets_only=True, link=link) as (data_from, data_to):
with bpy.data.libraries.load(
str(blend_file), assets_only=True, link=link
) as (data_from, data_to):
print(data_from.node_groups)
import_node_groups = [n for n in data_from.node_groups if n in node_names]
import_node_groups = [
n for n in data_from.node_groups if n in node_names
]
print("import_node_groups", import_node_groups)
data_to.node_groups = import_node_groups
#print(data_from.node_groups)
#print("data_to.node_groups", data_to.node_groups)
node_names -= set(import_node_groups) # Store already updated nodes
# print(data_from.node_groups)
# print("data_to.node_groups", data_to.node_groups)
node_names -= set(import_node_groups) # Store already updated nodes
#new_ntree = data_to.node_groups[0]
# new_ntree = data_to.node_groups[0]
new_node_groups = [n for n in bpy.data.node_groups if n not in node_groups]
#break
# break
#if new_ntree:
# break
# if new_ntree:
# break
new_node_groups = list(set(new_node_groups))
#print(new_node_groups)
# print(new_node_groups)
# if new_node_groups:
for new_node_group in new_node_groups:
new_node_group_name = new_node_group.library_weak_reference.id_name[2:]
local_node_group = next((n for n in bpy.data.node_groups if n.name == new_node_group_name and n != new_node_group), None)
local_node_group = next(
(
n
for n in bpy.data.node_groups
if n.name == new_node_group_name and n != new_node_group
),
None,
)
if not local_node_group:
print(f'No local node_group {new_node_group_name}')
print(f"No local node_group {new_node_group_name}")
continue
print(f'Merge node {local_node_group.name} into {new_node_group.name}')
print(f"Merge node {local_node_group.name} into {new_node_group.name}")
local_node_group.user_remap(new_node_group)
new_node_group.interface_update(context)
@ -186,8 +229,8 @@ class NODEKIT_OT_update_nodes(Operator):
new_node_group.name = new_node_group_name
new_node_group.asset_clear()
#self.report({"INFO"}, f"Node updated from {blend_file}")
return {'FINISHED'}
# self.report({"INFO"}, f"Node updated from {blend_file}")
return {"FINISHED"}
# else:
# self.report({"ERROR"}, f'No Node Group named "{ntree_name}" in the library')
@ -199,32 +242,35 @@ class NODEKIT_OT_update_nodes(Operator):
class NODEKIT_OT_pack_nodes(Operator):
bl_idname = 'node_kit.pack_nodes'
bl_label = 'Update node'
bl_idname = "node_kit.pack_nodes"
bl_label = "Pack Modifiers as Nodes"
bl_description = "Pack Geometry Nodes modifiers stack as a single node tree"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
combine_objects(context.selected_objects)
return {'FINISHED'}
return {"FINISHED"}
class NODEKIT_OT_unpack_nodes(Operator):
bl_idname = 'node_kit.unpack_nodes'
bl_label = 'Update node'
bl_idname = "node_kit.unpack_nodes"
bl_label = "Unpack Nodes as Modifiers"
bl_description = "Unpack node tree as Geometry Nodes modifiers"
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
extract_objects(context.active_object)
return {'FINISHED'}
return {"FINISHED"}
classes = (
NODEKIT_OT_copy,
NODEKIT_OT_copy_tree,
NODEKIT_OT_paste,
NODEKIT_OT_remap_node_group_duplicates,
NODEKIT_OT_update_nodes,
NODEKIT_OT_pack_nodes,
NODEKIT_OT_unpack_nodes
NODEKIT_OT_unpack_nodes,
)

View File

@ -1,16 +1,17 @@
import bpy
def set_params(src, tgt, mod_to_node=True, org_modifier=None):
# mod to node: est-ce qu'on copie les valeurs d'un modifier a une node, ou l'inverse
if mod_to_node: # syntax for node and modifier are slightly different
if mod_to_node: # syntax for node and modifier are slightly different
tree = src.node_group.interface.items_tree
else:
tree = src.node_tree.interface.items_tree
for param in tree:
if param.socket_type == 'NodeSocketGeometry':
if param.socket_type == "NodeSocketGeometry":
continue
if param.in_out == 'OUTPUT':
if param.in_out == "OUTPUT":
continue
# seulement en extract mode, src est une node donc on check si des parametres sont dans le modifier
@ -26,19 +27,23 @@ def set_params(src, tgt, mod_to_node=True, org_modifier=None):
else:
tgt[identifier] = src.inputs[identifier].default_value
def set_group_inputs(target, objects, group):
mod = target.modifiers[0]
node_dct = {} # used for cleanup
node_dct = {} # used for cleanup
for key, inp in get_node_inputs(objects).items():
# add the socket to the node group / modifier pannel
sock = group.interface.new_socket(inp["label"],in_out="INPUT",socket_type=inp["socket"])
sock = group.interface.new_socket(
inp["label"], in_out="INPUT", socket_type=inp["socket"]
)
mod[sock.identifier] = inp["data"]
# inspect all nodes and add a group input node when that socket is used
for node in parse_nodes(objects):
for param in node.node_tree.interface.items_tree:
nkey = get_input_socket_key(node, param)
if not nkey: continue
if not nkey:
continue
if nkey == key:
input_node = add_input_node(group, node, param.identifier, sock)
@ -53,12 +58,13 @@ def set_group_inputs(target, objects, group):
node_dct[node].append(input_node)
# on refait la meme chose pour les object info nodes car leur syntaxe est un peu differente
for node in parse_nodes(objects, type = "OBJECT_INFO"):
for node in parse_nodes(objects, type="OBJECT_INFO"):
nkey = get_input_socket_key(node, param)
if not nkey: continue
if not nkey:
continue
if nkey == key:
input_node = add_input_node(group, node, 'Object', sock)
node.inputs['Object'].default_value = None
input_node = add_input_node(group, node, "Object", sock)
node.inputs["Object"].default_value = None
# add to dict for cleanup
if not node in node_dct.keys():
@ -72,6 +78,7 @@ def set_group_inputs(target, objects, group):
input_node.location[1] += 50 * offset
hide_sockets(input_node)
def get_node_link_value(node, param_name, org_mod):
if not org_mod:
return
@ -82,20 +89,25 @@ def get_node_link_value(node, param_name, org_mod):
return org_mod[socket_id]
def get_geo_socket(node, input=True):
if node.type != "GROUP":
return('Geometry')
return "Geometry"
for param in node.node_tree.interface.items_tree:
if param.socket_type != 'NodeSocketGeometry':
if param.socket_type != "NodeSocketGeometry":
continue
if input and param.in_out == 'INPUT' : return param.identifier
if not input and param.in_out == 'OUTPUT' : return param.identifier
if input and param.in_out == "INPUT":
return param.identifier
if not input and param.in_out == "OUTPUT":
return param.identifier
return None
def get_input_socket_key(node, param):
if node.type == "GROUP":
if param.in_out != 'INPUT':
if param.in_out != "INPUT":
return False
if not param.socket_type in ['NodeSocketObject','NodeSocketCollection']:
if not param.socket_type in ["NodeSocketObject", "NodeSocketCollection"]:
return False
tgt = node.inputs[param.identifier].default_value
@ -104,11 +116,12 @@ def get_input_socket_key(node, param):
return f"{param.socket_type[10:][:3]} {tgt.name}"
if node.type == "OBJECT_INFO":
tgt = node.inputs['Object'].default_value
tgt = node.inputs["Object"].default_value
if not tgt:
return False
return f"Object {tgt.name}"
def get_node_inputs(combined_nodes):
# inputs["Col COL.name"] = {name = COL.name, data = COL, socket = "COLLECTION"}
# inputs["Obj OBJ.name"] = {name = OBJ.name, data = OBJ, socket = "OBJECT"}
@ -119,17 +132,28 @@ def get_node_inputs(combined_nodes):
if not key:
continue
tgt = node.inputs[param.identifier].default_value
inputs[key] = {'name': tgt.name, 'data': tgt, 'label': param.name , 'socket': param.socket_type}
inputs[key] = {
"name": tgt.name,
"data": tgt,
"label": param.name,
"socket": param.socket_type,
}
for node in parse_nodes(combined_nodes, type = "OBJECT_INFO"):
for node in parse_nodes(combined_nodes, type="OBJECT_INFO"):
key = get_input_socket_key(node, None)
if not key:
continue
tgt = node.inputs['Object'].default_value
inputs[key] = {'name': tgt.name, 'data': tgt, 'label': 'Source OB' , 'socket': "NodeSocketObject"}
tgt = node.inputs["Object"].default_value
inputs[key] = {
"name": tgt.name,
"data": tgt,
"label": "Source OB",
"socket": "NodeSocketObject",
}
return inputs
def get_node_bounds(objects, mode=0, x=0, y=0):
min_x = min_y = 10000000
max_x = max_y = 0
@ -137,13 +161,14 @@ def get_node_bounds(objects, mode=0, x=0, y=0):
for ob in objects:
for node in ob:
co = node.location
min_x = min(co[0],min_x)
max_x = max(co[0],max_x)
min_x = min(co[0], min_x)
max_x = max(co[0], max_x)
min_y = min(co[1],min_y)
max_y = max(co[1],max_y)
min_y = min(co[1], min_y)
max_y = max(co[1], max_y)
if mode == 0:
return([max_x+x, (min_y+max_y)/2 ])
return [max_x + x, (min_y + max_y) / 2]
def get_collection(name):
scn = bpy.context.scene
@ -152,23 +177,29 @@ def get_collection(name):
# look for existing
for c in bpy.data.collections:
if c.name == name: col = c
if c.name == name:
col = c
# create if needed
if not col: col = bpy.data.collections.new(name)
if not col:
col = bpy.data.collections.new(name)
# link to scene if needed
for c in scn.collection.children_recursive:
if c.name == col.name: link = True
if c.name == col.name:
link = True
if not link:
scn.collection.children.link(col)
return col
def get_mod_frames(grp):
frames = []
for node in grp.nodes:
if node.type == "FRAME": frames.append(node)
return(frames)
if node.type == "FRAME":
frames.append(node)
return frames
def get_frame_childrens(frame):
childrens = []
@ -183,13 +214,16 @@ def get_frame_childrens(frame):
childrens = [locs[x] for x in entries]
return childrens
def parse_nodes(combined_nodes, type = "GROUP"):
def parse_nodes(combined_nodes, type="GROUP"):
nodes = []
for frame in combined_nodes:
for node in frame:
if node.type == type: nodes.append(node)
if node.type == type:
nodes.append(node)
return nodes
def copy_source_ob(ob, col):
# est-ce que l'objet a des data ? si oui on cree une copie ,
# si non on renvois None
@ -210,7 +244,8 @@ def copy_source_ob(ob, col):
col.objects.link(new_ob)
return new_ob
def hide_sockets(node,collapse = True):
def hide_sockets(node, collapse=True):
for socket in node.outputs:
if not socket.links:
socket.hide = True
@ -220,14 +255,15 @@ def hide_sockets(node,collapse = True):
if collapse:
node.hide = True
def add_input_node(group, node, param_id, socket):
group_input_node = group.nodes.new('NodeGroupInput')
group_input_node = group.nodes.new("NodeGroupInput")
group_input_node.location = node.location
group_input_node.location[1] += 70
group_input_node.label = socket.name
group.links.new(group_input_node.outputs[socket.identifier],
node.inputs[param_id])
return(group_input_node)
group.links.new(group_input_node.outputs[socket.identifier], node.inputs[param_id])
return group_input_node
def add_material_node(ob, group, nodes):
if not ob.material_slots:
@ -235,54 +271,59 @@ def add_material_node(ob, group, nodes):
if not ob.material_slots[0].material:
return nodes
last_node = nodes[-1:][0]
node = group.nodes.new('GeometryNodeSetMaterial')
node.inputs['Material'].default_value = ob.material_slots[0].material
node = group.nodes.new("GeometryNodeSetMaterial")
node.inputs["Material"].default_value = ob.material_slots[0].material
node.location = last_node.location
node.location[0] += 300
nodes.append(node)
return nodes
def join_nodes(group, nodes):
prev = None
for i , node in enumerate(nodes):
for i, node in enumerate(nodes):
if not prev:
prev = node
continue
geo_in = get_geo_socket(node)
geo_out = get_geo_socket(prev, input = False)
geo_out = get_geo_socket(prev, input=False)
if not geo_in or not geo_out:
continue
group.links.new(prev.outputs[geo_out], node.inputs[geo_in])
prev = node
def frame_nodes(group, nodes, ob):
nd = group.nodes.new('NodeFrame')
nd = group.nodes.new("NodeFrame")
# frame = nodes.new(type='NodeFrame')
for n in nodes:
n.parent = nd
nd.label = ob.name
def combine_ob(ob, group, y=0, col=None):
nodes = []
# object info node
nd = group.nodes.new('GeometryNodeObjectInfo')
nd = group.nodes.new("GeometryNodeObjectInfo")
nd.location[0] -= 300
nd.location[1] = y * 800
nd.transform_space = "RELATIVE"
nd.inputs['Object'].default_value = copy_source_ob(ob, col) # si l'objet contient des data on crée une copie
nd.inputs["Object"].default_value = copy_source_ob(
ob, col
) # si l'objet contient des data on crée une copie
nodes.append(nd)
# ob modifiers
for x,md in enumerate(ob.modifiers):
if md.type != "NODES" :
for x, md in enumerate(ob.modifiers):
if md.type != "NODES":
print(abordage)
if md.node_group == group:
continue
nd = group.nodes.new('GeometryNodeGroup')
nd = group.nodes.new("GeometryNodeGroup")
nd.label = md.name
nd.width = 230
nd.location[0] = x * 300
@ -296,6 +337,7 @@ def combine_ob(ob, group, y=0, col=None):
frame_nodes(group, nodes, ob)
return nodes
def gen_target_ob(group, col=None):
ob = gen_empty_ob(group.name, col=col)
mod = ob.modifiers.new(group.name, "NODES")
@ -303,20 +345,22 @@ def gen_target_ob(group, col=None):
ob.show_name = True
bpy.context.view_layer.objects.active = ob
return(ob)
return ob
def gen_empty_ob(name, col=None):
scn = bpy.context.scene
ob = bpy.data.objects.new(name, object_data=bpy.data.meshes.new(name))
ob.data.materials.append(None)
ob.material_slots[0].link = 'OBJECT'
ob.material_slots[0].link = "OBJECT"
if not col:
scn.collection.objects.link(ob)
else:
col.objects.link(ob)
return(ob)
return ob
def assign_modifiers(ob, frame, org_modifier):
for node in get_frame_childrens(frame):
@ -328,11 +372,14 @@ def assign_modifiers(ob, frame, org_modifier):
set_params(node, mod, mod_to_node=False, org_modifier=org_modifier)
mod.node_group.interface_update(bpy.context)
def join_branches(objects, group):
# join all trees and add an output node
join = group.nodes.new('GeometryNodeJoinGeometry')
out = group.nodes.new('NodeGroupOutput')
out_sock = group.interface.new_socket("Geometry",in_out="OUTPUT",socket_type="NodeSocketGeometry")
join = group.nodes.new("GeometryNodeJoinGeometry")
out = group.nodes.new("NodeGroupOutput")
out_sock = group.interface.new_socket(
"Geometry", in_out="OUTPUT", socket_type="NodeSocketGeometry"
)
loc = get_node_bounds(objects, x=500)
join.location = loc
@ -341,45 +388,52 @@ def join_branches(objects, group):
for ob in objects:
node = ob[-1:][0]
group.links.new(node.outputs[get_geo_socket(node, input=False)],
join.inputs[get_geo_socket(join)])
group.links.new(
node.outputs[get_geo_socket(node, input=False)],
join.inputs[get_geo_socket(join)],
)
group.links.new(
join.outputs[get_geo_socket(join, input=False)], out.inputs[out_sock.identifier]
)
group.links.new(join.outputs[get_geo_socket(join, input=False)],
out.inputs[out_sock.identifier])
def gen_extracted_ob(name, frame, col, mod):
ob = None
for node in get_frame_childrens(frame):
if node.type != "OBJECT_INFO":
continue
target = get_node_link_value(node, 'Object', mod)
target = get_node_link_value(node, "Object", mod)
if target:
ob = target.copy()
ob.data = ob.data.copy()
col.objects.link(ob)
if not ob: ob = gen_empty_ob(name , col = col)
if not ob:
ob = gen_empty_ob(name, col=col)
# assign material
for node in get_frame_childrens(frame):
if node.type != "SET_MATERIAL":
continue
ob.material_slots[0].material = node.inputs['Material'].default_value
ob.material_slots[0].material = node.inputs["Material"].default_value
return ob
def combine_objects(objs):
name = f"NODEGROUP_combined"
col = get_collection(name)
group = bpy.data.node_groups.new(name=name, type='GeometryNodeTree')
group = bpy.data.node_groups.new(name=name, type="GeometryNodeTree")
objects = []
for y , ob in enumerate(objs):
for y, ob in enumerate(objs):
objects.append(combine_ob(ob, group, y=y, col=col))
target = gen_target_ob(group, col = col)
target = gen_target_ob(group, col=col)
set_group_inputs(target, objects, group)
join_branches(objects, group)
def extract_objects(object):
mod = object.modifiers[0]
grp = mod.node_group
@ -390,8 +444,9 @@ def extract_objects(object):
ob = gen_extracted_ob(name, frame, col, mod)
assign_modifiers(ob, frame, mod)
#combine_objects(bpy.context.selected_objects)
#extract_objects(bpy.context.active_object)
# combine_objects(bpy.context.selected_objects)
# extract_objects(bpy.context.active_object)
"""
TODO: extract copier les transform de l'objet original ...
OK ! combine: si un objet a un materiel on cree un node set material en fin de liste

22
preferences.py Normal file
View File

@ -0,0 +1,22 @@
import bpy
from bpy.types import AddonPreferences
from bpy.props import BoolProperty
class NodeKitPreferences(AddonPreferences):
bl_idname = __package__
classes = (
NodeKitPreferences,
)
def register():
for c in classes:
bpy.utils.register_class(c)
def unregister():
for c in reversed(classes):
bpy.utils.unregister_class(c)

37
ui.py
View File

@ -1,5 +1,5 @@
"""
This module contains blender UI elements
Node Kit UI elements and menus.
:author: Autour de Minuit
:maintainers: Florentin LUCE
@ -15,22 +15,29 @@ class NODEKIT_MT_node_kit(bpy.types.Menu):
def draw(self, context):
layout = self.layout
layout.operator('node_kit.copy_node_tree', text='Copy Nodes', icon='COPYDOWN')
layout.operator('node_kit.paste_node_tree', text='Paste Nodes', icon='PASTEDOWN')
layout.separator()
layout.operator('node_kit.remap_node_group_duplicates', text='Remap Node Groups Duplicates', icon='NODE_INSERT_OFF')
layout.operator('node_kit.update_nodes', text='Update Nodes', icon='IMPORT')
layout.separator()
layout.operator('node_kit.pack_nodes', text='Pack Nodes', icon='PACKAGE')
layout.operator('node_kit.unpack_nodes', text='UnPack Nodes', icon='UGLYPACKAGE')
layout.operator("node_kit.copy_nodes", icon="COPYDOWN")
layout.operator("node_kit.paste_nodes", icon="PASTEDOWN")
classes = (
NODEKIT_MT_node_kit,
)
layout.separator()
layout.operator("node_kit.copy_node_tree", icon="NODETREE")
layout.separator()
layout.operator("node_kit.remap_node_group_duplicates",icon="NODE_INSERT_OFF")
layout.operator("node_kit.update_nodes", icon="IMPORT")
layout.separator()
layout.operator("node_kit.pack_nodes", icon="PACKAGE")
layout.operator("node_kit.unpack_nodes", icon="UGLYPACKAGE")
classes = (NODEKIT_MT_node_kit,)
def draw_menu(self, context):
self.layout.menu('NODEKIT_MT_node_kit')
self.layout.menu("NODEKIT_MT_node_kit")
def register():
@ -41,7 +48,7 @@ def register():
def unregister():
bpy.types.NODE_MT_editor_menus.remove(draw_menu)
for c in reversed(classes):
bpy.utils.unregister_class(c)
bpy.types.NODE_MT_editor_menus.remove(draw_menu)

37
utils.py Normal file
View File

@ -0,0 +1,37 @@
from dataclasses import dataclass
from typing import Any
import bpy
@dataclass
class BlenderProperty:
"""
Blender Property abstraction, used since a Blender property value isn't
directly accessible from its Property object representation
NOTE: Do not rely on value being up-to-date, data will get stall
"""
rep: bpy.types.Property
attr: Any
def all_subclasses(cls):
return set(cls.__subclasses__()).union(
[s for c in cls.__subclasses__() for s in all_subclasses(c)]
)
def get_bl_default(prop: bpy.types.Property):
"""Get the default value of a Blender property"""
if getattr(prop, "is_array", False):
return list(prop.default_array)
elif hasattr(prop, "default"):
return prop.default
def set_bl_attribute(bl_object, attr, value):
try:
setattr(bl_object, attr, value)
except Exception as e:
print(e)