Compare commits
No commits in common. "refactor" and "master" have entirely different histories.
26
__init__.py
26
__init__.py
|
@ -1,25 +1,33 @@
|
||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Node Kit",
|
"name": "Node Kit",
|
||||||
"author": "Florentin Luce, Christophe Seux, Jonas Holzman",
|
"author": "Florentin Luce",
|
||||||
"version": (0, 1),
|
"version": (0, 1),
|
||||||
"blender": (4, 3, 2),
|
"blender": (4, 0, 2),
|
||||||
"location": "Node Editor -> Node Kit",
|
"category": "Node"}
|
||||||
"description": "Collection of node-related tools",
|
|
||||||
"doc_url": "https://git.autourdeminuit.com/autour_de_minuit/node_kit",
|
|
||||||
"category": "Node",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
from . import ui, operators, preferences
|
import sys
|
||||||
|
import importlib
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
from . import ui, operators
|
||||||
|
|
||||||
modules = (
|
modules = (
|
||||||
ui,
|
ui,
|
||||||
operators,
|
operators,
|
||||||
preferences
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if "bpy" in locals():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
for mod in modules:
|
||||||
|
importlib.reload(mod)
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
|
print('Register Node kit')
|
||||||
for mod in modules:
|
for mod in modules:
|
||||||
mod.register()
|
mod.register()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,212 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from mathutils import Color, Vector
|
||||||
|
|
||||||
|
from .sockets import Input, Output
|
||||||
|
|
||||||
|
|
||||||
|
class Node:
|
||||||
|
"""Blender Node abstraction."""
|
||||||
|
|
||||||
|
def __init__(self, bl_node, parent):
|
||||||
|
|
||||||
|
self.bl_node = bl_node
|
||||||
|
self.tree = parent
|
||||||
|
self.id = hex(id(self.bl_node))
|
||||||
|
|
||||||
|
self.data = {}
|
||||||
|
self.parameters = []
|
||||||
|
|
||||||
|
self._parent = None
|
||||||
|
self._scene = None
|
||||||
|
|
||||||
|
for prop in self.bl_node.bl_rna.properties:
|
||||||
|
if prop.is_readonly:
|
||||||
|
continue
|
||||||
|
|
||||||
|
prop_id = prop.identifier
|
||||||
|
|
||||||
|
setattr(self, prop_id, getattr(self.bl_node, prop_id))
|
||||||
|
self.parameters.append(prop_id)
|
||||||
|
|
||||||
|
self.inputs = [Input(ipt, self.tree) for ipt in self.bl_node.inputs]
|
||||||
|
self.outputs = [Output(opt, self.tree) for opt in self.bl_node.outputs]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parent(self):
|
||||||
|
"""Get the Node from all the other nodes in the tree checking that the
|
||||||
|
parent of its blender node is the same as the blender node we are comparing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Node: Node parent.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._parent:
|
||||||
|
return self._parent
|
||||||
|
|
||||||
|
# if blender node doesn't have a parent
|
||||||
|
if not self.bl_node.parent:
|
||||||
|
self._parent = None
|
||||||
|
return self._parent
|
||||||
|
|
||||||
|
for node in self.tree.nodes:
|
||||||
|
if node.bl_node == self.bl_node.parent:
|
||||||
|
self._parent = node
|
||||||
|
return self._parent
|
||||||
|
|
||||||
|
@parent.setter
|
||||||
|
def parent(self, value):
|
||||||
|
"""Set the Node parent, using the python object, it's id or the blender node.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value (Node|str|bpy.types.Node): Node, id or blender node to set as parent.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Node object case
|
||||||
|
if isinstance(value, Node):
|
||||||
|
self._parent = value
|
||||||
|
|
||||||
|
# Node id case
|
||||||
|
elif isinstance(value, str) and value.startswith('0x'):
|
||||||
|
for node in self.tree.nodes:
|
||||||
|
if node.id == value:
|
||||||
|
self._parent = node
|
||||||
|
else:
|
||||||
|
print('Cannot find parent')
|
||||||
|
|
||||||
|
# blender node case
|
||||||
|
elif isinstance(value, bpy.types.Node):
|
||||||
|
for node in self.tree.nodes:
|
||||||
|
if node.bl_node == value:
|
||||||
|
self._parent = node
|
||||||
|
|
||||||
|
if self._parent:
|
||||||
|
self.bl_node.parent = self._parent.bl_node
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_blender_node(cls, bl_node, tree):
|
||||||
|
"""Instanciate an abstract class based of the blender node idname.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bl_node (bpy.types.Node): Blender Node To create abstraction from.
|
||||||
|
tree (NodeTree): Node tree object node belongs to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Node: Node abstract according to the blender node type.
|
||||||
|
"""
|
||||||
|
if bl_node.bl_idname == 'CompositorNodeRLayers':
|
||||||
|
return RenderLayersNode(bl_node, tree)
|
||||||
|
|
||||||
|
elif bl_node.bl_idname == 'CompositorNodeValToRGB':
|
||||||
|
return ColorRampNode(bl_node, tree)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return cls(bl_node, tree)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data, tree):
|
||||||
|
"""Create all nodes from their dict representation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): dict nodes representation.
|
||||||
|
tree (Tree): blender node tree abstraction.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Node: Create abstract node.
|
||||||
|
"""
|
||||||
|
|
||||||
|
new_bl_node = tree.bl_node_tree.nodes.new(type=data['bl_idname'])
|
||||||
|
node = cls.from_blender_node(new_bl_node, tree)
|
||||||
|
|
||||||
|
node.id = data['id']
|
||||||
|
for p in node.parameters:
|
||||||
|
setattr(node, p, data[p])
|
||||||
|
|
||||||
|
# set attribute on the blender node only if correct type is retrieve
|
||||||
|
if p not in ('parent', 'scene'):
|
||||||
|
setattr(node.bl_node, p, getattr(node, p))
|
||||||
|
|
||||||
|
node.inputs = [Input.from_dict(ipt_data, node) for ipt_data in data['inputs'].values()]
|
||||||
|
node.outputs = [Output.from_dict(opt_data, node) for opt_data in data['outputs'].values()]
|
||||||
|
return node
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
"""Export currrent Node to its dict representation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Node dict representation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for prop_id in self.parameters:
|
||||||
|
|
||||||
|
if not hasattr(self, prop_id):
|
||||||
|
continue
|
||||||
|
|
||||||
|
attr_value = getattr(self, prop_id)
|
||||||
|
if attr_value is None:
|
||||||
|
attr_value = None
|
||||||
|
|
||||||
|
elif isinstance(attr_value, Node):
|
||||||
|
attr_value = attr_value.id
|
||||||
|
|
||||||
|
elif isinstance(attr_value, (Color, Vector)):
|
||||||
|
attr_value = list(attr_value)
|
||||||
|
|
||||||
|
self.data[prop_id] = attr_value
|
||||||
|
|
||||||
|
self.data['id'] = self.id
|
||||||
|
self.data['inputs'] = {ipt.id: ipt.dump() for ipt in self.inputs}
|
||||||
|
self.data['outputs'] = {opt.id: opt.dump() for opt in self.outputs}
|
||||||
|
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
|
||||||
|
class RenderLayersNode(Node):
|
||||||
|
"""Blender Render Layers Node abstraction"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scene(self):
|
||||||
|
"""Get the name of the scene used by the node.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: scene name.
|
||||||
|
"""
|
||||||
|
if self._scene:
|
||||||
|
return self._scene.name
|
||||||
|
|
||||||
|
@scene.setter
|
||||||
|
def scene(self, value):
|
||||||
|
"""Set the blender scene using the bpy Scene object or its name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value (str|bpy.types.Scene): scene name or scene object to set the node.
|
||||||
|
"""
|
||||||
|
if isinstance(value, str):
|
||||||
|
self._scene = bpy.data.scenes[value]
|
||||||
|
|
||||||
|
elif isinstance(value, bpy.types.Scene):
|
||||||
|
self._scene = value
|
||||||
|
|
||||||
|
if self._scene:
|
||||||
|
self.bl_node.scene = self._scene
|
||||||
|
|
||||||
|
|
||||||
|
class Link:
|
||||||
|
"""Blender Link abstraction."""
|
||||||
|
|
||||||
|
def __init__(self, bl_link, parent):
|
||||||
|
|
||||||
|
self.bl_link = bl_link
|
||||||
|
self.tree = parent
|
||||||
|
self.id = hex(id(self.bl_link))
|
||||||
|
|
||||||
|
self.input = self.bl_link.to_socket
|
||||||
|
self.output = self.bl_link.from_socket
|
||||||
|
|
||||||
|
self.data = {}
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
|
||||||
|
self.data['id'] = self.id
|
||||||
|
|
||||||
|
return self.data
|
|
@ -0,0 +1,69 @@
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .node import Node, Link
|
||||||
|
|
||||||
|
|
||||||
|
class NodeTree:
|
||||||
|
"""Blender node tree abstraction."""
|
||||||
|
|
||||||
|
def __init__(self, bl_node_tree):
|
||||||
|
|
||||||
|
self.bl_node_tree = bl_node_tree
|
||||||
|
|
||||||
|
self.data = {}
|
||||||
|
|
||||||
|
self.links = [Link(lnk, parent=self) for lnk in self.bl_node_tree.links]
|
||||||
|
self.nodes = []
|
||||||
|
for n in self.bl_node_tree.nodes:
|
||||||
|
self.nodes.append(Node.from_blender_node(n, self))
|
||||||
|
|
||||||
|
def dump(self, select_only=False):
|
||||||
|
"""Convert all blender nodes and links inside the tree into a dictionnary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
select_only (bool, optional): True to convert only selected nodes.
|
||||||
|
Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Nodes and links as dict.
|
||||||
|
"""
|
||||||
|
self.data['nodes'] = {n.id: n.dump() for n in self.nodes if not select_only or (select_only and n.select)}
|
||||||
|
self.data['links'] = [l.id for l in self.links]
|
||||||
|
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
def load(self, data):
|
||||||
|
"""From a Tree dict representation, create new nodes with their attributes.
|
||||||
|
Then create a connection dict by comparing link id from inputs and outputs of each nodes.
|
||||||
|
Use this dict to link nodes between each others.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): Tree dict representation to generate nodes and links from.
|
||||||
|
"""
|
||||||
|
|
||||||
|
connections = {}
|
||||||
|
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
for node_id, node_data in self.data['nodes'].items():
|
||||||
|
|
||||||
|
new_node = Node.from_dict(node_data, self)
|
||||||
|
self.nodes.append(new_node)
|
||||||
|
|
||||||
|
new_node.bl_node.select = True
|
||||||
|
|
||||||
|
for ipt in new_node.inputs:
|
||||||
|
if ipt.is_linked:
|
||||||
|
connections.setdefault(ipt.link, {})['input'] = ipt.bl_input
|
||||||
|
|
||||||
|
for opt in new_node.outputs:
|
||||||
|
if opt.is_linked:
|
||||||
|
for link in opt.link:
|
||||||
|
connections.setdefault(link, {})['output'] = opt.bl_output
|
||||||
|
|
||||||
|
for link_id in self.data['links']:
|
||||||
|
ipt = connections[link_id]['input']
|
||||||
|
opt = connections[link_id]['output']
|
||||||
|
|
||||||
|
self.bl_node_tree.links.new(ipt, opt)
|
|
@ -0,0 +1,110 @@
|
||||||
|
|
||||||
|
|
||||||
|
class Socket:
|
||||||
|
|
||||||
|
def __init__(self, bl_socket, tree):
|
||||||
|
|
||||||
|
self.tree = tree
|
||||||
|
self.bl_socket = bl_socket
|
||||||
|
self.data = {}
|
||||||
|
|
||||||
|
self.id = hex(id(bl_socket))
|
||||||
|
self.identifier = bl_socket.identifier
|
||||||
|
self.is_linked = bl_socket.is_linked
|
||||||
|
|
||||||
|
self._value = None
|
||||||
|
|
||||||
|
if hasattr(bl_socket, 'default_value'):
|
||||||
|
self._value = bl_socket.default_value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self):
|
||||||
|
|
||||||
|
if not isinstance(self._value, (str, int, float, bool)):
|
||||||
|
self._value = [v for v in self._value]
|
||||||
|
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
@value.setter
|
||||||
|
def value(self, v):
|
||||||
|
self.bl_socket.default_value = v
|
||||||
|
self._value = v
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
self.data['id'] = self.id
|
||||||
|
self.data['value'] = self.value
|
||||||
|
self.data['identifier'] = self.identifier
|
||||||
|
self.data['is_linked'] = self.is_linked
|
||||||
|
self.data['link'] = self.get_link()
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
|
||||||
|
class Input(Socket):
|
||||||
|
|
||||||
|
def __init__(self, bl_input, tree):
|
||||||
|
super().__init__(bl_input, tree)
|
||||||
|
|
||||||
|
self.bl_input = bl_input
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data, node):
|
||||||
|
|
||||||
|
for bl_ipt in node.bl_node.inputs:
|
||||||
|
|
||||||
|
if bl_ipt.identifier != data['identifier']:
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_ipt = cls(bl_ipt, node.tree)
|
||||||
|
|
||||||
|
for k, v in data.items():
|
||||||
|
setattr(new_ipt, k, v)
|
||||||
|
|
||||||
|
return new_ipt
|
||||||
|
|
||||||
|
def get_link(self):
|
||||||
|
|
||||||
|
if not self.is_linked:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for ipt_link in self.bl_input.links:
|
||||||
|
for tree_link in self.tree.links:
|
||||||
|
if ipt_link == tree_link.bl_link:
|
||||||
|
return tree_link.id
|
||||||
|
|
||||||
|
|
||||||
|
class Output(Socket):
|
||||||
|
|
||||||
|
def __init__(self, bl_output, tree):
|
||||||
|
super().__init__(bl_output, tree)
|
||||||
|
|
||||||
|
self.bl_output = bl_output
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data, node):
|
||||||
|
|
||||||
|
for bl_opt in node.bl_node.outputs:
|
||||||
|
|
||||||
|
if bl_opt.identifier != data['identifier']:
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_opt = cls(bl_opt, node.tree)
|
||||||
|
|
||||||
|
for k, v in data.items():
|
||||||
|
setattr(new_opt, k, v)
|
||||||
|
|
||||||
|
return new_opt
|
||||||
|
|
||||||
|
def get_link(self):
|
||||||
|
|
||||||
|
links = []
|
||||||
|
|
||||||
|
if not self.is_linked:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for opt_link in self.bl_output.links:
|
||||||
|
for tree_link in self.tree.links:
|
||||||
|
if opt_link == tree_link.bl_link:
|
||||||
|
links.append(tree_link.id)
|
||||||
|
|
||||||
|
return links
|
|
@ -1,70 +1,89 @@
|
||||||
from __future__ import annotations
|
import bpy
|
||||||
|
import mathutils
|
||||||
|
from pprint import pprint
|
||||||
|
import json
|
||||||
|
import itertools
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from os.path import abspath
|
from os.path import abspath
|
||||||
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
from . import utils
|
|
||||||
|
|
||||||
|
|
||||||
def get_dumper(bl_object: bpy.types.bpy_struct) -> type[Dumper]:
|
def get_default(prop):
|
||||||
"""Get the closest corresponding dumper for a given Blender object using its MRO"""
|
"""Get the default value of a bl property"""
|
||||||
for cls in bl_object.__class__.mro():
|
|
||||||
dumper_map = DumperRegistry().dumper_map
|
|
||||||
if cls in dumper_map:
|
|
||||||
return dumper_map[cls]
|
|
||||||
|
|
||||||
# Fallback to base Dumper if no matches are found
|
if getattr(prop, 'is_array', False):
|
||||||
return Dumper
|
return list(prop.default_array)
|
||||||
|
elif hasattr(prop, 'default'):
|
||||||
|
return prop.default
|
||||||
|
|
||||||
|
|
||||||
def dump_nodes(nodes: list[bpy.types.Node]):
|
def get_dumper(bl_object, fallback=None):
|
||||||
"""Generic recursive dump, convert nodes into a dict"""
|
"""Find the right dumper type by checking inheritance"""
|
||||||
Dumper.pointers.clear() # TODO: Bad global
|
for dp in dumpers:
|
||||||
|
if isinstance(bl_object, dp.bl_type):
|
||||||
|
return dp
|
||||||
|
|
||||||
data = [dump_node(node) for node in nodes]
|
return fallback or Dumper
|
||||||
|
|
||||||
|
|
||||||
|
def get_bl_object(data):
|
||||||
|
"""Find the bl object for loading data into it depending on the type and the context"""
|
||||||
|
if data.get('_new', {}).get('type') == 'GeometryNodeTree':
|
||||||
|
return bpy.context.object.modifiers.active.node_group
|
||||||
|
|
||||||
|
|
||||||
|
def dump(ob):
|
||||||
|
"""Generic Recursive Dump, convert any object into a dict"""
|
||||||
|
Dumper.pointers.clear()
|
||||||
|
|
||||||
|
if isinstance(ob, (list, tuple)):
|
||||||
|
data = [get_dumper(o).dump(o) for o in ob]
|
||||||
|
else:
|
||||||
|
data = get_dumper(ob).dump(ob)
|
||||||
|
|
||||||
Dumper.pointers.clear()
|
Dumper.pointers.clear()
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def dump_node(node: bpy.types.Node):
|
def load(data, bl_object=None):
|
||||||
dumper = get_dumper(node)
|
"""Generic Load to create an object from a dict"""
|
||||||
return dumper.dump(node)
|
|
||||||
|
|
||||||
|
|
||||||
def load_nodes(data, node_tree):
|
|
||||||
"""Load/Dump nodes into a specific node tree"""
|
|
||||||
Dumper.pointers.clear()
|
|
||||||
|
|
||||||
dumper = get_dumper(node_tree)
|
|
||||||
dumper.load(data, node_tree)
|
|
||||||
|
|
||||||
Dumper.pointers.clear()
|
Dumper.pointers.clear()
|
||||||
|
#print(Dumper.pointers)
|
||||||
|
|
||||||
|
if bl_object is None:
|
||||||
|
bl_object = get_bl_object(data)
|
||||||
|
|
||||||
|
dumper = get_dumper(bl_object)
|
||||||
|
dumper.load(data, bl_object)
|
||||||
|
|
||||||
|
Dumper.pointers.clear()
|
||||||
|
|
||||||
|
|
||||||
|
def set_attribute(bl_object, attr, value):
|
||||||
|
try:
|
||||||
|
setattr(bl_object, attr, value)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
|
||||||
class Dumper:
|
class Dumper:
|
||||||
pointers = {}
|
pointers = {}
|
||||||
includes = []
|
includes = []
|
||||||
excludes = ["rna_type", "bl_rna", "id_data", "depsgraph"]
|
excludes = ["rna_type", "bl_rna", 'id_data', 'depsgraph']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def properties(cls, bl_object):
|
def properties(cls, bl_object):
|
||||||
if cls.includes and not cls.excludes:
|
if cls.includes and not cls.excludes:
|
||||||
return [bl_object.bl_rna.properties[p] for p in cls.includes]
|
return [bl_object.bl_rna.properties[p] for p in cls.includes]
|
||||||
else:
|
else:
|
||||||
return [
|
return [ p for p in bl_object.bl_rna.properties if not
|
||||||
p
|
p.identifier.startswith('bl_') and p.identifier not in cls.excludes]
|
||||||
for p in bl_object.bl_rna.properties
|
|
||||||
if not p.identifier.startswith("bl_")
|
|
||||||
and p.identifier not in cls.excludes
|
|
||||||
]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
print(f"New not implemented for data {data}")
|
print(f'New not implemented for data {data}')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, data, bl_object=None):
|
def load(cls, data, bl_object=None):
|
||||||
|
@ -74,84 +93,98 @@ class Dumper:
|
||||||
if bl_object is None:
|
if bl_object is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
if bl_pointer := data.get("bl_pointer"):
|
#pprint(data)
|
||||||
|
if bl_pointer := data.get('bl_pointer'):
|
||||||
cls.pointers[bl_pointer] = bl_object
|
cls.pointers[bl_pointer] = bl_object
|
||||||
|
|
||||||
props = cls.properties(bl_object)
|
props = cls.properties(bl_object)
|
||||||
for key, value in sorted(
|
for key, value in sorted(data.items(), key=lambda x: props.index(x[0]) if x[0] in props else 0):
|
||||||
data.items(), key=lambda x: props.index(x[0]) if x[0] in props else 0
|
if key.startswith('_') or key not in bl_object.bl_rna.properties:
|
||||||
):
|
|
||||||
if key.startswith("_") or key not in bl_object.bl_rna.properties:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
prop = bl_object.bl_rna.properties[key]
|
prop = bl_object.bl_rna.properties[key]
|
||||||
attr = getattr(bl_object, key)
|
attr = getattr(bl_object, key)
|
||||||
|
|
||||||
if prop.type == "COLLECTION":
|
if prop.type == 'COLLECTION':
|
||||||
dumper = PropCollection
|
dumper = PropCollection
|
||||||
if hasattr(attr, "bl_rna"):
|
if hasattr(attr, 'bl_rna'):
|
||||||
bl_type = attr.bl_rna.type_recast()
|
bl_type = attr.bl_rna.type_recast()
|
||||||
dumper = PropCollection or get_dumper(bl_type)
|
dumper = get_dumper(bl_type, fallback=PropCollection)
|
||||||
|
|
||||||
dumper.load(value, attr)
|
dumper.load(value, attr)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif prop.type == "POINTER":
|
elif prop.type == 'POINTER':
|
||||||
if isinstance(value, int): # It's a pointer
|
# if key == 'node_tree':
|
||||||
|
# print('--------------')
|
||||||
|
# print(bl_object, value)
|
||||||
|
# print(cls.pointers)
|
||||||
|
|
||||||
|
if isinstance(value, int): # It's a pointer
|
||||||
if value not in cls.pointers:
|
if value not in cls.pointers:
|
||||||
print(bl_object, "not loaded yet", prop)
|
print(bl_object, "not loaded yet", prop)
|
||||||
value = cls.pointers[value]
|
value = cls.pointers[value]
|
||||||
|
|
||||||
elif value is None:
|
elif value is None:
|
||||||
utils.set_bl_attribute(bl_object, key, value)
|
set_attribute(bl_object, key, value)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
bl_type = prop.fixed_type.bl_rna.type_recast()
|
bl_type = prop.fixed_type.bl_rna.type_recast()
|
||||||
dumper = get_dumper(bl_type)
|
dumper = get_dumper(bl_type)
|
||||||
|
|
||||||
# If the pointer exist register the pointer then load data
|
# If the pointer exist register the pointer then load data
|
||||||
|
#print('-----', value)
|
||||||
|
#pointer =
|
||||||
if attr is None:
|
if attr is None:
|
||||||
attr = dumper.new(value)
|
attr = dumper.new(value)
|
||||||
|
|
||||||
dumper.load(value, attr)
|
dumper.load(value, attr)
|
||||||
cls.pointers[value["bl_pointer"]] = attr
|
#attr = getattr(bl_object, key)
|
||||||
|
#if not attr:
|
||||||
|
cls.pointers[value['bl_pointer']] = attr
|
||||||
|
|
||||||
if hasattr(attr, "update"):
|
if hasattr(attr, 'update'):
|
||||||
attr.update()
|
attr.update()
|
||||||
|
|
||||||
value = attr
|
value = attr
|
||||||
|
|
||||||
if not prop.is_readonly:
|
if not prop.is_readonly:
|
||||||
utils.set_bl_attribute(bl_object, key, value)
|
set_attribute(bl_object, key, value)
|
||||||
|
|
||||||
# Some coll needs a manual update like curve mapping
|
# Some coll needs a manual update like curve mapping
|
||||||
if hasattr(attr, "update"):
|
if hasattr(attr, 'update'):
|
||||||
attr.update()
|
attr.update()
|
||||||
|
|
||||||
elif not prop.is_readonly:
|
elif not prop.is_readonly:
|
||||||
utils.set_bl_attribute(bl_object, key, value)
|
#print(key, value)
|
||||||
|
set_attribute(bl_object, key, value)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# return bl_object
|
#return bl_object
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, bl_object):
|
def dump(cls, bl_object):
|
||||||
if isinstance(bl_object, (str, int, float, dict, list, type(None))):
|
if isinstance(bl_object, (str, int, float, dict, list, type(None))):
|
||||||
return bl_object
|
return bl_object
|
||||||
|
|
||||||
|
#print('Dumping object', bl_object)
|
||||||
|
|
||||||
data = {"bl_pointer": bl_object.as_pointer()}
|
data = {"bl_pointer": bl_object.as_pointer()}
|
||||||
cls.pointers[bl_object.as_pointer()] = bl_object
|
cls.pointers[bl_object.as_pointer()] = bl_object
|
||||||
|
|
||||||
|
|
||||||
for prop in cls.properties(bl_object):
|
for prop in cls.properties(bl_object):
|
||||||
if not hasattr(bl_object, prop.identifier):
|
if not hasattr(bl_object, prop.identifier):
|
||||||
print(f"{bl_object} has no attribute {prop.identifier}")
|
print(f'{bl_object} has no attribute {prop.identifier}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
#print(prop.identifier)
|
||||||
|
|
||||||
value = getattr(bl_object, prop.identifier)
|
value = getattr(bl_object, prop.identifier)
|
||||||
|
|
||||||
# Not storing default value
|
# Not storing default value
|
||||||
if prop.identifier not in cls.includes:
|
if prop.identifier not in cls.includes:
|
||||||
if (array := getattr(prop, "default_array", None)) and value == array:
|
if (array := getattr(prop, 'default_array', None)) and value == array:
|
||||||
continue
|
continue
|
||||||
if isinstance(value, (str, int, float)) and value == prop.default:
|
if isinstance(value, (str, int, float)) and value == prop.default:
|
||||||
continue
|
continue
|
||||||
|
@ -159,14 +192,19 @@ class Dumper:
|
||||||
if getattr(prop, "is_array", False):
|
if getattr(prop, "is_array", False):
|
||||||
value = PropArray.dump(value)
|
value = PropArray.dump(value)
|
||||||
|
|
||||||
elif prop.type == "COLLECTION":
|
elif prop.type == 'COLLECTION':
|
||||||
value = PropCollection.dump(value)
|
value = PropCollection.dump(value)
|
||||||
|
|
||||||
elif prop.type == "POINTER" and value:
|
elif prop.type == 'POINTER' and value:
|
||||||
|
#if prop.identifier == 'image':
|
||||||
|
# print(bl_object, cls.pointers)
|
||||||
if value.as_pointer() in cls.pointers:
|
if value.as_pointer() in cls.pointers:
|
||||||
value = value.as_pointer()
|
value = value.as_pointer()
|
||||||
else:
|
else:
|
||||||
|
# print('Register Pointer', value.as_pointer(), value)
|
||||||
cls.pointers[value.as_pointer()] = value
|
cls.pointers[value.as_pointer()] = value
|
||||||
|
# print(cls.pointers)
|
||||||
|
# print()
|
||||||
dumper = get_dumper(value)
|
dumper = get_dumper(value)
|
||||||
value = dumper.dump(value)
|
value = dumper.dump(value)
|
||||||
|
|
||||||
|
@ -196,6 +234,7 @@ class PropCollection(Dumper):
|
||||||
# Value cannot be None
|
# Value cannot be None
|
||||||
return [v for v in values if v is not None]
|
return [v for v in values if v is not None]
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, values, coll):
|
def load(cls, values, coll):
|
||||||
if not values:
|
if not values:
|
||||||
|
@ -203,49 +242,53 @@ class PropCollection(Dumper):
|
||||||
|
|
||||||
dumper = None
|
dumper = None
|
||||||
|
|
||||||
if not hasattr(coll, "new"): # Static collection
|
if not hasattr(coll, 'new'): # Static collection
|
||||||
for item, value in zip(coll, values):
|
for item, value in zip(coll, values):
|
||||||
dumper = dumper or get_dumper(item)
|
dumper = dumper or get_dumper(item)
|
||||||
dumper.load(value, item)
|
dumper.load(value, item)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
new_func = coll.bl_rna.functions["new"]
|
new_func = coll.bl_rna.functions['new']
|
||||||
for i, value in enumerate(values):
|
for i, value in enumerate(values):
|
||||||
if value.get("_new"):
|
|
||||||
params = value["_new"]
|
if value.get('_new'):
|
||||||
|
params = value['_new']
|
||||||
else:
|
else:
|
||||||
params = {
|
params = {k: value.get(k, get_default(v)) for k, v in new_func.parameters.items()[:-1]}
|
||||||
k: value.get(k, utils.get_bl_default(v))
|
|
||||||
for k, v in new_func.parameters.items()[:-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
# Replace arg pointer with bl object
|
# Replace arg pointer with bl object
|
||||||
valid_pointers = True
|
valid_pointers = True
|
||||||
for param in coll.bl_rna.functions["new"].parameters:
|
for param in coll.bl_rna.functions['new'].parameters:
|
||||||
if param.identifier not in params or param.type != "POINTER":
|
if param.identifier not in params or param.type != 'POINTER':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
pointer_id = params[param.identifier]
|
pointer_id = params[param.identifier]
|
||||||
if bl_object := cls.pointers.get(pointer_id):
|
if bl_object := cls.pointers.get(pointer_id):
|
||||||
params[param.identifier] = bl_object
|
params[param.identifier] = bl_object
|
||||||
else:
|
else:
|
||||||
print(f"No Pointer found for param {param.identifier} of {coll}")
|
print(f'No Pointer found for param {param.identifier} of {coll}')
|
||||||
valid_pointers = False
|
valid_pointers = False
|
||||||
|
|
||||||
if not valid_pointers:
|
if not valid_pointers:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
#print(param.identifier, cls.pointers[pointer_id])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
item = coll.new(**params)
|
item = coll.new(**params)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
|
#print(e, coll.data)
|
||||||
|
#print()
|
||||||
try:
|
try:
|
||||||
item = coll[i]
|
item = coll[i]
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
|
#print(e, coll.data)
|
||||||
break
|
break
|
||||||
|
|
||||||
dumper = get_dumper(item)
|
dumper = get_dumper(item)
|
||||||
dumper.load(value, item) # (item, value)
|
dumper.load(value, item)#(item, value)
|
||||||
|
|
||||||
|
|
||||||
class PropArray(Dumper):
|
class PropArray(Dumper):
|
||||||
|
@ -264,25 +307,19 @@ class PropArray(Dumper):
|
||||||
|
|
||||||
class NodeSocket(Dumper):
|
class NodeSocket(Dumper):
|
||||||
bl_type = bpy.types.NodeSocket
|
bl_type = bpy.types.NodeSocket
|
||||||
excludes = Dumper.excludes + [
|
excludes = Dumper.excludes + ["node", "links", "display_shape", "rna_type", "link_limit"]
|
||||||
"node",
|
|
||||||
"links",
|
|
||||||
"display_shape",
|
|
||||||
"rna_type",
|
|
||||||
"link_limit",
|
|
||||||
]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, socket):
|
def dump(cls, socket):
|
||||||
if socket.is_unavailable:
|
if socket.is_unavailable:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# cls.pointers[socket.as_pointer()] = socket
|
#cls.pointers[socket.as_pointer()] = socket
|
||||||
|
|
||||||
data = super().dump(socket)
|
data = super().dump(socket)
|
||||||
|
|
||||||
# data["_id"] = socket.as_pointer()
|
#data["_id"] = socket.as_pointer()
|
||||||
# data.pop('name', '')
|
#data.pop('name', '')
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -302,12 +339,11 @@ class NodeLink(Dumper):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, link):
|
def dump(cls, link):
|
||||||
return {
|
return {"_new": {
|
||||||
"_new": {
|
"input": link.from_socket.as_pointer(),
|
||||||
"input": link.from_socket.as_pointer(),
|
"output": link.to_socket.as_pointer()
|
||||||
"output": link.to_socket.as_pointer(),
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class NodeTreeInterfaceSocket(Dumper):
|
class NodeTreeInterfaceSocket(Dumper):
|
||||||
|
@ -316,31 +352,34 @@ class NodeTreeInterfaceSocket(Dumper):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, socket):
|
def dump(cls, socket):
|
||||||
# cls.pointers[socket.as_pointer()] = socket
|
#cls.pointers[socket.as_pointer()] = socket
|
||||||
|
|
||||||
data = super().dump(socket)
|
data = super().dump(socket)
|
||||||
# data["_id"] = socket.as_pointer()
|
#data["_id"] = socket.as_pointer()
|
||||||
|
|
||||||
data["_new"] = {"name": data.get("name", "")}
|
data['_new'] = {"name": data.get('name', '')}
|
||||||
|
|
||||||
|
if socket.item_type == 'SOCKET':
|
||||||
|
data['_new']["in_out"] = socket.in_out
|
||||||
|
|
||||||
if socket.item_type == "SOCKET":
|
|
||||||
data["_new"]["in_out"] = socket.in_out
|
|
||||||
|
|
||||||
# It's a real panel not the interface root
|
# It's a real panel not the interface root
|
||||||
if socket.parent.parent:
|
if socket.parent.parent:
|
||||||
data["parent"] = socket.parent.as_pointer()
|
data['parent'] = socket.parent.as_pointer()
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
class NodeSockets(PropCollection):
|
class NodeSockets(PropCollection):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, values, coll):
|
def load(cls, values, coll):
|
||||||
# return
|
|
||||||
|
#return
|
||||||
|
|
||||||
node_sockets = [s for s in coll if not s.is_unavailable]
|
node_sockets = [s for s in coll if not s.is_unavailable]
|
||||||
for socket, value in zip(node_sockets, values):
|
for socket, value in zip(node_sockets, values):
|
||||||
cls.pointers[value["bl_pointer"]] = socket
|
cls.pointers[value['bl_pointer']] = socket
|
||||||
Dumper.load(value, socket)
|
Dumper.load(value, socket)
|
||||||
# for k, v in value.items():
|
# for k, v in value.items():
|
||||||
# if k not in socket.bl_rna.properties:
|
# if k not in socket.bl_rna.properties:
|
||||||
|
@ -380,27 +419,20 @@ class NodeOutputs(NodeSockets):
|
||||||
|
|
||||||
class Node(Dumper):
|
class Node(Dumper):
|
||||||
bl_type = bpy.types.Node
|
bl_type = bpy.types.Node
|
||||||
excludes = Dumper.excludes + [
|
excludes = Dumper.excludes + ["dimensions", "height", "internal_links", "paired_output"]
|
||||||
"dimensions",
|
|
||||||
"height",
|
|
||||||
"internal_links",
|
|
||||||
"paired_output",
|
|
||||||
]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, node=None):
|
def dump(cls, node=None):
|
||||||
# cls.pointers[node.as_pointer()] = node
|
#cls.pointers[node.as_pointer()] = node
|
||||||
|
|
||||||
data = super().dump(node)
|
data = super().dump(node)
|
||||||
# data["_id"] = node.as_pointer()
|
#data["_id"] = node.as_pointer()
|
||||||
data["_new"] = {
|
data["_new"] = {"type": node.bl_rna.identifier} # 'node_tree': node.id_data.as_pointer()
|
||||||
"type": node.bl_rna.identifier
|
|
||||||
} # 'node_tree': node.id_data.as_pointer()
|
|
||||||
|
|
||||||
if paired_output := getattr(node, "paired_output", None):
|
if paired_output := getattr(node, "paired_output", None):
|
||||||
data["_pair_with_output"] = paired_output.as_pointer()
|
data["_pair_with_output"] = paired_output.as_pointer()
|
||||||
|
|
||||||
# if node.parent:
|
#if node.parent:
|
||||||
# data['location'] -= Vector()node.parent.location
|
# data['location'] -= Vector()node.parent.location
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -409,15 +441,15 @@ class Node(Dumper):
|
||||||
def load(cls, data, node):
|
def load(cls, data, node):
|
||||||
if node is None:
|
if node is None:
|
||||||
return
|
return
|
||||||
# cls.pointers[data['bl_pointer']] = node
|
#cls.pointers[data['bl_pointer']] = node
|
||||||
|
|
||||||
inputs = copy(data.pop("inputs", []))
|
inputs = copy(data.pop('inputs', []))
|
||||||
outputs = copy(data.pop("outputs", []))
|
outputs = copy(data.pop('outputs', []))
|
||||||
|
|
||||||
super().load(data, node)
|
super().load(data, node)
|
||||||
|
|
||||||
data["inputs"] = inputs
|
data['inputs'] = inputs
|
||||||
data["outputs"] = outputs
|
data['outputs'] = outputs
|
||||||
|
|
||||||
# Loading input and outputs after the properties
|
# Loading input and outputs after the properties
|
||||||
super().load({"inputs": inputs, "outputs": outputs}, node)
|
super().load({"inputs": inputs, "outputs": outputs}, node)
|
||||||
|
@ -425,7 +457,7 @@ class Node(Dumper):
|
||||||
if node.parent:
|
if node.parent:
|
||||||
node.location += node.parent.location
|
node.location += node.parent.location
|
||||||
|
|
||||||
# if node.type != 'FRAME':
|
#if node.type != 'FRAME':
|
||||||
# node.location.y -= 500
|
# node.location.y -= 500
|
||||||
|
|
||||||
|
|
||||||
|
@ -440,19 +472,20 @@ class NodeTreeInterface(Dumper):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, data, interface):
|
def load(cls, data, interface):
|
||||||
print("Load Interface")
|
|
||||||
|
|
||||||
for value in data.get("items_tree", []):
|
print('Load Interface')
|
||||||
item_type = value.get("item_type", "SOCKET")
|
|
||||||
if item_type == "SOCKET":
|
for value in data.get('items_tree', []):
|
||||||
item = interface.new_socket(**value["_new"])
|
item_type = value.get('item_type', 'SOCKET')
|
||||||
elif item_type == "PANEL":
|
if item_type == 'SOCKET':
|
||||||
# print(value['_new'])
|
item = interface.new_socket(**value['_new'])
|
||||||
item = interface.new_panel(**value["_new"])
|
elif item_type == 'PANEL':
|
||||||
|
#print(value['_new'])
|
||||||
|
item = interface.new_panel(**value['_new'])
|
||||||
|
|
||||||
NodeTreeInterfaceSocket.load(value, item)
|
NodeTreeInterfaceSocket.load(value, item)
|
||||||
|
|
||||||
interface.active_index = data.get("active_index", 0)
|
interface.active_index = data.get('active_index', 0)
|
||||||
|
|
||||||
|
|
||||||
class Nodes(PropCollection):
|
class Nodes(PropCollection):
|
||||||
|
@ -464,14 +497,13 @@ class Nodes(PropCollection):
|
||||||
|
|
||||||
# Pair zone input and output
|
# Pair zone input and output
|
||||||
for node_data in values:
|
for node_data in values:
|
||||||
if paired_output_id := node_data.get("_pair_with_output", None):
|
if paired_output_id := node_data.get('_pair_with_output', None):
|
||||||
node = cls.pointers[node_data["bl_pointer"]]
|
node = cls.pointers[node_data['bl_pointer']]
|
||||||
node.pair_with_output(cls.pointers[paired_output_id])
|
node.pair_with_output(cls.pointers[paired_output_id])
|
||||||
|
|
||||||
Dumper.load(
|
#print(node, node_data['outputs'])
|
||||||
{"inputs": node_data["inputs"], "outputs": node_data["outputs"]},
|
|
||||||
node,
|
Dumper.load({"inputs": node_data['inputs'], "outputs": node_data['outputs']}, node)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NodeTree(Dumper):
|
class NodeTree(Dumper):
|
||||||
|
@ -481,42 +513,32 @@ class NodeTree(Dumper):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
if link := data.get("_link"):
|
if link := data.get('_link'):
|
||||||
with bpy.data.libraries.load(link["filepath"], link=True) as (
|
with bpy.data.libraries.load(link['filepath'], link=True) as (data_from, data_to):
|
||||||
data_from,
|
setattr(data_to, link['data_type'], [link['name']])
|
||||||
data_to,
|
return getattr(data_to, link['data_type'])[0]
|
||||||
):
|
|
||||||
setattr(data_to, link["data_type"], [link["name"]])
|
|
||||||
return getattr(data_to, link["data_type"])[0]
|
|
||||||
|
|
||||||
return bpy.data.node_groups.new(**data["_new"])
|
return bpy.data.node_groups.new(**data["_new"])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, node_tree):
|
def dump(cls, node_tree):
|
||||||
if node_tree.library:
|
if node_tree.library:
|
||||||
data = {"bl_pointer": node_tree.as_pointer()}
|
data = {'bl_pointer': node_tree.as_pointer()}
|
||||||
filepath = abspath(
|
filepath = abspath(bpy.path.abspath(node_tree.library.filepath, library=node_tree.library.library))
|
||||||
bpy.path.abspath(
|
data["_link"] = {"filepath": filepath, "data_type": 'node_groups', 'name': node_tree.name}
|
||||||
node_tree.library.filepath, library=node_tree.library.library
|
|
||||||
)
|
|
||||||
)
|
|
||||||
data["_link"] = {
|
|
||||||
"filepath": filepath,
|
|
||||||
"data_type": "node_groups",
|
|
||||||
"name": node_tree.name,
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
data = super().dump(node_tree)
|
data = super().dump(node_tree)
|
||||||
data["_new"] = {"type": node_tree.bl_rna.identifier, "name": node_tree.name}
|
data["_new"] = {"type": node_tree.bl_rna.identifier, 'name': node_tree.name}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
class Points(PropCollection):
|
class Points(PropCollection):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, values, coll):
|
def load(cls, values, coll):
|
||||||
new_func = coll.bl_rna.functions["new"]
|
new_func = coll.bl_rna.functions['new']
|
||||||
params = {k: utils.get_bl_default(v) + 1.1 for k, v in new_func.parameters.items()[:-1]}
|
params = {k: get_default(v)+1.1 for k, v in new_func.parameters.items()[:-1]}
|
||||||
|
|
||||||
# Match the same number of elements in collection
|
# Match the same number of elements in collection
|
||||||
if len(values) > len(coll):
|
if len(values) > len(coll):
|
||||||
|
@ -525,8 +547,8 @@ class Points(PropCollection):
|
||||||
|
|
||||||
for i, value in enumerate(values):
|
for i, value in enumerate(values):
|
||||||
Dumper.load(value, coll[i])
|
Dumper.load(value, coll[i])
|
||||||
# for k, v in value.items():
|
#for k, v in value.items():
|
||||||
# setattr(coll[i], k, v)
|
#setattr(coll[i], k, v)
|
||||||
|
|
||||||
|
|
||||||
class CurveMapPoints(Points):
|
class CurveMapPoints(Points):
|
||||||
|
@ -561,7 +583,7 @@ class AOVs(PropCollection):
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, values, coll):
|
def load(cls, values, coll):
|
||||||
for value in values:
|
for value in values:
|
||||||
aov = coll.get(value["name"])
|
aov = coll.get(value['name'])
|
||||||
|
|
||||||
if not aov:
|
if not aov:
|
||||||
aov = coll.add()
|
aov = coll.add()
|
||||||
|
@ -573,7 +595,7 @@ class Image(Dumper):
|
||||||
bl_type = bpy.types.Image
|
bl_type = bpy.types.Image
|
||||||
|
|
||||||
excludes = []
|
excludes = []
|
||||||
includes = ["name", "filepath"]
|
includes = ['name', 'filepath']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
|
@ -583,20 +605,20 @@ class Image(Dumper):
|
||||||
# if image is None:
|
# if image is None:
|
||||||
# image = bpy.data.images.load(data['filepath'])
|
# image = bpy.data.images.load(data['filepath'])
|
||||||
|
|
||||||
return bpy.data.images.load(data["filepath"], check_existing=True)
|
return bpy.data.images.load(data['filepath'], check_existing=True)
|
||||||
|
|
||||||
|
|
||||||
class Material(Dumper):
|
class Material(Dumper):
|
||||||
bl_type = bpy.types.Material
|
bl_type = bpy.types.Material
|
||||||
|
|
||||||
excludes = Dumper.excludes + ["preview", "original"]
|
excludes = Dumper.excludes + ['preview', "original"]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
material = bpy.data.materials.get(data.get("name", ""))
|
material = bpy.data.materials.get(data.get('name', ''))
|
||||||
|
|
||||||
if material is None:
|
if material is None:
|
||||||
material = bpy.data.materials.new(data["name"])
|
material = bpy.data.materials.new(data['name'])
|
||||||
|
|
||||||
return material
|
return material
|
||||||
|
|
||||||
|
@ -604,25 +626,26 @@ class Material(Dumper):
|
||||||
class Object(Dumper):
|
class Object(Dumper):
|
||||||
bl_type = bpy.types.Object
|
bl_type = bpy.types.Object
|
||||||
excludes = []
|
excludes = []
|
||||||
includes = ["name"]
|
includes = ['name']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
if name := data.get("name"):
|
if name := data.get('name'):
|
||||||
return bpy.data.objects.get(name)
|
return bpy.data.objects.get(name)
|
||||||
|
|
||||||
|
|
||||||
class Scene(Dumper):
|
class Scene(Dumper):
|
||||||
bl_type = bpy.types.Scene
|
bl_type = bpy.types.Scene
|
||||||
excludes = []
|
excludes = []
|
||||||
includes = ["name"]
|
includes = ['name']
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
if scene := bpy.data.scenes.get(data.get("name", "")):
|
if scene := bpy.data.scenes.get(data.get('name', '')):
|
||||||
return scene
|
return scene
|
||||||
|
|
||||||
return bpy.data.scenes.new(name=data.get("name", ""))
|
return bpy.data.scenes.new(name=data.get('name', ''))
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -638,15 +661,14 @@ class Scene(Dumper):
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Collection(Dumper):
|
class Collection(Dumper):
|
||||||
bl_type = bpy.types.Collection
|
bl_type = bpy.types.Collection
|
||||||
includes = ["name"]
|
includes = ['name']
|
||||||
excludes = []
|
excludes = []
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new(cls, data):
|
def new(cls, data):
|
||||||
if name := data.get("name"):
|
if name := data.get('name'):
|
||||||
return bpy.data.collections.get(name)
|
return bpy.data.collections.get(name)
|
||||||
|
|
||||||
# @classmethod
|
# @classmethod
|
||||||
|
@ -660,15 +682,16 @@ class Collection(Dumper):
|
||||||
class CompositorNodeRLayers(Node):
|
class CompositorNodeRLayers(Node):
|
||||||
bl_type = bpy.types.CompositorNodeRLayers
|
bl_type = bpy.types.CompositorNodeRLayers
|
||||||
|
|
||||||
excludes = Dumper.excludes + ["scene"]
|
excludes = Dumper.excludes + ['scene']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, data, node):
|
def load(cls, data, node):
|
||||||
# print('load CompositorNodeRLayers')
|
|
||||||
|
|
||||||
scene_data = data.pop("scene")
|
#print('load CompositorNodeRLayers')
|
||||||
# print(scene_data)
|
|
||||||
layer = data.pop("layer")
|
scene_data = data.pop('scene')
|
||||||
|
#print(scene_data)
|
||||||
|
layer = data.pop('layer')
|
||||||
scene = Scene.new(scene_data)
|
scene = Scene.new(scene_data)
|
||||||
Scene.load(scene_data, scene)
|
Scene.load(scene_data, scene)
|
||||||
|
|
||||||
|
@ -680,37 +703,36 @@ class CompositorNodeRLayers(Node):
|
||||||
# Resetter the view_layer because it might have been created
|
# Resetter the view_layer because it might have been created
|
||||||
# with the scene attr in the dictionnary and nor available yet
|
# with the scene attr in the dictionnary and nor available yet
|
||||||
|
|
||||||
# print(bpy.)
|
#print(bpy.)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dump(cls, node):
|
def dump(cls, node):
|
||||||
# Add scene and viewlayer passes
|
# Add scene and viewlayer passes
|
||||||
data = super().dump(node)
|
data = super().dump(node)
|
||||||
|
|
||||||
# if
|
#if
|
||||||
|
|
||||||
view_layer = node.scene.view_layers[node.layer]
|
view_layer = node.scene.view_layers[node.layer]
|
||||||
view_layer_data = ViewLayer.dump(view_layer)
|
view_layer_data = ViewLayer.dump(view_layer)
|
||||||
|
|
||||||
"""
|
'''
|
||||||
view_layer_data = {
|
view_layer_data = {
|
||||||
"name": view_layer.name}
|
"name": view_layer.name}
|
||||||
properties = {p.name: p for p in view_layer.bl_rna.properties}
|
properties = {p.name: p for p in view_layer.bl_rna.properties}
|
||||||
for prop in view_layer.bl_rna:
|
for prop in view_layer.bl_rna:
|
||||||
if prop.identifier.startswith('use_pass'):
|
if prop.identifier.startswith('use_pass'):
|
||||||
view_layer_data[prop.identifier]
|
view_layer_data[prop.identifier]
|
||||||
"""
|
'''
|
||||||
|
|
||||||
# cls.pointers[bl_object.as_pointer()] = bl_object
|
#cls.pointers[bl_object.as_pointer()] = bl_object
|
||||||
|
|
||||||
data["scene"] = {
|
data['scene'] = {
|
||||||
"bl_pointer": node.scene.as_pointer(),
|
'bl_pointer': node.scene.as_pointer(),
|
||||||
"name": node.scene.name,
|
'name': node.scene.name,
|
||||||
"render": {
|
'render' : {'bl_pointer': node.scene.render.as_pointer(), "engine": node.scene.render.engine},
|
||||||
"bl_pointer": node.scene.render.as_pointer(),
|
'view_layers': [view_layer_data]
|
||||||
"engine": node.scene.render.engine,
|
|
||||||
},
|
|
||||||
"view_layers": [view_layer_data],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -718,16 +740,9 @@ class CompositorNodeRLayers(Node):
|
||||||
|
|
||||||
class ViewLayer(Dumper):
|
class ViewLayer(Dumper):
|
||||||
bl_type = bpy.types.ViewLayer
|
bl_type = bpy.types.ViewLayer
|
||||||
excludes = Dumper.excludes + [
|
excludes = Dumper.excludes + ['freestyle_settings', 'eevee', 'cycles', 'active_layer_collection',
|
||||||
"freestyle_settings",
|
'active_aov', 'active_lightgroup_index', 'active_lightgroup']
|
||||||
"eevee",
|
#includes = ['name']
|
||||||
"cycles",
|
|
||||||
"active_layer_collection",
|
|
||||||
"active_aov",
|
|
||||||
"active_lightgroup_index",
|
|
||||||
"active_lightgroup",
|
|
||||||
]
|
|
||||||
# includes = ['name']
|
|
||||||
|
|
||||||
|
|
||||||
class ViewLayers(PropCollection):
|
class ViewLayers(PropCollection):
|
||||||
|
@ -735,29 +750,41 @@ class ViewLayers(PropCollection):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, values, coll):
|
def load(cls, values, coll):
|
||||||
# print('LOAD VIEWLAYERS', values)
|
#print('LOAD VIEWLAYERS', values)
|
||||||
for value in values:
|
for value in values:
|
||||||
view_layer = coll.get(value["name"])
|
view_layer = coll.get(value['name'])
|
||||||
|
|
||||||
if view_layer is None:
|
if view_layer is None:
|
||||||
view_layer = coll.new(value["name"])
|
view_layer = coll.new(value['name'])
|
||||||
|
|
||||||
Dumper.load(value, view_layer)
|
Dumper.load(value, view_layer)
|
||||||
|
|
||||||
|
|
||||||
class DumperRegistry:
|
dumpers = [
|
||||||
"""Singleton-like class that holds a map of all parsers, constructed on first instantiation"""
|
CompositorNodeRLayers,
|
||||||
dumper_map = None
|
CompositorNodeGlare,
|
||||||
|
Node,
|
||||||
def __init__(self):
|
NodeSocket,
|
||||||
if self.dumper_map is None:
|
NodeTree,
|
||||||
self.construct_dumper_map()
|
NodeLink,
|
||||||
|
NodeTreeInterface,
|
||||||
@classmethod
|
NodeTreeInterfaceSocket,
|
||||||
def construct_dumper_map(cls):
|
NodeGeometryRepeatOutputItems,
|
||||||
cls.dumper_map = {}
|
Image,
|
||||||
|
Material,
|
||||||
for subclass in utils.all_subclasses(Dumper):
|
Object,
|
||||||
assert hasattr(subclass, "bl_type")
|
Scene,
|
||||||
cls.dumper_map[subclass.bl_type] = subclass
|
Collection,
|
||||||
print(cls.dumper_map)
|
ViewLayer,
|
||||||
|
CurveMapPoints,
|
||||||
|
ColorRampElements,
|
||||||
|
NodeInputs,
|
||||||
|
NodeOutputs,
|
||||||
|
Nodes,
|
||||||
|
ViewLayers,
|
||||||
|
PropCollection,
|
||||||
|
AOVs,
|
||||||
|
PropArray,
|
||||||
|
CompositorNodeOutputFileLayerSlots,
|
||||||
|
CompositorNodeOutputFileFileSlots,
|
||||||
|
]
|
|
@ -1,18 +1,18 @@
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
def clean_name(name):
|
def clean_name(name):
|
||||||
if re.match(r"(.*)\.\d{3}$", name):
|
if re.match(r'(.*)\.\d{3}$', name):
|
||||||
return name[:-4]
|
return name[:-4]
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def is_node_groups_duplicate(node_groups):
|
def is_node_groups_duplicate(node_groups):
|
||||||
node_group_types = sorted([n.type for n in node_groups[0].nodes])
|
node_group_types = sorted([n.type for n in node_groups[0].nodes])
|
||||||
return all(
|
return all( sorted([n.type for n in ng.nodes]) ==
|
||||||
sorted([n.type for n in ng.nodes]) == node_group_types for ng in node_groups[1:]
|
node_group_types for ng in node_groups[1:])
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def remap_node_group_duplicates(nodes=None, force=False):
|
def remap_node_group_duplicates(nodes=None, force=False):
|
||||||
|
@ -41,7 +41,7 @@ def remap_node_group_duplicates(nodes=None, force=False):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
node_groups.sort(key=lambda x: x.name, reverse=True)
|
node_groups.sort(key=lambda x : x.name, reverse=True)
|
||||||
|
|
||||||
print(node_groups)
|
print(node_groups)
|
||||||
|
|
||||||
|
@ -50,13 +50,11 @@ def remap_node_group_duplicates(nodes=None, force=False):
|
||||||
|
|
||||||
if not is_duplicate and not force:
|
if not is_duplicate and not force:
|
||||||
failed.append((node_group.name, node_groups[0].name))
|
failed.append((node_group.name, node_groups[0].name))
|
||||||
print(
|
print(f'Cannot merge Nodegroup {node_group.name} with {node_groups[0].name} they are different')
|
||||||
f"Cannot merge Nodegroup {node_group.name} with {node_groups[0].name} they are different"
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
merged.append((node_group.name, node_groups[0].name))
|
merged.append((node_group.name, node_groups[0].name))
|
||||||
print(f"Merge Nodegroup {node_group.name} into {node_groups[0].name}")
|
print(f'Merge Nodegroup {node_group.name} into {node_groups[0].name}')
|
||||||
|
|
||||||
node_group.user_remap(node_groups[0])
|
node_group.user_remap(node_groups[0])
|
||||||
bpy.data.node_groups.remove(node_group)
|
bpy.data.node_groups.remove(node_group)
|
|
@ -1,17 +1,16 @@
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
def set_params(src, tgt, mod_to_node=True, org_modifier=None):
|
def set_params(src, tgt, mod_to_node=True, org_modifier=None):
|
||||||
# mod to node: est-ce qu'on copie les valeurs d'un modifier a une node, ou l'inverse
|
# mod to node: est-ce qu'on copie les valeurs d'un modifier a une node, ou l'inverse
|
||||||
if mod_to_node: # syntax for node and modifier are slightly different
|
if mod_to_node: # syntax for node and modifier are slightly different
|
||||||
tree = src.node_group.interface.items_tree
|
tree = src.node_group.interface.items_tree
|
||||||
else:
|
else:
|
||||||
tree = src.node_tree.interface.items_tree
|
tree = src.node_tree.interface.items_tree
|
||||||
|
|
||||||
for param in tree:
|
for param in tree:
|
||||||
if param.socket_type == "NodeSocketGeometry":
|
if param.socket_type == 'NodeSocketGeometry':
|
||||||
continue
|
continue
|
||||||
if param.in_out == "OUTPUT":
|
if param.in_out == 'OUTPUT':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# seulement en extract mode, src est une node donc on check si des parametres sont dans le modifier
|
# seulement en extract mode, src est une node donc on check si des parametres sont dans le modifier
|
||||||
|
@ -27,23 +26,19 @@ def set_params(src, tgt, mod_to_node=True, org_modifier=None):
|
||||||
else:
|
else:
|
||||||
tgt[identifier] = src.inputs[identifier].default_value
|
tgt[identifier] = src.inputs[identifier].default_value
|
||||||
|
|
||||||
|
|
||||||
def set_group_inputs(target, objects, group):
|
def set_group_inputs(target, objects, group):
|
||||||
mod = target.modifiers[0]
|
mod = target.modifiers[0]
|
||||||
node_dct = {} # used for cleanup
|
node_dct = {} # used for cleanup
|
||||||
for key, inp in get_node_inputs(objects).items():
|
for key, inp in get_node_inputs(objects).items():
|
||||||
# add the socket to the node group / modifier pannel
|
# add the socket to the node group / modifier pannel
|
||||||
sock = group.interface.new_socket(
|
sock = group.interface.new_socket(inp["label"],in_out="INPUT",socket_type=inp["socket"])
|
||||||
inp["label"], in_out="INPUT", socket_type=inp["socket"]
|
|
||||||
)
|
|
||||||
mod[sock.identifier] = inp["data"]
|
mod[sock.identifier] = inp["data"]
|
||||||
|
|
||||||
# inspect all nodes and add a group input node when that socket is used
|
# inspect all nodes and add a group input node when that socket is used
|
||||||
for node in parse_nodes(objects):
|
for node in parse_nodes(objects):
|
||||||
for param in node.node_tree.interface.items_tree:
|
for param in node.node_tree.interface.items_tree:
|
||||||
nkey = get_input_socket_key(node, param)
|
nkey = get_input_socket_key(node, param)
|
||||||
if not nkey:
|
if not nkey: continue
|
||||||
continue
|
|
||||||
if nkey == key:
|
if nkey == key:
|
||||||
input_node = add_input_node(group, node, param.identifier, sock)
|
input_node = add_input_node(group, node, param.identifier, sock)
|
||||||
|
|
||||||
|
@ -58,13 +53,12 @@ def set_group_inputs(target, objects, group):
|
||||||
node_dct[node].append(input_node)
|
node_dct[node].append(input_node)
|
||||||
|
|
||||||
# on refait la meme chose pour les object info nodes car leur syntaxe est un peu differente
|
# on refait la meme chose pour les object info nodes car leur syntaxe est un peu differente
|
||||||
for node in parse_nodes(objects, type="OBJECT_INFO"):
|
for node in parse_nodes(objects, type = "OBJECT_INFO"):
|
||||||
nkey = get_input_socket_key(node, param)
|
nkey = get_input_socket_key(node, param)
|
||||||
if not nkey:
|
if not nkey: continue
|
||||||
continue
|
|
||||||
if nkey == key:
|
if nkey == key:
|
||||||
input_node = add_input_node(group, node, "Object", sock)
|
input_node = add_input_node(group, node, 'Object', sock)
|
||||||
node.inputs["Object"].default_value = None
|
node.inputs['Object'].default_value = None
|
||||||
|
|
||||||
# add to dict for cleanup
|
# add to dict for cleanup
|
||||||
if not node in node_dct.keys():
|
if not node in node_dct.keys():
|
||||||
|
@ -78,7 +72,6 @@ def set_group_inputs(target, objects, group):
|
||||||
input_node.location[1] += 50 * offset
|
input_node.location[1] += 50 * offset
|
||||||
hide_sockets(input_node)
|
hide_sockets(input_node)
|
||||||
|
|
||||||
|
|
||||||
def get_node_link_value(node, param_name, org_mod):
|
def get_node_link_value(node, param_name, org_mod):
|
||||||
if not org_mod:
|
if not org_mod:
|
||||||
return
|
return
|
||||||
|
@ -89,25 +82,20 @@ def get_node_link_value(node, param_name, org_mod):
|
||||||
|
|
||||||
return org_mod[socket_id]
|
return org_mod[socket_id]
|
||||||
|
|
||||||
|
|
||||||
def get_geo_socket(node, input=True):
|
def get_geo_socket(node, input=True):
|
||||||
if node.type != "GROUP":
|
if node.type != "GROUP":
|
||||||
return "Geometry"
|
return('Geometry')
|
||||||
for param in node.node_tree.interface.items_tree:
|
for param in node.node_tree.interface.items_tree:
|
||||||
if param.socket_type != "NodeSocketGeometry":
|
if param.socket_type != 'NodeSocketGeometry':
|
||||||
continue
|
continue
|
||||||
if input and param.in_out == "INPUT":
|
if input and param.in_out == 'INPUT' : return param.identifier
|
||||||
return param.identifier
|
if not input and param.in_out == 'OUTPUT' : return param.identifier
|
||||||
if not input and param.in_out == "OUTPUT":
|
|
||||||
return param.identifier
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_input_socket_key(node, param):
|
def get_input_socket_key(node, param):
|
||||||
if node.type == "GROUP":
|
if node.type == "GROUP":
|
||||||
if param.in_out != "INPUT":
|
if param.in_out != 'INPUT':
|
||||||
return False
|
return False
|
||||||
if not param.socket_type in ["NodeSocketObject", "NodeSocketCollection"]:
|
if not param.socket_type in ['NodeSocketObject','NodeSocketCollection']:
|
||||||
return False
|
return False
|
||||||
tgt = node.inputs[param.identifier].default_value
|
tgt = node.inputs[param.identifier].default_value
|
||||||
|
|
||||||
|
@ -116,12 +104,11 @@ def get_input_socket_key(node, param):
|
||||||
return f"{param.socket_type[10:][:3]} {tgt.name}"
|
return f"{param.socket_type[10:][:3]} {tgt.name}"
|
||||||
|
|
||||||
if node.type == "OBJECT_INFO":
|
if node.type == "OBJECT_INFO":
|
||||||
tgt = node.inputs["Object"].default_value
|
tgt = node.inputs['Object'].default_value
|
||||||
if not tgt:
|
if not tgt:
|
||||||
return False
|
return False
|
||||||
return f"Object {tgt.name}"
|
return f"Object {tgt.name}"
|
||||||
|
|
||||||
|
|
||||||
def get_node_inputs(combined_nodes):
|
def get_node_inputs(combined_nodes):
|
||||||
# inputs["Col COL.name"] = {name = COL.name, data = COL, socket = "COLLECTION"}
|
# inputs["Col COL.name"] = {name = COL.name, data = COL, socket = "COLLECTION"}
|
||||||
# inputs["Obj OBJ.name"] = {name = OBJ.name, data = OBJ, socket = "OBJECT"}
|
# inputs["Obj OBJ.name"] = {name = OBJ.name, data = OBJ, socket = "OBJECT"}
|
||||||
|
@ -132,28 +119,17 @@ def get_node_inputs(combined_nodes):
|
||||||
if not key:
|
if not key:
|
||||||
continue
|
continue
|
||||||
tgt = node.inputs[param.identifier].default_value
|
tgt = node.inputs[param.identifier].default_value
|
||||||
inputs[key] = {
|
inputs[key] = {'name': tgt.name, 'data': tgt, 'label': param.name , 'socket': param.socket_type}
|
||||||
"name": tgt.name,
|
|
||||||
"data": tgt,
|
|
||||||
"label": param.name,
|
|
||||||
"socket": param.socket_type,
|
|
||||||
}
|
|
||||||
|
|
||||||
for node in parse_nodes(combined_nodes, type="OBJECT_INFO"):
|
for node in parse_nodes(combined_nodes, type = "OBJECT_INFO"):
|
||||||
key = get_input_socket_key(node, None)
|
key = get_input_socket_key(node, None)
|
||||||
if not key:
|
if not key:
|
||||||
continue
|
continue
|
||||||
tgt = node.inputs["Object"].default_value
|
tgt = node.inputs['Object'].default_value
|
||||||
inputs[key] = {
|
inputs[key] = {'name': tgt.name, 'data': tgt, 'label': 'Source OB' , 'socket': "NodeSocketObject"}
|
||||||
"name": tgt.name,
|
|
||||||
"data": tgt,
|
|
||||||
"label": "Source OB",
|
|
||||||
"socket": "NodeSocketObject",
|
|
||||||
}
|
|
||||||
|
|
||||||
return inputs
|
return inputs
|
||||||
|
|
||||||
|
|
||||||
def get_node_bounds(objects, mode=0, x=0, y=0):
|
def get_node_bounds(objects, mode=0, x=0, y=0):
|
||||||
min_x = min_y = 10000000
|
min_x = min_y = 10000000
|
||||||
max_x = max_y = 0
|
max_x = max_y = 0
|
||||||
|
@ -161,14 +137,13 @@ def get_node_bounds(objects, mode=0, x=0, y=0):
|
||||||
for ob in objects:
|
for ob in objects:
|
||||||
for node in ob:
|
for node in ob:
|
||||||
co = node.location
|
co = node.location
|
||||||
min_x = min(co[0], min_x)
|
min_x = min(co[0],min_x)
|
||||||
max_x = max(co[0], max_x)
|
max_x = max(co[0],max_x)
|
||||||
|
|
||||||
min_y = min(co[1], min_y)
|
min_y = min(co[1],min_y)
|
||||||
max_y = max(co[1], max_y)
|
max_y = max(co[1],max_y)
|
||||||
if mode == 0:
|
if mode == 0:
|
||||||
return [max_x + x, (min_y + max_y) / 2]
|
return([max_x+x, (min_y+max_y)/2 ])
|
||||||
|
|
||||||
|
|
||||||
def get_collection(name):
|
def get_collection(name):
|
||||||
scn = bpy.context.scene
|
scn = bpy.context.scene
|
||||||
|
@ -177,29 +152,23 @@ def get_collection(name):
|
||||||
|
|
||||||
# look for existing
|
# look for existing
|
||||||
for c in bpy.data.collections:
|
for c in bpy.data.collections:
|
||||||
if c.name == name:
|
if c.name == name: col = c
|
||||||
col = c
|
|
||||||
|
|
||||||
# create if needed
|
# create if needed
|
||||||
if not col:
|
if not col: col = bpy.data.collections.new(name)
|
||||||
col = bpy.data.collections.new(name)
|
|
||||||
|
|
||||||
# link to scene if needed
|
# link to scene if needed
|
||||||
for c in scn.collection.children_recursive:
|
for c in scn.collection.children_recursive:
|
||||||
if c.name == col.name:
|
if c.name == col.name: link = True
|
||||||
link = True
|
|
||||||
if not link:
|
if not link:
|
||||||
scn.collection.children.link(col)
|
scn.collection.children.link(col)
|
||||||
return col
|
return col
|
||||||
|
|
||||||
|
|
||||||
def get_mod_frames(grp):
|
def get_mod_frames(grp):
|
||||||
frames = []
|
frames = []
|
||||||
for node in grp.nodes:
|
for node in grp.nodes:
|
||||||
if node.type == "FRAME":
|
if node.type == "FRAME": frames.append(node)
|
||||||
frames.append(node)
|
return(frames)
|
||||||
return frames
|
|
||||||
|
|
||||||
|
|
||||||
def get_frame_childrens(frame):
|
def get_frame_childrens(frame):
|
||||||
childrens = []
|
childrens = []
|
||||||
|
@ -214,16 +183,13 @@ def get_frame_childrens(frame):
|
||||||
childrens = [locs[x] for x in entries]
|
childrens = [locs[x] for x in entries]
|
||||||
return childrens
|
return childrens
|
||||||
|
|
||||||
|
def parse_nodes(combined_nodes, type = "GROUP"):
|
||||||
def parse_nodes(combined_nodes, type="GROUP"):
|
|
||||||
nodes = []
|
nodes = []
|
||||||
for frame in combined_nodes:
|
for frame in combined_nodes:
|
||||||
for node in frame:
|
for node in frame:
|
||||||
if node.type == type:
|
if node.type == type: nodes.append(node)
|
||||||
nodes.append(node)
|
|
||||||
return nodes
|
return nodes
|
||||||
|
|
||||||
|
|
||||||
def copy_source_ob(ob, col):
|
def copy_source_ob(ob, col):
|
||||||
# est-ce que l'objet a des data ? si oui on cree une copie ,
|
# est-ce que l'objet a des data ? si oui on cree une copie ,
|
||||||
# si non on renvois None
|
# si non on renvois None
|
||||||
|
@ -244,8 +210,7 @@ def copy_source_ob(ob, col):
|
||||||
col.objects.link(new_ob)
|
col.objects.link(new_ob)
|
||||||
return new_ob
|
return new_ob
|
||||||
|
|
||||||
|
def hide_sockets(node,collapse = True):
|
||||||
def hide_sockets(node, collapse=True):
|
|
||||||
for socket in node.outputs:
|
for socket in node.outputs:
|
||||||
if not socket.links:
|
if not socket.links:
|
||||||
socket.hide = True
|
socket.hide = True
|
||||||
|
@ -255,15 +220,14 @@ def hide_sockets(node, collapse=True):
|
||||||
if collapse:
|
if collapse:
|
||||||
node.hide = True
|
node.hide = True
|
||||||
|
|
||||||
|
|
||||||
def add_input_node(group, node, param_id, socket):
|
def add_input_node(group, node, param_id, socket):
|
||||||
group_input_node = group.nodes.new("NodeGroupInput")
|
group_input_node = group.nodes.new('NodeGroupInput')
|
||||||
group_input_node.location = node.location
|
group_input_node.location = node.location
|
||||||
group_input_node.location[1] += 70
|
group_input_node.location[1] += 70
|
||||||
group_input_node.label = socket.name
|
group_input_node.label = socket.name
|
||||||
group.links.new(group_input_node.outputs[socket.identifier], node.inputs[param_id])
|
group.links.new(group_input_node.outputs[socket.identifier],
|
||||||
return group_input_node
|
node.inputs[param_id])
|
||||||
|
return(group_input_node)
|
||||||
|
|
||||||
def add_material_node(ob, group, nodes):
|
def add_material_node(ob, group, nodes):
|
||||||
if not ob.material_slots:
|
if not ob.material_slots:
|
||||||
|
@ -271,59 +235,54 @@ def add_material_node(ob, group, nodes):
|
||||||
if not ob.material_slots[0].material:
|
if not ob.material_slots[0].material:
|
||||||
return nodes
|
return nodes
|
||||||
last_node = nodes[-1:][0]
|
last_node = nodes[-1:][0]
|
||||||
node = group.nodes.new("GeometryNodeSetMaterial")
|
node = group.nodes.new('GeometryNodeSetMaterial')
|
||||||
node.inputs["Material"].default_value = ob.material_slots[0].material
|
node.inputs['Material'].default_value = ob.material_slots[0].material
|
||||||
node.location = last_node.location
|
node.location = last_node.location
|
||||||
node.location[0] += 300
|
node.location[0] += 300
|
||||||
nodes.append(node)
|
nodes.append(node)
|
||||||
return nodes
|
return nodes
|
||||||
|
|
||||||
|
|
||||||
def join_nodes(group, nodes):
|
def join_nodes(group, nodes):
|
||||||
prev = None
|
prev = None
|
||||||
for i, node in enumerate(nodes):
|
for i , node in enumerate(nodes):
|
||||||
if not prev:
|
if not prev:
|
||||||
prev = node
|
prev = node
|
||||||
continue
|
continue
|
||||||
geo_in = get_geo_socket(node)
|
geo_in = get_geo_socket(node)
|
||||||
geo_out = get_geo_socket(prev, input=False)
|
geo_out = get_geo_socket(prev, input = False)
|
||||||
|
|
||||||
if not geo_in or not geo_out:
|
if not geo_in or not geo_out:
|
||||||
continue
|
continue
|
||||||
group.links.new(prev.outputs[geo_out], node.inputs[geo_in])
|
group.links.new(prev.outputs[geo_out], node.inputs[geo_in])
|
||||||
prev = node
|
prev = node
|
||||||
|
|
||||||
|
|
||||||
def frame_nodes(group, nodes, ob):
|
def frame_nodes(group, nodes, ob):
|
||||||
nd = group.nodes.new("NodeFrame")
|
nd = group.nodes.new('NodeFrame')
|
||||||
# frame = nodes.new(type='NodeFrame')
|
# frame = nodes.new(type='NodeFrame')
|
||||||
for n in nodes:
|
for n in nodes:
|
||||||
n.parent = nd
|
n.parent = nd
|
||||||
|
|
||||||
nd.label = ob.name
|
nd.label = ob.name
|
||||||
|
|
||||||
|
|
||||||
def combine_ob(ob, group, y=0, col=None):
|
def combine_ob(ob, group, y=0, col=None):
|
||||||
nodes = []
|
nodes = []
|
||||||
|
|
||||||
# object info node
|
# object info node
|
||||||
nd = group.nodes.new("GeometryNodeObjectInfo")
|
nd = group.nodes.new('GeometryNodeObjectInfo')
|
||||||
nd.location[0] -= 300
|
nd.location[0] -= 300
|
||||||
nd.location[1] = y * 800
|
nd.location[1] = y * 800
|
||||||
nd.transform_space = "RELATIVE"
|
nd.transform_space = "RELATIVE"
|
||||||
nd.inputs["Object"].default_value = copy_source_ob(
|
nd.inputs['Object'].default_value = copy_source_ob(ob, col) # si l'objet contient des data on crée une copie
|
||||||
ob, col
|
|
||||||
) # si l'objet contient des data on crée une copie
|
|
||||||
nodes.append(nd)
|
nodes.append(nd)
|
||||||
|
|
||||||
# ob modifiers
|
# ob modifiers
|
||||||
for x, md in enumerate(ob.modifiers):
|
for x,md in enumerate(ob.modifiers):
|
||||||
if md.type != "NODES":
|
if md.type != "NODES" :
|
||||||
print(abordage)
|
print(abordage)
|
||||||
if md.node_group == group:
|
if md.node_group == group:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
nd = group.nodes.new("GeometryNodeGroup")
|
nd = group.nodes.new('GeometryNodeGroup')
|
||||||
nd.label = md.name
|
nd.label = md.name
|
||||||
nd.width = 230
|
nd.width = 230
|
||||||
nd.location[0] = x * 300
|
nd.location[0] = x * 300
|
||||||
|
@ -337,7 +296,6 @@ def combine_ob(ob, group, y=0, col=None):
|
||||||
frame_nodes(group, nodes, ob)
|
frame_nodes(group, nodes, ob)
|
||||||
return nodes
|
return nodes
|
||||||
|
|
||||||
|
|
||||||
def gen_target_ob(group, col=None):
|
def gen_target_ob(group, col=None):
|
||||||
ob = gen_empty_ob(group.name, col=col)
|
ob = gen_empty_ob(group.name, col=col)
|
||||||
mod = ob.modifiers.new(group.name, "NODES")
|
mod = ob.modifiers.new(group.name, "NODES")
|
||||||
|
@ -345,22 +303,20 @@ def gen_target_ob(group, col=None):
|
||||||
|
|
||||||
ob.show_name = True
|
ob.show_name = True
|
||||||
bpy.context.view_layer.objects.active = ob
|
bpy.context.view_layer.objects.active = ob
|
||||||
return ob
|
return(ob)
|
||||||
|
|
||||||
|
|
||||||
def gen_empty_ob(name, col=None):
|
def gen_empty_ob(name, col=None):
|
||||||
scn = bpy.context.scene
|
scn = bpy.context.scene
|
||||||
ob = bpy.data.objects.new(name, object_data=bpy.data.meshes.new(name))
|
ob = bpy.data.objects.new(name, object_data=bpy.data.meshes.new(name))
|
||||||
|
|
||||||
ob.data.materials.append(None)
|
ob.data.materials.append(None)
|
||||||
ob.material_slots[0].link = "OBJECT"
|
ob.material_slots[0].link = 'OBJECT'
|
||||||
|
|
||||||
if not col:
|
if not col:
|
||||||
scn.collection.objects.link(ob)
|
scn.collection.objects.link(ob)
|
||||||
else:
|
else:
|
||||||
col.objects.link(ob)
|
col.objects.link(ob)
|
||||||
return ob
|
return(ob)
|
||||||
|
|
||||||
|
|
||||||
def assign_modifiers(ob, frame, org_modifier):
|
def assign_modifiers(ob, frame, org_modifier):
|
||||||
for node in get_frame_childrens(frame):
|
for node in get_frame_childrens(frame):
|
||||||
|
@ -372,14 +328,11 @@ def assign_modifiers(ob, frame, org_modifier):
|
||||||
set_params(node, mod, mod_to_node=False, org_modifier=org_modifier)
|
set_params(node, mod, mod_to_node=False, org_modifier=org_modifier)
|
||||||
mod.node_group.interface_update(bpy.context)
|
mod.node_group.interface_update(bpy.context)
|
||||||
|
|
||||||
|
|
||||||
def join_branches(objects, group):
|
def join_branches(objects, group):
|
||||||
# join all trees and add an output node
|
# join all trees and add an output node
|
||||||
join = group.nodes.new("GeometryNodeJoinGeometry")
|
join = group.nodes.new('GeometryNodeJoinGeometry')
|
||||||
out = group.nodes.new("NodeGroupOutput")
|
out = group.nodes.new('NodeGroupOutput')
|
||||||
out_sock = group.interface.new_socket(
|
out_sock = group.interface.new_socket("Geometry",in_out="OUTPUT",socket_type="NodeSocketGeometry")
|
||||||
"Geometry", in_out="OUTPUT", socket_type="NodeSocketGeometry"
|
|
||||||
)
|
|
||||||
|
|
||||||
loc = get_node_bounds(objects, x=500)
|
loc = get_node_bounds(objects, x=500)
|
||||||
join.location = loc
|
join.location = loc
|
||||||
|
@ -388,52 +341,45 @@ def join_branches(objects, group):
|
||||||
|
|
||||||
for ob in objects:
|
for ob in objects:
|
||||||
node = ob[-1:][0]
|
node = ob[-1:][0]
|
||||||
group.links.new(
|
group.links.new(node.outputs[get_geo_socket(node, input=False)],
|
||||||
node.outputs[get_geo_socket(node, input=False)],
|
join.inputs[get_geo_socket(join)])
|
||||||
join.inputs[get_geo_socket(join)],
|
|
||||||
)
|
|
||||||
|
|
||||||
group.links.new(
|
|
||||||
join.outputs[get_geo_socket(join, input=False)], out.inputs[out_sock.identifier]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
group.links.new(join.outputs[get_geo_socket(join, input=False)],
|
||||||
|
out.inputs[out_sock.identifier])
|
||||||
|
|
||||||
def gen_extracted_ob(name, frame, col, mod):
|
def gen_extracted_ob(name, frame, col, mod):
|
||||||
ob = None
|
ob = None
|
||||||
for node in get_frame_childrens(frame):
|
for node in get_frame_childrens(frame):
|
||||||
if node.type != "OBJECT_INFO":
|
if node.type != "OBJECT_INFO":
|
||||||
continue
|
continue
|
||||||
target = get_node_link_value(node, "Object", mod)
|
target = get_node_link_value(node, 'Object', mod)
|
||||||
|
|
||||||
if target:
|
if target:
|
||||||
ob = target.copy()
|
ob = target.copy()
|
||||||
ob.data = ob.data.copy()
|
ob.data = ob.data.copy()
|
||||||
col.objects.link(ob)
|
col.objects.link(ob)
|
||||||
if not ob:
|
if not ob: ob = gen_empty_ob(name , col = col)
|
||||||
ob = gen_empty_ob(name, col=col)
|
|
||||||
|
|
||||||
# assign material
|
# assign material
|
||||||
for node in get_frame_childrens(frame):
|
for node in get_frame_childrens(frame):
|
||||||
if node.type != "SET_MATERIAL":
|
if node.type != "SET_MATERIAL":
|
||||||
continue
|
continue
|
||||||
ob.material_slots[0].material = node.inputs["Material"].default_value
|
ob.material_slots[0].material = node.inputs['Material'].default_value
|
||||||
return ob
|
return ob
|
||||||
|
|
||||||
|
|
||||||
def combine_objects(objs):
|
def combine_objects(objs):
|
||||||
name = f"NODEGROUP_combined"
|
name = f"NODEGROUP_combined"
|
||||||
col = get_collection(name)
|
col = get_collection(name)
|
||||||
group = bpy.data.node_groups.new(name=name, type="GeometryNodeTree")
|
group = bpy.data.node_groups.new(name=name, type='GeometryNodeTree')
|
||||||
|
|
||||||
objects = []
|
objects = []
|
||||||
for y, ob in enumerate(objs):
|
for y , ob in enumerate(objs):
|
||||||
objects.append(combine_ob(ob, group, y=y, col=col))
|
objects.append(combine_ob(ob, group, y=y, col=col))
|
||||||
|
|
||||||
target = gen_target_ob(group, col=col)
|
target = gen_target_ob(group, col = col)
|
||||||
set_group_inputs(target, objects, group)
|
set_group_inputs(target, objects, group)
|
||||||
join_branches(objects, group)
|
join_branches(objects, group)
|
||||||
|
|
||||||
|
|
||||||
def extract_objects(object):
|
def extract_objects(object):
|
||||||
mod = object.modifiers[0]
|
mod = object.modifiers[0]
|
||||||
grp = mod.node_group
|
grp = mod.node_group
|
||||||
|
@ -444,9 +390,8 @@ def extract_objects(object):
|
||||||
ob = gen_extracted_ob(name, frame, col, mod)
|
ob = gen_extracted_ob(name, frame, col, mod)
|
||||||
assign_modifiers(ob, frame, mod)
|
assign_modifiers(ob, frame, mod)
|
||||||
|
|
||||||
|
#combine_objects(bpy.context.selected_objects)
|
||||||
# combine_objects(bpy.context.selected_objects)
|
#extract_objects(bpy.context.active_object)
|
||||||
# extract_objects(bpy.context.active_object)
|
|
||||||
"""
|
"""
|
||||||
TODO: extract copier les transform de l'objet original ...
|
TODO: extract copier les transform de l'objet original ...
|
||||||
OK ! combine: si un objet a un materiel on cree un node set material en fin de liste
|
OK ! combine: si un objet a un materiel on cree un node set material en fin de liste
|
|
@ -0,0 +1,12 @@
|
||||||
|
import plateform
|
||||||
|
from pathlib import Path
|
||||||
|
from os.path import expandvars
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache_dir()
|
||||||
|
if plateform.system() == 'Linux':
|
||||||
|
return Path(expandvars('$HOME/.cache/blender'))
|
||||||
|
elif plateform.system() == 'Darwin':
|
||||||
|
return Path('/Library/Caches/Blender')
|
||||||
|
elif plateform.system() == 'Windows':
|
||||||
|
return Path(expandvars('%USERPROFILE%\AppData\Local\Blender Foundation\Blender'))
|
17
formats.py
17
formats.py
|
@ -1,17 +0,0 @@
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
format_token = "#FMT:NODE_KIT#"
|
|
||||||
|
|
||||||
|
|
||||||
def dump_nkit_format(data: str) -> str:
|
|
||||||
return format_token + json.dumps(data)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_nkit_format(data: str) -> str | None:
|
|
||||||
if data.startswith(format_token):
|
|
||||||
print(data[len(format_token):])
|
|
||||||
return json.loads(data[len(format_token):])
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
223
operators.py
223
operators.py
|
@ -1,5 +1,5 @@
|
||||||
"""
|
"""
|
||||||
Node Kit Operators
|
This module contains all addons operators
|
||||||
|
|
||||||
:author: Autour de Minuit
|
:author: Autour de Minuit
|
||||||
:maintainers: Florentin LUCE
|
:maintainers: Florentin LUCE
|
||||||
|
@ -14,138 +14,104 @@ import bpy
|
||||||
from bpy.props import BoolProperty, EnumProperty
|
from bpy.props import BoolProperty, EnumProperty
|
||||||
from bpy.types import Operator
|
from bpy.types import Operator
|
||||||
|
|
||||||
from .dumper import dump_nodes, load_nodes
|
#from node_kit.core.node_tree import NodeTree
|
||||||
from .node_utils import remap_node_group_duplicates
|
from . core.dumper import dump, load
|
||||||
from .pack_nodes import combine_objects, extract_objects
|
from .core.node_utils import remap_node_group_duplicates
|
||||||
from .formats import dump_nkit_format, parse_nkit_format
|
from .core.pack_nodes import combine_objects, extract_objects
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_copy(Operator):
|
class NODEKIT_OT_copy(Operator):
|
||||||
bl_idname = "node_kit.copy_nodes"
|
bl_idname = 'node_kit.copy_node_tree'
|
||||||
bl_label = "Copy Nodes"
|
bl_label = 'Copy nodes'
|
||||||
bl_description = "Copy nodes to system clipboard"
|
bl_options = {'REGISTER', 'UNDO'}
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
|
||||||
|
select_only: BoolProperty(default=True)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
|
|
||||||
ntree = context.space_data.edit_tree
|
ntree = context.space_data.edit_tree
|
||||||
selected_nodes = [node for node in ntree.nodes if node.select]
|
if self.select_only:
|
||||||
|
ntree_data = {
|
||||||
|
"nodes" : dump([n for n in ntree.nodes if n.select]) ,#[dump(n) for n in ntree.nodes if n.select],
|
||||||
|
"links" : dump([l for l in ntree.links if l.from_node.select and l.to_node.select])
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
ntree_data = dump(ntree)
|
||||||
|
|
||||||
ntree_data = {
|
pprint(ntree_data)
|
||||||
"nodes": dump_nodes(selected_nodes),
|
|
||||||
"links": dump_nodes(
|
|
||||||
[l for l in ntree.links if l.from_node.select and l.to_node.select]
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
context.window_manager.clipboard = dump_nkit_format(ntree_data)
|
context.window_manager.clipboard = json.dumps(ntree_data)
|
||||||
|
|
||||||
self.report({"INFO"}, f"Copied {len(selected_nodes)} selected nodes to system clipboard")
|
return {'FINISHED'}
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_copy_tree(Operator):
|
|
||||||
bl_idname = "node_kit.copy_node_tree"
|
|
||||||
bl_label = "Copy Node Tree"
|
|
||||||
bl_description = "Copy node tree to system clipboard"
|
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
ntree = context.space_data.edit_tree
|
|
||||||
ntree_data = dict(ntree)
|
|
||||||
|
|
||||||
context.window_manager.clipboard = dump_nkit_format(ntree_data)
|
|
||||||
|
|
||||||
self.report({"INFO"}, f"Copied {len(ntree.nodes)} selected nodes to system clipboard")
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_paste(Operator):
|
class NODEKIT_OT_paste(Operator):
|
||||||
bl_idname = "node_kit.paste_nodes"
|
bl_idname = 'node_kit.paste_node_tree'
|
||||||
bl_label = "Paste Nodes"
|
bl_label = 'Paste nodes'
|
||||||
bl_description = "Paste nodes from system clipboard"
|
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
ntree_data = parse_nkit_format(context.window_manager.clipboard)
|
|
||||||
load_nodes(ntree_data, context.space_data.edit_tree)
|
|
||||||
|
|
||||||
self.report({"INFO"}, f"X node(s) pasted from system clipboard") # TODO: Ge the number of parsed nodes returned
|
ntree_data = json.loads(context.window_manager.clipboard)
|
||||||
return {"FINISHED"}
|
load(ntree_data, context.space_data.edit_tree)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_remap_node_group_duplicates(Operator):
|
class NODEKIT_OT_remap_node_group_duplicates(Operator):
|
||||||
bl_idname = "node_kit.remap_node_group_duplicates"
|
bl_idname = 'node_kit.remap_node_group_duplicates'
|
||||||
bl_label = "Clean Node Groups Duplicates"
|
bl_label = 'Clean nodes'
|
||||||
bl_description = "Remap Node Groups duplicates to the latest imported version"
|
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
selection: EnumProperty(
|
selection : EnumProperty(items=[(s, s.title(), '') for s in ('ALL', 'SELECTED', 'CURRENT')], default="CURRENT", name='All Nodes')
|
||||||
items=[(s, s.title(), "") for s in ("ALL", "SELECTED", "CURRENT")],
|
force : BoolProperty(default=False, description='Remap nodes even if there are different', name='Force')
|
||||||
default="CURRENT",
|
|
||||||
name="All Nodes",
|
|
||||||
)
|
|
||||||
force: BoolProperty(
|
|
||||||
default=False,
|
|
||||||
description="Remap nodes even if there are different",
|
|
||||||
name="Force",
|
|
||||||
)
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
def invoke(self, context, event):
|
||||||
return context.window_manager.invoke_props_dialog(self)
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
nodes = None
|
nodes = None
|
||||||
if self.selection == "SELECTED":
|
if self.selection == 'SELECTED':
|
||||||
nodes = [
|
nodes = [ n.node_tree for n in context.space_data.edit_tree.nodes
|
||||||
n.node_tree
|
if n.type == "GROUP" and n.select]
|
||||||
for n in context.space_data.edit_tree.nodes
|
elif self.selection == 'ACTIVE':
|
||||||
if n.type == "GROUP" and n.select
|
|
||||||
]
|
|
||||||
elif self.selection == "ACTIVE":
|
|
||||||
active_node = context.space_data.edit_tree
|
active_node = context.space_data.edit_tree
|
||||||
nodes = [active_node]
|
nodes = [active_node]
|
||||||
|
|
||||||
merged, failed = remap_node_group_duplicates(nodes=nodes, force=self.force)
|
merged, failed = remap_node_group_duplicates(nodes=nodes, force=self.force)
|
||||||
|
|
||||||
if failed and not merged:
|
if failed and not merged:
|
||||||
self.report({"ERROR"}, "No duplicates remapped, Node Group are differents")
|
self.report({"ERROR"}, 'No duplicates remapped, Node Group are differents')
|
||||||
return {"CANCELLED"}
|
return {"CANCELLED"}
|
||||||
|
|
||||||
self.report(
|
self.report({"INFO"}, f'{len(merged)} Node Groups Remapped, {len(failed)} Node Groups failed')
|
||||||
{"INFO"},
|
|
||||||
f"{len(merged)} Node Groups Remapped, {len(failed)} Node Groups failed",
|
|
||||||
)
|
|
||||||
|
|
||||||
return {"FINISHED"}
|
return {'FINISHED'}
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
|
|
||||||
|
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
layout.prop(self, "selection", expand=True)
|
layout.prop(self, "selection", expand=True)
|
||||||
layout.prop(self, "force")
|
layout.prop(self, "force")
|
||||||
if self.force and self.selection == "CURRENT":
|
if self.force and self.selection == 'CURRENT':
|
||||||
ntree = context.space_data.edit_tree
|
ntree = context.space_data.edit_tree
|
||||||
layout.label(text=f"Remap node {ntree.name} to others")
|
layout.label(text=f'Remap node {ntree.name} to others')
|
||||||
elif self.force and self.selection == "SELECTED":
|
elif self.force and self.selection == 'SELECTED':
|
||||||
layout.label(text="Selected nodes will override others")
|
layout.label(text='Selected nodes will override others')
|
||||||
elif self.selection == "SELECTED":
|
elif self.selection == 'SELECTED':
|
||||||
layout.label(text="Remap last .*** nodes")
|
layout.label(text='Remap last .*** nodes')
|
||||||
layout.label(text="Ex: Node.001 will override Node")
|
layout.label(text='Ex: Node.001 will override Node')
|
||||||
elif self.selection in ("CURRENT", "ALL"):
|
elif self.selection in ('CURRENT', 'ALL'):
|
||||||
layout.label(text="Remap last .*** nodes")
|
layout.label(text='Remap last .*** nodes')
|
||||||
layout.label(text="Ex: Node.001 will override Node")
|
layout.label(text='Ex: Node.001 will override Node')
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_update_nodes(Operator):
|
class NODEKIT_OT_update_nodes(Operator):
|
||||||
bl_idname = "node_kit.update_nodes"
|
bl_idname = 'node_kit.update_nodes'
|
||||||
bl_label = "Update Nodes from Library"
|
bl_label = 'Update node'
|
||||||
bl_description = "Overrides node group using the latest version from Asset Library"
|
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
selection: EnumProperty(
|
selection : EnumProperty(items=[(s, s.title(), '') for s in ('ALL', 'SELECTED', 'ACTIVE')], default="ACTIVE", name='All Nodes')
|
||||||
items=[(s, s.title(), "") for s in ("ALL", "SELECTED", "ACTIVE")],
|
|
||||||
default="ACTIVE",
|
|
||||||
name="All Nodes",
|
|
||||||
)
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
def invoke(self, context, event):
|
||||||
return context.window_manager.invoke_props_dialog(self)
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
@ -157,75 +123,61 @@ class NODEKIT_OT_update_nodes(Operator):
|
||||||
ntree_name = ntree.name
|
ntree_name = ntree.name
|
||||||
new_ntree = None
|
new_ntree = None
|
||||||
|
|
||||||
if self.selection == "SELECTED":
|
if self.selection == 'SELECTED':
|
||||||
nodes = [
|
nodes = [ n.node_tree for n in context.space_data.edit_tree.nodes
|
||||||
n.node_tree
|
if n.type == "GROUP" and n.select]
|
||||||
for n in context.space_data.edit_tree.nodes
|
elif self.selection == 'ACTIVE':
|
||||||
if n.type == "GROUP" and n.select
|
|
||||||
]
|
|
||||||
elif self.selection == "ACTIVE":
|
|
||||||
active_node = context.space_data.edit_tree
|
active_node = context.space_data.edit_tree
|
||||||
nodes = [active_node]
|
nodes = [active_node]
|
||||||
else:
|
else:
|
||||||
nodes = list(bpy.data.node_groups)
|
nodes = list(bpy.data.node_groups)
|
||||||
|
|
||||||
node_names = set(n.name for n in nodes)
|
|
||||||
# new_node_groups = []
|
|
||||||
|
|
||||||
# print("node_names", node_names)
|
node_names = set(n.name for n in nodes)
|
||||||
|
#new_node_groups = []
|
||||||
|
|
||||||
|
#print("node_names", node_names)
|
||||||
|
|
||||||
for asset_library in asset_libraries:
|
for asset_library in asset_libraries:
|
||||||
library_path = Path(asset_library.path)
|
library_path = Path(asset_library.path)
|
||||||
blend_files = [fp for fp in library_path.glob("**/*.blend") if fp.is_file()]
|
blend_files = [fp for fp in library_path.glob("**/*.blend") if fp.is_file()]
|
||||||
|
|
||||||
node_groups = list(
|
node_groups = list(bpy.data.node_groups)# Storing original node_geoup to compare with imported
|
||||||
bpy.data.node_groups
|
|
||||||
) # Storing original node_geoup to compare with imported
|
|
||||||
|
|
||||||
link = asset_library.import_method == "LINK"
|
link = (asset_library.import_method == 'LINK')
|
||||||
for blend_file in blend_files:
|
for blend_file in blend_files:
|
||||||
print(blend_file)
|
print(blend_file)
|
||||||
with bpy.data.libraries.load(
|
with bpy.data.libraries.load(str(blend_file), assets_only=True, link=link) as (data_from, data_to):
|
||||||
str(blend_file), assets_only=True, link=link
|
|
||||||
) as (data_from, data_to):
|
|
||||||
print(data_from.node_groups)
|
print(data_from.node_groups)
|
||||||
import_node_groups = [
|
import_node_groups = [n for n in data_from.node_groups if n in node_names]
|
||||||
n for n in data_from.node_groups if n in node_names
|
|
||||||
]
|
|
||||||
print("import_node_groups", import_node_groups)
|
print("import_node_groups", import_node_groups)
|
||||||
data_to.node_groups = import_node_groups
|
data_to.node_groups = import_node_groups
|
||||||
|
|
||||||
# print(data_from.node_groups)
|
#print(data_from.node_groups)
|
||||||
# print("data_to.node_groups", data_to.node_groups)
|
#print("data_to.node_groups", data_to.node_groups)
|
||||||
node_names -= set(import_node_groups) # Store already updated nodes
|
node_names -= set(import_node_groups) # Store already updated nodes
|
||||||
|
|
||||||
# new_ntree = data_to.node_groups[0]
|
#new_ntree = data_to.node_groups[0]
|
||||||
new_node_groups = [n for n in bpy.data.node_groups if n not in node_groups]
|
new_node_groups = [n for n in bpy.data.node_groups if n not in node_groups]
|
||||||
|
|
||||||
# break
|
#break
|
||||||
|
|
||||||
# if new_ntree:
|
#if new_ntree:
|
||||||
# break
|
# break
|
||||||
new_node_groups = list(set(new_node_groups))
|
new_node_groups = list(set(new_node_groups))
|
||||||
# print(new_node_groups)
|
#print(new_node_groups)
|
||||||
|
|
||||||
# if new_node_groups:
|
# if new_node_groups:
|
||||||
for new_node_group in new_node_groups:
|
for new_node_group in new_node_groups:
|
||||||
new_node_group_name = new_node_group.library_weak_reference.id_name[2:]
|
new_node_group_name = new_node_group.library_weak_reference.id_name[2:]
|
||||||
local_node_group = next(
|
local_node_group = next((n for n in bpy.data.node_groups if n.name == new_node_group_name and n != new_node_group), None)
|
||||||
(
|
|
||||||
n
|
|
||||||
for n in bpy.data.node_groups
|
|
||||||
if n.name == new_node_group_name and n != new_node_group
|
|
||||||
),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not local_node_group:
|
if not local_node_group:
|
||||||
print(f"No local node_group {new_node_group_name}")
|
print(f'No local node_group {new_node_group_name}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print(f"Merge node {local_node_group.name} into {new_node_group.name}")
|
print(f'Merge node {local_node_group.name} into {new_node_group.name}')
|
||||||
|
|
||||||
local_node_group.user_remap(new_node_group)
|
local_node_group.user_remap(new_node_group)
|
||||||
new_node_group.interface_update(context)
|
new_node_group.interface_update(context)
|
||||||
|
@ -234,8 +186,8 @@ class NODEKIT_OT_update_nodes(Operator):
|
||||||
new_node_group.name = new_node_group_name
|
new_node_group.name = new_node_group_name
|
||||||
new_node_group.asset_clear()
|
new_node_group.asset_clear()
|
||||||
|
|
||||||
# self.report({"INFO"}, f"Node updated from {blend_file}")
|
#self.report({"INFO"}, f"Node updated from {blend_file}")
|
||||||
return {"FINISHED"}
|
return {'FINISHED'}
|
||||||
|
|
||||||
# else:
|
# else:
|
||||||
# self.report({"ERROR"}, f'No Node Group named "{ntree_name}" in the library')
|
# self.report({"ERROR"}, f'No Node Group named "{ntree_name}" in the library')
|
||||||
|
@ -247,35 +199,32 @@ class NODEKIT_OT_update_nodes(Operator):
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_pack_nodes(Operator):
|
class NODEKIT_OT_pack_nodes(Operator):
|
||||||
bl_idname = "node_kit.pack_nodes"
|
bl_idname = 'node_kit.pack_nodes'
|
||||||
bl_label = "Pack Modifiers as Nodes"
|
bl_label = 'Update node'
|
||||||
bl_description = "Pack Geometry Nodes modifiers stack as a single node tree"
|
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
combine_objects(context.selected_objects)
|
combine_objects(context.selected_objects)
|
||||||
return {"FINISHED"}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
class NODEKIT_OT_unpack_nodes(Operator):
|
class NODEKIT_OT_unpack_nodes(Operator):
|
||||||
bl_idname = "node_kit.unpack_nodes"
|
bl_idname = 'node_kit.unpack_nodes'
|
||||||
bl_label = "Unpack Nodes as Modifiers"
|
bl_label = 'Update node'
|
||||||
bl_description = "Unpack node tree as Geometry Nodes modifiers"
|
|
||||||
bl_options = {"REGISTER", "UNDO"}
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
extract_objects(context.active_object)
|
extract_objects(context.active_object)
|
||||||
return {"FINISHED"}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
NODEKIT_OT_copy,
|
NODEKIT_OT_copy,
|
||||||
NODEKIT_OT_copy_tree,
|
|
||||||
NODEKIT_OT_paste,
|
NODEKIT_OT_paste,
|
||||||
NODEKIT_OT_remap_node_group_duplicates,
|
NODEKIT_OT_remap_node_group_duplicates,
|
||||||
NODEKIT_OT_update_nodes,
|
NODEKIT_OT_update_nodes,
|
||||||
NODEKIT_OT_pack_nodes,
|
NODEKIT_OT_pack_nodes,
|
||||||
NODEKIT_OT_unpack_nodes,
|
NODEKIT_OT_unpack_nodes
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
import bpy
|
|
||||||
from bpy.types import AddonPreferences
|
|
||||||
from bpy.props import BoolProperty
|
|
||||||
|
|
||||||
|
|
||||||
class NodeKitPreferences(AddonPreferences):
|
|
||||||
bl_idname = __package__
|
|
||||||
|
|
||||||
classes = (
|
|
||||||
NodeKitPreferences,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
for c in classes:
|
|
||||||
bpy.utils.register_class(c)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
for c in reversed(classes):
|
|
||||||
bpy.utils.unregister_class(c)
|
|
||||||
|
|
33
ui.py
33
ui.py
|
@ -1,5 +1,5 @@
|
||||||
"""
|
"""
|
||||||
Node Kit UI elements and menus.
|
This module contains blender UI elements
|
||||||
|
|
||||||
:author: Autour de Minuit
|
:author: Autour de Minuit
|
||||||
:maintainers: Florentin LUCE
|
:maintainers: Florentin LUCE
|
||||||
|
@ -15,29 +15,22 @@ class NODEKIT_MT_node_kit(bpy.types.Menu):
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
|
|
||||||
layout.operator("node_kit.copy_nodes", icon="COPYDOWN")
|
layout.operator('node_kit.copy_node_tree', text='Copy Nodes', icon='COPYDOWN')
|
||||||
layout.operator("node_kit.paste_nodes", icon="PASTEDOWN")
|
layout.operator('node_kit.paste_node_tree', text='Paste Nodes', icon='PASTEDOWN')
|
||||||
|
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
layout.operator('node_kit.remap_node_group_duplicates', text='Remap Node Groups Duplicates', icon='NODE_INSERT_OFF')
|
||||||
layout.operator("node_kit.copy_node_tree", icon="NODETREE")
|
layout.operator('node_kit.update_nodes', text='Update Nodes', icon='IMPORT')
|
||||||
|
|
||||||
layout.separator()
|
layout.separator()
|
||||||
|
layout.operator('node_kit.pack_nodes', text='Pack Nodes', icon='PACKAGE')
|
||||||
|
layout.operator('node_kit.unpack_nodes', text='UnPack Nodes', icon='UGLYPACKAGE')
|
||||||
|
|
||||||
layout.operator("node_kit.remap_node_group_duplicates",icon="NODE_INSERT_OFF")
|
classes = (
|
||||||
layout.operator("node_kit.update_nodes", icon="IMPORT")
|
NODEKIT_MT_node_kit,
|
||||||
|
)
|
||||||
layout.separator()
|
|
||||||
|
|
||||||
layout.operator("node_kit.pack_nodes", icon="PACKAGE")
|
|
||||||
layout.operator("node_kit.unpack_nodes", icon="UGLYPACKAGE")
|
|
||||||
|
|
||||||
|
|
||||||
classes = (NODEKIT_MT_node_kit,)
|
|
||||||
|
|
||||||
|
|
||||||
def draw_menu(self, context):
|
def draw_menu(self, context):
|
||||||
self.layout.menu("NODEKIT_MT_node_kit")
|
self.layout.menu('NODEKIT_MT_node_kit')
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
|
@ -48,7 +41,7 @@ def register():
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
bpy.types.NODE_MT_editor_menus.remove(draw_menu)
|
|
||||||
|
|
||||||
for c in reversed(classes):
|
for c in reversed(classes):
|
||||||
bpy.utils.unregister_class(c)
|
bpy.utils.unregister_class(c)
|
||||||
|
|
||||||
|
bpy.types.NODE_MT_editor_menus.remove(draw_menu)
|
||||||
|
|
22
utils.py
22
utils.py
|
@ -1,22 +0,0 @@
|
||||||
import bpy
|
|
||||||
|
|
||||||
|
|
||||||
def all_subclasses(cls):
|
|
||||||
return set(cls.__subclasses__()).union(
|
|
||||||
[s for c in cls.__subclasses__() for s in all_subclasses(c)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_bl_default(prop: bpy.types.Property):
|
|
||||||
"""Get the default value of a Blender property"""
|
|
||||||
if getattr(prop, "is_array", False):
|
|
||||||
return list(prop.default_array)
|
|
||||||
elif hasattr(prop, "default"):
|
|
||||||
return prop.default
|
|
||||||
|
|
||||||
|
|
||||||
def set_bl_attribute(bl_object, attr, value):
|
|
||||||
try:
|
|
||||||
setattr(bl_object, attr, value)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
Loading…
Reference in New Issue