Commit global

master
florentin.luce 2024-11-06 11:26:28 +01:00
parent f92b68f62a
commit ac0ad04c72
11 changed files with 1484 additions and 24 deletions

View File

@ -10,11 +10,8 @@ import sys
import importlib import importlib
from pathlib import Path from pathlib import Path
# Ensure the name of the module in python import
module_name = Path(__file__).parent.name
sys.modules.update({'node_kit': importlib.import_module(module_name)})
from node_kit import ui, operators from . import ui, operators
modules = ( modules = (
ui, ui,
@ -30,7 +27,7 @@ if "bpy" in locals():
def register(): def register():
print('Register Noke kit') print('Register Node kit')
for mod in modules: for mod in modules:
mod.register() mod.register()

0
bl_utils.py Normal file
View File

View File

@ -130,7 +130,7 @@ class Node:
node.outputs = [Output.from_dict(opt_data, node) for opt_data in data['outputs'].values()] node.outputs = [Output.from_dict(opt_data, node) for opt_data in data['outputs'].values()]
return node return node
def to_dict(self): def dump(self):
"""Export currrent Node to its dict representation. """Export currrent Node to its dict representation.
Returns: Returns:
@ -155,8 +155,8 @@ class Node:
self.data[prop_id] = attr_value self.data[prop_id] = attr_value
self.data['id'] = self.id self.data['id'] = self.id
self.data['inputs'] = {ipt.id: ipt.to_dict() for ipt in self.inputs} self.data['inputs'] = {ipt.id: ipt.dump() for ipt in self.inputs}
self.data['outputs'] = {opt.id: opt.to_dict() for opt in self.outputs} self.data['outputs'] = {opt.id: opt.dump() for opt in self.outputs}
return self.data return self.data
@ -205,7 +205,7 @@ class Link:
self.data = {} self.data = {}
def to_dict(self): def dump(self):
self.data['id'] = self.id self.data['id'] = self.id

View File

@ -18,7 +18,7 @@ class NodeTree:
for n in self.bl_node_tree.nodes: for n in self.bl_node_tree.nodes:
self.nodes.append(Node.from_blender_node(n, self)) self.nodes.append(Node.from_blender_node(n, self))
def to_dict(self, select_only=False): def dump(self, select_only=False):
"""Convert all blender nodes and links inside the tree into a dictionnary. """Convert all blender nodes and links inside the tree into a dictionnary.
Args: Args:
@ -28,12 +28,12 @@ class NodeTree:
Returns: Returns:
dict: Nodes and links as dict. dict: Nodes and links as dict.
""" """
self.data['nodes'] = {n.id: n.to_dict() for n in self.nodes if not select_only or (select_only and n.select)} self.data['nodes'] = {n.id: n.dump() for n in self.nodes if not select_only or (select_only and n.select)}
self.data['links'] = [l.id for l in self.links] self.data['links'] = [l.id for l in self.links]
return self.data return self.data
def ingest_dict(self, data): def load(self, data):
"""From a Tree dict representation, create new nodes with their attributes. """From a Tree dict representation, create new nodes with their attributes.
Then create a connection dict by comparing link id from inputs and outputs of each nodes. Then create a connection dict by comparing link id from inputs and outputs of each nodes.
Use this dict to link nodes between each others. Use this dict to link nodes between each others.

View File

@ -12,7 +12,10 @@ class Socket:
self.identifier = bl_socket.identifier self.identifier = bl_socket.identifier
self.is_linked = bl_socket.is_linked self.is_linked = bl_socket.is_linked
self._value = bl_socket.default_value self._value = None
if hasattr(bl_socket, 'default_value'):
self._value = bl_socket.default_value
@property @property
def value(self): def value(self):

788
core/dumper.py Normal file
View File

@ -0,0 +1,788 @@
import bpy
import mathutils
from pprint import pprint
import json
import itertools
from copy import copy
from os.path import abspath
def get_default(prop):
"""Get the default value of a bl property"""
if getattr(prop, 'is_array', False):
return list(prop.default_array)
elif hasattr(prop, 'default'):
return prop.default
def get_dumper(bl_object, fallback=None):
"""Find the right dumper type by checking inheritance"""
for dp in dumpers:
if isinstance(bl_object, dp.bl_type):
return dp
return fallback or Dumper
def get_bl_object(data):
"""Find the bl object for loading data into it depending on the type and the context"""
if data.get('_new', {}).get('type') == 'GeometryNodeTree':
return bpy.context.object.modifiers.active.node_group
def dump(ob):
"""Generic Recursive Dump, convert any object into a dict"""
Dumper.pointers.clear()
if isinstance(ob, (list, tuple)):
data = [get_dumper(o).dump(o) for o in ob]
else:
data = get_dumper(ob).dump(ob)
Dumper.pointers.clear()
return data
def load(data, bl_object=None):
"""Generic Load to create an object from a dict"""
Dumper.pointers.clear()
#print(Dumper.pointers)
if bl_object is None:
bl_object = get_bl_object(data)
dumper = get_dumper(bl_object)
dumper.load(data, bl_object)
Dumper.pointers.clear()
def set_attribute(bl_object, attr, value):
try:
setattr(bl_object, attr, value)
except Exception as e:
print(e)
class Dumper:
pointers = {}
includes = []
excludes = ["rna_type", "bl_rna", 'id_data', 'depsgraph']
@classmethod
def properties(cls, bl_object):
if cls.includes and not cls.excludes:
return [bl_object.bl_rna.properties[p] for p in cls.includes]
else:
return [ p for p in bl_object.bl_rna.properties if not
p.identifier.startswith('bl_') and p.identifier not in cls.excludes]
@classmethod
def new(cls, data):
print(f'New not implemented for data {data}')
@classmethod
def load(cls, data, bl_object=None):
if bl_object is None:
bl_object = cls.new(data)
if bl_object is None:
return
#pprint(data)
if bl_pointer := data.get('bl_pointer'):
cls.pointers[bl_pointer] = bl_object
props = cls.properties(bl_object)
for key, value in sorted(data.items(), key=lambda x: props.index(x[0]) if x[0] in props else 0):
if key.startswith('_') or key not in bl_object.bl_rna.properties:
continue
prop = bl_object.bl_rna.properties[key]
attr = getattr(bl_object, key)
if prop.type == 'COLLECTION':
dumper = PropCollection
if hasattr(attr, 'bl_rna'):
bl_type = attr.bl_rna.type_recast()
dumper = get_dumper(bl_type, fallback=PropCollection)
dumper.load(value, attr)
continue
elif prop.type == 'POINTER':
# if key == 'node_tree':
# print('--------------')
# print(bl_object, value)
# print(cls.pointers)
if isinstance(value, int): # It's a pointer
value = cls.pointers[value]
elif value is None:
set_attribute(bl_object, key, value)
else:
bl_type = prop.fixed_type.bl_rna.type_recast()
dumper = get_dumper(bl_type)
# If the pointer exist register the pointer then load data
#print('-----', value)
#pointer =
if attr is None:
attr = dumper.new(value)
dumper.load(value, attr)
#attr = getattr(bl_object, key)
#if not attr:
cls.pointers[value['bl_pointer']] = attr
if hasattr(attr, 'update'):
attr.update()
value = attr
if not prop.is_readonly:
set_attribute(bl_object, key, value)
# Some coll needs a manual update like curve mapping
if hasattr(attr, 'update'):
attr.update()
elif not prop.is_readonly:
#print(key, value)
set_attribute(bl_object, key, value)
continue
#return bl_object
@classmethod
def dump(cls, bl_object):
if isinstance(bl_object, (str, int, float, dict, list, type(None))):
return bl_object
#print('Dumping object', bl_object)
data = {"bl_pointer": bl_object.as_pointer()}
cls.pointers[bl_object.as_pointer()] = bl_object
for prop in cls.properties(bl_object):
if not hasattr(bl_object, prop.identifier):
print(f'{bl_object} has no attribute {prop.identifier}')
continue
#print(prop.identifier)
value = getattr(bl_object, prop.identifier)
# Not storing default value
if prop.identifier not in cls.includes:
if (array := getattr(prop, 'default_array', None)) and value == array:
continue
if isinstance(value, (str, int, float)) and value == prop.default:
continue
if getattr(prop, "is_array", False):
value = PropArray.dump(value)
elif prop.type == 'COLLECTION':
value = PropCollection.dump(value)
elif prop.type == 'POINTER' and value:
#if prop.identifier == 'image':
# print(bl_object, cls.pointers)
if value.as_pointer() in cls.pointers:
value = value.as_pointer()
else:
# print('Register Pointer', value.as_pointer(), value)
cls.pointers[value.as_pointer()] = value
# print(cls.pointers)
# print()
dumper = get_dumper(value)
value = dumper.dump(value)
elif bl_object.is_property_readonly(prop.identifier):
continue
else:
dumper = get_dumper(value)
value = dumper.dump(value)
data[prop.identifier] = value
return data
class PropCollection(Dumper):
bl_type = bpy.types.bpy_prop_collection
@classmethod
def dump(cls, coll):
if not len(coll):
return []
dumper = get_dumper(coll[0])
values = [dumper.dump(e) for e in coll]
# Value cannot be None
return [v for v in values if v is not None]
@classmethod
def load(cls, values, coll):
if not values:
return
dumper = None
if not hasattr(coll, 'new'): # Static collection
for item, value in zip(coll, values):
dumper = dumper or get_dumper(item)
dumper.load(value, item)
return
new_func = coll.bl_rna.functions['new']
for i, value in enumerate(values):
if value.get('_new'):
params = value['_new']
else:
params = {k: value.get(k, get_default(v)) for k, v in new_func.parameters.items()[:-1]}
# Replace arg pointer with bl object
valid_pointers = True
for param in coll.bl_rna.functions['new'].parameters:
if param.identifier not in params or param.type != 'POINTER':
continue
pointer_id = params[param.identifier]
if bl_object := cls.pointers.get(pointer_id):
params[param.identifier] = bl_object
else:
print(f'No Pointer found for param {param.identifier} of {coll}')
valid_pointers = False
if not valid_pointers:
continue
#print(param.identifier, cls.pointers[pointer_id])
try:
item = coll.new(**params)
except RuntimeError as e:
#print(e, coll.data)
#print()
try:
item = coll[i]
except IndexError as e:
#print(e, coll.data)
break
dumper = get_dumper(item)
dumper.load(value, item)#(item, value)
class PropArray(Dumper):
bl_type = bpy.types.bpy_prop_array
@classmethod
def dump(cls, array):
flat_array = []
for item in array:
if isinstance(item, (int, float)):
flat_array.append(item)
else:
flat_array.extend(cls.dump(item))
return flat_array
class NodeSocket(Dumper):
bl_type = bpy.types.NodeSocket
excludes = Dumper.excludes + ["node", "links", "display_shape", "rna_type", "link_limit"]
@classmethod
def dump(cls, socket):
if socket.is_unavailable:
return None
#cls.pointers[socket.as_pointer()] = socket
data = super().dump(socket)
#data["_id"] = socket.as_pointer()
#data.pop('name', '')
return data
class NodeGeometryRepeatOutputItems(PropCollection):
bl_type = bpy.types.NodeGeometryRepeatOutputItems
@classmethod
def load(cls, values, coll):
coll.clear()
super().load(values, coll)
class NodeLink(Dumper):
bl_type = bpy.types.NodeLink
@classmethod
def dump(cls, link):
return {"_new": {
"input": link.from_socket.as_pointer(),
"output": link.to_socket.as_pointer()
}
}
class NodeTreeInterfaceSocket(Dumper):
bl_type = bpy.types.NodeTreeInterfaceSocket
excludes = Dumper.excludes + ["parent", "interface_items"]
@classmethod
def dump(cls, socket):
#cls.pointers[socket.as_pointer()] = socket
data = super().dump(socket)
#data["_id"] = socket.as_pointer()
data['_new'] = {"name": data.get('name', '')}
if socket.item_type == 'SOCKET':
data['_new']["in_out"] = socket.in_out
# It's a real panel not the interface root
if socket.parent.parent:
data['parent'] = socket.parent.as_pointer()
return data
class NodeSockets(PropCollection):
@classmethod
def load(cls, values, coll):
#return
node_sockets = [s for s in coll if not s.is_unavailable]
for socket, value in zip(node_sockets, values):
cls.pointers[value['bl_pointer']] = socket
Dumper.load(value, socket)
# for k, v in value.items():
# if k not in socket.bl_rna.properties:
# continue
# setattr(socket, k, v)
"""
# Match Inputs Pointers
node_sockets = [s for s in coll if not s.is_unavailable]
if len(node_sockets) == len(inputs): # Match by index
super().load({"inputs": inputs}, node)
for socket, value in zip(node_sockets, coll):
cls.pointers[value['_id']] = socket
else: # Match by name
print(f'Match Inputs by Name for node {node}')
for socket in node_sockets:
index = next((i for i, v in enumerate(inputs) if v['name'] == socket.name), None)
if index is None:
continue
value = inputs[index]
print(socket, value)
cls.pointers[value['_id']] = socket
Dumper.load(value, socket)
del inputs[index]
"""
class NodeInputs(NodeSockets):
bl_type = bpy.types.NodeInputs
class NodeOutputs(NodeSockets):
bl_type = bpy.types.NodeOutputs
class Node(Dumper):
bl_type = bpy.types.Node
excludes = Dumper.excludes + ["dimensions", "height", "internal_links", "paired_output"]
@classmethod
def dump(cls, node=None):
#cls.pointers[node.as_pointer()] = node
data = super().dump(node)
#data["_id"] = node.as_pointer()
data["_new"] = {"type": node.bl_rna.identifier} # 'node_tree': node.id_data.as_pointer()
if paired_output := getattr(node, "paired_output", None):
data["_pair_with_output"] = paired_output.as_pointer()
#if node.parent:
# data['location'] -= Vector()node.parent.location
return data
@classmethod
def load(cls, data, node):
if node is None:
return
#cls.pointers[data['bl_pointer']] = node
inputs = copy(data.pop('inputs', []))
outputs = copy(data.pop('outputs', []))
super().load(data, node)
data['inputs'] = inputs
data['outputs'] = outputs
# Loading input and outputs after the properties
super().load({"inputs": inputs, "outputs": outputs}, node)
if node.parent:
node.location += node.parent.location
#if node.type != 'FRAME':
# node.location.y -= 500
class CompositorNodeGlare(Node):
bl_type = bpy.types.CompositorNodeGlare
includes = ["quality"]
class NodeTreeInterface(Dumper):
bl_type = bpy.types.NodeTreeInterface
@classmethod
def load(cls, data, interface):
print('Load Interface')
for value in data.get('items_tree', []):
item_type = value.get('item_type', 'SOCKET')
if item_type == 'SOCKET':
item = interface.new_socket(**value['_new'])
elif item_type == 'PANEL':
#print(value['_new'])
item = interface.new_panel(**value['_new'])
NodeTreeInterfaceSocket.load(value, item)
interface.active_index = data.get('active_index', 0)
class Nodes(PropCollection):
bl_type = bpy.types.Nodes
@classmethod
def load(cls, values, coll):
super().load(values, coll)
# Pair zone input and output
for node_data in values:
if paired_output_id := node_data.get('_pair_with_output', None):
node = cls.pointers[node_data['bl_pointer']]
node.pair_with_output(cls.pointers[paired_output_id])
#print(node, node_data['outputs'])
Dumper.load({"inputs": node_data['inputs'], "outputs": node_data['outputs']}, node)
class NodeTree(Dumper):
bl_type = bpy.types.NodeTree
excludes = []
includes = ["name", "interface", "nodes", "links"]
@classmethod
def new(cls, data):
if link := data.get('_link'):
with bpy.data.libraries.load(link['filepath'], link=True) as (data_from, data_to):
setattr(data_to, link['data_type'], [link['name']])
return getattr(data_to, link['data_type'])[0]
return bpy.data.node_groups.new(**data["_new"])
@classmethod
def dump(cls, node_tree):
if node_tree.library:
data = {'bl_pointer': node_tree.as_pointer()}
filepath = abspath(bpy.path.abspath(node_tree.library.filepath, library=node_tree.library.library))
data["_link"] = {"filepath": filepath, "data_type": 'node_groups', 'name': node_tree.name}
else:
data = super().dump(node_tree)
data["_new"] = {"type": node_tree.bl_rna.identifier, 'name': node_tree.name}
return data
class Points(PropCollection):
@classmethod
def load(cls, values, coll):
new_func = coll.bl_rna.functions['new']
params = {k: get_default(v)+1.1 for k, v in new_func.parameters.items()[:-1]}
# Match the same number of elements in collection
if len(values) > len(coll):
for _ in range(len(values) - len(coll)):
coll.new(**params)
for i, value in enumerate(values):
Dumper.load(value, coll[i])
#for k, v in value.items():
#setattr(coll[i], k, v)
class CurveMapPoints(Points):
bl_type = bpy.types.CurveMapPoints
class ColorRampElements(Points):
bl_type = bpy.types.ColorRampElements
class CompositorNodeOutputFileLayerSlots(PropCollection):
bl_type = bpy.types.CompositorNodeOutputFileLayerSlots
@classmethod
def load(cls, values, coll):
coll.clear()
super().load(values, coll)
class CompositorNodeOutputFileFileSlots(PropCollection):
@classmethod
def load(cls, values, coll):
coll.clear()
super().load(values, coll)
class AOVs(PropCollection):
bl_type = bpy.types.AOVs
@classmethod
def load(cls, values, coll):
for value in values:
aov = coll.get(value['name'])
if not aov:
aov = coll.add()
Dumper.load(value, aov)
class Image(Dumper):
bl_type = bpy.types.Image
excludes = []
includes = ['name', 'filepath']
@classmethod
def new(cls, data):
# image = next(( img for img in bpy.data.images if not img.library
# and img.filepath == data['filepath']), None)
# if image is None:
# image = bpy.data.images.load(data['filepath'])
return bpy.data.images.load(data['filepath'], check_existing=True)
class Material(Dumper):
bl_type = bpy.types.Material
excludes = Dumper.excludes + ['preview', "original"]
@classmethod
def new(cls, data):
material = bpy.data.materials.get(data.get('name', ''))
if material is None:
material = bpy.data.materials.new(data['name'])
return material
class Object(Dumper):
bl_type = bpy.types.Object
excludes = []
includes = ['name']
@classmethod
def new(cls, data):
if name := data.get('name'):
return bpy.data.objects.get(name)
class Scene(Dumper):
bl_type = bpy.types.Scene
excludes = []
includes = ['name']
@classmethod
def new(cls, data):
if scene := bpy.data.scenes.get(data.get('name', '')):
return scene
return bpy.data.scenes.new(name=data.get('name', ''))
"""
@classmethod
def dump(cls, scene):
view_layer = scene.view_layers[node.layer]
view_layer_data = ViewLayer.dump(view_layer)
return {
'bl_pointer': scene.as_pointer(),
'name': scene.name,
'render' : {'bl_pointer': scene.render.as_pointer(), "engine": scene.render.engine},
'view_layers': [view_layer_data]
}
"""
class Collection(Dumper):
bl_type = bpy.types.Collection
includes = ['name']
excludes = []
@classmethod
def new(cls, data):
if name := data.get('name'):
return bpy.data.collections.get(name)
# @classmethod
# def dump(cls, data):
# data = super().dump(data)
# data['render'] = {"engine": scene.render.engine}
# return data
class CompositorNodeRLayers(Node):
bl_type = bpy.types.CompositorNodeRLayers
excludes = Dumper.excludes + ['scene']
@classmethod
def load(cls, data, node):
#print('load CompositorNodeRLayers')
scene_data = data.pop('scene')
#print(scene_data)
layer = data.pop('layer')
scene = Scene.new(scene_data)
Scene.load(scene_data, scene)
node.scene = scene
node.layer = layer
super().load(data, node)
# Resetter the view_layer because it might have been created
# with the scene attr in the dictionnary and nor available yet
#print(bpy.)
@classmethod
def dump(cls, node):
# Add scene and viewlayer passes
data = super().dump(node)
#if
view_layer = node.scene.view_layers[node.layer]
view_layer_data = ViewLayer.dump(view_layer)
'''
view_layer_data = {
"name": view_layer.name}
properties = {p.name: p for p in view_layer.bl_rna.properties}
for prop in view_layer.bl_rna:
if prop.identifier.startswith('use_pass'):
view_layer_data[prop.identifier]
'''
#cls.pointers[bl_object.as_pointer()] = bl_object
data['scene'] = {
'bl_pointer': node.scene.as_pointer(),
'name': node.scene.name,
'render' : {'bl_pointer': node.scene.render.as_pointer(), "engine": node.scene.render.engine},
'view_layers': [view_layer_data]
}
return data
class ViewLayer(Dumper):
bl_type = bpy.types.ViewLayer
excludes = Dumper.excludes + ['freestyle_settings', 'eevee', 'cycles', 'active_layer_collection',
'active_aov', 'active_lightgroup_index', 'layer_collection', 'lightgroups', 'material_override',
'objects', 'use']
#includes = ['name']
class ViewLayers(PropCollection):
bl_type = bpy.types.ViewLayers
@classmethod
def load(cls, values, coll):
#print('LOAD VIEWLAYERS', values)
for value in values:
view_layer = coll.get(value['name'])
if view_layer is None:
view_layer = coll.new(value['name'])
Dumper.load(value, view_layer)
dumpers = [
CompositorNodeRLayers,
CompositorNodeGlare,
Node,
NodeSocket,
NodeTree,
NodeLink,
NodeTreeInterface,
NodeTreeInterfaceSocket,
NodeGeometryRepeatOutputItems,
Image,
Material,
Object,
Scene,
Collection,
ViewLayer,
CurveMapPoints,
ColorRampElements,
NodeInputs,
NodeOutputs,
Nodes,
ViewLayers,
PropCollection,
AOVs,
PropArray,
CompositorNodeOutputFileLayerSlots,
CompositorNodeOutputFileFileSlots,
]

68
core/node_utils.py Normal file
View File

@ -0,0 +1,68 @@
import bpy
import re
def clean_name(name):
if re.match(r'(.*)\.\d{3}$', name):
return name[:-4]
return name
def is_node_groups_duplicate(node_groups):
node_group_types = sorted([n.type for n in node_groups[0].nodes])
return all( sorted([n.type for n in ng.nodes]) ==
node_group_types for ng in node_groups[1:])
def remap_node_group_duplicates(nodes=None, force=False):
if nodes is None:
nodes = list(bpy.data.node_groups)
nodes = [n for n in nodes if not n.library]
failed = []
merged = []
# Group by name
groups = {}
for node in nodes:
groups.setdefault(clean_name(node.name), []).append(node)
for node in bpy.data.node_groups:
name = clean_name(node.name)
if name in groups and node not in groups[name]:
groups[name].append(node)
print("\nMerge Duplicate NodeGroup...")
for node_groups in groups.values():
if len(node_groups) == 1:
continue
if not force:
node_groups.sort(key=lambda x : x.name, reverse=True)
print(node_groups)
for node_group in node_groups[1:]:
is_duplicate = is_node_groups_duplicate((node_group, node_groups[0]))
if not is_duplicate and not force:
failed.append((node_group.name, node_groups[0].name))
print(f'Cannot merge Nodegroup {node_group.name} with {node_groups[0].name} they are different')
continue
merged.append((node_group.name, node_groups[0].name))
print(f'Merge Nodegroup {node_group.name} into {node_groups[0].name}')
node_group.user_remap(node_groups[0])
bpy.data.node_groups.remove(node_group)
node_groups.remove(node_group)
# Rename groups if it has no duplicate left
for node_groups in groups.values():
if len(node_groups) == 1 and not node_groups[0].library:
node_groups[0].name = clean_name(node_groups[0].name)
return merged, failed

405
core/pack_nodes.py Normal file
View File

@ -0,0 +1,405 @@
import bpy
def set_params(src, tgt, mod_to_node=True, org_modifier=None):
# mod to node: est-ce qu'on copie les valeurs d'un modifier a une node, ou l'inverse
if mod_to_node: # syntax for node and modifier are slightly different
tree = src.node_group.interface.items_tree
else:
tree = src.node_tree.interface.items_tree
for param in tree:
if param.socket_type == 'NodeSocketGeometry':
continue
if param.in_out == 'OUTPUT':
continue
# seulement en extract mode, src est une node donc on check si des parametres sont dans le modifier
node_link_value = get_node_link_value(src, param.name, org_modifier)
identifier = tree.get(param.name).identifier
if mod_to_node:
tgt.inputs[identifier].default_value = src[identifier]
else:
if node_link_value:
tgt[identifier] = node_link_value
else:
tgt[identifier] = src.inputs[identifier].default_value
def set_group_inputs(target, objects, group):
mod = target.modifiers[0]
node_dct = {} # used for cleanup
for key, inp in get_node_inputs(objects).items():
# add the socket to the node group / modifier pannel
sock = group.interface.new_socket(inp["label"],in_out="INPUT",socket_type=inp["socket"])
mod[sock.identifier] = inp["data"]
# inspect all nodes and add a group input node when that socket is used
for node in parse_nodes(objects):
for param in node.node_tree.interface.items_tree:
nkey = get_input_socket_key(node, param)
if not nkey: continue
if nkey == key:
input_node = add_input_node(group, node, param.identifier, sock)
# on efface les parametres par defaut qui sont connectes
# ( pour ne pas garder de trace des anciens params / collections /object)
node.inputs[param.identifier].default_value = None
# add to dict for cleanup
if not node in node_dct.keys():
node_dct[node] = [input_node]
else:
node_dct[node].append(input_node)
# on refait la meme chose pour les object info nodes car leur syntaxe est un peu differente
for node in parse_nodes(objects, type = "OBJECT_INFO"):
nkey = get_input_socket_key(node, param)
if not nkey: continue
if nkey == key:
input_node = add_input_node(group, node, 'Object', sock)
node.inputs['Object'].default_value = None
# add to dict for cleanup
if not node in node_dct.keys():
node_dct[node] = [input_node]
else:
node_dct[node].append(input_node)
# cleanup tree
for input_nodes in node_dct.values():
for offset, input_node in enumerate(input_nodes):
input_node.location[1] += 50 * offset
hide_sockets(input_node)
def get_node_link_value(node, param_name, org_mod):
if not org_mod:
return
# est-ce que le param est connecté a une autre node ?
if not node.inputs[param_name].links:
return
socket_id = node.inputs[param_name].links[0].from_socket.identifier
return org_mod[socket_id]
def get_geo_socket(node, input=True):
if node.type != "GROUP":
return('Geometry')
for param in node.node_tree.interface.items_tree:
if param.socket_type != 'NodeSocketGeometry':
continue
if input and param.in_out == 'INPUT' : return param.identifier
if not input and param.in_out == 'OUTPUT' : return param.identifier
return None
def get_input_socket_key(node, param):
if node.type == "GROUP":
if param.in_out != 'INPUT':
return False
if not param.socket_type in ['NodeSocketObject','NodeSocketCollection']:
return False
tgt = node.inputs[param.identifier].default_value
if not tgt:
return False
return f"{param.socket_type[10:][:3]} {tgt.name}"
if node.type == "OBJECT_INFO":
tgt = node.inputs['Object'].default_value
if not tgt:
return False
return f"Object {tgt.name}"
def get_node_inputs(combined_nodes):
# inputs["Col COL.name"] = {name = COL.name, data = COL, socket = "COLLECTION"}
# inputs["Obj OBJ.name"] = {name = OBJ.name, data = OBJ, socket = "OBJECT"}
inputs = {}
for node in parse_nodes(combined_nodes):
for param in node.node_tree.interface.items_tree:
key = get_input_socket_key(node, param)
if not key:
continue
tgt = node.inputs[param.identifier].default_value
inputs[key] = {'name': tgt.name, 'data': tgt, 'label': param.name , 'socket': param.socket_type}
for node in parse_nodes(combined_nodes, type = "OBJECT_INFO"):
key = get_input_socket_key(node, None)
if not key:
continue
tgt = node.inputs['Object'].default_value
inputs[key] = {'name': tgt.name, 'data': tgt, 'label': 'Source OB' , 'socket': "NodeSocketObject"}
return inputs
def get_node_bounds(objects, mode=0, x=0, y=0):
min_x = min_y = 10000000
max_x = max_y = 0
for ob in objects:
for node in ob:
co = node.location
min_x = min(co[0],min_x)
max_x = max(co[0],max_x)
min_y = min(co[1],min_y)
max_y = max(co[1],max_y)
if mode == 0:
return([max_x+x, (min_y+max_y)/2 ])
def get_collection(name):
scn = bpy.context.scene
col = None
link = False
# look for existing
for c in bpy.data.collections:
if c.name == name: col = c
# create if needed
if not col: col = bpy.data.collections.new(name)
# link to scene if needed
for c in scn.collection.children_recursive:
if c.name == col.name: link = True
if not link:
scn.collection.children.link(col)
return col
def get_mod_frames(grp):
frames = []
for node in grp.nodes:
if node.type == "FRAME": frames.append(node)
return(frames)
def get_frame_childrens(frame):
childrens = []
locs = {}
for node in frame.id_data.nodes:
if node.parent == frame:
locs[node.location[0]] = node
# sort nodes by their x location, je sais c'est mal ecris...
entries = sorted(locs.keys())
childrens = [locs[x] for x in entries]
return childrens
def parse_nodes(combined_nodes, type = "GROUP"):
nodes = []
for frame in combined_nodes:
for node in frame:
if node.type == type: nodes.append(node)
return nodes
def copy_source_ob(ob, col):
# est-ce que l'objet a des data ? si oui on cree une copie ,
# si non on renvois None
new_ob = None
if ob.type == "MESH" and len(ob.data.vertices) > 0:
new_ob = ob.copy()
new_ob.data = ob.data.copy()
if ob.type == "CURVE" and len(ob.data.splines) > 0:
new_ob = ob.copy()
new_ob.data = ob.data.copy()
if new_ob:
for mod in new_ob.modifiers:
new_ob.modifiers.remove(mod)
if new_ob and col:
col.objects.link(new_ob)
return new_ob
def hide_sockets(node,collapse = True):
for socket in node.outputs:
if not socket.links:
socket.hide = True
for socket in node.inputs:
if not socket.links:
socket.hide = True
if collapse:
node.hide = True
def add_input_node(group, node, param_id, socket):
group_input_node = group.nodes.new('NodeGroupInput')
group_input_node.location = node.location
group_input_node.location[1] += 70
group_input_node.label = socket.name
group.links.new(group_input_node.outputs[socket.identifier],
node.inputs[param_id])
return(group_input_node)
def add_material_node(ob, group, nodes):
if not ob.material_slots:
return nodes
if not ob.material_slots[0].material:
return nodes
last_node = nodes[-1:][0]
node = group.nodes.new('GeometryNodeSetMaterial')
node.inputs['Material'].default_value = ob.material_slots[0].material
node.location = last_node.location
node.location[0] += 300
nodes.append(node)
return nodes
def join_nodes(group, nodes):
prev = None
for i , node in enumerate(nodes):
if not prev:
prev = node
continue
geo_in = get_geo_socket(node)
geo_out = get_geo_socket(prev, input = False)
if not geo_in or not geo_out:
continue
group.links.new(prev.outputs[geo_out], node.inputs[geo_in])
prev = node
def frame_nodes(group, nodes, ob):
nd = group.nodes.new('NodeFrame')
# frame = nodes.new(type='NodeFrame')
for n in nodes:
n.parent = nd
nd.label = ob.name
def combine_ob(ob, group, y=0, col=None):
nodes = []
# object info node
nd = group.nodes.new('GeometryNodeObjectInfo')
nd.location[0] -= 300
nd.location[1] = y * 800
nd.transform_space = "RELATIVE"
nd.inputs['Object'].default_value = copy_source_ob(ob, col) # si l'objet contient des data on crée une copie
nodes.append(nd)
# ob modifiers
for x,md in enumerate(ob.modifiers):
if md.type != "NODES" :
print(abordage)
if md.node_group == group:
continue
nd = group.nodes.new('GeometryNodeGroup')
nd.label = md.name
nd.width = 230
nd.location[0] = x * 300
nd.location[1] = y * 800
nd.node_tree = md.node_group
set_params(md, nd)
nodes.append(nd)
nodes = add_material_node(ob, group, nodes)
join_nodes(group, nodes)
frame_nodes(group, nodes, ob)
return nodes
def gen_target_ob(group, col=None):
ob = gen_empty_ob(group.name, col=col)
mod = ob.modifiers.new(group.name, "NODES")
mod.node_group = group
ob.show_name = True
bpy.context.view_layer.objects.active = ob
return(ob)
def gen_empty_ob(name, col=None):
scn = bpy.context.scene
ob = bpy.data.objects.new(name, object_data=bpy.data.meshes.new(name))
ob.data.materials.append(None)
ob.material_slots[0].link = 'OBJECT'
if not col:
scn.collection.objects.link(ob)
else:
col.objects.link(ob)
return(ob)
def assign_modifiers(ob, frame, org_modifier):
for node in get_frame_childrens(frame):
if node.type != "GROUP":
continue
mod = ob.modifiers.new(node.label, "NODES")
mod.node_group = node.node_tree
mod.show_expanded = False
set_params(node, mod, mod_to_node=False, org_modifier=org_modifier)
mod.node_group.interface_update(bpy.context)
def join_branches(objects, group):
# join all trees and add an output node
join = group.nodes.new('GeometryNodeJoinGeometry')
out = group.nodes.new('NodeGroupOutput')
out_sock = group.interface.new_socket("Geometry",in_out="OUTPUT",socket_type="NodeSocketGeometry")
loc = get_node_bounds(objects, x=500)
join.location = loc
out.location = loc
out.location[0] += 700
for ob in objects:
node = ob[-1:][0]
group.links.new(node.outputs[get_geo_socket(node, input=False)],
join.inputs[get_geo_socket(join)])
group.links.new(join.outputs[get_geo_socket(join, input=False)],
out.inputs[out_sock.identifier])
def gen_extracted_ob(name, frame, col, mod):
ob = None
for node in get_frame_childrens(frame):
if node.type != "OBJECT_INFO":
continue
target = get_node_link_value(node, 'Object', mod)
if target:
ob = target.copy()
ob.data = ob.data.copy()
col.objects.link(ob)
if not ob: ob = gen_empty_ob(name , col = col)
# assign material
for node in get_frame_childrens(frame):
if node.type != "SET_MATERIAL":
continue
ob.material_slots[0].material = node.inputs['Material'].default_value
return ob
def combine_objects(objs):
name = f"NODEGROUP_combined"
col = get_collection(name)
group = bpy.data.node_groups.new(name=name, type='GeometryNodeTree')
objects = []
for y , ob in enumerate(objs):
objects.append(combine_ob(ob, group, y=y, col=col))
target = gen_target_ob(group, col = col)
set_group_inputs(target, objects, group)
join_branches(objects, group)
def extract_objects(object):
mod = object.modifiers[0]
grp = mod.node_group
col = get_collection(grp.name)
for frame in get_mod_frames(grp):
name = f"{object.name} {frame.label}"
ob = gen_extracted_ob(name, frame, col, mod)
assign_modifiers(ob, frame, mod)
#combine_objects(bpy.context.selected_objects)
#extract_objects(bpy.context.active_object)
"""
TODO: extract copier les transform de l'objet original ...
OK ! combine: si un objet a un materiel on cree un node set material en fin de liste
OK ! extract: si on trouve un noeud set material on l'assigne
OK ! extract: si un socket est connecté on recup la valeur de la connection plutot que du socket
OK ! combine: effacer tout les parametres par defaut qui sont connectes ( pour ne pas garder de trace des anciens params / collections /object)
OK ! combine: mettre tout les objets crees/copiés dans une collection
OK ! combine: si un objet source a des mesh/curve data, on en fait une copie, remove les modifiers, et assign dans le object node source
OK ! combine: si un noeud object info n'est pas vide on expose son contenu dans l'interface
OK ! extract: si un noeud object info n'est pas vide on duplique son contenu au lieu de creer un mesh vide
"""

12
file_utils.py Normal file
View File

@ -0,0 +1,12 @@
import plateform
from pathlib import Path
from os.path import expandvars
def get_cache_dir()
if plateform.system() == 'Linux':
return Path(expandvars('$HOME/.cache/blender'))
elif plateform.system() == 'Darwin':
return Path('/Library/Caches/Blender')
elif plateform.system() == 'Windows':
return Path(expandvars('%USERPROFILE%\AppData\Local\Blender Foundation\Blender'))

View File

@ -7,41 +7,224 @@ This module contains all addons operators
""" """
import json import json
from pprint import pprint
from pathlib import Path
import bpy import bpy
from bpy.props import BoolProperty, EnumProperty
from bpy.types import Operator
from node_kit.core.node_tree import NodeTree #from node_kit.core.node_tree import NodeTree
from . core.dumper import dump, load
from .core.node_utils import remap_node_group_duplicates
from .core.pack_nodes import combine_objects, extract_objects
class NODEKIT_OT_copy(bpy.types.Operator): class NODEKIT_OT_copy(Operator):
bl_idname = 'node_kit.copy_node_tree' bl_idname = 'node_kit.copy_node_tree'
bl_label = 'Copy nodes' bl_label = 'Copy nodes'
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
select_only: bpy.props.BoolProperty(default=False) select_only: BoolProperty(default=True)
def execute(self, context): def execute(self, context):
tree = NodeTree(context.space_data.node_tree) ntree = context.space_data.edit_tree
context.window_manager.clipboard = json.dumps(tree.to_dict(select_only=self.select_only)) if self.select_only:
ntree_data = {
"nodes" : dump([n for n in ntree.nodes if n.select]) ,#[dump(n) for n in ntree.nodes if n.select],
"links" : dump([l for l in ntree.links if l.from_node.select and l.to_node.select])
}
else:
ntree_data = dump(ntree)
pprint(ntree_data)
context.window_manager.clipboard = json.dumps(ntree_data)
return {'FINISHED'} return {'FINISHED'}
class NODEKIT_OT_paste(bpy.types.Operator): class NODEKIT_OT_paste(Operator):
bl_idname = 'node_kit.paste_node_tree' bl_idname = 'node_kit.paste_node_tree'
bl_label = 'Paste nodes' bl_label = 'Paste nodes'
def execute(self, context): def execute(self, context):
tree = NodeTree(context.space_data.node_tree) ntree_data = json.loads(context.window_manager.clipboard)
tree.ingest_dict(json.loads(context.window_manager.clipboard)) load(ntree_data, context.space_data.edit_tree)
return {'FINISHED'} return {'FINISHED'}
class NODEKIT_OT_remap_node_group_duplicates(Operator):
bl_idname = 'node_kit.remap_node_group_duplicates'
bl_label = 'Clean nodes'
bl_options = {"REGISTER", "UNDO"}
selection : EnumProperty(items=[(s, s.title(), '') for s in ('ALL', 'SELECTED', 'CURRENT')], default="CURRENT", name='All Nodes')
force : BoolProperty(default=False, description='Remap nodes even if there are different', name='Force')
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def execute(self, context):
nodes = None
if self.selection == 'SELECTED':
nodes = [ n.node_tree for n in context.space_data.edit_tree.nodes
if n.type == "GROUP" and n.select]
elif self.selection == 'ACTIVE':
active_node = context.space_data.edit_tree
nodes = [active_node]
merged, failed = remap_node_group_duplicates(nodes=nodes, force=self.force)
if failed and not merged:
self.report({"ERROR"}, 'No duplicates remapped, Node Group are differents')
return {"CANCELLED"}
self.report({"INFO"}, f'{len(merged)} Node Groups Remapped, {len(failed)} Node Groups failed')
return {'FINISHED'}
def draw(self, context):
layout = self.layout
layout.prop(self, "selection", expand=True)
layout.prop(self, "force")
if self.force and self.selection == 'CURRENT':
ntree = context.space_data.edit_tree
layout.label(text=f'Remap node {ntree.name} to others')
elif self.force and self.selection == 'SELECTED':
layout.label(text='Selected nodes will override others')
elif self.selection == 'SELECTED':
layout.label(text='Remap last .*** nodes')
layout.label(text='Ex: Node.001 will override Node')
elif self.selection in ('CURRENT', 'ALL'):
layout.label(text='Remap last .*** nodes')
layout.label(text='Ex: Node.001 will override Node')
class NODEKIT_OT_update_nodes(Operator):
bl_idname = 'node_kit.update_nodes'
bl_label = 'Update node'
bl_options = {"REGISTER", "UNDO"}
selection : EnumProperty(items=[(s, s.title(), '') for s in ('ALL', 'SELECTED', 'ACTIVE')], default="ACTIVE", name='All Nodes')
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
def execute(self, context):
asset_libraries = context.preferences.filepaths.asset_libraries
ntree = context.space_data.edit_tree
ntree_name = ntree.name
new_ntree = None
if self.selection == 'SELECTED':
nodes = [ n.node_tree for n in context.space_data.edit_tree.nodes
if n.type == "GROUP" and n.select]
elif self.selection == 'ACTIVE':
active_node = context.space_data.edit_tree
nodes = [active_node]
else:
nodes = list(bpy.data.node_groups)
node_names = set(n.name for n in nodes)
#new_node_groups = []
#print("node_names", node_names)
for asset_library in asset_libraries:
library_path = Path(asset_library.path)
blend_files = [fp for fp in library_path.glob("**/*.blend") if fp.is_file()]
node_groups = list(bpy.data.node_groups)# Storing original node_geoup to compare with imported
link = (asset_library.import_method == 'LINK')
for blend_file in blend_files:
print(blend_file)
with bpy.data.libraries.load(str(blend_file), assets_only=True, link=link) as (data_from, data_to):
print(data_from.node_groups)
import_node_groups = [n for n in data_from.node_groups if n in node_names]
print("import_node_groups", import_node_groups)
data_to.node_groups = import_node_groups
#print(data_from.node_groups)
#print("data_to.node_groups", data_to.node_groups)
node_names -= set(import_node_groups) # Store already updated nodes
#new_ntree = data_to.node_groups[0]
new_node_groups = [n for n in bpy.data.node_groups if n not in node_groups]
#break
#if new_ntree:
# break
new_node_groups = list(set(new_node_groups))
#print(new_node_groups)
# if new_node_groups:
for new_node_group in new_node_groups:
new_node_group_name = new_node_group.library_weak_reference.id_name[2:]
local_node_group = next((n for n in bpy.data.node_groups if n.name == new_node_group_name and n != new_node_group), None)
if not local_node_group:
print(f'No local node_group {new_node_group_name}')
continue
print(f'Merge node {local_node_group.name} into {new_node_group.name}')
local_node_group.user_remap(new_node_group)
new_node_group.interface_update(context)
bpy.data.node_groups.remove(local_node_group)
new_node_group.name = new_node_group_name
new_node_group.asset_clear()
#self.report({"INFO"}, f"Node updated from {blend_file}")
return {'FINISHED'}
# else:
# self.report({"ERROR"}, f'No Node Group named "{ntree_name}" in the library')
# return {'CANCELLED'}
def draw(self, context):
layout = self.layout
layout.prop(self, "selection", expand=True)
class NODEKIT_OT_pack_nodes(Operator):
bl_idname = 'node_kit.pack_nodes'
bl_label = 'Update node'
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
combine_objects(context.selected_objects)
return {'FINISHED'}
class NODEKIT_OT_unpack_nodes(Operator):
bl_idname = 'node_kit.unpack_nodes'
bl_label = 'Update node'
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
extract_objects(context.active_object)
return {'FINISHED'}
classes = ( classes = (
NODEKIT_OT_copy, NODEKIT_OT_copy,
NODEKIT_OT_paste, NODEKIT_OT_paste,
NODEKIT_OT_remap_node_group_duplicates,
NODEKIT_OT_update_nodes,
NODEKIT_OT_pack_nodes,
NODEKIT_OT_unpack_nodes
) )

10
ui.py
View File

@ -15,10 +15,14 @@ class NODEKIT_MT_node_kit(bpy.types.Menu):
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
layout.operator('node_kit.copy_node_tree', text='Copy node tree', icon='COPYDOWN') layout.operator('node_kit.copy_node_tree', text='Copy Nodes', icon='COPYDOWN')
layout.operator('node_kit.paste_node_tree', text='Paste node tree', icon='PASTEDOWN') layout.operator('node_kit.paste_node_tree', text='Paste Nodes', icon='PASTEDOWN')
layout.separator() layout.separator()
layout.operator('node_kit.remap_node_group_duplicates', text='Remap Node Groups Duplicates', icon='NODE_INSERT_OFF')
layout.operator('node_kit.update_nodes', text='Update Nodes', icon='IMPORT')
layout.separator()
layout.operator('node_kit.pack_nodes', text='Pack Nodes', icon='PACKAGE')
layout.operator('node_kit.unpack_nodes', text='UnPack Nodes', icon='UGLYPACKAGE')
classes = ( classes = (
NODEKIT_MT_node_kit, NODEKIT_MT_node_kit,