draft files
commit
e60c449524
|
@ -0,0 +1,39 @@
|
|||
info = {
|
||||
'icon': 'X',
|
||||
'description': 'Full Clear : nodes, render_layers, nodegroups',
|
||||
}
|
||||
|
||||
import bpy
|
||||
C = bpy.context
|
||||
|
||||
def clear():
|
||||
render = bpy.data.scenes.get('Render')
|
||||
if not render:
|
||||
print('SKIP, no Render scene')
|
||||
|
||||
# # clear passes
|
||||
# for i in range(len(render.view_layers))[::-1]:
|
||||
# vl = render.view_layers[i]
|
||||
# if not ' / ' in vl.name:
|
||||
# render.view_layers.remove(vl)
|
||||
|
||||
# clear all nodes
|
||||
if render.use_nodes:
|
||||
for i in range(len(render.node_tree.nodes))[::-1]:
|
||||
if not render.node_tree.nodes[i].parent:
|
||||
continue
|
||||
render.node_tree.nodes.remove(render.node_tree.nodes[i])
|
||||
|
||||
# clear all view_layers
|
||||
for vl in reversed(render.view_layers):
|
||||
if ' / ' in vl.name:
|
||||
render.view_layers.remove(vl)
|
||||
|
||||
# clear all "NG_" nodegroups
|
||||
for ng in reversed(bpy.data.node_groups):
|
||||
if ng.name.startswith('NG_'):
|
||||
bpy.data.node_groups.remove(ng)
|
||||
|
||||
|
||||
|
||||
clear()
|
|
@ -0,0 +1,683 @@
|
|||
info = {
|
||||
'icon': 'SHADERFX',
|
||||
'description': 'create GP render nodes',
|
||||
}
|
||||
|
||||
import fnmatch
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
from math import degrees, radians
|
||||
from os import listdir
|
||||
from os.path import basename, dirname, exists, isdir, isfile, join, splitext
|
||||
from pathlib import Path
|
||||
from mathutils import Vector
|
||||
import bpy
|
||||
|
||||
from collections import defaultdict
|
||||
C = bpy.context
|
||||
D = bpy.data
|
||||
|
||||
def real_location(n):
|
||||
if not n.parent:
|
||||
return n.location
|
||||
return n.location + real_location(n.parent)
|
||||
|
||||
def get_frame_transform(f, node_tree):
|
||||
'''only works with one level of nesting (not recursive)'''
|
||||
if f.type != 'FRAME':
|
||||
return
|
||||
# return real_location(f), f.dimensions
|
||||
|
||||
childs = [n for n in n.node_tree.nodes if n.parent == f]
|
||||
# real_locs = [f.location + n.location for n in childs]
|
||||
|
||||
xs = [n.location.x for n in childs] + [n.location.x + n.dimensions.x for n in childs]
|
||||
ys = [n.location.y for n in childs] + [n.location.y - n.dimensions.y for n in childs]
|
||||
xs.sort(key=lambda loc: loc) # x val : ascending
|
||||
ys.sort(key=lambda loc: loc) # ascending # , reversed=True) # y val : descending
|
||||
|
||||
loc = Vector((min(xs), max(ys)))
|
||||
dim = Vector((max(xs) - min(xs) + 60, max(ys) - min(ys) + 60))
|
||||
|
||||
return loc, dim
|
||||
|
||||
|
||||
def bbox(f, frames):
|
||||
xs=[]
|
||||
ys=[]
|
||||
for n in frames[f]: # nodes of passed frame
|
||||
# Better as Vectors ?
|
||||
if n.type == 'FRAME':
|
||||
if n not in frames.keys():
|
||||
# print(f'frame {n.name} not in frame list')
|
||||
continue
|
||||
all_xs, all_ys = bbox(n, frames) # frames[n]
|
||||
xs += all_xs
|
||||
ys += all_ys
|
||||
|
||||
else:
|
||||
loc = real_location(n)
|
||||
xs += [loc.x, loc.x + n.dimensions.x] # + (n.dimensions.x/get_dpi_factor())
|
||||
ys += [loc.y, loc.y - n.dimensions.y] # - (n.dimensions.y/get_dpi_factor())
|
||||
|
||||
|
||||
# margin ~= 30
|
||||
# return xs and ys
|
||||
return [min(xs)-30, max(xs)+30], [min(ys)-30, max(ys)+30]
|
||||
|
||||
def get_frames_bbox(node_tree):
|
||||
'''Return a dic with all frames
|
||||
ex: {frame_node: (location, dimension), ...}
|
||||
'''
|
||||
|
||||
# create dic of frame object with his direct child nodes nodes
|
||||
frames = defaultdict(list)
|
||||
frames_bbox = {}
|
||||
for n in node_tree.nodes:
|
||||
if not n.parent:
|
||||
continue
|
||||
# also contains frames
|
||||
frames[n.parent].append(n)
|
||||
|
||||
# Dic for bbox coord
|
||||
for f, nodes in frames.items():
|
||||
if f.parent:
|
||||
continue
|
||||
|
||||
xs, ys = bbox(f, frames)
|
||||
# xs, ys = bbox(nodes, frames)
|
||||
|
||||
## returning: list of corner coords
|
||||
# coords = [
|
||||
# Vector((xs[0], ys[1])),
|
||||
# Vector((xs[1], ys[1])),
|
||||
# Vector((xs[1], ys[0])),
|
||||
# Vector((xs[0], ys[0])),
|
||||
# ]
|
||||
# frames_bbox[f] = coords
|
||||
|
||||
## returning: (loc vector, dimensions vector)
|
||||
frames_bbox[f] = Vector((xs[0], ys[1])), Vector((xs[1] - xs[0], ys[1] - ys[0]))
|
||||
|
||||
return frames_bbox
|
||||
|
||||
def create_node(type, tree=None, **kargs):
|
||||
tree = tree or bpy.context.scene.node_tree
|
||||
|
||||
node = tree.nodes.new(type)
|
||||
for k,v in kargs.items():
|
||||
setattr(node, k, v)
|
||||
|
||||
return node
|
||||
|
||||
def new_aa_node(tree):
|
||||
'''create AA node'''
|
||||
aa = create_node('CompositorNodeAntiAliasing', tree) # type = ANTIALIASING
|
||||
aa.threshold = 0.5
|
||||
aa.contrast_limit = 0.5
|
||||
aa.corner_rounding = 0.25
|
||||
aa.hide = True
|
||||
return aa
|
||||
|
||||
def get_render_scene():
|
||||
render = bpy.data.scenes.get('Render')
|
||||
if not render:
|
||||
render = bpy.data.scenes.new('Render')
|
||||
render.use_nodes = True
|
||||
return render
|
||||
|
||||
def set_settings(scene=None):
|
||||
if not scene:
|
||||
scene = bpy.context.scene
|
||||
# specify scene settings for these kind of render
|
||||
scene = bpy.context.scene
|
||||
scene.eevee.taa_render_samples = 1
|
||||
scene.grease_pencil_settings.antialias_threshold = 0
|
||||
|
||||
def get_view_layer(name, scene=None):
|
||||
'''get viewlayer name
|
||||
return existing/created viewlayer
|
||||
'''
|
||||
if not scene:
|
||||
# scene = bpy.context.scene
|
||||
scene = get_render_scene()
|
||||
### pass double letter prefix as suffix
|
||||
## pass_name = re.sub(r'^([A-Z]{2})(_)(.*)', r'\3\2\1', 'name')
|
||||
## pass_name = f'{name}_{passe}'
|
||||
pass_vl = scene.view_layers.get(name)
|
||||
if not pass_vl:
|
||||
pass_vl = scene.view_layers.new(name)
|
||||
return pass_vl
|
||||
|
||||
def add_rlayer(layer_name, scene=None, location=None, color=None, node_name=None, width=400):
|
||||
'''create a render layer node
|
||||
if node_name is not specified, use passed layer name
|
||||
'''
|
||||
|
||||
if not node_name:
|
||||
node_name = layer_name # 'RL_' +
|
||||
|
||||
if not scene:
|
||||
scene=bpy.context.scene
|
||||
|
||||
nodes = scene.node_tree.nodes
|
||||
|
||||
comp = nodes.get(node_name)
|
||||
if comp:
|
||||
if comp.layer == node_name:
|
||||
return comp
|
||||
else:
|
||||
# TODO : delete rlayer with bad VL name !
|
||||
pass
|
||||
|
||||
comp = nodes.new('CompositorNodeRLayers')
|
||||
comp.name = node_name
|
||||
comp.scene = scene
|
||||
comp.layer = layer_name
|
||||
comp.label = layer_name
|
||||
if location:
|
||||
comp.location = location
|
||||
if color:
|
||||
comp.color = color
|
||||
|
||||
if width:
|
||||
comp.width = width
|
||||
comp.show_preview = False
|
||||
return comp
|
||||
|
||||
def clear_nodegroup(name, full_clear=False):
|
||||
'''remove duplication of a nodegroup (.???)
|
||||
also remove the base one if full_clear True
|
||||
'''
|
||||
for ng in reversed(bpy.data.node_groups):
|
||||
pattern = name + r'\.\d{3}'
|
||||
if re.search(pattern, ng.name):
|
||||
bpy.data.node_groups.remove(ng)
|
||||
|
||||
if full_clear and ng.name == name:
|
||||
# if full clear
|
||||
bpy.data.node_groups.remove(ng)
|
||||
|
||||
def rearrange_frames(node_tree):
|
||||
print('> re-arrange node_tree')
|
||||
frame_d = get_frames_bbox(node_tree) # dic : {frame_node:(loc vector, dimensions vector), ...}
|
||||
if not frame_d:
|
||||
return
|
||||
## order the dict by frame.y location
|
||||
frame_d = {key: value for key, value in sorted(frame_d.items(), key=lambda pair: pair[1][0].y - pair[1][1].y, reversed=True)}
|
||||
frame_d = {key: value for key, value in sorted(frame_d.items(), key=lambda pair: pair[1][0].y - pair[1][1].y, reversed=True)}
|
||||
|
||||
frames = [[f, v[0], v[1].y] for f, v in frame_d.items()] # [frame_node, real_loc, real dimensions]
|
||||
# frames.sort(key=lambda n: n.location.y - n.dimensions.y, reverse=True)
|
||||
# top = frames[0].location.y
|
||||
top = frames[0][1].y # upper node location.y
|
||||
offset = 0
|
||||
for f in frames:
|
||||
# n.location.y = top - offset
|
||||
f[0].location.y = (f[1].y - f[0].location.y) + top - offset
|
||||
offset += f[2] + 50 # gap # f[0].dimensions.y
|
||||
|
||||
|
||||
def reorder_inputs(ng):
|
||||
rl_nodes = [s.links[0].from_node for s in ng.inputs if s.is_linked and s.links and s.links[0].from_node.type == 'R_LAYERS']
|
||||
rl_nodes.sort(key=lambda x: x.location.y, reverse=True)
|
||||
names = [n.layer for n in rl_nodes]
|
||||
inputs_names = [s.name for s in ng.inputs]
|
||||
filtered_names = [n for n in names if n in inputs_names]
|
||||
|
||||
for dest, name in enumerate(filtered_names):
|
||||
## rebuild list at each iteration so index are good
|
||||
inputs_names = [s.name for s in ng.inputs]
|
||||
src = inputs_names.index(name)
|
||||
# reorder on node_tree not directly on node!
|
||||
ng.node_tree.inputs.move(src, dest)
|
||||
|
||||
def reorder_outputs(ng):
|
||||
ordered_out_name = [nis.name for nis in ng.inputs if nis.name in [o.name for o in ng.outputs]]
|
||||
for s_name in ordered_out_name:
|
||||
all_outnames = [o.name for o in ng.outputs]
|
||||
# reorder on nodetree, not on node !
|
||||
ng.node_tree.outputs.move(all_outnames.index(s_name), ordered_out_name.index(s_name))
|
||||
|
||||
def clear_disconnected(fo):
|
||||
for inp in reversed(fo.inputs):
|
||||
if not inp.is_linked:
|
||||
print(f'Deleting unlinked fileout slot: {inp.name}')
|
||||
fo.inputs.remove(inp)
|
||||
|
||||
def reorder_fileout(fo, ng=None):
|
||||
if not ng: # get connected nodegroup
|
||||
for s in fo.inputs:
|
||||
if s.is_linked and s.links and s.links[0].from_node.type == 'GROUP':
|
||||
ng = s.links[0].from_node
|
||||
break
|
||||
if not ng:
|
||||
print(f'No nodegroup to refer to filter {fo.name}')
|
||||
return
|
||||
ordered = [o.links[0].to_socket.name for o in ng.outputs if o.is_linked and o.is_linked and o.links[0].to_node == fo]
|
||||
for s_name in ordered:
|
||||
all_outnames = [s.name for s in fo.inputs] # same as [fs.path for fs in fo.file_slots]
|
||||
fo.inputs.move(all_outnames.index(s_name), ordered.index(s_name))
|
||||
|
||||
def connect_to_group_output(n):
|
||||
for o in n.outputs:
|
||||
if o.is_linked:
|
||||
if o.links[0].to_node.type == 'GROUP_OUTPUT':
|
||||
return o.links[0].to_socket
|
||||
val = connect_to_group_output(o.links[0].to_node)
|
||||
if val:
|
||||
return val
|
||||
return False
|
||||
|
||||
def connect_to_group_input(n):
|
||||
for i in n.inputs:
|
||||
if i.is_linked:
|
||||
if i.links[0].from_node.type == 'GROUP_INPUT':
|
||||
return i.links[0].from_socket
|
||||
val = connect_to_group_output(i.links[0].from_node)
|
||||
if val:
|
||||
return val
|
||||
return False
|
||||
|
||||
def connect_render_layer(rlayer, ng=None, out=None, frame=None):
|
||||
scene = get_render_scene()
|
||||
nodes = scene.node_tree.nodes
|
||||
links = scene.node_tree.links
|
||||
|
||||
vl_name = rlayer.layer
|
||||
if not vl_name or vl_name == 'View Layer':
|
||||
print(f'Bad layer for node {rlayer.name}')
|
||||
|
||||
if not ' / ' in vl_name:
|
||||
print(f'no slash (" / ") separator in vl_name {vl_name}, should be "obj.name / layer_name"')
|
||||
return
|
||||
|
||||
obname, lname = vl_name.split(' / ')
|
||||
lname = bpy.path.clean_name(lname)
|
||||
|
||||
if not frame:
|
||||
if rlayer.parent:
|
||||
frame=rlayer.parent
|
||||
else:
|
||||
print(f'render_layer has not parent frame: {rlayer.name}')
|
||||
frame=None
|
||||
|
||||
ng_name = f'NG_{obname}' # only object name
|
||||
|
||||
# get set nodegroup from vlayer name
|
||||
|
||||
## clear nodes groups duplication (.00?)
|
||||
clear_nodegroup(ng_name, full_clear=False)
|
||||
|
||||
if not ng:
|
||||
ng = nodes.get(ng_name)
|
||||
|
||||
if not ng:
|
||||
ngroup = bpy.data.node_groups.get(ng_name)
|
||||
if not ngroup:
|
||||
# delete and recreate ?
|
||||
print(f'create nodegroup {ng_name}')
|
||||
ngroup = bpy.data.node_groups.new(ng_name, 'CompositorNodeTree')
|
||||
|
||||
ng = create_node('CompositorNodeGroup', tree=scene.node_tree, location=(rlayer.location[0] + 600, rlayer.location[1]), width=400)
|
||||
if frame:
|
||||
ng.parent= frame
|
||||
ng.node_tree = ngroup
|
||||
ng.name = ngroup.name
|
||||
|
||||
ng_in = create_node('NodeGroupInput', tree=ngroup, location=(-600,0))
|
||||
ng_out = create_node('NodeGroupOutput', tree=ngroup, location=(600,0))
|
||||
|
||||
else:
|
||||
print(f'found group node {ng.name}')
|
||||
ngroup = ng.node_tree
|
||||
ng_in = ngroup.nodes.get('Group Input')
|
||||
ng_out = ngroup.nodes.get('Group Output')
|
||||
|
||||
# Connect rlayer to nodegroup
|
||||
if not rlayer.outputs['Image'].is_linked:
|
||||
sockin = ng.inputs.get(vl_name)
|
||||
if not sockin:
|
||||
print('creating socket', vl_name)
|
||||
sockin = ng.inputs.new('NodeSocketColor', vl_name)
|
||||
sockin = ng.inputs[-1]
|
||||
|
||||
links.new(rlayer.outputs['Image'], sockin)
|
||||
|
||||
## get nodes from frame
|
||||
rl_nodes = [n for n in nodes if n.type == 'R_LAYERS' and n.layer != 'View Layer' and n.parent == frame]
|
||||
|
||||
# auto clean : if an input exists but is not linked and name not exists in rlayers of current frame
|
||||
for s in reversed(ng.inputs):
|
||||
if not s.is_linked: # and not any(x.layer == s.name for x in rl_nodes)
|
||||
print(f'removing grp unlinked input {s.name}')
|
||||
ng.inputs.remove(s)
|
||||
|
||||
## get nodes from linked NG inputs ??? maybe more clear...
|
||||
# rl_nodes = [s.links[0].from_node for s in ng.inputs if s.links and s.links[0].from_node and s.links[0].from_node.type == 'R_LAYERS']
|
||||
|
||||
## reorder
|
||||
reorder_inputs(ng)
|
||||
|
||||
# CREATE NG outsocket (individual, without taking merge)
|
||||
|
||||
connected = False
|
||||
|
||||
if ng_in.outputs[vl_name].is_linked:
|
||||
# check if connect to the other side
|
||||
socket = connect_to_group_output(ng_in.outputs[vl_name].links[0].to_node) #if ng_in.outputs[vl_name].links[0].to_node.type == 'ALPHAOVER':
|
||||
if socket:
|
||||
connected = True
|
||||
groupout = ng.outputs.get(socket.name)
|
||||
|
||||
if not connected:
|
||||
print('need to connect')
|
||||
# add AA and connect
|
||||
aa = new_aa_node(ngroup)
|
||||
groupout = ng.outputs.get(vl_name)
|
||||
if not groupout:
|
||||
print('create group out-socket')
|
||||
ng.outputs.new('NodeSocketColor', vl_name) # assigning direcly doesn't link well
|
||||
groupout = ng.outputs[-1]
|
||||
|
||||
print('ng_out.inputs.get(vl_name): ', ng_out.inputs.get(vl_name))
|
||||
# ng_in.outputs[vl_name]
|
||||
ngroup.links.new(ng_in.outputs[vl_name], aa.inputs[0]) # node_tree
|
||||
ngroup.links.new(aa.outputs[0], ng_out.inputs[vl_name]) # node_tree
|
||||
|
||||
# clean outputs
|
||||
for o in reversed(ngroup.outputs):
|
||||
if not o.name in [o.name for o in ngroup.inputs]:
|
||||
print(f'removing group output {o.name} (name not exists in group inputs)')
|
||||
ngroup.outputs.remove(o)
|
||||
|
||||
# reorder output to match inputs
|
||||
reorder_outputs(ng)
|
||||
|
||||
# Clear : delete orphan nodes that are not connected from ng_in
|
||||
for n in reversed(ngroup.nodes):
|
||||
if n.type in ('GROUP_INPUT', 'GROUP_OUTPUT'):
|
||||
continue
|
||||
if not connect_to_group_input(n) and not connect_to_group_output(n): # is disconnected from both side
|
||||
ngroup.nodes.remove(n)
|
||||
# TODO clear nodes that are disconnected from input side ?
|
||||
|
||||
if groupout.links and groupout.links[0].to_node.type == 'OUTPUT_FILE':
|
||||
# if already connected to outfile just skip cause user might have customised the name
|
||||
return
|
||||
|
||||
slot_name = f'{lname}/{lname}_'
|
||||
out_name = f'OUT_{obname}' # or get output from frame
|
||||
if not out:
|
||||
out = nodes.get(out_name)
|
||||
if not out:
|
||||
out = create_node('CompositorNodeOutputFile', tree=scene.node_tree, location=(ng.location[0]+600, ng.location[1]+50), width=600) # color = (0.2,0.3,0.5)
|
||||
out.name = out_name
|
||||
out.parent = frame
|
||||
out.base_path = f'//render/{bpy.path.clean_name(obname)}'
|
||||
|
||||
out_input = out.inputs.get(slot_name)
|
||||
if not out_input:
|
||||
out.file_slots.new(slot_name)
|
||||
out_input = out.inputs[-1] # assigning directly above doesn't link afterwards
|
||||
print(f'new filouput entry: {out_input}')
|
||||
|
||||
# link to FileOut
|
||||
links.new(groupout, out_input)
|
||||
|
||||
# clean fileout
|
||||
clear_disconnected(out)
|
||||
reorder_fileout(out, ng=ng)
|
||||
|
||||
return ng, out
|
||||
|
||||
|
||||
def get_set_viewlayer_from_gp(ob, l, scene=None):
|
||||
if not scene:
|
||||
# scene = bpy.context.scene
|
||||
scene = get_render_scene() # create if necessary
|
||||
nodes = scene.node_tree.nodes
|
||||
|
||||
in_rds = scene.collection.all_objects.get(ob.name)
|
||||
if not in_rds:
|
||||
scene.collection.objects.link(ob)
|
||||
|
||||
# create viewlayer
|
||||
vl_name = f'{ob.name} / {l.info}'
|
||||
vl = get_view_layer(vl_name, scene=scene)
|
||||
vl_name = vl.name
|
||||
# affect layer to this vl
|
||||
l.viewlayer_render = vl_name
|
||||
|
||||
# check if already exists
|
||||
rlayer_list = [n for n in nodes if n.type == 'R_LAYERS' and n.layer == vl_name]
|
||||
|
||||
# get frame object and their contents
|
||||
# dict like : {objname : [layer_nodeA, layer_nodeB,...]}
|
||||
frame_dic = {f.label: [n for n in nodes if n.type == 'R_LAYERS' and n.parent and n.parent.name == f.name and n.layer != 'View Layer']
|
||||
for f in nodes if f.type == 'FRAME'}
|
||||
|
||||
# debug print
|
||||
for k,v in frame_dic.items():
|
||||
print('-', k)
|
||||
for n in v:
|
||||
print('---', n.layer)
|
||||
|
||||
if rlayer_list: # rlayer exists
|
||||
print(f'{len(rlayer_list)} nodes using {vl_name}')
|
||||
|
||||
# affect only the one within an object frame
|
||||
framed_rl = [n for n in rlayer_list if n.parent and n.parent.label == ob.name]
|
||||
if framed_rl:
|
||||
if len(framed_rl) > 1:
|
||||
print(f'! More than one nodes using {vl_name} in a frame ({len(framed_rl)}) !')
|
||||
|
||||
# sort top to bottom and take upper node
|
||||
framed_rl.sort(key=lambda x:x.location.y, reverse=True)
|
||||
cp = framed_rl[0]
|
||||
cp.select = True # select so the user see that it existed
|
||||
return vl, cp
|
||||
|
||||
# Returned if existed and OK
|
||||
|
||||
if not ob.name in frame_dic.keys(): # and len(frame_dic[ob.name])
|
||||
print(f'\n{ob.name} -> {l.info} (first generation)')
|
||||
# frame not exists, add the RL and frame at the very bottom of all render_layers
|
||||
# check position of frame type ? all type ?
|
||||
all_frames = [n for n in nodes if n.type == 'FRAME']
|
||||
# all_rl_x = [n.location.x for n in nodes if n.type == 'R_LAYERS' and n.layer != 'View Layer']
|
||||
if all_frames:
|
||||
# all_frames.sort(key=lambda x: x.location.y, reverse=True)
|
||||
# loc.y - dim.y
|
||||
y_loc = min(get_frame_transform(f, node_tree)[0].y - get_frame_transform(f, node_tree)[1].y for f in all_frames)
|
||||
loc = (0, y_loc)
|
||||
else:
|
||||
loc = (0,0)
|
||||
|
||||
print('loc: ', loc)
|
||||
# create frame at new rl position
|
||||
frame = nodes.new('NodeFrame')
|
||||
frame.label = ob.name
|
||||
frame.label_size = 50
|
||||
frame.location = (loc[0], loc[1] + 20)
|
||||
|
||||
cp = add_rlayer(vl_name, scene=scene, location=loc)
|
||||
cp.parent = frame
|
||||
|
||||
connect_render_layer(cp, frame=frame)
|
||||
|
||||
""" # Create omega-node group
|
||||
ngroup = bpy.data.node_groups.new('NG_' + vl_name, 'CompositorNodeTree')
|
||||
ng = create_node('CompositorNodeGroup', tree=scene.node_tree, location=(x_loc + 600, y_loc), width=400)
|
||||
ng.parent=frame
|
||||
ng.node_tree = ngroup
|
||||
ng.name = ngroup.name
|
||||
print('ng.node_tree: ', ng.node_tree)
|
||||
|
||||
# add GROUP_INPUT(NodeGroupInput) && GROUP_OUTPUT(NodeGroupOutput)
|
||||
ng_in = create_node('NodeGroupInput', tree=ngroup, location=(-600,0))
|
||||
ng_out = create_node('NodeGroupOutput', tree=ngroup, location=(600,0))
|
||||
|
||||
# add AA and connect
|
||||
aa = new_aa_node(ngroup)
|
||||
ngroup.inputs.new('NodeSocketColor', vl_name)
|
||||
ngroup.outputs.new('NodeSocketColor', vl_name)
|
||||
|
||||
scene.node_tree.links.new(cp.outputs[0], ng.inputs[0])
|
||||
|
||||
ngroup.links.new(ng_in.outputs[0], aa.inputs[0]) # node_tree.
|
||||
ngroup.links.new(aa.outputs[0], ng_out.inputs[0]) # node_tree.
|
||||
|
||||
# --- add fileout node
|
||||
# CompositorNodeOutputFile OUTPUT_FILE
|
||||
|
||||
out = create_node('CompositorNodeOutputFile', tree=scene.node_tree, location=(x_loc+1200, y_loc+50), width=600) # color = (0.2,0.3,0.5)
|
||||
out.name = f'OUT_{vl_name}'
|
||||
out.parent=frame
|
||||
out.base_path = f'//render/{bpy.path.clean_name(ob.name)}' # TODO hardcoded base path
|
||||
out.file_slots[0].path = f'{bpy.path.clean_name(l.info)}/{bpy.path.clean_name(l.info)}_'
|
||||
|
||||
scene.node_tree.links.new(ng.outputs[0], out.inputs[0])
|
||||
"""
|
||||
return vl, cp
|
||||
|
||||
|
||||
print(f'\n {ob.name} -> {l.info} (connect to existing)')
|
||||
# ng = nodes.get(f'NG_{vl_name}')
|
||||
# if not ng:
|
||||
# print('nodegroup not found') # TODO generate if necessary
|
||||
# return
|
||||
|
||||
# out = nodes.get(f'OUT_{vl_name}')
|
||||
# if not out:
|
||||
# print('output not found') # TODO generate if necessary
|
||||
# return
|
||||
|
||||
## object frame exists: get framing and insert
|
||||
cp = add_rlayer(vl_name, scene=scene, location=(0,0))
|
||||
if cp.layer != vl_name:
|
||||
print(f'problem with {cp}: {cp.layer} != {vl_name}')
|
||||
return
|
||||
|
||||
frame = [f for f in nodes if f.type == 'FRAME' and f.label == ob.name][0]
|
||||
rl_nodes = frame_dic[frame.label]
|
||||
if rl_nodes:
|
||||
# get nodes order to insert
|
||||
rl_nodes.sort(key=lambda n: real_location(n).y, reverse=True)
|
||||
top_loc = real_location(rl_nodes[0])
|
||||
else:
|
||||
top_loc = get_frame_transform(frame[1], node_tree) -60
|
||||
|
||||
# cp.location = (top_loc[0], top_loc[1] + 100) # temp location to adjust x loc
|
||||
|
||||
# list of layer names in nodes order
|
||||
rl_names = [n.layer.split(' / ')[1] for n in rl_nodes] # get True layer name from rl
|
||||
# names with the right order WITH the new layer included
|
||||
names = [lay.info for lay in ob.data.layers if lay.info in rl_names or lay == l]
|
||||
|
||||
rl_nodes.append(cp)
|
||||
|
||||
# filter by getting index(layer_name)
|
||||
cp.parent = frame
|
||||
rl_nodes.sort(key=lambda x : names.index(x.layer.split(' / ')[1])) # Sort True layer name from rl
|
||||
|
||||
offset = 0
|
||||
print(f'number of nodes in frame: {len(rl_nodes)}')
|
||||
ref_node = rl_nodes[0]
|
||||
|
||||
print('ref_node: ', ref_node.name, ref_node.location)
|
||||
for n in rl_nodes:
|
||||
# set x loc from first node in list (maybe use leftmost ?)
|
||||
n.location = (ref_node.location[0], top_loc[1] - offset)
|
||||
offset += 180
|
||||
|
||||
# reorder render layers nodes within frame
|
||||
|
||||
connect_render_layer(cp, frame=frame)
|
||||
|
||||
# re-arrange all frames (since the offset probably overlapped)
|
||||
rearrange_frames(node_tree)
|
||||
|
||||
return vl, cp
|
||||
|
||||
# def generate_all_layer(ob):
|
||||
# '''Basic layer generation'''
|
||||
# bpy.context.scene.use_nodes = True
|
||||
# for l in ob.data.layers:
|
||||
# if l.hide:
|
||||
# continue
|
||||
# get_set_viewlayer_from_gp(ob, l)
|
||||
|
||||
def generate_full_render_output(ob):
|
||||
bpy.context.scene.use_nodes = True
|
||||
|
||||
# Create another scene. link the GP colleciton (or selected GP object when) in it
|
||||
|
||||
# create the render scene
|
||||
rds = get_render_scene()
|
||||
scn = bpy.data.scenes.get('Scene')
|
||||
if not scn:
|
||||
if bpy.context.scene != rds:
|
||||
scn = rds
|
||||
else:
|
||||
# return
|
||||
all_scenes = [s for s in bpy.data.scenes if s != rds]
|
||||
if not all_scenes:
|
||||
print('! there is no default scene !')
|
||||
return
|
||||
scn = all_scenes[0]
|
||||
|
||||
# bpy.context.window.scene = rds # switch to render scene ?
|
||||
|
||||
# Link GP or 2D collections ? or GP by GP (in a dedicated collections ? not necessary)
|
||||
# gp_col = bpy.data.collections.get('GP')
|
||||
# if gp_col:
|
||||
# rds.collection.children.link(gp_col)
|
||||
# two_d = bpy.data.collections.get('2D')
|
||||
# if two_d:
|
||||
# rds.collection.children.link(gp_col)
|
||||
|
||||
## better to link selected objects (or all GP objects) on the fly...
|
||||
set_settings(scene=rds)
|
||||
|
||||
## setup world, link a specific world or use the current one
|
||||
if not rds.world:
|
||||
rds.world = scn.world
|
||||
|
||||
## put in an "output" collection ?
|
||||
# out_col = rds.collections.children.get('output')
|
||||
# if not out_col:
|
||||
# out_col = bpy.data.collections.new('output')
|
||||
# rds.collection.children.link(out_col)
|
||||
|
||||
# Clear this/all object(s) and start from scratch
|
||||
|
||||
# nodes = rds.node_tree.nodes
|
||||
|
||||
exclude_list = ['MA', 'IN']
|
||||
for l in ob.data.layers:
|
||||
if any(x + '_' in l.info for x in exclude_list):
|
||||
continue
|
||||
if l.hide:
|
||||
continue
|
||||
## Create associated nodegroup later if needed (same operation as selecting multiple and run)
|
||||
vl, cp = get_set_viewlayer_from_gp(ob, l, scene=rds)
|
||||
|
||||
|
||||
## if an objects nodes are already there, should create in the same area (and offset the all the unrelated bottom nodes)
|
||||
|
||||
|
||||
# def generate_all_objects():
|
||||
|
||||
# ## filter the objects by depth ? -> not reliable since animators use the X-ray sometimes...
|
||||
# for o in bpy.context.selected_objects:
|
||||
# if o.type != 'GPENCIL':
|
||||
# continue
|
||||
# if not o.select_get():
|
||||
# continue
|
||||
# generate_all_layer(o)
|
||||
|
||||
|
||||
# generate_full_render_output(C.object)
|
||||
get_set_viewlayer_from_gp(C.object, C.object.data.layers.active)
|
|
@ -0,0 +1,159 @@
|
|||
info = {
|
||||
'icon': 'TRIA_DOWN_BAR',
|
||||
'description': 'Merge layers',
|
||||
}
|
||||
|
||||
import fnmatch
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
from math import degrees, radians
|
||||
from os import listdir
|
||||
from os.path import basename, dirname, exists, isdir, isfile, join, splitext
|
||||
from pathlib import Path
|
||||
import bpy
|
||||
C = bpy.context
|
||||
D = bpy.data
|
||||
|
||||
|
||||
def random_color(alpha=False):
|
||||
import random
|
||||
if alpha:
|
||||
return (random.uniform(0,1), random.uniform(0,1), random.uniform(0,1), 1)
|
||||
return (random.uniform(0,1), random.uniform(0,1), random.uniform(0,1))
|
||||
|
||||
def create_node(type, tree=None, **kargs):
|
||||
tree = tree or bpy.context.scene.node_tree
|
||||
|
||||
node = tree.nodes.new(type)
|
||||
for k,v in kargs.items():
|
||||
setattr(node, k, v)
|
||||
|
||||
return node
|
||||
|
||||
def new_aa_node(tree):
|
||||
'''create AA node'''
|
||||
aa = create_node('CompositorNodeAntiAliasing', tree) # type = ANTIALIASING
|
||||
aa.threshold = 0.5
|
||||
aa.contrast_limit = 0.5
|
||||
aa.corner_rounding = 0.25
|
||||
aa.hide = True
|
||||
return aa
|
||||
|
||||
def get_render_scene():
|
||||
render = bpy.data.scenes.get('Render')
|
||||
if not render:
|
||||
render = bpy.data.scenes.new('Render')
|
||||
render.use_nodes = True
|
||||
return render
|
||||
|
||||
|
||||
def merge_layers(rlayers, obname=None):
|
||||
print(f'Merging {len(rlayers)} layers')
|
||||
print('->', [r.layer for r in rlayers])
|
||||
print()
|
||||
|
||||
if not rlayers:
|
||||
return ('ERROR', 'No render layer sent to merge')
|
||||
|
||||
ng = rlayers[0].outputs[0].links[0].to_node
|
||||
rlayers.sort(key=lambda x: x.location.y, reverse=True)
|
||||
|
||||
# change colors of those nodes
|
||||
color = random_color()
|
||||
for n in rlayers:
|
||||
n.use_custom_color = True
|
||||
n.color = color
|
||||
|
||||
|
||||
# get inside socket (group input) from outside socket list (should be already ordered)
|
||||
|
||||
## by name
|
||||
# for i, inp in enumerate(ng.node_tree.inputs):
|
||||
# if inp.name ==
|
||||
|
||||
# by connection order
|
||||
|
||||
socket_list = []
|
||||
grp_sockets = []
|
||||
|
||||
for n in rlayers:
|
||||
if n.outputs[0].links[0].to_node != ng:
|
||||
print(f'Skip {n.layer}, connect to {n.outputs[0].links[0].to_node} instead of {ng.name}')
|
||||
continue
|
||||
|
||||
sock_in = n.outputs[0].links[0].to_socket
|
||||
for i, s in enumerate(ng.inputs):
|
||||
if s == sock_in:
|
||||
print(i, s.name)
|
||||
socket_list.append(s)
|
||||
grp_sockets.append(ng.node_tree.nodes['Group Input'].outputs[i])
|
||||
break
|
||||
|
||||
# debug
|
||||
for inp, grps in zip(socket_list, grp_sockets):
|
||||
if inp.name != grps.name:
|
||||
print(f'\n! Problem ! : {inp.name}, {grps.name}')
|
||||
return
|
||||
|
||||
##
|
||||
# JUST CREATE ANOTHER GROUP NODE FOR THE MERGE !
|
||||
##
|
||||
|
||||
|
||||
def merge_selected_layers():
|
||||
'''Merge command from selected GP layers'''
|
||||
ob = bpy.context.object
|
||||
layer_names = [l.info for l in ob.data.layers if l.select and not l.hide]
|
||||
print("layer_names", layer_names)#Dbg
|
||||
|
||||
if len(layer_names) < 2:
|
||||
print(f'Should select multiple layers for merging')
|
||||
return
|
||||
|
||||
render = bpy.data.scenes.get('Render')
|
||||
if render:
|
||||
nodes = render.node_tree.nodes
|
||||
|
||||
clean_ob_name = bpy.path.clean_name(ob.name)
|
||||
rlayers = []
|
||||
for l in layer_names:
|
||||
## identifier is clean_name(ob.name).layer_name
|
||||
|
||||
idname = f'{clean_ob_name}.{l}'
|
||||
|
||||
# check the render layer that have a parent frame
|
||||
rlayer = [n for n in nodes if n.type == 'R_LAYERS' and n.layer == idname and n.parent]
|
||||
if not rlayer:
|
||||
# send to function to generate the rlayer and connect
|
||||
# rlayer = creation
|
||||
continue
|
||||
|
||||
rlayers.append(rlayer)
|
||||
|
||||
merge_layers(rlayers, obname=clean_ob_name)
|
||||
|
||||
|
||||
def merge_selected_render_layers():
|
||||
'''Merge command from selected render layers nodes'''
|
||||
render = bpy.data.scenes.get('Render')
|
||||
if not render:
|
||||
print('No render scene')
|
||||
return
|
||||
|
||||
nodes = render.node_tree.nodes
|
||||
selection = [n for n in nodes if n.select and n.type == 'R_LAYERS']
|
||||
|
||||
# should be from the same object:
|
||||
assert all(selection[0].layer.split('.')[0] == n.layer.split('.')[0] for n in selection), 'Not every nodes start with the same object'
|
||||
|
||||
# obname = selection[0].layer.split('.')[0]
|
||||
merge_layers(selection)
|
||||
|
||||
|
||||
|
||||
|
||||
# merge_selected_layers() # function to merge from GP dopesheet
|
||||
|
||||
merge_selected_render_layers() # function to merge from nodegroup
|
||||
|
|
@ -0,0 +1,463 @@
|
|||
info = {
|
||||
'icon': 'NODE_COMPOSITING',
|
||||
'description': 'Setup GP compositing passes',
|
||||
}
|
||||
|
||||
import fnmatch
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
from math import degrees, radians
|
||||
from os import listdir
|
||||
from os.path import basename, dirname, exists, isdir, isfile, join, splitext
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
from mathutils import Matrix, Vector
|
||||
|
||||
C = bpy.context
|
||||
D = bpy.data
|
||||
scene = C.scene
|
||||
|
||||
## GLOBAL VARIABLES
|
||||
rebuild = True
|
||||
white = (1,1,1)
|
||||
black = (0,0,0)
|
||||
rest = re.compile(r'^[A-Z]{2}_')
|
||||
# allowed_prefixes = ['SP','LN','LT','DK','DE','TX','CO','MA','SH','CC',] # Mars express # 'PO','AN' # AN and posing are not rendered
|
||||
|
||||
# Unicorn wars tag set
|
||||
allowed_prefixes = ['CU','TO','CO','FX'] # 'MA',
|
||||
excluded_prefixes = ['PR','RG','TD',] # not used
|
||||
|
||||
## TODO
|
||||
# - create a json file with layer order, frame per GP and layer order
|
||||
# - rules should be dynamic to regenerate
|
||||
|
||||
def link_node_group(filepath, group_name, link=True):
|
||||
'''Link a node_group by name from a file, if link is False, append instead of linking'''
|
||||
|
||||
with bpy.data.libraries.load(filepath, link=link) as (data_from, data_to):
|
||||
# data_to.node_groups = [c for c in data_from.node_groups if c.startswith(group_name)]
|
||||
data_to.node_groups = [c for c in data_from.node_groups if c == group_name]
|
||||
|
||||
if data_to.node_groups:
|
||||
return data_to.node_groups[0]
|
||||
# return data_to.node_groups
|
||||
|
||||
def clear_view_layer():
|
||||
for i in range(len(C.scene.view_layers))[::-1]:
|
||||
vl = C.scene.view_layers[i]
|
||||
if not '_' in vl.name:
|
||||
continue
|
||||
if not vl.name.startswith('View'): # maybe not needed...
|
||||
C.scene.view_layers.remove(vl)
|
||||
|
||||
def get_view_layer(name):
|
||||
'''get viewlayer name
|
||||
return existing/created viewlayer
|
||||
'''
|
||||
### pass double letter prefix as suffix
|
||||
## pass_name = re.sub(r'^([A-Z]{2})(_)(.*)', r'\3\2\1', 'name')
|
||||
## pass_name = f'{name}_{passe}'
|
||||
pass_vl = scene.view_layers.get(name)
|
||||
if not pass_vl:
|
||||
pass_vl = scene.view_layers.new(name)
|
||||
return pass_vl
|
||||
|
||||
def linkin(col, parent):
|
||||
'''take tow collection, link col into parent.childs'''
|
||||
if not col in [c for c in parent.children]:
|
||||
parent.children.link(col)
|
||||
|
||||
def get_col(name):
|
||||
'''get collection by name (create if not found)'''
|
||||
col = bpy.data.collections.get(name)
|
||||
if not col:
|
||||
col = bpy.data.collections.new(name)
|
||||
return col
|
||||
|
||||
def set_layer_col_attr(attr, value, lcol=None, filter=None):
|
||||
'''set and attribute attr to set with a value on a viewlayer collection lcol'''
|
||||
lcol = lcol or bpy.context.view_layer.layer_collection
|
||||
for c in lcol.children:
|
||||
if filter is None or filter(c):
|
||||
setattr(c, attr, value(c) if callable(value) else value)
|
||||
set_layer_col_attr(attr, value, lcol=c, filter=filter)
|
||||
|
||||
|
||||
def set_passes_gp(ob):
|
||||
if not ob.name.endswith('_PASSES'):
|
||||
print(f'{ob.name} has not a _PASSES suffix')
|
||||
return
|
||||
|
||||
collec_name = ob.name
|
||||
pass_name = ob.name.replace('_PASSES', '')
|
||||
|
||||
for l in ob.data.layers:
|
||||
vl = None
|
||||
## Color to white
|
||||
if l.info.startswith('CO_'): # Colors
|
||||
# l.tint_factor = 1
|
||||
# l.tint_color = white
|
||||
vl = get_view_layer(pass_name+'_CO')
|
||||
l.viewlayer_render = '' # remove viewlayer (should be on all VL)
|
||||
|
||||
elif l.info.startswith('TO_'): # Tones
|
||||
# l.tint_color = black
|
||||
# l.tint_factor = 1
|
||||
vl = get_view_layer(pass_name+'_TO')
|
||||
l.viewlayer_render = vl.name
|
||||
|
||||
## line at full opacity
|
||||
elif l.info.startswith('CU_'): # CleanUp
|
||||
vl = get_view_layer(pass_name+'_CU')
|
||||
l.viewlayer_render = vl.name
|
||||
l.opacity = 1
|
||||
|
||||
## spec switch to black (else white on white) full opacity
|
||||
# elif l.info.startswith('SP_'):
|
||||
# vl = get_view_layer(pass_name+'_SP')
|
||||
# l.viewlayer_render = vl.name
|
||||
# l.tint_color = black
|
||||
# l.tint_factor = 1
|
||||
# l.opacity = 1
|
||||
|
||||
#?# opacity to max ??
|
||||
elif l.info.startswith('FX_'): # FX
|
||||
vl = get_view_layer(pass_name+'_FX')
|
||||
l.viewlayer_render = vl.name
|
||||
|
||||
elif l.info.startswith('MA_'): # Masks
|
||||
l.opacity = 0 # put masks opacity to 0
|
||||
if l.hide:
|
||||
print(f'{l.info} is hidden')
|
||||
|
||||
## Add other prefixes even if they have no specific rules yet
|
||||
# elif l.info.split('_')[0] in allowed_prefixes:
|
||||
# pfix = l.info.split('_')[0]
|
||||
# vl = get_view_layer(pass_name+f'_{pfix}')
|
||||
# l.viewlayer_render = vl.name
|
||||
|
||||
# elif l.info.startswith(('PR','RG','TD'))
|
||||
else:
|
||||
# assign exclude viewlayers
|
||||
# vl = get_view_layer('_excluded')
|
||||
# l.viewlayer_render = vl.name
|
||||
l.viewlayer_render = get_view_layer('_excluded').name # do not assign vl to layer
|
||||
|
||||
|
||||
## enable only the _passe col in those viewlayers
|
||||
# TODO ! exclude other viewlayer collection than PASSE and it's parents
|
||||
if vl:
|
||||
set_layer_col_attr('exclude', True, vl.layer_collection)
|
||||
set_layer_col_attr('exclude', False, vl.layer_collection, filter=lambda x: x.name == collec_name)
|
||||
|
||||
def clear_gp(name):
|
||||
ob = bpy.data.objects.get(name)
|
||||
if ob:
|
||||
dat = ob.data
|
||||
bpy.data.objects.remove(ob)
|
||||
bpy.data.grease_pencils.remove(dat)
|
||||
|
||||
def dup_gp(ob, name):
|
||||
nob = ob.copy()
|
||||
nob.name = name
|
||||
nob.data = ob.data.copy()
|
||||
nob.data.name = name
|
||||
return nob
|
||||
|
||||
def add_rlayer(layer_name, location=None, color=None, node_name=None):
|
||||
'''create a render layer node
|
||||
if node_name is not specified, use passed layer name
|
||||
'''
|
||||
|
||||
# connect to fileoutput
|
||||
if not node_name:
|
||||
node_name = layer_name # 'RL_' +
|
||||
|
||||
nodes = bpy.context.scene.node_tree.nodes
|
||||
comp = nodes.get(node_name)
|
||||
if comp:
|
||||
if rebuild:
|
||||
location = comp.location.copy() # keep previous loc
|
||||
nodes.remove(comp)
|
||||
else:
|
||||
return comp
|
||||
|
||||
comp = nodes.new('CompositorNodeRLayers')
|
||||
comp.name = node_name
|
||||
comp.layer = layer_name
|
||||
comp.label = layer_name
|
||||
if location:
|
||||
comp.location = location
|
||||
if color:
|
||||
comp.color = color
|
||||
return comp
|
||||
|
||||
def get_create_composite():
|
||||
'''return composite output (create if needed) and replace it'''
|
||||
nodes = bpy.context.scene.node_tree.nodes
|
||||
compout = [n for n in nodes if n.type == 'COMPOSITE']
|
||||
if compout:
|
||||
compout = compout[0]
|
||||
for lnk in compout.inputs[0].links:
|
||||
lnk.from_node.location.y = 1000
|
||||
else:
|
||||
compout = nodes.new('CompositorNodeComposite')
|
||||
compout.location = (1000,1000)
|
||||
return compout
|
||||
|
||||
def connect_node_group(out_socket, name, source_path):
|
||||
'''get a node socket to connect from, name of the node group, source path where to find the nodegroup'''
|
||||
|
||||
nodes = bpy.context.scene.node_tree.nodes
|
||||
links = bpy.context.scene.node_tree.links
|
||||
### TODO get create nodegroup node and connect from node socket
|
||||
|
||||
# check if node group exists in file
|
||||
tree = bpy.data.node_groups.get(name)
|
||||
print('tree')
|
||||
if tree:
|
||||
print('in tree')
|
||||
# if the group tree exists delete laready connected node to recreate
|
||||
for n in nodes:
|
||||
if n.type != 'GROUP':
|
||||
continue
|
||||
if not n.node_tree or n.node_tree != tree:
|
||||
continue
|
||||
print('same group', n.name)
|
||||
if len(n.inputs[0].links) < 1:
|
||||
continue
|
||||
print('has links')
|
||||
if out_socket.node == n.inputs[0].links[0].from_node:
|
||||
print(n.name)
|
||||
nodes.remove(n)
|
||||
break
|
||||
print('no same from nodes:', n.inputs[0].links[0].from_node)
|
||||
|
||||
else:
|
||||
# always relink tree ??
|
||||
tree = link_node_group(source_path, name, link=False) # should not duplicate
|
||||
ng = nodes.new('CompositorNodeGroup')
|
||||
ng.node_tree = tree
|
||||
# create the link
|
||||
links.new(out_socket, ng.inputs[0])
|
||||
return ng
|
||||
|
||||
|
||||
|
||||
## create individual collection
|
||||
def gp_output(gpo):
|
||||
# get / create grease pencil passes
|
||||
out = get_col('OUTPUT')
|
||||
linkin(out, bpy.context.scene.collection)
|
||||
name = gpo.name
|
||||
col_out_name = name + '_OUTPUT'
|
||||
passe_name = name + '_PASSES'
|
||||
|
||||
# create and link a collection
|
||||
gpout = get_col(col_out_name)
|
||||
linkin(gpout, out)
|
||||
|
||||
## Passes
|
||||
col_passe = get_col(passe_name)
|
||||
linkin(col_passe, gpout)
|
||||
|
||||
## Clean
|
||||
clear_gp(passe_name)
|
||||
|
||||
## duplicate
|
||||
gp_passe = dup_gp(gpo, passe_name)
|
||||
col_passe.objects.link(gp_passe)
|
||||
|
||||
## Set the passes in layers
|
||||
set_passes_gp(gp_passe)
|
||||
|
||||
## create viewlayers and compo_tree
|
||||
prefixes = [l.info.split('_')[0] for l in gpo.data.layers if rest.match(l.info.strip(' -'))]
|
||||
prefixes = list(set(prefixes))
|
||||
|
||||
nodes = bpy.context.scene.node_tree.nodes
|
||||
links = bpy.context.scene.node_tree.links
|
||||
|
||||
## get composite output
|
||||
# compout = get_create_composite()
|
||||
|
||||
bottom = min([n.location.y for n in nodes]) - 250
|
||||
|
||||
x_rlayers_loc = [n.location.x for n in nodes if n.type == 'R_LAYERS']
|
||||
if x_rlayers_loc:
|
||||
left_rlayer = min(x_rlayers_loc)
|
||||
else:
|
||||
left_rlayer = 0
|
||||
|
||||
## sort prefixes according to given prefix list and keep non-listed at the list tail
|
||||
new_prefixes = sorted([p for p in prefixes if p not in allowed_prefixes]) # non prelisted prefixes
|
||||
prefixes = [p for p in allowed_prefixes if p in prefixes] # sorted prelisted prefixes
|
||||
|
||||
# prefixes += new_prefixes # add new prefixes to the end of the list
|
||||
if new_prefixes:
|
||||
print(r'/!\ warning, some prefixes are not listed :', new_prefixes)
|
||||
|
||||
### ------------------
|
||||
## fileoutput
|
||||
|
||||
fo_name = name + '_FILEOUT'
|
||||
fo = nodes.get(fo_name)
|
||||
if not fo:
|
||||
fo = nodes.new('CompositorNodeOutputFile')
|
||||
fo.location = (left_rlayer + 800, bottom)
|
||||
fo.name = fo_name
|
||||
fo.width = 400
|
||||
fo.file_slots.remove(fo.inputs[0]) # remove default Image first slot
|
||||
else:
|
||||
# clear all inputs (could also fully delete node and recreate...)
|
||||
for i in range(0, len(fo.file_slots))[::-1]:
|
||||
print(i, fo.file_slots[i].path)
|
||||
for lnk in fo.inputs[i].links:
|
||||
links.remove(lnk)
|
||||
fo.file_slots.remove(fo.inputs[i]) # fo.file_slots[i]
|
||||
|
||||
# TODO specifier un chemin d'output via env/template
|
||||
fo.base_path = f'//sequences/{name}'
|
||||
|
||||
# Create render layers nodes from available prefixes
|
||||
|
||||
print('prefixes:', prefixes)
|
||||
first=True
|
||||
for pfix in prefixes:
|
||||
## get previously created render layer and connect to file out
|
||||
passe = f'{name}_{pfix}'
|
||||
if first: # avoid first
|
||||
first=False
|
||||
else:
|
||||
bottom -= 200
|
||||
|
||||
comp = add_rlayer(passe, location=(left_rlayer, bottom), color=None)
|
||||
comp.show_preview = False
|
||||
rl_node = nodes.get(passe)
|
||||
if not rl_node:
|
||||
print(f'/!\ missing {passe}')
|
||||
continue
|
||||
|
||||
# Connect to fileoutput
|
||||
subpath = f'{passe}/{passe}_'
|
||||
sl = fo.file_slots.get(subpath)
|
||||
if not sl:
|
||||
sl = fo.file_slots.new(subpath)
|
||||
|
||||
ng = None
|
||||
## TODO conditions according to type (8/16 bits, png, alpha...)
|
||||
if pfix == 'SP':
|
||||
# TODO need to pass link as True and dynamically define nodegroup library (using env)
|
||||
ng = connect_node_group(rl_node.outputs[0], 'invert_keep_alpha', r'/z/___LONGS/UNICORN_WARS/library/nodegroups/invert_keep_alpha.blend')
|
||||
links.new(ng.outputs[0], sl)
|
||||
else:
|
||||
links.new(rl_node.outputs[0], sl)
|
||||
|
||||
# replace node_group if any
|
||||
if ng:
|
||||
rloc = rl_node.location
|
||||
ng.location = (rloc.x + 300, rloc.y + 60)
|
||||
|
||||
|
||||
## generate compo
|
||||
|
||||
def connect_main_vl():
|
||||
nodes = bpy.context.scene.node_tree.nodes
|
||||
links = bpy.context.scene.node_tree.links
|
||||
|
||||
vl = bpy.context.scene.view_layers.get('View Layer')
|
||||
|
||||
if not vl:
|
||||
print('No viewlayer named "View Layer" !')
|
||||
# trying to autofetch
|
||||
vlist = [vl for vl in bpy.context.scene.view_layers if not re.search(r'_[A-Z]{2}$', vl.name)]
|
||||
if not vlist:
|
||||
print('Cancelling, No candidate found...')
|
||||
return
|
||||
if len(vlist) > 1:
|
||||
print('Cancelling, Multiple candidate found :', vlist)
|
||||
return
|
||||
|
||||
vl = vlist[0]
|
||||
print('Using autodetected view layer name:', vl.name)
|
||||
|
||||
render_vl = [n for n in nodes if n.type == 'R_LAYERS' and n.layer == vl.name]
|
||||
|
||||
compout = get_create_composite()
|
||||
|
||||
main_loc = (compout.location.x - 1000, compout.location.y - 24)
|
||||
if not render_vl:
|
||||
render_vl = add_rlayer(vl.name, location=main_loc, node_name='Render Layers')
|
||||
else:
|
||||
render_vl = render_vl[0]
|
||||
render_vl.location = main_loc
|
||||
|
||||
is_linked = [lnk for lnk in render_vl.outputs[0].links if lnk.to_node == compout]
|
||||
if not is_linked:
|
||||
outlinks = [lnk for lnk in compout.inputs[0].links]
|
||||
if outlinks:
|
||||
print(f'cannot link {render_vl.name} to composite, already linked from {outlinks[0].from_node.name}')
|
||||
return
|
||||
links.new(render_vl.outputs[0], compout.inputs[0])
|
||||
else:
|
||||
print(f'{vl.name} already linked to composite')
|
||||
|
||||
|
||||
def generate_all_comp():
|
||||
bpy.context.scene.use_nodes = True
|
||||
## sepcial check : mandatory 2D collection (Mars express)
|
||||
col = bpy.data.collections.get('2D')
|
||||
if not col:
|
||||
print('No 2D collection in file (grease pencil comp is created from GP object within this collection)')
|
||||
col = bpy.data.collections.get('GP')
|
||||
if not col:
|
||||
print('\n\nNo GP collection in file (need 2D or GP)\n\n')
|
||||
return
|
||||
|
||||
connect_main_vl()
|
||||
|
||||
# exclude_filter = ('old',)
|
||||
# fetch targets
|
||||
gp_objects = [o for o in col.all_objects if o.type == 'GPENCIL'
|
||||
and not bpy.context.view_layer.objects[o.name].hide_get()] # and not any(x in o.name.lower() for x in exclude_filter)
|
||||
print()
|
||||
print(f'Working on {len(gp_objects)} GP objects:')
|
||||
print('\n'.join([o.name for o in gp_objects]))
|
||||
|
||||
# build comp for every GP
|
||||
for gpo in gp_objects:
|
||||
gp_output(gpo)
|
||||
|
||||
vl = bpy.context.scene.view_layers.get('View Layer')
|
||||
if vl:
|
||||
set_layer_col_attr('exclude', True, vl.layer_collection, filter=lambda x: x.name == 'OUTPUT')
|
||||
|
||||
# export
|
||||
|
||||
def single_comp(ob):
|
||||
if not ob:
|
||||
print('No active object')
|
||||
return
|
||||
|
||||
if ob.type != 'GPENCIL':
|
||||
print('current active object is not a grease pencil')
|
||||
return
|
||||
|
||||
bpy.context.scene.use_nodes = True
|
||||
|
||||
col_out = bpy.data.collections.get('OUTPUT')
|
||||
if col_out and ob in col_out.all_objects[:]:
|
||||
print('WARNING', f'Object {ob.name} is part of the OUTPUT collection !')
|
||||
return
|
||||
gp_output(ob)
|
||||
|
||||
|
||||
generate_all_comp()
|
||||
|
||||
## from selection
|
||||
# for ob in bpy.context.selected_objects:
|
||||
# if ob.type == 'GPENCIL':
|
||||
# single_comp(ob)
|
|
@ -0,0 +1,39 @@
|
|||
info = {
|
||||
'icon': 'X',
|
||||
'description': 'Clear',
|
||||
}
|
||||
|
||||
import bpy
|
||||
C = bpy.context
|
||||
|
||||
# clear passes
|
||||
for i in range(len(C.scene.view_layers))[::-1]:
|
||||
vl = C.scene.view_layers[i]
|
||||
if not vl.name.startswith('View'):
|
||||
C.scene.view_layers.remove(vl)
|
||||
|
||||
|
||||
# clear nodes
|
||||
if C.scene.use_nodes:
|
||||
for n in range(len(C.scene.node_tree.nodes))[::-1]:
|
||||
C.scene.node_tree.nodes.remove(C.scene.node_tree.nodes[i])
|
||||
|
||||
|
||||
def get_cols(c, cols=[]):
|
||||
'''recursively get all collection in passed collection'''
|
||||
for child in c.children:
|
||||
cols.append(child)
|
||||
get_cols(child, cols)
|
||||
return cols
|
||||
|
||||
# delete OUTPOUT collection
|
||||
out = bpy.data.collections.get('OUTPUT')
|
||||
if out:
|
||||
for o in [o for o in out.all_objects][::-1]:
|
||||
bpy.data.objects.remove(o)
|
||||
|
||||
col_list = get_cols(out)
|
||||
print("col_list", [c.name for c in col_list])#Dbg
|
||||
for c in col_list:
|
||||
bpy.data.collections.remove(c)
|
||||
bpy.data.collections.remove(out)
|
Loading…
Reference in New Issue