2021-09-07 18:06:54 +02:00
|
|
|
import bpy
|
2021-09-10 18:32:50 +02:00
|
|
|
import re
|
2021-09-17 16:31:26 +02:00
|
|
|
from math import isclose
|
2023-04-04 11:09:00 +02:00
|
|
|
from itertools import groupby
|
2021-09-08 18:29:10 +02:00
|
|
|
from . import fn
|
2021-09-16 00:19:57 +02:00
|
|
|
from . import gen_vlayer
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2021-09-17 16:31:26 +02:00
|
|
|
# TODO : make a merge compatible with already merged nodegroup (or even other node type)
|
|
|
|
# --> need to delete/mute AA internal node
|
|
|
|
|
|
|
|
def merge_layers(rlayers, obname=None, active=None, disconnect=True, color=None):
|
2024-04-16 18:01:10 +02:00
|
|
|
'''merge render layers, node_tree is found using first render layer (with id_data)'''
|
2021-09-07 18:06:54 +02:00
|
|
|
|
|
|
|
print(f'Merging {len(rlayers)} layers')
|
|
|
|
print('->', [r.layer for r in rlayers])
|
|
|
|
print()
|
|
|
|
|
|
|
|
if not rlayers:
|
|
|
|
return ('ERROR', 'No render layer sent to merge')
|
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
# get node group
|
|
|
|
# ng = rlayers[0].outputs[0].links[0].to_node
|
|
|
|
|
2023-01-18 14:28:27 +01:00
|
|
|
# sort RL descending
|
2021-09-08 18:29:10 +02:00
|
|
|
rlayers.sort(key=lambda n: fn.real_loc(n).y, reverse=True)
|
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
node_tree = rlayers[0].id_data
|
|
|
|
nodes = node_tree.nodes
|
|
|
|
links = node_tree.links
|
|
|
|
|
|
|
|
if active:
|
|
|
|
vl_name = active.layer
|
|
|
|
else:
|
|
|
|
vl_name = rlayers[-1].layer # -1 : bottom node == upper layer
|
|
|
|
|
|
|
|
if ' / ' in vl_name:
|
|
|
|
obname, lname = vl_name.split(' / ')
|
|
|
|
lname = bpy.path.clean_name(lname)
|
|
|
|
base_path = f'//render/{bpy.path.clean_name(obname)}'
|
|
|
|
slot_name = f'{lname}/{lname}_'
|
|
|
|
else:
|
|
|
|
# directly use full vlname for both base output and subfolder ?? (or return error)
|
|
|
|
obname = lname = bpy.path.clean_name(vl_name)
|
|
|
|
base_path = f'//render/'
|
|
|
|
slot_name = f'{lname}/{lname}_'
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-07 18:06:54 +02:00
|
|
|
|
|
|
|
# change colors of those nodes
|
2021-09-10 18:32:50 +02:00
|
|
|
disconnected_groups = []
|
2023-01-18 14:28:27 +01:00
|
|
|
if not color:
|
2021-09-17 16:31:26 +02:00
|
|
|
color = fn.random_color()
|
2021-09-07 18:06:54 +02:00
|
|
|
for n in rlayers:
|
|
|
|
n.use_custom_color = True
|
|
|
|
n.color = color
|
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
if disconnect:
|
|
|
|
if n.outputs[0].is_linked:
|
|
|
|
for lnk in reversed(n.outputs[0].links):
|
|
|
|
if lnk.to_node.name.startswith('NG_'):
|
|
|
|
disconnected_groups.append(lnk.to_node)
|
|
|
|
links.remove(lnk)
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
disconnected_groups = list(set(disconnected_groups))
|
2021-09-08 18:29:10 +02:00
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
ng_name = f'merge_NG_{obname}' # only object name
|
2021-09-08 18:29:10 +02:00
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
## clear unused nodes groups duplication
|
2021-09-08 18:29:10 +02:00
|
|
|
fn.clear_nodegroup(ng_name, full_clear=False)
|
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
### always create a new nodegroup (nerve call an existing one)
|
|
|
|
|
|
|
|
# need a unique nodegroup name
|
|
|
|
# increment name while nodegroup exists
|
|
|
|
while bpy.data.node_groups.get(ng_name): # nodes.get(ng_name)
|
|
|
|
if not re.search(r'(\d+)$', ng_name):
|
|
|
|
ng_name += '_02' # if not ending with a number add _02
|
2023-01-18 14:28:27 +01:00
|
|
|
ng_name = re.sub(r'(\d+)(?!.*\d)', lambda x: str(int(x.group(1))+1).zfill(len(x.group(1))), ng_name)
|
2021-09-10 18:32:50 +02:00
|
|
|
|
2021-09-15 19:36:06 +02:00
|
|
|
# print(f'create merge nodegroup {ng_name}')
|
2021-09-10 18:32:50 +02:00
|
|
|
ngroup = bpy.data.node_groups.new(ng_name, 'CompositorNodeTree')
|
|
|
|
ng = fn.create_node('CompositorNodeGroup', tree=node_tree, location=(fn.real_loc(rlayers[0]).x + 1900, fn.real_loc(rlayers[0]).y - 200), width=400)
|
|
|
|
ng.node_tree = ngroup
|
|
|
|
ng.name = ngroup.name
|
|
|
|
|
|
|
|
_ng_in = fn.create_node('NodeGroupInput', tree=ngroup, location=(-600,0))
|
|
|
|
_ng_out = fn.create_node('NodeGroupOutput', tree=ngroup, location=(600,0))
|
|
|
|
|
|
|
|
# Create inputs and links to node_group
|
|
|
|
for rln in rlayers:
|
|
|
|
rln.outputs['Image']
|
|
|
|
sockin = ng.inputs.new('NodeSocketColor', rln.layer)
|
|
|
|
sockin = ng.inputs[-1]
|
|
|
|
links.new(rln.outputs['Image'], sockin)
|
|
|
|
|
|
|
|
fn.nodegroup_merge_inputs(ng.node_tree)
|
|
|
|
ng.update()
|
|
|
|
# create dedicated fileout
|
|
|
|
|
|
|
|
out = fn.create_node('CompositorNodeOutputFile', tree=node_tree, location=(ng.location[0]+450, ng.location[1]+50), width=600)
|
2021-09-21 18:23:25 +02:00
|
|
|
fn.set_file_output_format(out)
|
2021-09-10 18:32:50 +02:00
|
|
|
out_name = f'merge_OUT_{vl_name}' # or get output from frame
|
|
|
|
out.name = out_name
|
|
|
|
out.base_path = base_path
|
|
|
|
out.file_slots.new(slot_name)
|
|
|
|
links.new(ng.outputs[0], out.inputs[-1])
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
fn.clear_disconnected(out)
|
|
|
|
out.update()
|
|
|
|
|
|
|
|
## Clear node_group after disconnect
|
|
|
|
# for dg in disconnected_groups:
|
|
|
|
# fn.clean_nodegroup_inputs(dg)
|
|
|
|
# # fn.clear_nodegroup_content_if_disconnected(dg.node_tree)
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2024-04-16 18:01:10 +02:00
|
|
|
bpy.context.scene.gp_render_settings.use_aa = False # trigger fn.scene_aa(toggle=False)
|
2021-10-25 16:02:11 +02:00
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
return ng, out
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2023-04-04 11:41:18 +02:00
|
|
|
def merge_compositor_preview(scene=None, clear=False):
|
2023-04-04 11:09:00 +02:00
|
|
|
'''Merge all active render layer with alpha over.location[0]+450, ng.location[1]+50), width=600)
|
|
|
|
Create a dedicated node group and connect to compositor output
|
|
|
|
return tuple(merge nodegroup, compositor out)
|
|
|
|
'''
|
|
|
|
|
2023-04-04 11:41:18 +02:00
|
|
|
scene = scene or bpy.context.scene
|
|
|
|
node_tree = scene.node_tree
|
2023-04-04 11:09:00 +02:00
|
|
|
nodes = node_tree.nodes
|
|
|
|
links = node_tree.links
|
|
|
|
|
|
|
|
## identify all duplicated render layer and remove them, then recreate the preview
|
|
|
|
for n in reversed(nodes):
|
|
|
|
if n.type in ('R_LAYERS', 'GROUP') and n.get('is_preview'):
|
|
|
|
nodes.remove(n)
|
|
|
|
|
|
|
|
ng_name = f'merge_NG_preview'
|
|
|
|
## clear unused nodes groups duplication
|
|
|
|
fn.clear_nodegroup(ng_name, full_clear=True)
|
|
|
|
|
|
|
|
if clear:
|
|
|
|
## Restore to jpg out ?
|
|
|
|
# im_settings = context.scene.render.image_settings
|
|
|
|
# im_settings.file_format = 'JPEG'
|
|
|
|
# im_settings.color_mode = 'RGB'
|
|
|
|
# im_settings.quality = 0
|
|
|
|
return
|
|
|
|
|
|
|
|
## Get all RL node per object block (node frames), sort object by name (should a)
|
|
|
|
|
|
|
|
all_rlayers = [n for n in nodes if n.type == 'R_LAYERS']
|
|
|
|
all_rlayers.sort(key=lambda x: x.label, reverse=True)
|
|
|
|
|
|
|
|
## ! All at once doe not work, need to separate by individual object first
|
|
|
|
# all_rlayers.sort(key=lambda x: (x.label, -fn.real_loc(x).y))
|
|
|
|
|
|
|
|
## Sort all render layer by object (either by start name or by frames)
|
|
|
|
## sort order within by order in layer stack (check -n.location.y or associated gp object)
|
|
|
|
|
|
|
|
grps = groupby(all_rlayers, key=lambda x : x.label.split(' /')[0])
|
|
|
|
|
|
|
|
rlayers_groups = {k : sorted(list(grp), key=lambda x: x.location.y, reverse=True) for k, grp in grps if k}
|
|
|
|
|
|
|
|
# Debug prints
|
|
|
|
# for ob_key_name, rl_group in rlayers_groups.items():
|
|
|
|
# print(ob_key_name)
|
|
|
|
# for n in rl_group:
|
|
|
|
# print(f'- {n.label}')
|
|
|
|
# print()
|
|
|
|
# print('Done')
|
|
|
|
|
|
|
|
## Recreate the render layer nodes duplicated at the side of the frames
|
|
|
|
|
|
|
|
pos_x = 2400
|
|
|
|
pos_y = 30
|
|
|
|
offset_y = 180
|
|
|
|
comp_list = []
|
|
|
|
for k, rl_group in rlayers_groups.items():
|
|
|
|
for rl in rl_group:
|
|
|
|
comp = nodes.new('CompositorNodeRLayers')
|
|
|
|
comp['is_preview'] = 1
|
|
|
|
comp.label = rl.label # f'.{rl.label}'
|
|
|
|
comp.name = f'.{rl.name}'
|
|
|
|
comp.scene = rl.scene
|
|
|
|
comp.layer = rl.layer
|
|
|
|
comp.color = rl.color
|
|
|
|
comp.use_custom_color = True
|
|
|
|
comp.width = rl.width
|
|
|
|
comp.show_preview = False
|
|
|
|
comp.location = (pos_x, pos_y)
|
|
|
|
comp_list.append(comp)
|
|
|
|
pos_y -= offset_y
|
|
|
|
|
|
|
|
|
|
|
|
### Create the nodegroup for clean alpha over merge
|
|
|
|
|
|
|
|
## Need a unique nodegroup name, increment name while nodegroup exists
|
|
|
|
# while bpy.data.node_groups.get(ng_name): # nodes.get(ng_name)
|
|
|
|
# if not re.search(r'(\d+)$', ng_name):
|
|
|
|
# ng_name += '_02' # if not ending with a number add _02
|
|
|
|
# ng_name = re.sub(r'(\d+)(?!.*\d)', lambda x: str(int(x.group(1))+1).zfill(len(x.group(1))), ng_name)
|
|
|
|
|
|
|
|
print(f'Create preview merge nodegroup {ng_name}')
|
|
|
|
|
|
|
|
ngroup = bpy.data.node_groups.new(ng_name, 'CompositorNodeTree')
|
|
|
|
ng = fn.create_node('CompositorNodeGroup', tree=node_tree, location=(3000, 0), width=400)
|
|
|
|
ng.node_tree = ngroup
|
|
|
|
ng.name = ngroup.name
|
|
|
|
ng['is_preview'] = 1
|
|
|
|
fn.create_node('NodeGroupInput', tree=ngroup, location=(-600,0))
|
|
|
|
fn.create_node('NodeGroupOutput', tree=ngroup, location=(1000,0))
|
2023-04-04 15:51:41 +02:00
|
|
|
## ngroup.outputs.new('NodeSocketColor', 'Image') # generated in merge_inputs
|
2023-04-04 11:09:00 +02:00
|
|
|
|
|
|
|
# Create inputs and links to node_group
|
|
|
|
for rln in comp_list:
|
2024-01-16 12:27:10 +01:00
|
|
|
if bpy.app.version < (4,0,0):
|
|
|
|
sockin = ng.node_tree.inputs.new('NodeSocketColor', rln.layer)
|
|
|
|
else:
|
|
|
|
sockin = ng.node_tree.interface.new_socket(rln.layer, in_out='INPUT', socket_type='NodeSocketColor')
|
2023-04-04 11:09:00 +02:00
|
|
|
sockin = ng.inputs[-1]
|
|
|
|
links.new(rln.outputs['Image'], sockin)
|
|
|
|
|
2023-04-04 15:51:41 +02:00
|
|
|
fn.nodegroup_merge_inputs(ng.node_tree, aa=False) # do not create AA node (needed ?)
|
2023-04-04 11:09:00 +02:00
|
|
|
ng.update()
|
|
|
|
|
|
|
|
# Create composite out (if needed) and connect
|
|
|
|
composite_out = next((n for n in nodes if n.type == 'COMPOSITE'), None)
|
|
|
|
if not composite_out:
|
|
|
|
composite_out = fn.create_node('CompositorNodeComposite', tree=node_tree, location=(ng.location[0]+450, ng.location[1]+50), width=140)
|
|
|
|
composite_out.use_alpha = True
|
|
|
|
links.new(ng.outputs[0], composite_out.inputs[0])
|
|
|
|
|
2023-04-04 11:41:18 +02:00
|
|
|
im_settings = scene.render.image_settings
|
2023-04-04 11:09:00 +02:00
|
|
|
# im_settings.file_format = 'JPEG'
|
|
|
|
# im_settings.color_mode = 'RGB'
|
|
|
|
# im_settings.quality = 0
|
|
|
|
im_settings.file_format = 'OPEN_EXR'
|
|
|
|
im_settings.color_mode = 'RGBA'
|
|
|
|
im_settings.color_depth = '16'
|
|
|
|
im_settings.exr_codec = 'ZIP'
|
|
|
|
|
|
|
|
return ng, composite_out
|
|
|
|
|
2021-09-21 18:23:25 +02:00
|
|
|
class GPEXP_OT_merge_viewlayers_to_active(bpy.types.Operator):
|
|
|
|
bl_idname = "gp.merge_viewlayers_to_active"
|
|
|
|
bl_label = "Merge selected layers view_layers"
|
|
|
|
bl_description = "Merge view layers of selected gp layers to on the active one"
|
|
|
|
bl_options = {"REGISTER"}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def poll(cls, context):
|
|
|
|
return context.object and context.object.type == 'GPENCIL'
|
|
|
|
|
2023-06-07 17:31:38 +02:00
|
|
|
multi_object_merge : bpy.props.BoolProperty(default=False, options={'SKIP_SAVE'})
|
|
|
|
|
2021-09-21 18:23:25 +02:00
|
|
|
def execute(self, context):
|
2023-06-07 17:31:38 +02:00
|
|
|
ob = context.object
|
2021-09-21 18:23:25 +02:00
|
|
|
act = ob.data.layers.active
|
2023-06-07 17:31:38 +02:00
|
|
|
|
|
|
|
if self.multi_object_merge:
|
|
|
|
|
|
|
|
layers = [l for ob in context.selected_objects if ob.type == 'GPENCIL' for l in ob.data.layers if l.select and l != act]
|
|
|
|
else:
|
|
|
|
layers = [l for l in ob.data.layers if l.select and l != act]
|
2021-09-21 18:23:25 +02:00
|
|
|
|
2023-01-05 18:05:24 +01:00
|
|
|
# if not act.viewlayer_render:
|
|
|
|
# self.report({'ERROR'}, f'Active layer {act.info} has no viewlayer assigned')
|
|
|
|
# return {'CANCELLED'}
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2023-01-06 15:10:10 +01:00
|
|
|
ret = fn.merge_gplayer_viewlayers(ob, act=act, layers=layers)
|
2023-01-05 18:05:24 +01:00
|
|
|
if isinstance(ret, tuple):
|
|
|
|
self.report(*ret)
|
2021-09-21 18:23:25 +02:00
|
|
|
return {"FINISHED"}
|
|
|
|
|
2023-06-08 12:39:39 +02:00
|
|
|
class GPEXP_OT_remove_viewlayer_on_selected(bpy.types.Operator):
|
|
|
|
bl_idname = "gp.remove_viewlayer_on_selected"
|
|
|
|
bl_label = "Exclude Viewlayer"
|
|
|
|
bl_description = "Set exclude view layers on selected gp layers\
|
|
|
|
\nRemove associated nodes in Render scene nodetree\
|
|
|
|
\nCtrl + Click : Affect selected GP objects, not only active"
|
|
|
|
bl_options = {"REGISTER"}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def poll(cls, context):
|
|
|
|
return context.object and context.object.type == 'GPENCIL'
|
|
|
|
|
|
|
|
# multi_object : bpy.props.BoolProperty(default=True, options={'SKIP_SAVE'})
|
|
|
|
remove_all_hidden : bpy.props.BoolProperty(default=False, options={'SKIP_SAVE'})
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def description(cls, context, properties) -> str:
|
|
|
|
if properties.remove_all_hidden:
|
|
|
|
return "Set HIDDEN gp layers to 'exclude' viewlayers\
|
|
|
|
\nremoving associated nodes in Render scene nodetree\
|
|
|
|
\nCtrl + Click : Affect selected GP objects, else only active"
|
|
|
|
else:
|
|
|
|
return "Set SELECTED gp layers to 'exclude' viewlayers\
|
|
|
|
\nremoving associated nodes in Render scene nodetree\
|
|
|
|
\nCtrl + Click : Affect selected GP objects, else only active"
|
|
|
|
|
|
|
|
def invoke(self, context, event):
|
|
|
|
self.multi_object = event.ctrl
|
|
|
|
return self.execute(context)
|
|
|
|
|
|
|
|
def execute(self, context):
|
|
|
|
ob = context.object
|
|
|
|
|
2024-03-28 11:51:57 +01:00
|
|
|
scn = context.scene
|
2023-06-08 12:39:39 +02:00
|
|
|
|
|
|
|
if self.remove_all_hidden:
|
|
|
|
if self.multi_object:
|
|
|
|
layers = [l for ob in context.selected_objects if ob.type == 'GPENCIL' for l in ob.data.layers if l.hide]
|
|
|
|
else:
|
|
|
|
layers = [l for l in ob.data.layers if l.select]
|
|
|
|
|
|
|
|
else:
|
|
|
|
if self.multi_object:
|
|
|
|
layers = [l for ob in context.selected_objects if ob.type == 'GPENCIL' for l in ob.data.layers if l.select]
|
|
|
|
else:
|
|
|
|
layers = [l for l in ob.data.layers if l.select]
|
|
|
|
|
|
|
|
if not layers:
|
|
|
|
self.report({'ERROR'}, 'Some layers need to be selected to exclude render viewlayer')
|
|
|
|
return {'CANCELLED'}
|
|
|
|
|
|
|
|
layers = list(set(layers))
|
|
|
|
|
|
|
|
## Prepare report / prints in console
|
|
|
|
exclude_message = ['Layer list set to exclude:']
|
|
|
|
print('\nLayer list to exclude:')
|
|
|
|
for l in layers:
|
|
|
|
vl_name = l.viewlayer_render if l.viewlayer_render else 'None'
|
|
|
|
mess = f'{l.id_data.name}: {l.info} (previous: {vl_name})'
|
|
|
|
print(mess)
|
|
|
|
exclude_message.append(mess)
|
|
|
|
|
2024-03-28 11:51:57 +01:00
|
|
|
view_layers = [scn.view_layers.get(l.viewlayer_render) for l in layers\
|
|
|
|
if l.viewlayer_render and scn.view_layers.get(l.viewlayer_render)]
|
2023-06-08 12:39:39 +02:00
|
|
|
|
|
|
|
## remove nodes associated with those viewlayers
|
2024-03-28 11:51:57 +01:00
|
|
|
fn.remove_nodes_by_viewlayer(view_layers, scene=scn)
|
2023-06-08 12:39:39 +02:00
|
|
|
|
|
|
|
## Set selected those layer viewlayer exclude
|
|
|
|
for l in layers:
|
|
|
|
l.viewlayer_render = fn.get_view_layer('exclude').name
|
|
|
|
|
|
|
|
fn.show_message_box(exclude_message)
|
|
|
|
return {"FINISHED"}
|
|
|
|
|
2023-04-04 11:09:00 +02:00
|
|
|
class GPEXP_OT_merge_preview_ouput(bpy.types.Operator):
|
|
|
|
bl_idname = "gp.merge_preview_ouput"
|
|
|
|
bl_label = "Merge Preview Output"
|
|
|
|
bl_description = "Merge all active render layers to an output"
|
|
|
|
bl_options = {"REGISTER"}
|
|
|
|
|
|
|
|
# @classmethod
|
|
|
|
# def poll(cls, context):
|
|
|
|
# return True
|
|
|
|
|
|
|
|
clear : bpy.props.BoolProperty(default=False, options={'SKIP_SAVE'})
|
|
|
|
|
|
|
|
def execute(self, context):
|
2023-04-04 11:41:18 +02:00
|
|
|
merge_compositor_preview(scene=context.scene, clear=self.clear)
|
2023-04-04 11:09:00 +02:00
|
|
|
return {"FINISHED"}
|
2023-01-06 15:10:10 +01:00
|
|
|
|
|
|
|
class GPEXP_OT_auto_merge_adjacent_prefix(bpy.types.Operator):
|
|
|
|
bl_idname = "gpexp.auto_merge_adjacent_prefix"
|
|
|
|
bl_label = "Auto Merge Adjacent Prefix"
|
|
|
|
bl_description = "Automatically merge viewlayer and renderlayer of grouped layer prefix"
|
|
|
|
bl_options = {"REGISTER"}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def poll(cls, context):
|
|
|
|
return context.object and context.object.type == 'GPENCIL'
|
|
|
|
|
|
|
|
excluded_prefix : bpy.props.StringProperty(
|
|
|
|
name='Excluded Prefix', default='GP,RG,PO',
|
|
|
|
description='Exclude comma separated prefix from merging viewlayer')
|
2023-01-18 14:28:27 +01:00
|
|
|
|
|
|
|
first_name : bpy.props.BoolProperty(name='Merge On Bottom Layer',
|
|
|
|
default=True,
|
2023-01-06 15:10:10 +01:00
|
|
|
description='Keep the viewlayer of the bottom layer in groups, else upper layer')
|
|
|
|
|
|
|
|
def invoke(self, context, event):
|
|
|
|
return context.window_manager.invoke_props_dialog(self)
|
|
|
|
return self.execute(context)
|
|
|
|
|
|
|
|
def draw(self, context):
|
|
|
|
layout = self.layout
|
|
|
|
layout.label(text='Settings for auto-merge:')
|
|
|
|
layout.prop(self, 'excluded_prefix')
|
|
|
|
layout.prop(self, 'first_name')
|
|
|
|
|
|
|
|
def execute(self, context):
|
|
|
|
prefix_list = [p.strip() for p in self.excluded_prefix.split(',')]
|
|
|
|
for ob in [o for o in context.selected_objects if o.type == 'GPENCIL']:
|
|
|
|
fn.group_adjacent_layer_prefix_rlayer(ob, excluded_prefix=prefix_list, first_name=self.first_name)
|
|
|
|
return {"FINISHED"}
|
|
|
|
|
|
|
|
# unused
|
2021-09-08 18:29:10 +02:00
|
|
|
class GPEXP_OT_merge_selected_dopesheet_layers(bpy.types.Operator):
|
|
|
|
bl_idname = "gp.merge_selected_dopesheet_layers"
|
2021-09-21 18:23:25 +02:00
|
|
|
bl_label = "Merge selected layers nodes"
|
2021-09-08 18:29:10 +02:00
|
|
|
bl_description = "Merge view layers of selected gp layers to a new dedicated file output"
|
|
|
|
bl_options = {"REGISTER"}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def poll(cls, context):
|
|
|
|
return context.object and context.object.type == 'GPENCIL'
|
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
disconnect : bpy.props.BoolProperty(default=True, options={'SKIP_SAVE'})
|
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
def execute(self, context):
|
|
|
|
ob = bpy.context.object
|
2021-09-16 00:19:57 +02:00
|
|
|
layers = [l for l in ob.data.layers if l.select and not l.hide]
|
|
|
|
act = ob.data.layers.active
|
2021-09-21 18:23:25 +02:00
|
|
|
# merge_selected_layers() # function to merge from GP dopesheet
|
2021-09-16 00:19:57 +02:00
|
|
|
if not act:
|
|
|
|
self.report({'ERROR'}, f'An active layer is needed to set merge output name')
|
|
|
|
return {"CANCELLED"}
|
|
|
|
|
|
|
|
if len(layers) < 2:
|
2021-09-15 19:36:06 +02:00
|
|
|
self.report({'ERROR'}, f'Should select multiple layers for merging')
|
|
|
|
return {"CANCELLED"}
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2024-04-16 18:01:10 +02:00
|
|
|
rd_scene = fn.get_render_scene(create=False)
|
|
|
|
if rd_scene:
|
|
|
|
nodes = rd_scene.node_tree.nodes
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
clean_ob_name = bpy.path.clean_name(ob.name)
|
|
|
|
rlayers = []
|
2021-09-16 00:19:57 +02:00
|
|
|
for l in layers:
|
|
|
|
idname = f'{clean_ob_name} / {l.info}'
|
|
|
|
rlayer = rl = None
|
2024-04-16 18:01:10 +02:00
|
|
|
# check the rd_scene layer that have a parent frame
|
|
|
|
if not rd_scene:
|
2021-09-16 00:19:57 +02:00
|
|
|
_vl, rl = gen_vlayer.get_set_viewlayer_from_gp(ob, l)
|
2024-04-16 18:01:10 +02:00
|
|
|
rd_scene = fn.get_render_scene(create=False)
|
|
|
|
nodes = rd_scene.node_tree.nodes
|
2021-09-16 00:19:57 +02:00
|
|
|
|
|
|
|
if not rl:
|
|
|
|
rlayer = [n for n in nodes if n.type == 'R_LAYERS' and n.layer == idname and n.parent]
|
2021-09-08 18:29:10 +02:00
|
|
|
if not rlayer:
|
|
|
|
# send to function to generate the rlayer and connect
|
2021-09-16 00:19:57 +02:00
|
|
|
_vl, rl = gen_vlayer.get_set_viewlayer_from_gp(ob, l)
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-16 00:19:57 +02:00
|
|
|
else:
|
|
|
|
rlayer.sort(key=lambda n: n.location.y, reverse=True)
|
|
|
|
rl = rlayer[0]
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-16 00:19:57 +02:00
|
|
|
if act == l:
|
|
|
|
nodes.active = rl # make it active so the merge use this one
|
2021-09-08 18:29:10 +02:00
|
|
|
|
2021-09-16 00:19:57 +02:00
|
|
|
rlayers.append(rl)
|
2023-01-18 14:28:27 +01:00
|
|
|
|
2021-09-17 16:31:26 +02:00
|
|
|
color = None
|
2021-09-17 18:36:15 +02:00
|
|
|
if fn.has_channel_color(act): # and bpy.context.preferences.edit.use_anim_channel_group_colors
|
2021-09-17 16:31:26 +02:00
|
|
|
color = act.channel_color
|
|
|
|
merge_layers(rlayers, disconnect=self.disconnect, color=color)
|
2021-09-08 18:29:10 +02:00
|
|
|
|
|
|
|
return {"FINISHED"}
|
|
|
|
|
|
|
|
|
|
|
|
class GPEXP_OT_merge_selected_viewlayer_nodes(bpy.types.Operator):
|
|
|
|
bl_idname = "gp.merge_selected_viewlayer_nodes"
|
|
|
|
bl_label = "Merge selected view_layers "
|
2021-09-10 18:32:50 +02:00
|
|
|
bl_description = "Merge selected view layers to a new dedicated file output\nDisconnect single output unless using 'keep connect'"
|
2021-09-08 18:29:10 +02:00
|
|
|
bl_options = {"REGISTER"}
|
|
|
|
|
2021-09-10 18:32:50 +02:00
|
|
|
disconnect : bpy.props.BoolProperty(default=True, options={'SKIP_SAVE'})
|
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
def execute(self, context):
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2024-04-16 18:01:10 +02:00
|
|
|
nodes = context.scene.node_tree.nodes
|
2021-09-08 18:29:10 +02:00
|
|
|
selection = [n for n in nodes if n.select and n.type == 'R_LAYERS']
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
if not nodes.active in selection:
|
|
|
|
self.report({'ERROR'}, 'The active node not within the render layer selection (used to define out name)')
|
|
|
|
return {'CANCELLED'}
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
# should be from the same object:
|
|
|
|
if not all(selection[0].layer.split('.')[0] == n.layer.split('.')[0] for n in selection):
|
2021-09-15 19:36:06 +02:00
|
|
|
print('/!\ Merge -> Not every nodes start with the same object')
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2021-09-21 18:23:25 +02:00
|
|
|
color = None
|
|
|
|
if nodes.active.use_custom_color and nodes.active.color:
|
|
|
|
color = nodes.active.color
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2024-04-16 18:01:10 +02:00
|
|
|
merge_layers(selection, active=nodes.active, disconnect=self.disconnect, color=color)
|
2021-09-08 18:29:10 +02:00
|
|
|
return {"FINISHED"}
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
classes=(
|
2021-09-21 18:23:25 +02:00
|
|
|
GPEXP_OT_merge_viewlayers_to_active,
|
2023-01-06 15:10:10 +01:00
|
|
|
GPEXP_OT_auto_merge_adjacent_prefix,
|
2021-09-21 18:23:25 +02:00
|
|
|
GPEXP_OT_merge_selected_dopesheet_layers,# unused
|
2021-09-08 18:29:10 +02:00
|
|
|
GPEXP_OT_merge_selected_viewlayer_nodes,
|
2023-06-08 12:39:39 +02:00
|
|
|
GPEXP_OT_remove_viewlayer_on_selected,
|
2023-04-04 11:09:00 +02:00
|
|
|
GPEXP_OT_merge_preview_ouput,
|
2021-09-08 18:29:10 +02:00
|
|
|
)
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2023-01-18 14:28:27 +01:00
|
|
|
def register():
|
2021-09-08 18:29:10 +02:00
|
|
|
for cls in classes:
|
|
|
|
bpy.utils.register_class(cls)
|
2021-09-07 18:06:54 +02:00
|
|
|
|
2021-09-08 18:29:10 +02:00
|
|
|
def unregister():
|
|
|
|
for cls in reversed(classes):
|
|
|
|
bpy.utils.unregister_class(cls)
|