First Commit
parent
3c7f133a85
commit
e3d24aa4e3
|
@ -0,0 +1,2 @@
|
||||||
|
__pycache__
|
||||||
|
*.py[cod]
|
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"python.pythonPath": "/usr/bin/python3",
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.linting.enabled": true
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
# asset_library
|
# asset_browser
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,14 +15,14 @@ Already a pro? Just edit this README.md and make it your own. Want to make it ea
|
||||||
|
|
||||||
```
|
```
|
||||||
cd existing_repo
|
cd existing_repo
|
||||||
git remote add origin https://gitlab.com/autour-de-minuit/blender/asset_library.git
|
git remote add origin https://gitlab.com/autour-de-minuit/blender/asset_browser.git
|
||||||
git branch -M main
|
git branch -M main
|
||||||
git push -uf origin main
|
git push -uf origin main
|
||||||
```
|
```
|
||||||
|
|
||||||
## Integrate with your tools
|
## Integrate with your tools
|
||||||
|
|
||||||
- [ ] [Set up project integrations](https://gitlab.com/autour-de-minuit/blender/asset_library/-/settings/integrations)
|
- [ ] [Set up project integrations](https://gitlab.com/autour-de-minuit/blender/asset_browser/-/settings/integrations)
|
||||||
|
|
||||||
## Collaborate with your team
|
## Collaborate with your team
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ Use the built-in continuous integration in GitLab.
|
||||||
|
|
||||||
# Editing this README
|
# Editing this README
|
||||||
|
|
||||||
When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thank you to [makeareadme.com](https://www.makeareadme.com/) for this template.
|
When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thank you to [makeareadme.com](https://www.makeareadme.com/) for this template.
|
||||||
|
|
||||||
## Suggestions for a good README
|
## Suggestions for a good README
|
||||||
Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
|
Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Extending features of the Asset Browser for a studio use.
|
||||||
|
"""
|
||||||
|
|
||||||
|
bl_info = {
|
||||||
|
"name": "Asset Library",
|
||||||
|
"description": "Asset Library based on the Asset Browser.",
|
||||||
|
"author": "Sybren A. Stüvel, Clement Ducarteron, Christophe Seux, Samuel Bernou",
|
||||||
|
"version": (2, 0),
|
||||||
|
"blender": (3, 3, 0),
|
||||||
|
"warning": "In development, things may change",
|
||||||
|
"location": "Asset Browser -> Animations, and 3D Viewport -> Animation panel",
|
||||||
|
"category": "Animation",
|
||||||
|
}
|
||||||
|
|
||||||
|
#from typing import List, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
from asset_library import pose
|
||||||
|
from asset_library import action
|
||||||
|
from asset_library import collection
|
||||||
|
from asset_library import file
|
||||||
|
from asset_library import (gui, keymaps, prefs, operators)
|
||||||
|
from asset_library import constants
|
||||||
|
#from asset_library.common.adapter import AssetLibraryAdapter
|
||||||
|
from asset_library.common.bl_utils import get_addon_prefs
|
||||||
|
from asset_library.common.functions import set_env_libraries
|
||||||
|
from asset_library.common.template import Template
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
print("Reload Addon Asset Library")
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
importlib.reload(gui)
|
||||||
|
importlib.reload(keymaps)
|
||||||
|
|
||||||
|
importlib.reload(prefs)
|
||||||
|
importlib.reload(operators)
|
||||||
|
importlib.reload(constants)
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
#addon_keymaps: List[Tuple[bpy.types.KeyMap, bpy.types.KeyMapItem]] = []
|
||||||
|
|
||||||
|
bl_modules = (
|
||||||
|
operators,
|
||||||
|
pose,
|
||||||
|
action,
|
||||||
|
collection,
|
||||||
|
file,
|
||||||
|
keymaps,
|
||||||
|
gui,
|
||||||
|
prefs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_handler():
|
||||||
|
print('load_handler')
|
||||||
|
|
||||||
|
set_env_libraries()
|
||||||
|
bpy.ops.assetlib.set_paths(all=True)
|
||||||
|
#bpy.ops.assetlib.#(all=True, only_recent=True)
|
||||||
|
|
||||||
|
bpy.ops.assetlib.bundle(blocking=False, mode='AUTO_BUNDLE')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def register() -> None:
|
||||||
|
|
||||||
|
for m in bl_modules:
|
||||||
|
m.register()
|
||||||
|
|
||||||
|
#prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
bpy.app.timers.register(load_handler, first_interval=1)
|
||||||
|
|
||||||
|
|
||||||
|
def unregister() -> None:
|
||||||
|
for m in reversed(bl_modules):
|
||||||
|
m.unregister()
|
|
@ -0,0 +1,30 @@
|
||||||
|
|
||||||
|
from asset_library.action import (
|
||||||
|
gui,
|
||||||
|
keymaps,
|
||||||
|
clear_asset,
|
||||||
|
concat_preview,
|
||||||
|
operators,
|
||||||
|
properties,
|
||||||
|
rename_pose,
|
||||||
|
render_preview,)
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
importlib.reload(gui)
|
||||||
|
importlib.reload(keymaps)
|
||||||
|
importlib.reload(clear_asset)
|
||||||
|
importlib.reload(concat_preview)
|
||||||
|
importlib.reload(operators)
|
||||||
|
importlib.reload(properties)
|
||||||
|
importlib.reload(rename_pose)
|
||||||
|
importlib.reload(render_preview)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
operators.register()
|
||||||
|
keymaps.register()
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
operators.unregister()
|
||||||
|
keymaps.unregister()
|
|
@ -0,0 +1,48 @@
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import bpy
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
sys.path.append(Path(__file__).parents[3])
|
||||||
|
from asset_library.common.bl_utils import (
|
||||||
|
get_preview,
|
||||||
|
)
|
||||||
|
|
||||||
|
def clear_asset(action_name='', use_fake_user=False):
|
||||||
|
|
||||||
|
scn = bpy.context.scene
|
||||||
|
|
||||||
|
action = bpy.data.actions.get(action_name)
|
||||||
|
if not action:
|
||||||
|
print(f'No {action_name} not found.')
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
action.asset_clear()
|
||||||
|
if use_fake_user:
|
||||||
|
action.use_fake_user = True
|
||||||
|
else:
|
||||||
|
preview = get_preview(asset_path=bpy.data.filepath, asset_name=action_name)
|
||||||
|
if preview:
|
||||||
|
preview.unlink()
|
||||||
|
bpy.data.actions.remove(action)
|
||||||
|
|
||||||
|
bpy.ops.wm.save_mainfile(
|
||||||
|
filepath=bpy.data.filepath, compress=True, exit=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='Add Comment To the tracker',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('--action-name')
|
||||||
|
parser.add_argument('--use-fake-user', type=json.loads, default='false')
|
||||||
|
|
||||||
|
if '--' in sys.argv :
|
||||||
|
index = sys.argv.index('--')
|
||||||
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
clear_asset(**vars(args))
|
|
@ -0,0 +1,146 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import math
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def alpha_to_color(pixels_data, color):
|
||||||
|
"""Convert Alpha to WhiteBG"""
|
||||||
|
new_pixels_data = []
|
||||||
|
for i in pixels_data:
|
||||||
|
height, width, array_d = i.shape
|
||||||
|
mask = i[:,:,3:]
|
||||||
|
background = np.array([color[0], color[1], color[2] ,1], dtype=np.float32)
|
||||||
|
background = np.tile(background, (height*width))
|
||||||
|
background = np.reshape(background, (height,width,4))
|
||||||
|
new_pixels_data.append(i * mask + background * (1 - mask))
|
||||||
|
# print(new_pixels_data)#Dbg
|
||||||
|
return new_pixels_data
|
||||||
|
|
||||||
|
def create_array(height, width):
|
||||||
|
return np.zeros((height*width*4), dtype=np.float32)
|
||||||
|
|
||||||
|
def read_pixels_data(img, source_height, source_width):
|
||||||
|
img_w, img_h = img.size
|
||||||
|
|
||||||
|
if img_w != source_width :
|
||||||
|
scale = abs(img_w/source_width)
|
||||||
|
img.scale(int(img_w/scale), int(img_h/scale))
|
||||||
|
img_w, img_h = img.size
|
||||||
|
|
||||||
|
array = create_array(img_h, img_w)
|
||||||
|
img.pixels.foreach_get(array)
|
||||||
|
array = array.reshape(img_h, img_w, 4)
|
||||||
|
|
||||||
|
if array.shape[0] != source_height:
|
||||||
|
#print('ARRAY SHAPE', array.shape[:], source_height)
|
||||||
|
missing_height = int(abs(source_height-img_h)/2)
|
||||||
|
empty_array = create_array(missing_height, source_width)
|
||||||
|
empty_array = empty_array.reshape(missing_height, source_width, 4)
|
||||||
|
array = np.vstack((empty_array, array, empty_array))
|
||||||
|
|
||||||
|
return array.reshape(source_height, source_width, 4)
|
||||||
|
|
||||||
|
def create_final(output_name, pixels_data, final_height, final_width):
|
||||||
|
#print('output_name: ', output_name)
|
||||||
|
|
||||||
|
new_img = bpy.data.images.get(output_name)
|
||||||
|
if new_img:
|
||||||
|
bpy.data.images.remove(new_img)
|
||||||
|
|
||||||
|
new_img = bpy.data.images.new(output_name, final_width, final_height)
|
||||||
|
new_img.generated_color=(0,0,0,0)
|
||||||
|
|
||||||
|
#print('pixels_data: ', pixels_data)
|
||||||
|
new_img.pixels.foreach_set(pixels_data)
|
||||||
|
|
||||||
|
return new_img
|
||||||
|
|
||||||
|
def guess_input_format(img_list):
|
||||||
|
for i in img_list:
|
||||||
|
if i.size[0] == i.size[1]:
|
||||||
|
return i.size
|
||||||
|
|
||||||
|
def format_files(files, catalog_data):
|
||||||
|
img_dict = {}
|
||||||
|
for k, v in catalog_data.items():
|
||||||
|
if '/' not in k:
|
||||||
|
continue
|
||||||
|
img_dict[v['name']] = [f for f in files if v['name'] in f]
|
||||||
|
|
||||||
|
return img_dict
|
||||||
|
|
||||||
|
def mosaic_export(
|
||||||
|
files, catalog_data, row=2, columns=2, auto_calculate=True,
|
||||||
|
bg_color=(0.18, 0.18, 0.18,), resize_output=100,
|
||||||
|
):
|
||||||
|
|
||||||
|
img_dict = format_files(files, catalog_data)
|
||||||
|
|
||||||
|
for cat, files_list in img_dict.items():
|
||||||
|
|
||||||
|
if not files_list:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for i in bpy.data.images:
|
||||||
|
bpy.data.images.remove(i)
|
||||||
|
|
||||||
|
img_list = []
|
||||||
|
|
||||||
|
chars = Path(files_list[0]).parts[-4]
|
||||||
|
output_dir = str(Path(files_list[0]).parent.parent)
|
||||||
|
|
||||||
|
ext = 'jpg'
|
||||||
|
output_name = f'{chars}_{cat}.{ext}'
|
||||||
|
|
||||||
|
for img in files_list:
|
||||||
|
img_list.append(bpy.data.images.load(img, check_existing=True))
|
||||||
|
|
||||||
|
for i in img_list:
|
||||||
|
i.colorspace_settings.name = 'Raw'
|
||||||
|
|
||||||
|
if auto_calculate:
|
||||||
|
rows = int(math.sqrt(len(img_list)))
|
||||||
|
columns = math.ceil(len(img_list)/rows)
|
||||||
|
|
||||||
|
if rows*columns < len(img_list):
|
||||||
|
raise AttributeError('Grid too small for number of images')
|
||||||
|
|
||||||
|
src_w, src_h = img_list[0].size
|
||||||
|
final_w = src_w * columns
|
||||||
|
final_h = src_h * rows
|
||||||
|
|
||||||
|
img_pixels = [read_pixels_data(img, src_h, src_w) for img in img_list]
|
||||||
|
|
||||||
|
#Check if there is enough "data" to create an horizontal stack
|
||||||
|
##It not, create empty array
|
||||||
|
h_stack = []
|
||||||
|
total_len = rows*columns
|
||||||
|
if len(img_pixels) < total_len:
|
||||||
|
for i in range(total_len-len(img_pixels)):
|
||||||
|
img_pixels.append(create_array(src_h, src_w).reshape(src_h, src_w, 4))
|
||||||
|
|
||||||
|
img_pixels = alpha_to_color(img_pixels, bg_color)
|
||||||
|
for i in range(0,len(img_pixels),columns):
|
||||||
|
h_stack.append(np.hstack(img_pixels[i:i+columns]))
|
||||||
|
if rows > 1:
|
||||||
|
combined_stack = np.vstack(h_stack[::-1])
|
||||||
|
else:
|
||||||
|
combined_stack = np.hstack((h_stack[:]))
|
||||||
|
|
||||||
|
combined_img = create_final(output_name, combined_stack.flatten(), final_h, final_w)
|
||||||
|
|
||||||
|
if resize_output != 100:
|
||||||
|
w, h = combined_img.size
|
||||||
|
combined_img.scale(w*(resize_output*.01), h*(resize_output*.01))
|
||||||
|
|
||||||
|
|
||||||
|
combined_img.filepath_raw = '/'.join([output_dir, output_name])
|
||||||
|
combined_img.file_format = 'JPEG'
|
||||||
|
combined_img.save()
|
||||||
|
|
||||||
|
print(f"""
|
||||||
|
Image saved: {combined_img.filepath_raw}
|
||||||
|
""")
|
|
@ -0,0 +1,207 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Functions related to anim and pose.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Collection
|
||||||
|
|
||||||
|
from typing import Optional, FrozenSet, Set, Union, Iterable, cast
|
||||||
|
import dataclasses
|
||||||
|
import functools
|
||||||
|
import re
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.types import (
|
||||||
|
Action,
|
||||||
|
Bone,
|
||||||
|
Context,
|
||||||
|
FCurve,
|
||||||
|
Keyframe,
|
||||||
|
Object,
|
||||||
|
TimelineMarker
|
||||||
|
)
|
||||||
|
|
||||||
|
from asset_library.common.bl_utils import active_catalog_id, split_path
|
||||||
|
|
||||||
|
FCurveValue = Union[float, int]
|
||||||
|
|
||||||
|
pose_bone_re = re.compile(r'pose.bones\["([^"]+)"\]')
|
||||||
|
"""RegExp for matching FCurve data paths."""
|
||||||
|
|
||||||
|
def is_pose(action):
|
||||||
|
for fc in action.fcurves:
|
||||||
|
if len(fc.keyframe_points) > 1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_bone_visibility(data_path):
|
||||||
|
bone, prop = split_path(data_path)
|
||||||
|
|
||||||
|
ob = bpy.context.object
|
||||||
|
b_layers = [i for i, val in enumerate(ob.pose.bones[bone].bone.layers) if val]
|
||||||
|
|
||||||
|
rig_layers = [(i, val) for i, val in enumerate(ob.data.layers)]
|
||||||
|
|
||||||
|
return ob.data.layers[b_layers[0]]
|
||||||
|
|
||||||
|
def get_keyframes(action, selected=False, includes=[]):
|
||||||
|
if selected:
|
||||||
|
# keyframes = sorted([int(k.co[0]) for f in action.fcurves for k in f.keyframe_points if k.select_control_point and get_bone_visibility(f.data_path)])
|
||||||
|
keyframes = []
|
||||||
|
for f in action.fcurves:
|
||||||
|
bone, prop = split_path(f.data_path)
|
||||||
|
for k in f.keyframe_points:
|
||||||
|
if bone not in includes:
|
||||||
|
continue
|
||||||
|
if not k.select_control_point:
|
||||||
|
continue
|
||||||
|
if not get_bone_visibility(f.data_path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
keyframes += [int(k.co[0])]
|
||||||
|
if len(keyframes) <= 1:
|
||||||
|
keyframes = [bpy.context.scene.frame_current]
|
||||||
|
else:
|
||||||
|
keyframes = sorted([int(k.co[0]) for f in action.fcurves for k in f.keyframe_points])
|
||||||
|
|
||||||
|
return keyframes
|
||||||
|
|
||||||
|
def get_marker(action):
|
||||||
|
if action.pose_markers:
|
||||||
|
markers = action.pose_markers
|
||||||
|
return next((m.name for m in markers if m.frame == bpy.context.scene.frame_current), None)
|
||||||
|
|
||||||
|
def reset_bone(bone, transform=True, custom_props=True):
|
||||||
|
if transform:
|
||||||
|
bone.location = (0, 0, 0)
|
||||||
|
if bone.rotation_mode == "QUATERNION":
|
||||||
|
bone.rotation_quaternion = (0, 0, 0, 0)
|
||||||
|
elif bone.rotation_mode == "AXIS_ANGLE":
|
||||||
|
bone.rotation_axis_angle = (0, 0, 0, 0)
|
||||||
|
else:
|
||||||
|
bone.rotation_euler = (0, 0, 0)
|
||||||
|
|
||||||
|
bone.scale = (1, 1, 1)
|
||||||
|
|
||||||
|
if custom_props:
|
||||||
|
for key, value in bone.items():
|
||||||
|
try:
|
||||||
|
id_prop = bone.id_properties_ui(key)
|
||||||
|
except TypeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not isinstance(value, (int, float)) or not id_prop:
|
||||||
|
continue
|
||||||
|
bone[key] = id_prop.as_dict()['default']
|
||||||
|
|
||||||
|
def is_asset_action(action):
|
||||||
|
return action.asset_data and action.asset_data.catalog_id != str(uuid.UUID(int=0))
|
||||||
|
|
||||||
|
def conform_action(action):
|
||||||
|
tags = ('pose', 'anim')
|
||||||
|
|
||||||
|
if any(tag in action.asset_data.tags.keys() for tag in tags):
|
||||||
|
return
|
||||||
|
|
||||||
|
for fc in action.fcurves:
|
||||||
|
action.asset_data['is_single_frame'] = True
|
||||||
|
if len(fc.keyframe_points) > 1:
|
||||||
|
action.asset_data['is_single_frame'] = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if action.asset_data['is_single_frame']:
|
||||||
|
action.asset_data.tags.new('pose')
|
||||||
|
else:
|
||||||
|
action.asset_data.tags.new('anim')
|
||||||
|
|
||||||
|
def clean_action(action='', frame_start=0, frame_end=0, excludes=[], includes=[]):
|
||||||
|
## Clean Keyframe Before/After Range
|
||||||
|
for fc in action.fcurves:
|
||||||
|
bone, prop = split_path(fc.data_path)
|
||||||
|
# !! Mush Mush dependent. Need to be fix
|
||||||
|
if bone in excludes or bone not in includes:
|
||||||
|
action.fcurves.remove(fc)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add Keyframe At Start/End Range
|
||||||
|
for fr in (frame_start, frame_end):
|
||||||
|
fc_val = fc.evaluate(fr)
|
||||||
|
fc.keyframe_points.insert(frame=fr, value=fc_val)
|
||||||
|
fc.update()
|
||||||
|
|
||||||
|
# Remove Keyframe out of range
|
||||||
|
for k in reversed(fc.keyframe_points):
|
||||||
|
if int(k.co[0]) not in range(frame_start, frame_end+1):
|
||||||
|
fc.keyframe_points.remove(k)
|
||||||
|
fc.update()
|
||||||
|
|
||||||
|
def append_action(action_path='', action_name=''):
|
||||||
|
print(f'Loading {action_name} from: {action_path}')
|
||||||
|
|
||||||
|
with bpy.data.libraries.load(str(action_path), link=False) as (data_from, data_to):
|
||||||
|
data_to.actions = [action_name]
|
||||||
|
|
||||||
|
return data_to.actions[0]
|
||||||
|
|
||||||
|
def apply_anim(action_lib, ob, bones=[]):
|
||||||
|
from mathutils import Vector
|
||||||
|
|
||||||
|
scn = bpy.context.scene
|
||||||
|
|
||||||
|
if not ob.animation_data:
|
||||||
|
ob.animation_data_create()
|
||||||
|
|
||||||
|
action = ob.animation_data.action
|
||||||
|
|
||||||
|
if not action:
|
||||||
|
action = bpy.data.actions.new(ob.name)
|
||||||
|
ob.animation_data.action = action
|
||||||
|
|
||||||
|
keys = sorted([k.co[0] for f in action_lib.fcurves for k in f.keyframe_points])
|
||||||
|
if not keys:
|
||||||
|
print(f'The action {action_lib.name} has no keyframes')
|
||||||
|
return
|
||||||
|
|
||||||
|
first_key = keys[0]
|
||||||
|
key_offset = scn.frame_current - first_key
|
||||||
|
|
||||||
|
key_attr = ('type', 'interpolation', 'handle_left_type', 'handle_right_type',
|
||||||
|
'amplitude', 'back', 'easing', 'period', 'handle_right', 'handle_left'
|
||||||
|
)
|
||||||
|
for fc in action_lib.fcurves:
|
||||||
|
bone_name, prop_name = split_path(fc.data_path)
|
||||||
|
|
||||||
|
if bones and bone_name not in bones:
|
||||||
|
continue
|
||||||
|
|
||||||
|
action_fc = action.fcurves.find(fc.data_path, index=fc.array_index)
|
||||||
|
if not action_fc:
|
||||||
|
action_fc = action.fcurves.new(
|
||||||
|
fc.data_path,
|
||||||
|
index=fc.array_index,
|
||||||
|
action_group=fc.group.name if fc.group else fc.data_path.split('"')[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
for kf_lib in fc.keyframe_points:
|
||||||
|
kf = action_fc.keyframe_points.insert(
|
||||||
|
frame=kf_lib.co[0] + key_offset,
|
||||||
|
value=kf_lib.co[1]
|
||||||
|
)
|
||||||
|
for attr in key_attr:
|
||||||
|
src_val = getattr(kf_lib, attr)
|
||||||
|
if attr.startswith('handle') and 'type' not in attr:
|
||||||
|
src_val += Vector((key_offset, 0))
|
||||||
|
|
||||||
|
setattr(kf, attr, src_val)
|
||||||
|
|
||||||
|
fc.update()
|
||||||
|
|
||||||
|
# redraw graph areas
|
||||||
|
for window in bpy.context.window_manager.windows:
|
||||||
|
screen = window.screen
|
||||||
|
for area in screen.areas:
|
||||||
|
if area.type == 'GRAPH_EDITOR':
|
||||||
|
area.tag_redraw()
|
|
@ -0,0 +1,49 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
def draw_context_menu(layout):
|
||||||
|
params = bpy.context.space_data.params
|
||||||
|
asset = bpy.context.asset_file_handle
|
||||||
|
|
||||||
|
layout.operator("assetlib.open_blend", text="Open blend file")#.asset = asset.name
|
||||||
|
layout.operator("assetlib.play_preview", text="Play Preview")
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.operator_context = 'INVOKE_DEFAULT'
|
||||||
|
|
||||||
|
#layout.operator("assetlib.rename_asset", text="Rename Action")
|
||||||
|
layout.operator("assetlib.clear_asset", text="Remove Asset")
|
||||||
|
layout.operator("assetlib.edit_data", text="Edit Asset data")
|
||||||
|
|
||||||
|
#layout.operator("actionlib.clear_asset", text="Clear Asset (Fake User)").use_fake_user = True
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.operator("actionlib.apply_selected_action", text="Apply Pose").flipped = False
|
||||||
|
layout.operator("actionlib.apply_selected_action", text="Apply Pose (Flipped)").flipped = True
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.operator("poselib.blend_pose_asset_for_keymap", text="Blend Pose").flipped = False
|
||||||
|
layout.operator("poselib.blend_pose_asset_for_keymap", text="Blend Pose (Flipped)").flipped = True
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.operator("poselib.pose_asset_select_bones", text="Select Bones").selected_side = 'CURRENT'
|
||||||
|
layout.operator("poselib.pose_asset_select_bones", text="Select Bones (Flipped)").selected_side = 'FLIPPED'
|
||||||
|
layout.operator("poselib.pose_asset_select_bones", text="Select Bones (Both)").selected_side = 'BOTH'
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
#layout.operator("asset.library_refresh")
|
||||||
|
if params.display_type == 'THUMBNAIL':
|
||||||
|
layout.prop_menu_enum(params, "display_size")
|
||||||
|
|
||||||
|
|
||||||
|
def draw_header(layout):
|
||||||
|
'''Draw the header of the Asset Browser Window'''
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
layout.operator("actionlib.store_anim_pose", text='Add Action', icon='FILE_NEW')
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
|
||||||
|
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
addon_keymaps: List[Tuple[bpy.types.KeyMap, bpy.types.KeyMapItem]] = []
|
||||||
|
|
||||||
|
def register():
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
addon = wm.keyconfigs.addon
|
||||||
|
if not addon:
|
||||||
|
return
|
||||||
|
|
||||||
|
km = addon.keymaps.new(name="File Browser Main", space_type="FILE_BROWSER")
|
||||||
|
|
||||||
|
# DblClick to apply pose.
|
||||||
|
kmi = km.keymap_items.new("actionlib.apply_selected_action", "LEFTMOUSE", "DOUBLE_CLICK")
|
||||||
|
kmi.properties.flipped = False
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("actionlib.apply_selected_action", "LEFTMOUSE", "DOUBLE_CLICK", alt=True)
|
||||||
|
kmi.properties.flipped = True
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("poselib.blend_pose_asset_for_keymap", "LEFTMOUSE", "DOUBLE_CLICK", shift=True)
|
||||||
|
kmi.properties.flipped = False
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("poselib.blend_pose_asset_for_keymap", "LEFTMOUSE", "DOUBLE_CLICK", alt=True, shift=True)
|
||||||
|
kmi.properties.flipped = True
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("poselib.pose_asset_select_bones", "S", "PRESS")
|
||||||
|
kmi.properties.selected_side = 'CURRENT'
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("poselib.pose_asset_select_bones", "S", "PRESS", alt=True)
|
||||||
|
kmi.properties.selected_side = 'FLIPPED'
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("poselib.pose_asset_select_bones", "S", "PRESS", alt=True, ctrl=True)
|
||||||
|
kmi.properties.selected_side = 'BOTH'
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for km, kmi in addon_keymaps:
|
||||||
|
km.keymap_items.remove(kmi)
|
||||||
|
addon_keymaps.clear()
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,34 @@
|
||||||
|
import bpy
|
||||||
|
from bpy.types import PropertyGroup
|
||||||
|
from bpy.props import PointerProperty, StringProperty, BoolProperty
|
||||||
|
|
||||||
|
class ACTIONLIB_PG_scene(PropertyGroup):
|
||||||
|
flipped : BoolProperty(
|
||||||
|
name="Flip Pose",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
previous_action : PointerProperty(type=bpy.types.Action)
|
||||||
|
publish_path : StringProperty(subtype='FILE_PATH')
|
||||||
|
camera : PointerProperty(type=bpy.types.Object, poll=lambda s, o: o.type == 'CAMERA')
|
||||||
|
rest_pose : PointerProperty(type=bpy.types.Action, poll=lambda s, a: a.asset_data)
|
||||||
|
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
ACTIONLIB_PG_scene,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
bpy.types.Scene.actionlib = PointerProperty(type=ACTIONLIB_PG_scene)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
try:
|
||||||
|
del bpy.types.Scene.actionlib
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for cls in reversed(classes):
|
||||||
|
bpy.utils.unregister_class(cls)
|
|
@ -0,0 +1,44 @@
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import bpy
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
sys.path.append(Path(__file__).parents[3])
|
||||||
|
from asset_library.common.bl_utils import (
|
||||||
|
get_preview,
|
||||||
|
)
|
||||||
|
|
||||||
|
def rename_pose(src_name='', dst_name=''):
|
||||||
|
|
||||||
|
scn = bpy.context.scene
|
||||||
|
action = bpy.data.actions.get(src_name)
|
||||||
|
if not action:
|
||||||
|
print(f'No {src_name} not found.')
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
action.name = dst_name
|
||||||
|
preview = get_preview(asset_path=bpy.data.filepath, asset_name=src_name)
|
||||||
|
if preview:
|
||||||
|
preview.rename(re.sub(src_name, dst_name, str(preview)))
|
||||||
|
|
||||||
|
bpy.ops.wm.save_mainfile(
|
||||||
|
filepath=bpy.data.filepath, compress=True, exit=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='Add Comment To the tracker',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('--src-name')
|
||||||
|
parser.add_argument('--dst-name')
|
||||||
|
|
||||||
|
if '--' in sys.argv :
|
||||||
|
index = sys.argv.index('--')
|
||||||
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
rename_pose(**vars(args))
|
|
@ -0,0 +1,292 @@
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
sys.path.append(str(Path(__file__).parents[3]))
|
||||||
|
|
||||||
|
from asset_library.action.concat_preview import mosaic_export
|
||||||
|
from asset_library.common.file_utils import open_file
|
||||||
|
from asset_library.action.functions import reset_bone, get_keyframes
|
||||||
|
from asset_library.common.functions import read_catalog
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def rm_tree(pth):
|
||||||
|
pth = Path(pth)
|
||||||
|
for child in pth.glob('*'):
|
||||||
|
if child.is_file():
|
||||||
|
child.unlink()
|
||||||
|
else:
|
||||||
|
rm_tree(child)
|
||||||
|
pth.rmdir()
|
||||||
|
|
||||||
|
def render_preview(directory, asset_catalog, render_actions, publish_actions, remove_folder):
|
||||||
|
|
||||||
|
scn = bpy.context.scene
|
||||||
|
rnd = bpy.context.scene.render
|
||||||
|
rnd.resolution_x = rnd.resolution_y = 512
|
||||||
|
|
||||||
|
report = []
|
||||||
|
blendfile = Path(bpy.data.filepath)
|
||||||
|
asset_catalog_data = read_catalog(asset_catalog)
|
||||||
|
|
||||||
|
anim_render_dir = Path(gettempdir()) / 'actionlib_render' #/tmp/actionlib_render. Removed at the end
|
||||||
|
anim_render_dir.mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
|
preview_render_dir = Path(directory) / 'preview'
|
||||||
|
|
||||||
|
if preview_render_dir.exists() and remove_folder:
|
||||||
|
rm_tree(preview_render_dir)
|
||||||
|
|
||||||
|
preview_render_dir.mkdir(exist_ok=True, parents=True)
|
||||||
|
for i in ('anim', 'pose'):
|
||||||
|
Path(preview_render_dir / i).mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
|
for f in preview_render_dir.rglob('*'):
|
||||||
|
if f.is_dir():
|
||||||
|
print(f'{f} is dir. Skipped.')
|
||||||
|
continue
|
||||||
|
if all(i not in f.parts for i in ('anim', 'pose')) and f.parent.parts[-1] != 'preview':
|
||||||
|
print(f'{f} is out of pipe. Approved or Rtk pictures. Skipped.')
|
||||||
|
continue
|
||||||
|
if not any(f.stem.endswith(a) for a in publish_actions):
|
||||||
|
print(f'{str(f)} not in publish actions anymore. Removing...')
|
||||||
|
f.unlink()
|
||||||
|
|
||||||
|
# Set Scene
|
||||||
|
# ----------
|
||||||
|
# Scene Setting
|
||||||
|
scn.use_preview_range = True
|
||||||
|
scn.eevee.use_gtao = True
|
||||||
|
scn.tool_settings.use_keyframe_insert_auto = False
|
||||||
|
|
||||||
|
# Render Setting
|
||||||
|
rnd.engine = 'BLENDER_EEVEE'
|
||||||
|
rnd.use_simplify = False
|
||||||
|
rnd.use_stamp_date = True
|
||||||
|
rnd.use_stamp_time = True
|
||||||
|
rnd.use_stamp_render_time = False
|
||||||
|
rnd.use_stamp_frame = True
|
||||||
|
rnd.use_stamp_frame_range = False
|
||||||
|
rnd.use_stamp_memory = False
|
||||||
|
rnd.use_stamp_hostname = False
|
||||||
|
rnd.use_stamp_camera = True
|
||||||
|
rnd.use_stamp_lens = False
|
||||||
|
rnd.use_stamp_scene = False
|
||||||
|
rnd.use_stamp_marker = False
|
||||||
|
rnd.use_stamp_filename = False
|
||||||
|
rnd.use_stamp_sequencer_strip = False
|
||||||
|
rnd.use_stamp_note = True
|
||||||
|
rnd.use_stamp = True
|
||||||
|
rnd.stamp_font_size = 16
|
||||||
|
rnd.use_stamp_labels = False
|
||||||
|
rnd.image_settings.file_format = 'JPEG'
|
||||||
|
|
||||||
|
# Viewport Look
|
||||||
|
# ----------
|
||||||
|
"""
|
||||||
|
# Eevee
|
||||||
|
for screen in bpy.data.screens:
|
||||||
|
for area in screen.areas:
|
||||||
|
for space in area.spaces:
|
||||||
|
if space.type == 'VIEW_3D':
|
||||||
|
space.overlay.show_overlays = False
|
||||||
|
space.shading.type = 'RENDERED'
|
||||||
|
space.shading.use_scene_lights_render = False
|
||||||
|
space.shading.use_scene_world_render = False
|
||||||
|
space.region_3d.view_perspective = 'CAMERA'
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Cycles Mat Shading
|
||||||
|
for a in bpy.context.screen.areas:
|
||||||
|
if a.type == 'VIEW_3D':
|
||||||
|
a.spaces[0].overlay.show_overlays = False
|
||||||
|
a.spaces[0].region_3d.view_perspective = 'CAMERA'
|
||||||
|
a.spaces[0].shading.show_cavity = True
|
||||||
|
a.spaces[0].shading.cavity_type = 'WORLD'
|
||||||
|
a.spaces[0].shading.cavity_ridge_factor = 0.75
|
||||||
|
a.spaces[0].shading.cavity_valley_factor = 1.0
|
||||||
|
|
||||||
|
|
||||||
|
# Add Subsurf
|
||||||
|
# -----------
|
||||||
|
deform_ob = [m.object for o in scn.objects \
|
||||||
|
for m in o.modifiers if m.type == 'MESH_DEFORM'
|
||||||
|
]
|
||||||
|
deform_ob += [m.target for o in scn.objects \
|
||||||
|
for m in o.modifiers if m.type == 'SURFACE_DEFORM'
|
||||||
|
]
|
||||||
|
|
||||||
|
objects = [o for o in bpy.context.scene.objects if (o.type == 'MESH'
|
||||||
|
and o not in deform_ob and o not in bpy.context.scene.collection.objects[:])
|
||||||
|
]
|
||||||
|
|
||||||
|
for o in objects:
|
||||||
|
subsurf = False
|
||||||
|
for m in o.modifiers:
|
||||||
|
if m.type == 'SUBSURF':
|
||||||
|
m.show_viewport = m.show_render
|
||||||
|
m.levels = m.render_levels
|
||||||
|
subsurf = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not subsurf:
|
||||||
|
subsurf = o.modifiers.new('', 'SUBSURF')
|
||||||
|
subsurf.show_viewport = subsurf.show_render
|
||||||
|
subsurf.levels = subsurf.render_levels
|
||||||
|
|
||||||
|
|
||||||
|
# Loop through action and render
|
||||||
|
# ------------------------------
|
||||||
|
rig = next((o for o in scn.objects if o.type == 'ARMATURE'), None)
|
||||||
|
# actions = [a for a in bpy.data.actions if a.asset_data]
|
||||||
|
|
||||||
|
|
||||||
|
rig.animation_data_create()
|
||||||
|
for action_name in render_actions:
|
||||||
|
action = bpy.data.actions.get(action_name)
|
||||||
|
|
||||||
|
if not action:
|
||||||
|
print(f'\'{action_name}\' not found.')
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f"-- Current --: {action.name}")
|
||||||
|
|
||||||
|
rnd.stamp_note_text = '{type} : {pose_name}'
|
||||||
|
action_data = action.asset_data
|
||||||
|
|
||||||
|
if 'camera' not in action_data.keys():
|
||||||
|
report.append(f"'{action.name}' has no CameraData.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
catalog_name = next((v['name'] for v in asset_catalog_data.values() if action_data.catalog_id == v['id']), None)
|
||||||
|
pose_name = '/'.join([*catalog_name.split('-'), action.name])
|
||||||
|
filename = bpy.path.clean_name(f'{catalog_name}_{action.name}')
|
||||||
|
ext = 'jpg'
|
||||||
|
|
||||||
|
rig.animation_data.action = None
|
||||||
|
bpy.context.view_layer.update()
|
||||||
|
for b in rig.pose.bones:
|
||||||
|
if re.match('^[A-Z]+\.', b.name):
|
||||||
|
continue
|
||||||
|
reset_bone(b)
|
||||||
|
|
||||||
|
rest_pose = None
|
||||||
|
if isinstance(action.asset_data.get('rest_pose'), str):
|
||||||
|
rest_pose = bpy.data.actions.get(action.asset_data['rest_pose'])
|
||||||
|
|
||||||
|
rig.animation_data.action = rest_pose
|
||||||
|
bpy.context.view_layer.update()
|
||||||
|
|
||||||
|
rig.animation_data.action = action
|
||||||
|
|
||||||
|
if 'camera' in action.asset_data.keys():
|
||||||
|
action_cam = bpy.data.objects.get(action.asset_data['camera'], '')
|
||||||
|
if action_cam:
|
||||||
|
scn.camera = action_cam
|
||||||
|
|
||||||
|
# Is Anim
|
||||||
|
if not action_data['is_single_frame'] or 'anim' in action_data.tags.keys():
|
||||||
|
keyframes = get_keyframes(action)
|
||||||
|
if not keyframes:
|
||||||
|
continue
|
||||||
|
anim_start = keyframes[0]
|
||||||
|
anim_end = keyframes[-1]
|
||||||
|
|
||||||
|
if anim_start < scn.frame_start:
|
||||||
|
report.append(f"Issue found for '{action.name}'. Has keyframes before 'Start Frame'.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
scn.frame_preview_start = anim_start
|
||||||
|
scn.frame_preview_end = anim_end
|
||||||
|
|
||||||
|
rnd.stamp_note_text = rnd.stamp_note_text.format(
|
||||||
|
type='ANIM',
|
||||||
|
pose_name=pose_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
rnd.filepath = f'{str(anim_render_dir)}/{filename}_####.{ext}'
|
||||||
|
|
||||||
|
bpy.ops.render.opengl(animation=True)
|
||||||
|
|
||||||
|
ffmpeg_cmd = [
|
||||||
|
'ffmpeg', '-y',
|
||||||
|
'-start_number', f'{anim_start:04d}',
|
||||||
|
'-i', rnd.filepath.replace('####', '%04d'),
|
||||||
|
'-c:v', 'libx264',
|
||||||
|
str((preview_render_dir/'anim'/filename).with_suffix('.mov')),
|
||||||
|
]
|
||||||
|
subprocess.call(ffmpeg_cmd)
|
||||||
|
|
||||||
|
# Is Pose
|
||||||
|
elif action_data['is_single_frame'] or 'pose' in action_data.tags.keys():
|
||||||
|
scn.frame_preview_start = scn.frame_preview_end = scn.frame_start
|
||||||
|
|
||||||
|
rnd.stamp_note_text = rnd.stamp_note_text.format(
|
||||||
|
type='POSE',
|
||||||
|
pose_name=pose_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
rnd.filepath = f'{str(preview_render_dir)}/pose/{filename}_####.{ext}'
|
||||||
|
|
||||||
|
bpy.ops.render.opengl(animation=True)
|
||||||
|
|
||||||
|
filename = rnd.filepath.replace('####', f'{scn.frame_preview_end:04d}')
|
||||||
|
Path(filename).rename(re.sub('_[0-9]{4}.', '.', filename))
|
||||||
|
|
||||||
|
shutil.rmtree(anim_render_dir)
|
||||||
|
|
||||||
|
# Report
|
||||||
|
# ------
|
||||||
|
if report:
|
||||||
|
report_file = blendfile.parent / Path(f'{blendfile.stem}report').with_suffix('.txt')
|
||||||
|
if not report_file.exists():
|
||||||
|
report_file.touch(exist_ok=False)
|
||||||
|
|
||||||
|
report_file.write_text('-')
|
||||||
|
report_file.write_text('\n'.join(report))
|
||||||
|
|
||||||
|
result = report_file
|
||||||
|
|
||||||
|
else:
|
||||||
|
result = preview_render_dir
|
||||||
|
|
||||||
|
open_file(result)
|
||||||
|
|
||||||
|
files = [str(f) for f in sorted((preview_render_dir/'pose').glob('*.jpg'))]
|
||||||
|
|
||||||
|
mosaic_export(
|
||||||
|
files=files, catalog_data=asset_catalog_data,
|
||||||
|
row=2, columns=2, auto_calculate=True,
|
||||||
|
bg_color=(0.18, 0.18, 0.18,), resize_output=100
|
||||||
|
)
|
||||||
|
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='Add Comment To the tracker',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('--directory')
|
||||||
|
parser.add_argument('--asset-catalog')
|
||||||
|
parser.add_argument('--render-actions', nargs='+')
|
||||||
|
parser.add_argument('--publish-actions', nargs='+')
|
||||||
|
parser.add_argument('--remove-folder', type=json.loads, default='false')
|
||||||
|
|
||||||
|
if '--' in sys.argv :
|
||||||
|
index = sys.argv.index('--')
|
||||||
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
render_preview(**vars(args))
|
|
@ -0,0 +1,4 @@
|
||||||
|
|
||||||
|
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||||
|
from asset_library.adapters.copy_folder import CopyFolderLibrary
|
||||||
|
from asset_library.adapters.scan_folder import ScanFolderLibrary
|
|
@ -0,0 +1,545 @@
|
||||||
|
|
||||||
|
from asset_library.common.functions import (read_catalog, write_catalog, norm_asset_datas, get_catalog_path)
|
||||||
|
from asset_library.common.bl_utils import get_addon_prefs, load_datablocks
|
||||||
|
from asset_library.common.file_utils import read_file, write_file
|
||||||
|
from asset_library.common.template import Template
|
||||||
|
|
||||||
|
from asset_library import (action, collection, file)
|
||||||
|
|
||||||
|
from bpy.types import PropertyGroup
|
||||||
|
from bpy.props import StringProperty
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from itertools import groupby
|
||||||
|
from pathlib import Path
|
||||||
|
import shutil
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class AssetLibraryAdapter(PropertyGroup):
|
||||||
|
|
||||||
|
#def __init__(self):
|
||||||
|
name = "Base Adapter"
|
||||||
|
#library = None
|
||||||
|
|
||||||
|
bundle_directory : StringProperty()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library(self):
|
||||||
|
prefs = self.addon_prefs
|
||||||
|
for lib in prefs.libraries:
|
||||||
|
if lib.adapter == self:
|
||||||
|
return lib
|
||||||
|
if lib.conform.adapter == self:
|
||||||
|
return lib
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library_path(self):
|
||||||
|
return self.library.library_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image_template(self):
|
||||||
|
return Template(self.library.image_template)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def video_template(self):
|
||||||
|
return Template(self.library.video_template)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def asset_description_template(self):
|
||||||
|
return Template(self.library.asset_description_template)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data_type(self):
|
||||||
|
return self.library.data_type
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data_types(self):
|
||||||
|
return self.library.data_types
|
||||||
|
|
||||||
|
@property
|
||||||
|
def blend_depth(self):
|
||||||
|
return self.library.blend_depth
|
||||||
|
|
||||||
|
@property
|
||||||
|
def externalize_data(self):
|
||||||
|
return self.library.externalize_data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def catalog_path(self):
|
||||||
|
return self.library.catalog_path
|
||||||
|
|
||||||
|
def get_catalog_path(self, filepath):
|
||||||
|
return get_catalog_path(filepath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cache_file(self):
|
||||||
|
return Path(self.library_path) / f"blender_assets.{self.library.id}.json"
|
||||||
|
#return get_asset_datas_file(self.library_path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addon_prefs(self):
|
||||||
|
return get_addon_prefs()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def module_type(self):
|
||||||
|
lib_type = self.library.data_type
|
||||||
|
if lib_type == 'ACTION':
|
||||||
|
return action
|
||||||
|
elif lib_type == 'FILE':
|
||||||
|
return file
|
||||||
|
elif lib_type == 'COLLECTION':
|
||||||
|
return collection
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||||
|
|
||||||
|
def fetch(self):
|
||||||
|
raise Exception('This method need to be define in the adapter')
|
||||||
|
|
||||||
|
def norm_file_name(self, name):
|
||||||
|
return name.replace(' ', '_')
|
||||||
|
|
||||||
|
def copy_file(self, source, destination):
|
||||||
|
src = Path(source)
|
||||||
|
dst = Path(destination)
|
||||||
|
|
||||||
|
if not source.exists():
|
||||||
|
print(f'Cannot copy file {source}: file not exist')
|
||||||
|
return
|
||||||
|
|
||||||
|
dst.parent.mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
|
if src == dst:
|
||||||
|
print(f'Cannot copy file {source}: source and destination are the same')
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f'Copy file from {source} to {destination}')
|
||||||
|
shutil.copy2(str(source), str(destination))
|
||||||
|
|
||||||
|
def load_datablocks(self, src, names=None, type='objects', link=True, expr=None):
|
||||||
|
"""Link or append a datablock from a blendfile"""
|
||||||
|
return load_datablocks(src, names=names, type=type, link=link, expr=expr)
|
||||||
|
|
||||||
|
def get_asset_relative_path(self, name, catalog):
|
||||||
|
'''Get a relative path for the asset'''
|
||||||
|
name = self.norm_file_name(name)
|
||||||
|
return Path(catalog, name, name).with_suffix('.blend')
|
||||||
|
|
||||||
|
#def _get_file_name(self, name, filepath):
|
||||||
|
# '''Ensure having a unique name per asset if in the same folder by prefixing with the blend_file name'''
|
||||||
|
# file_name = name
|
||||||
|
# if filepath.stem != name:
|
||||||
|
# file_name = f'{file_name}_{name}'
|
||||||
|
#
|
||||||
|
# return file_name
|
||||||
|
|
||||||
|
def get_active_asset_library(self):
|
||||||
|
asset_handle = bpy.context.asset_file_handle
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
asset_handle = bpy.context.asset_file_handle
|
||||||
|
|
||||||
|
lib = None
|
||||||
|
if '.library_id' in asset_handle.asset_data:
|
||||||
|
lib_id = asset_handle.asset_data['.library_id']
|
||||||
|
lib = next((l for l in prefs.libraries if l.id == lib_id), None)
|
||||||
|
|
||||||
|
if not lib:
|
||||||
|
print(f"No library found for id {lib_id}")
|
||||||
|
|
||||||
|
if not lib:
|
||||||
|
lib = self
|
||||||
|
|
||||||
|
return lib
|
||||||
|
|
||||||
|
def get_active_asset_path(self):
|
||||||
|
'''Get the full path of the active asset_handle from the asset brower'''
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
asset_handle = bpy.context.asset_file_handle
|
||||||
|
|
||||||
|
lib = self.get_active_asset_library()
|
||||||
|
|
||||||
|
if 'filepath' in asset_handle.asset_data:
|
||||||
|
asset_path = asset_handle.asset_data['filepath']
|
||||||
|
asset_path = lib.adapter.format_path(asset_path)
|
||||||
|
else:
|
||||||
|
asset_path = bpy.types.AssetHandle.get_full_library_path(
|
||||||
|
asset_handle, bpy.context.asset_library_ref
|
||||||
|
)
|
||||||
|
|
||||||
|
return asset_path
|
||||||
|
|
||||||
|
def get_path(self, type, name, asset_path, template=None) -> Path:
|
||||||
|
if not template:
|
||||||
|
template = getattr(self, f'{type}_template')
|
||||||
|
|
||||||
|
if isinstance(template, str):
|
||||||
|
template = Template(template)
|
||||||
|
|
||||||
|
filepath = Path(asset_path)
|
||||||
|
return (filepath / template.format(name=name, path=Path(asset_path))).resolve()
|
||||||
|
|
||||||
|
#def get_image_path(self, name, asset_path):
|
||||||
|
# filepath = Path(asset_path)
|
||||||
|
# image_name = self._get_file_name(name, asset_path)
|
||||||
|
# return (filepath / self.image_template.format(name=image_name)).resolve()
|
||||||
|
|
||||||
|
def get_cache_image_path(self, name, catalog) -> Path:
|
||||||
|
""""Get the the cache path of a image for asset without an externalized image"""
|
||||||
|
return Path(self.library_path, '.previews', f"{catalog.replace('/', '_')}_{name}").with_suffix(('.png'))
|
||||||
|
|
||||||
|
def get_cache_image(self, name, catalog):
|
||||||
|
cache_image_path = self.get_cache_image_path(name, catalog)
|
||||||
|
if cache_image_path.exists():
|
||||||
|
return cache_image_path
|
||||||
|
|
||||||
|
#def get_video_path(self, name, asset_path):
|
||||||
|
# filepath = Path(asset_path)
|
||||||
|
# video_name = self._get_file_name(name, asset_path)
|
||||||
|
# return (filepath / self.video_template.format(name=video_name)).resolve()
|
||||||
|
|
||||||
|
def get_image(self, name, asset_path):
|
||||||
|
image_path = self.get_path('image', name, asset_path)
|
||||||
|
if image_path.exists():
|
||||||
|
return image_path
|
||||||
|
|
||||||
|
def get_video(self, name, asset_path):
|
||||||
|
video_path = self.get_path('video', name, asset_path)
|
||||||
|
if video_path.exists():
|
||||||
|
return video_path
|
||||||
|
|
||||||
|
def get_asset_description_path(self, asset_path) -> Path:
|
||||||
|
""""Get the path of the json or yaml describing all assets data in onle file"""
|
||||||
|
filepath = Path(asset_path)
|
||||||
|
return (filepath / self.asset_description_template.format(name=filepath.stem)).resolve()
|
||||||
|
|
||||||
|
def read_asset_description(self, asset_path) -> dict:
|
||||||
|
"""Read the description file of the asset"""
|
||||||
|
|
||||||
|
asset_description_path = self.get_asset_description_path(asset_path)
|
||||||
|
return read_file(asset_description_path)
|
||||||
|
|
||||||
|
def write_asset_description(self, asset_data, asset_path) -> None:
|
||||||
|
asset_description_path = self.get_asset_description_path(asset_path)
|
||||||
|
return write_file(asset_description_path, asset_data)
|
||||||
|
|
||||||
|
def write_asset(self, asset, asset_path):
|
||||||
|
bpy.data.libraries.write(
|
||||||
|
str(asset_path),
|
||||||
|
{asset},
|
||||||
|
path_remap="NONE",
|
||||||
|
fake_user=True,
|
||||||
|
compress=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_catalog(self, filepath=None):
|
||||||
|
"""Read the catalog file of the library bundle path or of the specified filepath"""
|
||||||
|
|
||||||
|
catalog_path = self.catalog_path
|
||||||
|
if filepath:
|
||||||
|
catalog_path = self.get_catalog_path(filepath)
|
||||||
|
return read_catalog(catalog_path)
|
||||||
|
|
||||||
|
def write_catalog(self, catalog_data, filepath=None):
|
||||||
|
"""Write the catalog file in the library bundle path or of the specified filepath"""
|
||||||
|
|
||||||
|
catalog_path = self.catalog_path
|
||||||
|
if filepath:
|
||||||
|
catalog_path = self.get_catalog_path(filepath)
|
||||||
|
|
||||||
|
return write_catalog(catalog_path, catalog_data)
|
||||||
|
|
||||||
|
def read_cache(self):
|
||||||
|
return read_file(self.cache_file)
|
||||||
|
|
||||||
|
def norm_asset_datas(self, asset_file_datas):
|
||||||
|
''' Return a new flat list of asset data
|
||||||
|
the filepath keys are merge with the assets keys
|
||||||
|
'''
|
||||||
|
return norm_asset_datas(asset_file_datas)
|
||||||
|
|
||||||
|
def write_cache(self, asset_datas):
|
||||||
|
path = self.cache_file
|
||||||
|
print(f'cache file writen to {path}')
|
||||||
|
return write_file(path, list(asset_datas))
|
||||||
|
|
||||||
|
def prop_rel_path(self, path, prop):
|
||||||
|
'''Get a filepath relative to a property of the adapter'''
|
||||||
|
field_prop = '{%s}/'%prop
|
||||||
|
|
||||||
|
prop_value = getattr(self, prop)
|
||||||
|
prop_value = Path(os.path.expandvars(prop_value)).resolve()
|
||||||
|
|
||||||
|
rel_path = Path(path).resolve().relative_to(prop_value).as_posix()
|
||||||
|
|
||||||
|
return field_prop + rel_path
|
||||||
|
|
||||||
|
def write_preview(self, preview, filepath):
|
||||||
|
if not preview or not filepath:
|
||||||
|
return
|
||||||
|
|
||||||
|
filepath = Path(filepath)
|
||||||
|
filepath.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
img_size = preview.image_size
|
||||||
|
|
||||||
|
px = [0] * img_size[0] * img_size[1] * 4
|
||||||
|
preview.image_pixels_float.foreach_get(px)
|
||||||
|
img = bpy.data.images.new(name=filepath.name, width=img_size[0], height=img_size[1], is_data=True, alpha=True)
|
||||||
|
img.pixels.foreach_set(px)
|
||||||
|
img.filepath_raw = str(filepath.with_suffix('.png'))
|
||||||
|
img.file_format = 'PNG'
|
||||||
|
img.save()
|
||||||
|
|
||||||
|
def draw_header(self, layout):
|
||||||
|
"""Draw the header of the Asset Browser Window"""
|
||||||
|
#layout.separator()
|
||||||
|
|
||||||
|
self.module_type.gui.draw_header(layout)
|
||||||
|
|
||||||
|
def draw_context_menu(self, layout):
|
||||||
|
"""Draw the context menu of the Asset Browser Window"""
|
||||||
|
#layout.separator()
|
||||||
|
self.module_type.gui.draw_context_menu(layout)
|
||||||
|
|
||||||
|
def group_key(self, asset_data):
|
||||||
|
"""Key used to group assets inside one blend"""
|
||||||
|
|
||||||
|
catalog_parts = asset_data['catalog'].split('/') + [asset_data['name']]
|
||||||
|
|
||||||
|
return catalog_parts[:self.blend_depth]
|
||||||
|
|
||||||
|
def set_asset_preview(self, asset, asset_data):
|
||||||
|
"""Load an externalize image as preview for an asset"""
|
||||||
|
|
||||||
|
image_path = Path(asset_data['image'])
|
||||||
|
if not image_path.is_absolute():
|
||||||
|
image_path = Path(asset_data['filepath'], image_path)
|
||||||
|
|
||||||
|
image_path = self.format_path(image_path.as_posix())
|
||||||
|
if image_path and image_path.exists():
|
||||||
|
with bpy.context.temp_override(id=asset):
|
||||||
|
bpy.ops.ed.lib_id_load_custom_preview(
|
||||||
|
filepath=str(image_path)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if asset.preview:
|
||||||
|
return
|
||||||
|
|
||||||
|
#Creating the preview for collection, object or material
|
||||||
|
src_asset = self.load_datablocks(asset_data['filepath'], names=asset_data['name'], link=True, type=self.data_types)
|
||||||
|
if not src_asset:
|
||||||
|
print(f'No asset named {asset_data["name"]} in {asset_data["filepath"]}')
|
||||||
|
return
|
||||||
|
|
||||||
|
bpy.ops.ed.lib_id_generate_preview({"id": src_asset})
|
||||||
|
|
||||||
|
#Transfering pixels between previews
|
||||||
|
pixels = [0] * (128*128*4)
|
||||||
|
src_asset.preview.image_pixels_float.foreach_get(pixels)
|
||||||
|
asset.preview.image_pixels_float.foreach_set(pixels)
|
||||||
|
|
||||||
|
getattr(bpy.data, self.data_types).remove(src_asset)
|
||||||
|
|
||||||
|
|
||||||
|
def set_asset_catalog(self, asset, asset_data, catalog_data):
|
||||||
|
"""Find the catalog if already exist or create it"""
|
||||||
|
catalog_name = asset_data['catalog']
|
||||||
|
catalog = catalog_data.get(catalog_name)
|
||||||
|
if not catalog:
|
||||||
|
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
||||||
|
catalog_data[catalog_name] = catalog
|
||||||
|
|
||||||
|
asset.asset_data.catalog_id = catalog['id']
|
||||||
|
|
||||||
|
def set_asset_metadata(self, asset, asset_data):
|
||||||
|
"""Create custom prop to an asset base on provided data"""
|
||||||
|
metadata = asset_data.get('metadata', {})
|
||||||
|
|
||||||
|
library_id = self.library.id
|
||||||
|
if 'library_id' in asset_data:
|
||||||
|
library_id = asset_data['library_id']
|
||||||
|
|
||||||
|
metadata['.library_id'] = library_id
|
||||||
|
metadata['filepath'] = asset_data['filepath']
|
||||||
|
for k, v in metadata.items():
|
||||||
|
asset.asset_data[k] = v
|
||||||
|
|
||||||
|
def set_asset_tags(self, asset, asset_data):
|
||||||
|
"""Create asset tags base on provided data"""
|
||||||
|
tags = asset_data.get('tags', [])
|
||||||
|
if tags:
|
||||||
|
#Clear all tags first
|
||||||
|
for tag in asset.asset_data.tags[:]:
|
||||||
|
asset.asset_data.tags.remove(tag)
|
||||||
|
|
||||||
|
for tag in tags:
|
||||||
|
if not tag:
|
||||||
|
continue
|
||||||
|
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
||||||
|
|
||||||
|
def bundle(self, cache_diff=None):
|
||||||
|
"""Group all new assets in one or multiple blends for the asset browser"""
|
||||||
|
|
||||||
|
if self.data_type not in ('FILE', 'ACTION', 'COLLECTION'):
|
||||||
|
print(f'{self.data_type} is not supported yet')
|
||||||
|
return
|
||||||
|
|
||||||
|
lib_path = self.library_path
|
||||||
|
catalog_data = self.read_catalog() #TODO remove unused catalog
|
||||||
|
|
||||||
|
write_cache = False
|
||||||
|
if not cache_diff:
|
||||||
|
# Get list of all modifications
|
||||||
|
cache, cache_diff = self.diff()
|
||||||
|
|
||||||
|
# Only write complete cache at the end
|
||||||
|
write_cache = True
|
||||||
|
|
||||||
|
elif isinstance(cache_diff, (Path, str)):
|
||||||
|
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||||
|
|
||||||
|
if self.blend_depth == 0:
|
||||||
|
groups = [(cache_diff)]
|
||||||
|
else:
|
||||||
|
cache_diff.sort(key=self.group_key)
|
||||||
|
groups = groupby(cache_diff, key=self.group_key)
|
||||||
|
|
||||||
|
total_assets = len(cache_diff)
|
||||||
|
print(f'total_assets={total_assets}')
|
||||||
|
|
||||||
|
if total_assets == 0:
|
||||||
|
print('No assets found')
|
||||||
|
return
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for sub_path, asset_datas in groups:
|
||||||
|
blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||||
|
blend_path = Path(lib_path, *sub_path, blend_name).with_suffix('.blend')
|
||||||
|
|
||||||
|
if blend_path.exists():
|
||||||
|
print(f'Opening existing bundle blend: {blend_path}')
|
||||||
|
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
||||||
|
else:
|
||||||
|
print(f'Create new bundle blend to: {blend_path}')
|
||||||
|
bpy.ops.wm.read_homefile(use_empty=True)
|
||||||
|
|
||||||
|
for asset_data in asset_datas:
|
||||||
|
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
||||||
|
print(f'Progress: {int(i / total_assets * 100)+1}')
|
||||||
|
|
||||||
|
operation = asset_data.get('operation', 'ADD')
|
||||||
|
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
||||||
|
|
||||||
|
if operation == 'REMOVE':
|
||||||
|
if asset:
|
||||||
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
else:
|
||||||
|
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
if operation == 'MODIFY' and not asset:
|
||||||
|
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
||||||
|
|
||||||
|
elif operation == 'ADD' or not asset:
|
||||||
|
if asset:
|
||||||
|
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
||||||
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
|
||||||
|
#print(f"INFO: Add new asset: {asset_data['name']}")
|
||||||
|
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
||||||
|
else:
|
||||||
|
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFIED)')
|
||||||
|
continue
|
||||||
|
|
||||||
|
asset.asset_mark()
|
||||||
|
|
||||||
|
self.set_asset_preview(asset, asset_data)
|
||||||
|
#if self.externalize_data:
|
||||||
|
# self.write_preview(preview, filepath)
|
||||||
|
|
||||||
|
self.set_asset_catalog(asset, asset_data, catalog_data)
|
||||||
|
self.set_asset_metadata(asset, asset_data)
|
||||||
|
self.set_asset_tags(asset, asset_data)
|
||||||
|
asset.asset_data.description = asset_data.get('description', '')
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
print(f'Saving Blend to {blend_path}')
|
||||||
|
|
||||||
|
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
||||||
|
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||||
|
|
||||||
|
if write_cache:
|
||||||
|
self.write_cache(cache)
|
||||||
|
|
||||||
|
self.write_catalog(catalog_data)
|
||||||
|
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
|
||||||
|
def norm_cache(self, cache):
|
||||||
|
""" Return a new flat list of asset data
|
||||||
|
the filepath keys are merge with the assets keys"""
|
||||||
|
|
||||||
|
new_cache = []
|
||||||
|
for asset_description in cache:
|
||||||
|
asset_description = asset_description.copy()
|
||||||
|
if 'assets' in asset_description:
|
||||||
|
|
||||||
|
assets = asset_description.pop('assets')
|
||||||
|
for asset_data in assets:
|
||||||
|
new_cache.append({**asset_description, **asset_data})
|
||||||
|
else:
|
||||||
|
new_cache.append(asset_description)
|
||||||
|
|
||||||
|
return new_cache
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
"""Compare the library cache with it current state and return the difference"""
|
||||||
|
|
||||||
|
cache = self.read_cache()
|
||||||
|
|
||||||
|
if cache is None:
|
||||||
|
print(f'Fetch The library {self.library.name} for the first time, might be long...')
|
||||||
|
cache = []
|
||||||
|
|
||||||
|
new_cache = self.fetch()
|
||||||
|
|
||||||
|
cache = {f"{a['filepath']}/{a['name']}": a for a in self.norm_cache(cache)}
|
||||||
|
new_cache = {f"{a['filepath']}/{a['name']}" : a for a in self.norm_cache(new_cache)}
|
||||||
|
|
||||||
|
assets_added = [v for k, v in new_cache.items() if k not in cache]
|
||||||
|
assets_removed = [v for k, v in cache.items() if k not in new_cache]
|
||||||
|
assets_modified = [v for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
||||||
|
|
||||||
|
if assets_added:
|
||||||
|
print(f'{len(assets_added)} Assets Added \n{tuple(a["name"] for a in assets_added[:10])}\n')
|
||||||
|
if assets_removed:
|
||||||
|
print(f'{len(assets_removed)} Assets Removed \n{tuple(a["name"] for a in assets_removed[:10])}\n')
|
||||||
|
if assets_modified:
|
||||||
|
print(f'{len(assets_modified)} Assets Modified \n{tuple(a["name"] for a in assets_modified[:10])}\n')
|
||||||
|
|
||||||
|
assets_added = [dict(a, operation='ADD') for a in assets_added]
|
||||||
|
assets_removed = [dict(a, operation='REMOVE') for a in assets_removed]
|
||||||
|
assets_modified = [dict(a, operation='MODIFY') for a in assets_modified]
|
||||||
|
|
||||||
|
cache_diff = assets_added + assets_removed + assets_modified
|
||||||
|
if not cache_diff:
|
||||||
|
print('No change in the library')
|
||||||
|
|
||||||
|
return new_cache, cache_diff
|
||||||
|
|
||||||
|
def draw_prefs(self, layout):
|
||||||
|
"""Draw the options in the addon preference for this adapter"""
|
||||||
|
|
||||||
|
annotations = self.__class__.__annotations__
|
||||||
|
for k, v in annotations.items():
|
||||||
|
layout.prop(self, k, text=bpy.path.display_name(k))
|
||||||
|
|
||||||
|
def format_path(self, template, **kargs):
|
||||||
|
return Template(template).format(self.to_dict(), **kargs).resolve()
|
|
@ -0,0 +1,34 @@
|
||||||
|
|
||||||
|
"""
|
||||||
|
Adapter for making an asset library of all blender file found in a folder
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||||
|
from asset_library.common.file_utils import copy_dir
|
||||||
|
from bpy.props import StringProperty
|
||||||
|
from os.path import expandvars
|
||||||
|
|
||||||
|
|
||||||
|
class CopyFolderLibrary(AssetLibraryAdapter):
|
||||||
|
"""Copy library folder from a server to a local disk for better performance"""
|
||||||
|
|
||||||
|
name = "Copy Folder"
|
||||||
|
source_directory : StringProperty()
|
||||||
|
|
||||||
|
includes : StringProperty()
|
||||||
|
excludes : StringProperty()
|
||||||
|
|
||||||
|
def bundle(self):
|
||||||
|
src = expandvars(self.source_directory)
|
||||||
|
dst = expandvars(self.library_path)
|
||||||
|
|
||||||
|
includes = [inc.strip() for inc in self.includes.split(',')]
|
||||||
|
excludes = [ex.strip() for ex in self.excludes.split(',')]
|
||||||
|
|
||||||
|
print(f'Copy Folder from {src} to {dst}...')
|
||||||
|
copy_dir(
|
||||||
|
src, dst, only_recent=True,
|
||||||
|
excludes=excludes, includes=includes
|
||||||
|
)
|
||||||
|
|
|
@ -0,0 +1,162 @@
|
||||||
|
|
||||||
|
"""
|
||||||
|
Plugin for making an asset library of all blender file found in a folder
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||||
|
from asset_library.common.template import Template
|
||||||
|
from asset_library.common.file_utils import install_module
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from itertools import groupby
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import json
|
||||||
|
import urllib3
|
||||||
|
import traceback
|
||||||
|
import time
|
||||||
|
|
||||||
|
gazu = install_module('gazu')
|
||||||
|
|
||||||
|
|
||||||
|
class KitsuLibrary(AssetLibraryAdapter):
|
||||||
|
|
||||||
|
name = "Kitsu"
|
||||||
|
template_name : StringProperty()
|
||||||
|
template_file : StringProperty()
|
||||||
|
|
||||||
|
url: StringProperty()
|
||||||
|
login: StringProperty()
|
||||||
|
password: StringProperty(subtype='PASSWORD')
|
||||||
|
project_name: StringProperty()
|
||||||
|
|
||||||
|
def connect(self, url=None, login=None, password=None):
|
||||||
|
'''Connect to kitsu api using provided url, login and password'''
|
||||||
|
|
||||||
|
urllib3.disable_warnings()
|
||||||
|
|
||||||
|
if not self.url:
|
||||||
|
print(f'Kitsu Url: {self.url} is empty')
|
||||||
|
return
|
||||||
|
|
||||||
|
url = self.url
|
||||||
|
if not url.endswith('/api'):
|
||||||
|
url += '/api'
|
||||||
|
|
||||||
|
print(f'Info: Setting Host for kitsu {url}')
|
||||||
|
gazu.client.set_host(url)
|
||||||
|
|
||||||
|
if not gazu.client.host_is_up():
|
||||||
|
print('Error: Kitsu Host is down')
|
||||||
|
|
||||||
|
try:
|
||||||
|
print(f'Info: Log in to kitsu as {self.login}')
|
||||||
|
res = gazu.log_in(self.login, self.password)
|
||||||
|
print(f'Info: Sucessfully login to Kitsu as {res["user"]["full_name"]}')
|
||||||
|
return res['user']
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Error: {traceback.format_exc()}')
|
||||||
|
|
||||||
|
def get_asset_path(self, name, catalog, directory=None):
|
||||||
|
directory = directory or self.source_directory
|
||||||
|
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||||
|
|
||||||
|
def get_asset_description(self, data, path):
|
||||||
|
|
||||||
|
modified = time.time_ns()
|
||||||
|
catalog = data['entity_type_name']
|
||||||
|
asset_path = Path(path)
|
||||||
|
asset_name = self.norm_file_name(data['name'])
|
||||||
|
|
||||||
|
asset_description = dict(
|
||||||
|
filepath='{source_directory}/' + asset_path.as_posix(),
|
||||||
|
modified=modified,
|
||||||
|
library_id=self.library.id,
|
||||||
|
assets=[dict(
|
||||||
|
catalog=catalog,
|
||||||
|
metadata=data.get('data', {}),
|
||||||
|
description=data['description'],
|
||||||
|
tags=[],
|
||||||
|
type=self.data_type,
|
||||||
|
image=str(self.image_template.format(name=asset_name)),
|
||||||
|
video=str(self.video_template.format(name=asset_name)),
|
||||||
|
name=data['name'])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return asset_description
|
||||||
|
|
||||||
|
def bundle(self, cache_diff=None):
|
||||||
|
"""Group all asset in one or multiple blends for the asset browser"""
|
||||||
|
|
||||||
|
return super().bundle(cache_diff=cache_diff)
|
||||||
|
|
||||||
|
def get_preview(self, asset_data):
|
||||||
|
|
||||||
|
name = asset_data['name']
|
||||||
|
preview = (f / image_template.format(name=name)).resolve()
|
||||||
|
if not preview.exists():
|
||||||
|
preview_blend_file(f, preview)
|
||||||
|
|
||||||
|
return preview
|
||||||
|
|
||||||
|
def conform(self, directory, templates):
|
||||||
|
"""Split each assets per blend and externalize preview"""
|
||||||
|
|
||||||
|
print(f'Conforming {self.library.name} to {directory}')
|
||||||
|
|
||||||
|
|
||||||
|
def fetch(self):
|
||||||
|
"""Gather in a list all assets found in the folder"""
|
||||||
|
|
||||||
|
print(f'Fetch Assets for {self.library.name}')
|
||||||
|
|
||||||
|
self.connect()
|
||||||
|
|
||||||
|
template_file = Template(self.template_file)
|
||||||
|
template_name = Template(self.template_name)
|
||||||
|
|
||||||
|
project = gazu.client.fetch_first('projects', {'name': self.project_name})
|
||||||
|
entity_types = gazu.client.fetch_all('entity-types')
|
||||||
|
entity_types_ids = {e['id']: e['name'] for e in entity_types}
|
||||||
|
|
||||||
|
new_cache = []
|
||||||
|
for asset_data in gazu.asset.all_assets_for_project(project):
|
||||||
|
asset_data['entity_type_name'] = entity_types_ids[asset_data.pop('entity_type_id')]
|
||||||
|
asset_name = asset_data['name']
|
||||||
|
|
||||||
|
asset_field_data = dict(name=asset_name, type=asset_data['entity_type_name'])
|
||||||
|
|
||||||
|
try:
|
||||||
|
asset_field_data.update(template_name.parse(asset_name))
|
||||||
|
except Exception:
|
||||||
|
print(f'Warning: Could not parse {asset_name} with template {template_name}')
|
||||||
|
|
||||||
|
asset_path = template_file.find(asset_field_data)
|
||||||
|
if not asset_path:
|
||||||
|
print(f'Warning: Could not find file for {template_file.format(asset_field_data)}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
#print(asset_path)
|
||||||
|
|
||||||
|
new_cache.append(self.get_asset_description(asset_data, asset_path))
|
||||||
|
|
||||||
|
#asset = load_datablocks(asset_path, data_type='collections', names=asset_data['name'], link=True)
|
||||||
|
#if not asset:
|
||||||
|
# print(f"Asset {asset_name} not found in {asset_path}")
|
||||||
|
|
||||||
|
|
||||||
|
#asset_description = self.get_asset_description(asset)
|
||||||
|
|
||||||
|
#new_cache.append(asset_description)
|
||||||
|
|
||||||
|
#print(assets)
|
||||||
|
# for k, v in assets[0].items():
|
||||||
|
# print(f'- {k} {v}')
|
||||||
|
|
||||||
|
return new_cache
|
|
@ -0,0 +1,459 @@
|
||||||
|
|
||||||
|
"""
|
||||||
|
Plugin for making an asset library of all blender file found in a folder
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||||
|
from asset_library.common.bl_utils import load_datablocks
|
||||||
|
from asset_library.common.template import Template
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from itertools import groupby
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class ScanFolderLibrary(AssetLibraryAdapter):
|
||||||
|
|
||||||
|
name = "Scan Folder"
|
||||||
|
source_directory : StringProperty(subtype='DIR_PATH')
|
||||||
|
template : StringProperty()
|
||||||
|
blend_depth : IntProperty()
|
||||||
|
#externalize_preview : BoolProperty(default=True)
|
||||||
|
|
||||||
|
#def draw_header(self, layout):
|
||||||
|
# '''Draw the header of the Asset Browser Window'''
|
||||||
|
# layout.separator()
|
||||||
|
# layout.operator("actionlib.store_anim_pose", text='Add Action', icon='FILE_NEW')
|
||||||
|
|
||||||
|
#def update(self):
|
||||||
|
#
|
||||||
|
def get_asset_path(self, name, catalog, directory=None):
|
||||||
|
directory = directory or self.source_directory
|
||||||
|
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||||
|
|
||||||
|
def get_asset_description(self, asset, catalog, modified):
|
||||||
|
|
||||||
|
asset_path = self.get_asset_relative_path(name=asset.name, catalog=catalog)
|
||||||
|
asset_name = self.norm_file_name(asset.name)
|
||||||
|
|
||||||
|
asset_description = dict(
|
||||||
|
filepath='{source_directory}/' + asset_path.as_posix(),
|
||||||
|
modified=modified,
|
||||||
|
library_id=self.library.id,
|
||||||
|
assets=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
asset_description['assets'].append(dict(
|
||||||
|
catalog=catalog,
|
||||||
|
metadata=dict(asset.asset_data),
|
||||||
|
tags=asset.asset_data.tags.keys(),
|
||||||
|
type=self.data_type,
|
||||||
|
image=str(self.image_template.format(name=asset_name)),
|
||||||
|
video=str(self.video_template.format(name=asset_name)),
|
||||||
|
name=asset.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
return asset_description
|
||||||
|
|
||||||
|
def _find_blend_files(self):
|
||||||
|
'''Get a sorted list of all blender files found matching the template'''
|
||||||
|
template = Template(self.template)
|
||||||
|
|
||||||
|
print(f'Search for blend using glob template: {template.glob_pattern}')
|
||||||
|
|
||||||
|
source_directory = Path(os.path.expandvars(self.source_directory))
|
||||||
|
print(f'Scanning Folder {source_directory}...')
|
||||||
|
blend_files = list(source_directory.glob(template.glob_pattern))
|
||||||
|
|
||||||
|
blend_files.sort()
|
||||||
|
|
||||||
|
return blend_files
|
||||||
|
|
||||||
|
def _group_key(self, asset_data):
|
||||||
|
"""Group assets inside one blend"""
|
||||||
|
|
||||||
|
catalog_parts = asset_data['catalog'].split('/') + [asset_data['name']]
|
||||||
|
|
||||||
|
return catalog_parts[:self.blend_depth]
|
||||||
|
|
||||||
|
def bundle(self, cache_diff=None):
|
||||||
|
"""Group all asset in one or multiple blends for the asset browser"""
|
||||||
|
|
||||||
|
if self.data_type not in ('FILE', 'ACTION'):
|
||||||
|
print(f'{self.data_type} is not supported yet')
|
||||||
|
return
|
||||||
|
|
||||||
|
lib_path = self.library_path
|
||||||
|
catalog_data = self.read_catalog() # TODO remove unused catalog
|
||||||
|
|
||||||
|
#asset_file_datas = self.fetch() # TODO replace to only change new assets
|
||||||
|
|
||||||
|
if not cache_diff:
|
||||||
|
# Get list of all modifications
|
||||||
|
cache, cache_diff = self.diff()
|
||||||
|
self.write_cache(cache)
|
||||||
|
|
||||||
|
elif isinstance(cache_diff, (Path, str)):
|
||||||
|
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||||
|
|
||||||
|
if self.blend_depth == 0:
|
||||||
|
groups = [(cache_diff)]
|
||||||
|
else:
|
||||||
|
cache_diff.sort(key=self._group_key)
|
||||||
|
groups = groupby(cache_diff, key=self._group_key)
|
||||||
|
|
||||||
|
# #print(cache_diff)
|
||||||
|
# print('\n')
|
||||||
|
# for sub_path, asset_datas in groups:
|
||||||
|
|
||||||
|
# print('\n')
|
||||||
|
# print(f'{sub_path=}')
|
||||||
|
# print(f'asset_datas={list(asset_datas)}')
|
||||||
|
|
||||||
|
# raise Exception()
|
||||||
|
|
||||||
|
#progress = 0
|
||||||
|
total_assets = len(cache_diff)
|
||||||
|
print(f'total_assets={total_assets}')
|
||||||
|
|
||||||
|
if total_assets == 0:
|
||||||
|
print('No assets found')
|
||||||
|
return
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for sub_path, asset_datas in groups:
|
||||||
|
|
||||||
|
# print('\n')
|
||||||
|
# print(f'{sub_path=}')
|
||||||
|
# print(f'asset_datas={list(asset_datas)}')
|
||||||
|
|
||||||
|
# print('\n')
|
||||||
|
|
||||||
|
blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||||
|
blend_path = Path(lib_path, *sub_path, blend_name).with_suffix('.blend')
|
||||||
|
|
||||||
|
if blend_path.exists():
|
||||||
|
print(f'Opening existing bundle blend: {blend_path}')
|
||||||
|
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
||||||
|
else:
|
||||||
|
print(f'Create new bundle blend to: {blend_path}')
|
||||||
|
bpy.ops.wm.read_homefile(use_empty=True)
|
||||||
|
|
||||||
|
for asset_data in asset_datas:
|
||||||
|
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
||||||
|
print(f'Progress: {int(i / total_assets * 100)+1}')
|
||||||
|
|
||||||
|
operation = asset_data.get('operation', 'ADD')
|
||||||
|
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
||||||
|
|
||||||
|
if operation == 'REMOVE':
|
||||||
|
if asset:
|
||||||
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
else:
|
||||||
|
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif operation == 'MODIFY':
|
||||||
|
if not asset:
|
||||||
|
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
||||||
|
|
||||||
|
elif operation == 'ADD' or not asset:
|
||||||
|
if asset:
|
||||||
|
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
||||||
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
|
||||||
|
#print(f"INFO: Add new asset: {asset_data['name']}")
|
||||||
|
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
||||||
|
else:
|
||||||
|
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFIED)')
|
||||||
|
continue
|
||||||
|
|
||||||
|
asset.asset_mark()
|
||||||
|
|
||||||
|
# Load external preview if exists
|
||||||
|
#image_template = Template(asset_data['preview'])
|
||||||
|
image_path = Path(asset_data['image'])
|
||||||
|
if not image_path.is_absolute():
|
||||||
|
image_path = Path(asset_data['filepath'], image_path)
|
||||||
|
|
||||||
|
image_path = self.format_path(image_path.as_posix())
|
||||||
|
|
||||||
|
if image_path and image_path.exists():
|
||||||
|
with bpy.context.temp_override(id=asset):
|
||||||
|
bpy.ops.ed.lib_id_load_custom_preview(
|
||||||
|
filepath=str(image_path)
|
||||||
|
)
|
||||||
|
#else:
|
||||||
|
# print(f'Preview {image_path} not found for asset {asset}')
|
||||||
|
|
||||||
|
asset.asset_data.description = asset_data.get('description', '')
|
||||||
|
|
||||||
|
catalog_name = asset_data['catalog']
|
||||||
|
catalog = catalog_data.get(catalog_name)
|
||||||
|
if not catalog:
|
||||||
|
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
||||||
|
catalog_data[catalog_name] = catalog
|
||||||
|
|
||||||
|
asset.asset_data.catalog_id = catalog['id']
|
||||||
|
|
||||||
|
metadata = asset_data.get('metadata', {})
|
||||||
|
|
||||||
|
library_id = self.library.id
|
||||||
|
if 'library_id' in asset_data:
|
||||||
|
library_id = asset_data['library_id']
|
||||||
|
|
||||||
|
metadata['.library_id'] = library_id
|
||||||
|
|
||||||
|
#print(metadata)
|
||||||
|
|
||||||
|
metadata['filepath'] = asset_data['filepath']
|
||||||
|
for k, v in metadata.items():
|
||||||
|
asset.asset_data[k] = v
|
||||||
|
|
||||||
|
# Set tags if specified the asset_description
|
||||||
|
tags = asset_data.get('tags', [])
|
||||||
|
if tags:
|
||||||
|
for tag in asset.asset_data.tags[:]:
|
||||||
|
asset.asset_data.tags.remove(tag)
|
||||||
|
|
||||||
|
for tag in tags:
|
||||||
|
if not tag:
|
||||||
|
continue
|
||||||
|
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
print(f'Saving Blend to {blend_path}')
|
||||||
|
|
||||||
|
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
||||||
|
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||||
|
|
||||||
|
self.write_catalog(catalog_data)
|
||||||
|
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
def get_preview(self, asset_data):
|
||||||
|
|
||||||
|
name = asset_data['name']
|
||||||
|
preview = (f / image_template.format(name=name)).resolve()
|
||||||
|
if not preview.exists():
|
||||||
|
preview_blend_file(f, preview)
|
||||||
|
|
||||||
|
return preview
|
||||||
|
|
||||||
|
|
||||||
|
def conform(self, directory, templates):
|
||||||
|
"""Split each assets per blend and externalize preview"""
|
||||||
|
|
||||||
|
print(f'Conforming {self.library.name} to {directory}')
|
||||||
|
|
||||||
|
if self.data_type not in ('FILE', 'ACTION'):
|
||||||
|
print(f'{self.data_type} is not supported yet')
|
||||||
|
return
|
||||||
|
|
||||||
|
#lib_path = self.library_path
|
||||||
|
source_directory = Path(os.path.expandvars(self.source_directory))
|
||||||
|
catalog_data = self.read_catalog(filepath=source_directory)
|
||||||
|
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_data.items()}
|
||||||
|
directory = Path(directory).resolve()
|
||||||
|
|
||||||
|
image_template = templates.get('image') or self.image_template
|
||||||
|
video_template = templates.get('video') or self.video_template
|
||||||
|
|
||||||
|
# Get list of all modifications
|
||||||
|
for blend_file in self._find_blend_files():
|
||||||
|
|
||||||
|
modified = blend_file.stat().st_mtime_ns
|
||||||
|
|
||||||
|
print(f'Scanning blendfile {blend_file}...')
|
||||||
|
with bpy.data.libraries.load(str(blend_file), link=True, assets_only=True) as (data_from, data_to):
|
||||||
|
asset_names = getattr(data_from, self.data_types)
|
||||||
|
print(f'Found {len(asset_names)} {self.data_types} inside')
|
||||||
|
|
||||||
|
setattr(data_to, self.data_types, asset_names)
|
||||||
|
|
||||||
|
assets = getattr(data_to, self.data_types)
|
||||||
|
#print('assets', assets)
|
||||||
|
|
||||||
|
for asset in assets:
|
||||||
|
#TODO options for choose beetween asset catalog and filepath directory
|
||||||
|
asset_catalog_data = catalog_ids.get(asset.asset_data.catalog_id)
|
||||||
|
|
||||||
|
if not asset_catalog_data:
|
||||||
|
print(f'No catalog found for asset {asset.name}')
|
||||||
|
asset_catalog_data = {"path": blend_file.parent.relative_to(source_directory).as_posix()}
|
||||||
|
|
||||||
|
catalog_path = asset_catalog_data['path']
|
||||||
|
|
||||||
|
asset_path = self.get_asset_path(name=asset.name, catalog=catalog_path, directory=directory)
|
||||||
|
asset_description = self.get_asset_description(asset, catalog=catalog_path, modified=modified)
|
||||||
|
|
||||||
|
self.write_asset_description(asset_description, asset_path)
|
||||||
|
#Write blend file containing only one asset
|
||||||
|
self.write_asset(asset=asset, asset_path=asset_path)
|
||||||
|
|
||||||
|
# Copy image if source image found else write the asset preview
|
||||||
|
src_image_path = self.get_path('image', name=asset.name, asset_path=blend_file, template=image_template)
|
||||||
|
dst_image_path = self.get_path('image', name=asset.name, asset_path=asset_path)
|
||||||
|
|
||||||
|
if src_image_path.exists():
|
||||||
|
self.copy_file(src_image_path, dst_image_path)
|
||||||
|
else:
|
||||||
|
self.write_preview(asset.preview, dst_image_path)
|
||||||
|
|
||||||
|
# Copy video if source video found
|
||||||
|
src_video_path = self.get_path('video', name=asset.name, asset_path=blend_file, template=video_template)
|
||||||
|
|
||||||
|
#print('src_video_path', src_video_path)
|
||||||
|
if src_video_path.exists():
|
||||||
|
dst_video_path = self.get_path('video', name=asset.name, asset_path=asset_path)
|
||||||
|
self.copy_file(src_video_path, dst_video_path)
|
||||||
|
|
||||||
|
self.write_catalog(catalog_data, filepath=directory)
|
||||||
|
|
||||||
|
def fetch(self):
|
||||||
|
"""Gather in a list all assets found in the folder"""
|
||||||
|
|
||||||
|
print(f'Fetch Assets for {self.library.name}')
|
||||||
|
|
||||||
|
source_directory = Path(os.path.expandvars(self.source_directory))
|
||||||
|
template = Template(self.template)
|
||||||
|
catalog_data = self.read_catalog(filepath=source_directory)
|
||||||
|
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_data.items()}
|
||||||
|
|
||||||
|
cache = self.read_cache() or []
|
||||||
|
|
||||||
|
print(f'Search for blend using glob template: {template.glob_pattern}')
|
||||||
|
|
||||||
|
print(f'Scanning Folder {source_directory}...')
|
||||||
|
#blend_files = list(source_directory.glob(template.glob_pattern))
|
||||||
|
|
||||||
|
# Remove delete blends for the list
|
||||||
|
#blend_paths = [self.prop_rel_path(f, 'source_directory') for f in blend_files]
|
||||||
|
#print('blend_paths', blend_paths)
|
||||||
|
|
||||||
|
|
||||||
|
#cache = []
|
||||||
|
#blend_paths = []
|
||||||
|
new_cache = []
|
||||||
|
|
||||||
|
for blend_file in template.glob(source_directory):#sorted(blend_files):
|
||||||
|
|
||||||
|
source_rel_path = self.prop_rel_path(blend_file, 'source_directory')
|
||||||
|
modified = blend_file.stat().st_mtime_ns
|
||||||
|
|
||||||
|
asset_description = next((a for a in cache if a['filepath'] == source_rel_path), None)
|
||||||
|
|
||||||
|
if asset_description and asset_description['modified'] >= modified:
|
||||||
|
print(blend_file, 'is skipped because not modified')
|
||||||
|
new_cache.append(asset_description)
|
||||||
|
continue
|
||||||
|
|
||||||
|
rel_path = blend_file.relative_to(source_directory).as_posix()
|
||||||
|
#field_values = re.findall(re_pattern, rel_path)[0]
|
||||||
|
#field_data = {k:v for k,v in zip(field_names, field_values)}
|
||||||
|
field_data = template.parse(rel_path)
|
||||||
|
|
||||||
|
if not field_data:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
#asset_data = (blend_file / prefs.asset_description_template.format(name=name)).resolve()
|
||||||
|
|
||||||
|
catalogs = [v for k,v in sorted(field_data.items()) if k.isdigit()]
|
||||||
|
catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
||||||
|
|
||||||
|
if self.data_type == 'FILE':
|
||||||
|
name = field_data.get('name', blend_file.stem)
|
||||||
|
image = self.get_path('image', name=name, asset_path=blend_file)
|
||||||
|
|
||||||
|
asset_description = dict(
|
||||||
|
filepath=source_rel_path,
|
||||||
|
modified=modified,
|
||||||
|
catalog='/'.join(catalogs),
|
||||||
|
tags=[],
|
||||||
|
type=self.data_type,
|
||||||
|
image=self.prop_rel_path(image, 'source_directory'),
|
||||||
|
name=name
|
||||||
|
)
|
||||||
|
new_cache.append(asset_description)
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
#First Check if there is a asset_data .json
|
||||||
|
asset_description = self.read_asset_description(blend_file)
|
||||||
|
|
||||||
|
if not asset_description:
|
||||||
|
# Scan the blend file for assets inside and write a custom asset description for info found
|
||||||
|
|
||||||
|
print(f'Scanning blendfile {blend_file}...')
|
||||||
|
with bpy.data.libraries.load(str(blend_file), link=True, assets_only=True) as (data_from, data_to):
|
||||||
|
asset_names = getattr(data_from, self.data_types)
|
||||||
|
print(f'Found {len(asset_names)} {self.data_types} inside')
|
||||||
|
|
||||||
|
setattr(data_to, self.data_types, asset_names)
|
||||||
|
assets = getattr(data_to, self.data_types)
|
||||||
|
|
||||||
|
asset_description = dict(
|
||||||
|
filepath=source_rel_path,
|
||||||
|
modified=modified,
|
||||||
|
assets=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
for asset in assets:
|
||||||
|
asset_catalog_data = catalog_ids.get(asset.asset_data.catalog_id)
|
||||||
|
|
||||||
|
if not asset_catalog_data:
|
||||||
|
print(f'No catalog found for asset {asset.name}')
|
||||||
|
asset_catalog_data = {"path": blend_file.relative_to(self.source_directory).as_posix()}
|
||||||
|
|
||||||
|
catalog_path = asset_catalog_data['path']
|
||||||
|
|
||||||
|
image_path = self.get_path('image', asset.name, catalog_path)
|
||||||
|
image = self.prop_rel_path(image_path, 'source_directory')
|
||||||
|
|
||||||
|
# Write image only if no image was found
|
||||||
|
if not image_path.exists():
|
||||||
|
image_path = self.get_cache_image_path(asset.name, catalog_path)
|
||||||
|
image = self.prop_rel_path(image_path, 'library_path')
|
||||||
|
self.write_preview(asset.preview, image_path)
|
||||||
|
|
||||||
|
video_path = self.get_path('video', asset.name, catalog_path)
|
||||||
|
video = self.prop_rel_path(video_path, 'source_directory')
|
||||||
|
|
||||||
|
asset_data = dict(
|
||||||
|
filepath=self.prop_rel_path(blend_file, 'source_directory'),
|
||||||
|
modified=modified,
|
||||||
|
catalog=catalog_path,
|
||||||
|
tags=asset.asset_data.tags.keys(),
|
||||||
|
type=self.data_type,
|
||||||
|
image=image,
|
||||||
|
video=video,
|
||||||
|
name=asset.name
|
||||||
|
)
|
||||||
|
asset_description['assets'].append(asset_data)
|
||||||
|
|
||||||
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
|
||||||
|
new_cache.append(asset_description)
|
||||||
|
|
||||||
|
|
||||||
|
#cache = [a for a in cache if a['filepath'] in blend_paths]
|
||||||
|
|
||||||
|
#for a in asset_data:
|
||||||
|
# print(a)
|
||||||
|
|
||||||
|
#print(asset_data)
|
||||||
|
new_cache.sort(key=lambda x:x['filepath'])
|
||||||
|
|
||||||
|
return new_cache
|
||||||
|
|
||||||
|
# Write json data file to store all asset found
|
||||||
|
#print(f'Writing asset data file to, {asset_data_path}')
|
||||||
|
#asset_data_path.write_text(json.dumps(asset_data, indent=4))
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
from asset_library.collection import (
|
||||||
|
gui,
|
||||||
|
operators,
|
||||||
|
keymaps,
|
||||||
|
build_collection_blends,
|
||||||
|
create_collection_library)
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
importlib.reload(gui)
|
||||||
|
importlib.reload(operators)
|
||||||
|
importlib.reload(keymaps)
|
||||||
|
importlib.reload(build_collection_blends)
|
||||||
|
importlib.reload(create_collection_library)
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
operators.register()
|
||||||
|
keymaps.register()
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
operators.unregister()
|
||||||
|
keymaps.unregister()
|
|
@ -0,0 +1,124 @@
|
||||||
|
import argparse
|
||||||
|
import bpy
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from time import time, sleep
|
||||||
|
from itertools import groupby
|
||||||
|
from asset_library.common.bl_utils import load_datablocks, col_as_asset
|
||||||
|
from asset_library.constants import ASSETLIB_FILENAME
|
||||||
|
|
||||||
|
""" blender_assets.libs.json data Structure
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'name': 'chars/main',
|
||||||
|
'id': '013562-56315-4563156-123',
|
||||||
|
'children':
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'filepath' : '/z/...',
|
||||||
|
'name' : 'collection name',
|
||||||
|
'tags' : ['variation', 'machin', 'chose'],
|
||||||
|
'metadata' : {'filepath': '$PROJECT/...', 'version' : 'mushable'}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'filepath' : '/z/...',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
|
||||||
|
def build_collection_blends(path, categories=None, clean=True):
|
||||||
|
|
||||||
|
t0 = time()
|
||||||
|
scn = bpy.context.scene
|
||||||
|
scn.render.resolution_x = scn.render.resolution_y = 1000
|
||||||
|
|
||||||
|
json_path = Path(path) / ASSETLIB_FILENAME
|
||||||
|
if not json_path.exists():
|
||||||
|
return
|
||||||
|
|
||||||
|
# _col_datas = json.loads(json_path.read())[category]
|
||||||
|
category_datas = json.loads(json_path.read_text())
|
||||||
|
|
||||||
|
for category_data in category_datas:
|
||||||
|
if categories and category_data['name'] not in categories:
|
||||||
|
continue
|
||||||
|
|
||||||
|
bpy.ops.wm.read_homefile(use_empty=True)
|
||||||
|
|
||||||
|
|
||||||
|
#category_data = next(c for c in category_datas if c['name'] == category)
|
||||||
|
#_col_datas = category_data['children']
|
||||||
|
|
||||||
|
cat_name = category_data['name']
|
||||||
|
build_path = Path(path) / cat_name / f'{cat_name}.blend'
|
||||||
|
|
||||||
|
## re-iterate in grouped filepath
|
||||||
|
col_datas = sorted(category_data['children'], key=lambda x: x['filepath'])
|
||||||
|
for filepath, col_data_groups in groupby(col_datas, key=lambda x: x['filepath']):
|
||||||
|
#f = Path(f)
|
||||||
|
if not Path(filepath).exists():
|
||||||
|
print(f'Not exists: {filepath}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
col_data_groups = list(col_data_groups)
|
||||||
|
|
||||||
|
col_names = [a['name'] for a in col_data_groups]
|
||||||
|
linked_cols = load_datablocks(filepath, col_names, link=True, type='collections')
|
||||||
|
|
||||||
|
for i, col in enumerate(linked_cols):
|
||||||
|
# iterate in linked collection and associated data
|
||||||
|
if not col:
|
||||||
|
continue
|
||||||
|
asset_data = col_data_groups[i]
|
||||||
|
|
||||||
|
## asset_data -> {'filepath': str, 'tags': list, 'metadata': dict}
|
||||||
|
|
||||||
|
## Directly link as collection inside a marked collection with same name
|
||||||
|
marked_col = col_as_asset(col, verbose=True)
|
||||||
|
marked_col.asset_data.description = asset_data.get('description', '')
|
||||||
|
marked_col.asset_data.catalog_id = category_data['id'] # assign catalog
|
||||||
|
|
||||||
|
for k, v in asset_data.get('metadata', {}).items():
|
||||||
|
marked_col.asset_data[k] = v
|
||||||
|
|
||||||
|
## exclude collections and generate preview
|
||||||
|
bpy.ops.ed.lib_id_generate_preview({"id": marked_col}) # preview gen
|
||||||
|
vcol = bpy.context.view_layer.layer_collection.children[marked_col.name]
|
||||||
|
vcol.exclude = True
|
||||||
|
|
||||||
|
sleep(1.0)
|
||||||
|
|
||||||
|
## clear all objects (can be very long with a lot of objects...):
|
||||||
|
if clean:
|
||||||
|
print('Removing links...')
|
||||||
|
for lib in reversed(bpy.data.libraries):
|
||||||
|
bpy.data.libraries.remove(lib)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Créer les dossiers intermediaires
|
||||||
|
build_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
print('Saving to', build_path)
|
||||||
|
bpy.ops.wm.save_as_mainfile(filepath=str(build_path), compress=False)
|
||||||
|
|
||||||
|
print("build time:", f'{time() - t0:.1f}s')
|
||||||
|
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='build_collection_blends',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('-path') # Trouve/créer le json assetlib.json en sous-dossier de libdir
|
||||||
|
parser.add_argument('--category') # Lit la category dans le json et a link tout dans le blend
|
||||||
|
|
||||||
|
if '--' in sys.argv :
|
||||||
|
index = sys.argv.index('--')
|
||||||
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
build_collection_blends(**vars(args))
|
|
@ -0,0 +1,163 @@
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import subprocess
|
||||||
|
import bpy
|
||||||
|
from pathlib import Path
|
||||||
|
from asset_library.common.functions import create_catalog_file
|
||||||
|
from asset_library.common.file_utils import get_last_files
|
||||||
|
from asset_library.constants import ASSETLIB_FILENAME
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
### Create asset collection
|
||||||
|
|
||||||
|
## create_collection_library: generate all category blend from json
|
||||||
|
## if source_directory is set, call create_collection_json
|
||||||
|
|
||||||
|
## # create_collection_json:
|
||||||
|
## # scan marked blend, create json and call create_catalog_file
|
||||||
|
|
||||||
|
## # create_catalog_file
|
||||||
|
## # create catalog file from json file
|
||||||
|
|
||||||
|
### Json Structure
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'name': 'chars/main',
|
||||||
|
'id': '013562-56315-4563156-123',
|
||||||
|
'children':
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'filepath' : '/z/...',
|
||||||
|
'name' : 'collection name',
|
||||||
|
'tags' : ['variation', 'machin', 'chose'],
|
||||||
|
'metadata' : {'filepath': '$PROJECT/...', 'version' : 'mushable'}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'filepath' : '/z/...',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
|
||||||
|
def create_collection_json(path, source_directory):
|
||||||
|
'''Create a Json from every marked collection in blends
|
||||||
|
contained in folderpath (respect hierachy)
|
||||||
|
'''
|
||||||
|
|
||||||
|
json_path = Path(path) / ASSETLIB_FILENAME
|
||||||
|
|
||||||
|
# scan all last version of the assets ?
|
||||||
|
# get last version files ?
|
||||||
|
# or open all blends and look only for marked collection ? (if versionned, get still get only last)
|
||||||
|
|
||||||
|
# get all blend in dir and subdirs (only last when versionned _v???)
|
||||||
|
blends = get_last_files(source_directory, pattern=r'(_v\d{3})?\.blend$', only_matching=True)
|
||||||
|
|
||||||
|
root_path = Path(source_directory).as_posix().rstrip('/') + '/'
|
||||||
|
print('root_path: ', root_path)
|
||||||
|
# open and check data block marked as asset
|
||||||
|
|
||||||
|
category_datas = []
|
||||||
|
for i, blend in enumerate(blends):
|
||||||
|
fp = Path(blend)
|
||||||
|
print(f'{i+1}/{len(blends)}')
|
||||||
|
|
||||||
|
## What is considered a grouping category ? top level folders ? parents[1] ?
|
||||||
|
|
||||||
|
## Remove root path and extension
|
||||||
|
## top level folder ('chars'), problem if blends at root
|
||||||
|
category = fp.as_posix().replace(root_path, '').split('/')[0]
|
||||||
|
|
||||||
|
## full blend path (chars/perso/perso)
|
||||||
|
# category = fp.as_posix().replace(root_path, '').rsplit('.', 1)[0]
|
||||||
|
|
||||||
|
print(category)
|
||||||
|
|
||||||
|
with bpy.data.libraries.load(blend, link=True, assets_only=True) as (data_from, data_to):
|
||||||
|
## just listing
|
||||||
|
col_name_list = [c for c in data_from.collections]
|
||||||
|
|
||||||
|
if not col_name_list:
|
||||||
|
continue
|
||||||
|
|
||||||
|
col_list = next((c['children'] for c in category_datas if c['name'] == category), None)
|
||||||
|
if col_list is None:
|
||||||
|
col_list = []
|
||||||
|
category_data = {
|
||||||
|
'name': category,
|
||||||
|
'id': str(uuid.uuid4()),
|
||||||
|
'children': col_list,
|
||||||
|
}
|
||||||
|
category_datas.append(category_data)
|
||||||
|
|
||||||
|
|
||||||
|
blend_source_path = blend.as_posix()
|
||||||
|
if (project_root := os.environ.get('PROJECT_ROOT')):
|
||||||
|
blend_source_path = blend_source_path.replace(project_root, '$PROJECT_ROOT')
|
||||||
|
|
||||||
|
|
||||||
|
for name in col_name_list:
|
||||||
|
data = {
|
||||||
|
'filepath' : blend,
|
||||||
|
'name' : name,
|
||||||
|
# 'tags' : [],
|
||||||
|
'metadata' : {'filepath': blend_source_path},
|
||||||
|
}
|
||||||
|
|
||||||
|
col_list.append(data)
|
||||||
|
|
||||||
|
json_path.write_text(json.dumps(category_datas, indent='\t'))
|
||||||
|
## create text catalog from json (keep_existing_category ?)
|
||||||
|
create_catalog_file(json_path, keep_existing_category=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_collection_library(path, source_directory=None):
|
||||||
|
'''
|
||||||
|
path: store collection library (json and blends database)
|
||||||
|
source_directory: if a source is set, rebuild json and library
|
||||||
|
'''
|
||||||
|
|
||||||
|
if source_directory:
|
||||||
|
if not Path(source_directory).exists():
|
||||||
|
print(f'Source directory not exists: {source_directory}')
|
||||||
|
return
|
||||||
|
|
||||||
|
## scan source and build json in assetlib dir root
|
||||||
|
create_collection_json(path, source_directory)
|
||||||
|
|
||||||
|
json_path = Path(path) / ASSETLIB_FILENAME
|
||||||
|
if not json_path.exists():
|
||||||
|
print(f'No json found at: {json_path}')
|
||||||
|
return
|
||||||
|
|
||||||
|
file_datas = json.loads(json_path.read())
|
||||||
|
|
||||||
|
## For each category in json, execute build_assets_blend script
|
||||||
|
script = Path(__file__).parent / 'build_collection_blends.py'
|
||||||
|
#empty_blend = Path(__file__).parent / 'empty_scene.blend'
|
||||||
|
|
||||||
|
# for category, asset_datas in file_datas.items():
|
||||||
|
for category_data in file_datas:
|
||||||
|
## add an empty blend as second arg
|
||||||
|
cmd = [bpy.app.binary_path, '--python', str(script), '--', '--path', path, '--category', category_data['name']]
|
||||||
|
print('cmd: ', cmd)
|
||||||
|
subprocess.call(cmd)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='Create Collection Library',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('--path') # trouve/créer le json assetlib.json en sous-dossier de libdir
|
||||||
|
|
||||||
|
if '--' in sys.argv :
|
||||||
|
index = sys.argv.index('--')
|
||||||
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
create_collection_library(**vars(args))
|
|
@ -0,0 +1,14 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
def draw_context_menu(layout):
|
||||||
|
params = bpy.context.space_data.params
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def draw_header(layout):
|
||||||
|
'''Draw the header of the Asset Browser Window'''
|
||||||
|
|
||||||
|
return
|
|
@ -0,0 +1,22 @@
|
||||||
|
|
||||||
|
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
addon_keymaps: List[Tuple[bpy.types.KeyMap, bpy.types.KeyMapItem]] = []
|
||||||
|
|
||||||
|
def register():
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
addon = wm.keyconfigs.addon
|
||||||
|
if not addon:
|
||||||
|
return
|
||||||
|
|
||||||
|
km = addon.keymaps.new(name="File Browser Main", space_type="FILE_BROWSER")
|
||||||
|
kmi = km.keymap_items.new("assetlib.load_asset", "LEFTMOUSE", "DOUBLE_CLICK") # , shift=True
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for km, kmi in addon_keymaps:
|
||||||
|
km.keymap_items.remove(kmi)
|
||||||
|
addon_keymaps.clear()
|
|
@ -0,0 +1,102 @@
|
||||||
|
import bpy
|
||||||
|
from bpy.types import Context, Operator
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
import os
|
||||||
|
import fnmatch
|
||||||
|
from os.path import expandvars
|
||||||
|
from typing import List, Tuple, Set
|
||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from asset_library.common.bl_utils import load_col
|
||||||
|
from asset_library.common.functions import get_active_library
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_load_asset(Operator):
|
||||||
|
bl_idname = "assetlib.load_asset"
|
||||||
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
bl_label = 'Load Asset'
|
||||||
|
bl_description = 'Link and override asset in current file'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
cls.poll_message_set("Current editor is not an asset browser")
|
||||||
|
return False
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
if not lib or lib.data_type != 'COLLECTION':
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not context.active_file or 'filepath' not in context.active_file.asset_data:
|
||||||
|
cls.poll_message_set("Has not filepath property")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
|
||||||
|
print('Load Asset')
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
print(lib, lib.data_type)
|
||||||
|
|
||||||
|
# dir(asset) : 'asset_data', 'bl_rna', 'id_type', 'local_id', 'name', 'preview_icon_id', 'relative_path', 'rna_type']
|
||||||
|
# dir(asset.asset_data) : 'active_tag', 'author', 'bl_rna', 'catalog_id', 'catalog_simple_name', 'description', 'rna_type', 'tags']
|
||||||
|
|
||||||
|
## get source path
|
||||||
|
# asset_file_handle = context.asset_file_handle
|
||||||
|
# if asset_file_handle is None:
|
||||||
|
# return {'CANCELLED'}
|
||||||
|
# if asset_file_handle.local_id:
|
||||||
|
# return {'CANCELLED'}
|
||||||
|
# asset_library_ref = context.asset_library_ref
|
||||||
|
# source_directory = bpy.types.AssetHandle.get_full_library_path(
|
||||||
|
# asset_file_handle, asset_library_ref
|
||||||
|
# )
|
||||||
|
|
||||||
|
asset = context.active_file
|
||||||
|
if not asset:
|
||||||
|
self.report({"ERROR"}, 'No asset selected')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
fp = expandvars(asset.asset_data['filepath'])
|
||||||
|
name = asset.name
|
||||||
|
|
||||||
|
## set mode to object
|
||||||
|
if context.mode != 'OBJECT':
|
||||||
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
|
||||||
|
## get the real direct path with expand_var
|
||||||
|
print('path expanded: ', fp)
|
||||||
|
|
||||||
|
if not Path(fp).exists():
|
||||||
|
self.report({'ERROR'}, f'Not exists: {fp}')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
res = load_col(fp, name, link=True, override=True, rig_pattern='*_rig')
|
||||||
|
if res:
|
||||||
|
if res.type == 'ARMATURE':
|
||||||
|
self.report({'INFO'}, f'Override rig {res.name}')
|
||||||
|
elif res.type == 'EMPTY':
|
||||||
|
self.report({'INFO'}, f'Instance collection {res.name}')
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
### --- REGISTER ---
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
ASSETLIB_OT_load_asset,
|
||||||
|
)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for cls in reversed(classes):
|
||||||
|
bpy.utils.unregister_class(cls)
|
|
@ -0,0 +1,26 @@
|
||||||
|
|
||||||
|
#from asset_library.bundle_blend import bundle_blend, bundle_library
|
||||||
|
#from file_utils import (norm_str, norm_value,
|
||||||
|
# norm_arg, get_bl_cmd, copy_file, copy_dir)
|
||||||
|
#from asset_library.functions import
|
||||||
|
|
||||||
|
#from asset_library.common import bundle_blend
|
||||||
|
from asset_library.common import file_utils
|
||||||
|
from asset_library.common import functions
|
||||||
|
from asset_library.common import synchronize
|
||||||
|
from asset_library.common import template
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
#importlib.reload(bundle_blend)
|
||||||
|
importlib.reload(file_utils)
|
||||||
|
importlib.reload(functions)
|
||||||
|
importlib.reload(synchronize)
|
||||||
|
importlib.reload(template)
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,465 @@
|
||||||
|
|
||||||
|
"""
|
||||||
|
Generic Blender functions
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from typing import Any, List, Iterable, Optional, Tuple
|
||||||
|
Datablock = Any
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
from asset_library.constants import RESOURCES_DIR
|
||||||
|
#from asset_library.common.file_utils import no
|
||||||
|
from os.path import abspath
|
||||||
|
|
||||||
|
|
||||||
|
class attr_set():
|
||||||
|
'''Receive a list of tuple [(data_path, "attribute" [, wanted value)] ]
|
||||||
|
entering with-statement : Store existing values, assign wanted value (if any)
|
||||||
|
exiting with-statement: Restore values to their old values
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, attrib_list):
|
||||||
|
self.store = []
|
||||||
|
# item = (prop, attr, [new_val])
|
||||||
|
for item in attrib_list:
|
||||||
|
prop, attr = item[:2]
|
||||||
|
self.store.append( (prop, attr, getattr(prop, attr)) )
|
||||||
|
if len(item) >= 3:
|
||||||
|
try:
|
||||||
|
setattr(prop, attr, item[2])
|
||||||
|
except TypeError:
|
||||||
|
print(f'Cannot set attribute {attr} to {prop}')
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||||
|
for prop, attr, old_val in self.store:
|
||||||
|
setattr(prop, attr, old_val)
|
||||||
|
|
||||||
|
def get_overriden_col(ob, scene=None):
|
||||||
|
scn = scene or bpy.context.scene
|
||||||
|
|
||||||
|
cols = [c for c in bpy.data.collections if scn.user_of_id(c)]
|
||||||
|
|
||||||
|
return next((c for c in cols if ob in c.all_objects[:]
|
||||||
|
if all(not c.override_library for c in get_col_parents(c))), None)
|
||||||
|
|
||||||
|
def get_view3d_persp():
|
||||||
|
windows = bpy.context.window_manager.windows
|
||||||
|
view_3ds = [a for w in windows for a in w.screen.areas if a.type == 'VIEW_3D']
|
||||||
|
view_3d = next((a for a in view_3ds if a.spaces.active.region_3d.view_perspective == 'PERSP'), view_3ds[0])
|
||||||
|
return view_3d
|
||||||
|
|
||||||
|
def biggest_asset_browser_area(screen: bpy.types.Screen) -> Optional[bpy.types.Area]:
|
||||||
|
"""Return the asset browser Area that's largest on screen.
|
||||||
|
|
||||||
|
:param screen: context.window.screen
|
||||||
|
|
||||||
|
:return: the Area, or None if no Asset Browser area exists.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def area_sorting_key(area: bpy.types.Area) -> Tuple[bool, int]:
|
||||||
|
"""Return area size in pixels."""
|
||||||
|
return (area.width * area.height)
|
||||||
|
|
||||||
|
areas = list(suitable_areas(screen))
|
||||||
|
if not areas:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return max(areas, key=area_sorting_key)
|
||||||
|
|
||||||
|
def suitable_areas(screen: bpy.types.Screen) -> Iterable[bpy.types.Area]:
|
||||||
|
"""Generator, yield Asset Browser areas."""
|
||||||
|
|
||||||
|
for area in screen.areas:
|
||||||
|
space_data = area.spaces[0]
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(space_data):
|
||||||
|
continue
|
||||||
|
yield area
|
||||||
|
|
||||||
|
def area_from_context(context: bpy.types.Context) -> Optional[bpy.types.Area]:
|
||||||
|
"""Return an Asset Browser suitable for the given category.
|
||||||
|
|
||||||
|
Prefers the current Asset Browser if available, otherwise the biggest.
|
||||||
|
"""
|
||||||
|
|
||||||
|
space_data = context.space_data
|
||||||
|
if asset_utils.SpaceAssetInfo.is_asset_browser(space_data):
|
||||||
|
return context.area
|
||||||
|
|
||||||
|
# Try the current screen first.
|
||||||
|
browser_area = biggest_asset_browser_area(context.screen)
|
||||||
|
if browser_area:
|
||||||
|
return browser_area
|
||||||
|
|
||||||
|
for win in context.window_manager.windows:
|
||||||
|
if win.screen == context.screen:
|
||||||
|
continue
|
||||||
|
browser_area = biggest_asset_browser_area(win.screen)
|
||||||
|
if browser_area:
|
||||||
|
return browser_area
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def activate_asset(
|
||||||
|
asset: bpy.types.Action, asset_browser: bpy.types.Area, *, deferred: bool
|
||||||
|
) -> None:
|
||||||
|
"""Select & focus the asset in the browser."""
|
||||||
|
|
||||||
|
space_data = asset_browser.spaces[0]
|
||||||
|
assert asset_utils.SpaceAssetInfo.is_asset_browser(space_data)
|
||||||
|
space_data.activate_asset_by_id(asset, deferred=deferred)
|
||||||
|
|
||||||
|
def active_catalog_id(asset_browser: bpy.types.Area) -> str:
|
||||||
|
"""Return the ID of the catalog shown in the asset browser."""
|
||||||
|
return params(asset_browser).catalog_id
|
||||||
|
|
||||||
|
def get_asset_space_params(asset_browser: bpy.types.Area) -> bpy.types.FileAssetSelectParams:
|
||||||
|
"""Return the asset browser parameters given its Area."""
|
||||||
|
space_data = asset_browser.spaces[0]
|
||||||
|
assert asset_utils.SpaceAssetInfo.is_asset_browser(space_data)
|
||||||
|
return space_data.params
|
||||||
|
|
||||||
|
def refresh_asset_browsers():
|
||||||
|
for area in suitable_areas(bpy.context.screen):
|
||||||
|
bpy.ops.asset.library_refresh({"area": area, 'region': area.regions[3]})
|
||||||
|
|
||||||
|
def tag_redraw(screen: bpy.types.Screen) -> None:
|
||||||
|
"""Tag all asset browsers for redrawing."""
|
||||||
|
|
||||||
|
for area in suitable_areas(screen):
|
||||||
|
area.tag_redraw()
|
||||||
|
|
||||||
|
# def get_blender_command(file=None, script=None, background=True, **args):
|
||||||
|
# '''Return a Blender Command as a list to be used in a subprocess'''
|
||||||
|
|
||||||
|
# cmd = [bpy.app.binary_path]
|
||||||
|
|
||||||
|
# if file:
|
||||||
|
# cmd += [str(file)]
|
||||||
|
# if background:
|
||||||
|
# cmd += ['--background']
|
||||||
|
# if script:
|
||||||
|
# cmd += ['--python', str(script)]
|
||||||
|
# if args:
|
||||||
|
# cmd += ['--']
|
||||||
|
# for k, v in args.items():
|
||||||
|
# cmd += [f"--{k.replace('_', '-')}", str(v)]
|
||||||
|
|
||||||
|
# return cmd
|
||||||
|
|
||||||
|
def norm_value(value):
|
||||||
|
if isinstance(value, (tuple, list)):
|
||||||
|
values = []
|
||||||
|
for v in value:
|
||||||
|
if not isinstance(v, str):
|
||||||
|
v = json.dumps(v)
|
||||||
|
values.append(v)
|
||||||
|
|
||||||
|
return values
|
||||||
|
|
||||||
|
if isinstance(value, Path):
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
if not isinstance(value, str):
|
||||||
|
value = json.dumps(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def norm_arg(arg_name, format=str.lower, prefix='--', separator='-'):
|
||||||
|
arg_name = norm_str(arg_name, format=format, separator=separator)
|
||||||
|
|
||||||
|
return prefix + arg_name
|
||||||
|
|
||||||
|
def get_bl_cmd(blender=None, background=False, focus=True, blendfile=None, script=None, **kargs):
|
||||||
|
cmd = [str(blender)] if blender else [bpy.app.binary_path]
|
||||||
|
|
||||||
|
if background:
|
||||||
|
cmd += ['--background']
|
||||||
|
|
||||||
|
if not focus and not background:
|
||||||
|
cmd += ['--no-window-focus']
|
||||||
|
cmd += ['--window-geometry', '5000', '0', '10', '10']
|
||||||
|
|
||||||
|
if blendfile:
|
||||||
|
cmd += [str(blendfile)]
|
||||||
|
|
||||||
|
if script:
|
||||||
|
cmd += ['--python', str(script)]
|
||||||
|
|
||||||
|
if kargs:
|
||||||
|
cmd += ['--']
|
||||||
|
for k, v in kargs.items():
|
||||||
|
k = norm_arg(k)
|
||||||
|
v = norm_value(v)
|
||||||
|
|
||||||
|
cmd += [k]
|
||||||
|
if isinstance(v, (tuple, list)):
|
||||||
|
cmd += v
|
||||||
|
else:
|
||||||
|
cmd += [v]
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
def get_addon_prefs():
|
||||||
|
addon_name = __package__.split('.')[0]
|
||||||
|
return bpy.context.preferences.addons[addon_name].preferences
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def thumbnail_blend_file(input_blend, output_img):
|
||||||
|
input_blend = Path(input_blend).resolve()
|
||||||
|
output_img = Path(output_img).resolve()
|
||||||
|
|
||||||
|
print(f'Thumbnailing {input_blend} to {output_img}')
|
||||||
|
blender_thumbnailer = Path(bpy.app.binary_path).parent / 'blender-thumbnailer'
|
||||||
|
|
||||||
|
output_img.parent.mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
|
subprocess.call([blender_thumbnailer, str(input_blend), str(output_img)])
|
||||||
|
|
||||||
|
success = output_img.exists()
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
empty_preview = RESOURCES_DIR / 'empty_preview.png'
|
||||||
|
shutil.copy(str(empty_preview), str(output_img))
|
||||||
|
|
||||||
|
return success
|
||||||
|
|
||||||
|
def get_col_parents(col, root=None, cols=None):
|
||||||
|
'''Return a list of parents collections of passed col
|
||||||
|
root : Pass a collection to search in (recursive)
|
||||||
|
else search in master collection
|
||||||
|
'''
|
||||||
|
if cols is None:
|
||||||
|
cols = []
|
||||||
|
|
||||||
|
if root == None:
|
||||||
|
root = bpy.context.scene.collection
|
||||||
|
|
||||||
|
for sub in root.children:
|
||||||
|
if sub == col:
|
||||||
|
cols.append(root)
|
||||||
|
|
||||||
|
if len(sub.children):
|
||||||
|
cols = get_col_parents(col, root=sub, cols=cols)
|
||||||
|
return cols
|
||||||
|
|
||||||
|
def get_overriden_col(ob, scene=None):
|
||||||
|
'''Get the collection use for making the override'''
|
||||||
|
scn = scene or bpy.context.scene
|
||||||
|
|
||||||
|
cols = [c for c in bpy.data.collections if scn.user_of_id(c)]
|
||||||
|
|
||||||
|
return next((c for c in cols if ob in c.all_objects[:]
|
||||||
|
if all(not c.override_library for c in get_col_parents(c))), None)
|
||||||
|
|
||||||
|
def load_assets_from(filepath: Path) -> List[Datablock]:
|
||||||
|
if not has_assets(filepath):
|
||||||
|
# Avoid loading any datablocks when there are none marked as asset.
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Append everything from the file.
|
||||||
|
with bpy.data.libraries.load(str(filepath)) as (
|
||||||
|
data_from,
|
||||||
|
data_to,
|
||||||
|
):
|
||||||
|
for attr in dir(data_to):
|
||||||
|
setattr(data_to, attr, getattr(data_from, attr))
|
||||||
|
|
||||||
|
# Iterate over the appended datablocks to find assets.
|
||||||
|
def loaded_datablocks() -> Iterable[Datablock]:
|
||||||
|
for attr in dir(data_to):
|
||||||
|
datablocks = getattr(data_to, attr)
|
||||||
|
for datablock in datablocks:
|
||||||
|
yield datablock
|
||||||
|
|
||||||
|
loaded_assets = []
|
||||||
|
for datablock in loaded_datablocks():
|
||||||
|
if not getattr(datablock, "asset_data", None):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Fake User is lost when appending from another file.
|
||||||
|
datablock.use_fake_user = True
|
||||||
|
loaded_assets.append(datablock)
|
||||||
|
return loaded_assets
|
||||||
|
|
||||||
|
def has_assets(filepath: Path) -> bool:
|
||||||
|
with bpy.data.libraries.load(str(filepath), assets_only=True) as (
|
||||||
|
data_from,
|
||||||
|
_,
|
||||||
|
):
|
||||||
|
for attr in dir(data_from):
|
||||||
|
data_names = getattr(data_from, attr)
|
||||||
|
if data_names:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def copy_frames(start, end, offset, path):
|
||||||
|
for i in range (start, end):
|
||||||
|
src = path.replace('####', f'{i:04d}')
|
||||||
|
dst = src.replace(src.split('_')[-1].split('.')[0], f'{i+offset:04d}')
|
||||||
|
shutil.copy2(src, dst)
|
||||||
|
|
||||||
|
def split_path(path) :
|
||||||
|
try :
|
||||||
|
bone_name = path.split('["')[1].split('"]')[0]
|
||||||
|
except :
|
||||||
|
bone_name = None
|
||||||
|
try :
|
||||||
|
prop_name = path.split('["')[2].split('"]')[0]
|
||||||
|
except :
|
||||||
|
prop_name = path.split('.')[-1]
|
||||||
|
|
||||||
|
return bone_name, prop_name
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def load_datablocks(src, names=None, type='objects', link=True, expr=None) -> list:
|
||||||
|
return_list = not isinstance(names, str)
|
||||||
|
names = names or []
|
||||||
|
|
||||||
|
if not isinstance(names, (list, tuple)):
|
||||||
|
names = [names]
|
||||||
|
|
||||||
|
if isinstance(expr, str):
|
||||||
|
pattern = expr
|
||||||
|
expr = lambda x : fnmatch(x, pattern)
|
||||||
|
|
||||||
|
with bpy.data.libraries.load(str(src), link=link) as (data_from, data_to):
|
||||||
|
datablocks = getattr(data_from, type)
|
||||||
|
if expr:
|
||||||
|
names += [i for i in datablocks if expr(i)]
|
||||||
|
elif not names:
|
||||||
|
names = datablocks
|
||||||
|
|
||||||
|
setattr(data_to, type, names)
|
||||||
|
|
||||||
|
datablocks = getattr(data_to, type)
|
||||||
|
|
||||||
|
if return_list:
|
||||||
|
return datablocks
|
||||||
|
|
||||||
|
elif datablocks:
|
||||||
|
return datablocks[0]
|
||||||
|
|
||||||
|
"""
|
||||||
|
# --- Collection handling
|
||||||
|
"""
|
||||||
|
|
||||||
|
def col_as_asset(col, verbose=False):
|
||||||
|
if col is None:
|
||||||
|
return
|
||||||
|
if verbose:
|
||||||
|
print('linking:', col.name)
|
||||||
|
pcol = bpy.data.collections.new(col.name)
|
||||||
|
bpy.context.scene.collection.children.link(pcol)
|
||||||
|
pcol.children.link(col)
|
||||||
|
pcol.asset_mark()
|
||||||
|
return pcol
|
||||||
|
|
||||||
|
def load_col(filepath, name, link=True, override=True, rig_pattern=None, context=None):
|
||||||
|
'''Link a collection by name from a file and override if has armature'''
|
||||||
|
|
||||||
|
# with bpy.data.libraries.load(filepath, link=link) as (data_from, data_to):
|
||||||
|
# data_to.collections = [c for c in data_from.collections if c == name]
|
||||||
|
# if not data_to.collections:
|
||||||
|
# return
|
||||||
|
# return data_to.collections[0]
|
||||||
|
context = context or bpy.context
|
||||||
|
|
||||||
|
collections = load_datablocks(filepath, name, link=link, type='collections')
|
||||||
|
if not collections:
|
||||||
|
print(f'No collection "{name}" found in: {filepath}')
|
||||||
|
return
|
||||||
|
|
||||||
|
col = collections[0]
|
||||||
|
print('collection:', col.name)
|
||||||
|
|
||||||
|
## create instance object
|
||||||
|
inst = bpy.data.objects.new(col.name, None)
|
||||||
|
inst.instance_collection = col
|
||||||
|
inst.instance_type = 'COLLECTION'
|
||||||
|
context.scene.collection.objects.link(inst)
|
||||||
|
|
||||||
|
# make active
|
||||||
|
inst.select_set(True)
|
||||||
|
context.view_layer.objects.active = inst
|
||||||
|
|
||||||
|
## simple object (no armatures)
|
||||||
|
if not link or not override:
|
||||||
|
return inst
|
||||||
|
if not next((o for o in col.all_objects if o.type == 'ARMATURE'), None):
|
||||||
|
return inst
|
||||||
|
|
||||||
|
## Create the override
|
||||||
|
# Search
|
||||||
|
parent_cols = inst.users_collection
|
||||||
|
child_cols = [child for pcol in parent_cols for child in pcol.children]
|
||||||
|
|
||||||
|
params = {'active_object': inst, 'selected_objects': [inst]}
|
||||||
|
try:
|
||||||
|
bpy.ops.object.make_override_library(params)
|
||||||
|
|
||||||
|
## check which collection is new in parents collection
|
||||||
|
asset_col = next((c for pcol in parent_cols for c in pcol.children if c not in child_cols), None)
|
||||||
|
if not asset_col:
|
||||||
|
print('Overriden, but no collection found !!')
|
||||||
|
return
|
||||||
|
|
||||||
|
for ob in asset_col.all_objects:
|
||||||
|
if ob.type != 'ARMATURE':
|
||||||
|
continue
|
||||||
|
if rig_pattern and not fnmatch(ob.name, rig_pattern):
|
||||||
|
continue
|
||||||
|
|
||||||
|
ob.hide_select = ob.hide_viewport = False
|
||||||
|
ob.select_set(True)
|
||||||
|
context.view_layer.objects.active = ob
|
||||||
|
print(ob.name)
|
||||||
|
return ob
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Override failed on {col.name}')
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
return inst
|
||||||
|
|
||||||
|
|
||||||
|
def get_preview(asset_path='', asset_name=''):
|
||||||
|
asset_preview_dir = Path(asset_path).parents[1]
|
||||||
|
name = asset_name.lower()
|
||||||
|
return next((f for f in asset_preview_dir.rglob('*') if f.stem.lower().endswith(name)), None)
|
||||||
|
|
||||||
|
def get_object_libraries(ob):
|
||||||
|
if not ob :
|
||||||
|
return []
|
||||||
|
|
||||||
|
libraries = [ob.library, ob.data.library]
|
||||||
|
|
||||||
|
if ob.type in ('MESH', 'CURVE'):
|
||||||
|
libraries += [m.library for m in ob.data.materials if m]
|
||||||
|
|
||||||
|
filepaths = []
|
||||||
|
for l in libraries:
|
||||||
|
if not l or not l.filepath:
|
||||||
|
continue
|
||||||
|
|
||||||
|
absolute_filepath = abspath(bpy.path.abspath(l.filepath, library=l))
|
||||||
|
if absolute_filepath in filepaths:
|
||||||
|
continue
|
||||||
|
|
||||||
|
filepaths.append(absolute_filepath)
|
||||||
|
|
||||||
|
return filepaths
|
|
@ -0,0 +1,307 @@
|
||||||
|
|
||||||
|
"""Generic python functions to make operation on file and names"""
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
|
import json
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import unicodedata
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import importlib
|
||||||
|
import sys
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
def install_module(module_name, package_name=None):
|
||||||
|
'''Install a python module with pip or return it if already installed'''
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(module_name)
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
print(f'Installing Module {module_name} ....')
|
||||||
|
|
||||||
|
subprocess.call([sys.executable, '-m', 'ensurepip'])
|
||||||
|
subprocess.call([sys.executable, '-m', 'pip', 'install', package_name or module_name])
|
||||||
|
|
||||||
|
module = importlib.import_module(module_name)
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
def import_module_from_path(path):
|
||||||
|
from importlib import util
|
||||||
|
|
||||||
|
try:
|
||||||
|
path = Path(path)
|
||||||
|
spec = util.spec_from_file_location(path.stem, str(path))
|
||||||
|
mod = util.module_from_spec(spec)
|
||||||
|
|
||||||
|
spec.loader.exec_module(mod)
|
||||||
|
|
||||||
|
return mod
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Cannot import file {path}')
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
def norm_str(string, separator='_', format=str.lower, padding=0):
|
||||||
|
string = str(string)
|
||||||
|
string = string.replace('_', ' ')
|
||||||
|
string = string.replace('-', ' ')
|
||||||
|
string = re.sub('[ ]+', ' ', string)
|
||||||
|
string = re.sub('[ ]+\/[ ]+', '/', string)
|
||||||
|
string = string.strip()
|
||||||
|
|
||||||
|
if format:
|
||||||
|
string = format(string)
|
||||||
|
|
||||||
|
# Padd rightest number
|
||||||
|
string = re.sub(r'(\d+)(?!.*\d)', lambda x : x.group(1).zfill(padding), string)
|
||||||
|
|
||||||
|
string = string.replace(' ', separator)
|
||||||
|
string = unicodedata.normalize('NFKD', string).encode('ASCII', 'ignore').decode("utf-8")
|
||||||
|
|
||||||
|
return string
|
||||||
|
|
||||||
|
def remove_version(filepath):
|
||||||
|
pattern = '_v[0-9]+\.'
|
||||||
|
search = re.search(pattern, filepath)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
filepath = filepath.replace(search.group()[:-1], '')
|
||||||
|
|
||||||
|
return Path(filepath).name
|
||||||
|
|
||||||
|
def is_exclude(name, patterns) -> bool:
|
||||||
|
# from fnmatch import fnmatch
|
||||||
|
if not isinstance(patterns, (list,tuple)) :
|
||||||
|
patterns = [patterns]
|
||||||
|
return any([fnmatch(name, p) for p in patterns])
|
||||||
|
|
||||||
|
def get_last_files(root, pattern=r'_v\d{3}\.\w+', only_matching=False, ex_file=None, ex_dir=None, keep=1, verbose=False) -> list:
|
||||||
|
'''Recursively get last(s) file(s) (when there is multiple versions) in passed directory
|
||||||
|
root -> str: Filepath of the folder to scan.
|
||||||
|
pattern -> str: Regex pattern to group files.
|
||||||
|
only_matching -> bool: Discard files that aren't matched by regex pattern.
|
||||||
|
ex_file -> list : List of fn_match pattern to exclude files.
|
||||||
|
ex_dir -> list : List of fn_match pattern of directory name to skip.
|
||||||
|
keep -> int: Number of lasts versions to keep when there are mutliple versionned files (e.g: 1 keep only last).
|
||||||
|
verbose -> bool: Print infos in console.
|
||||||
|
'''
|
||||||
|
|
||||||
|
files = []
|
||||||
|
if ex_file is None:
|
||||||
|
all_items = [f for f in os.scandir(root)]
|
||||||
|
else:
|
||||||
|
all_items = [f for f in os.scandir(root) if not is_exclude(f.name, ex_file)]
|
||||||
|
|
||||||
|
allfiles = [f for f in all_items if f.is_file()]
|
||||||
|
# Need to sort to effectively group separated key in list
|
||||||
|
allfiles.sort(key=lambda x: x.name)
|
||||||
|
|
||||||
|
dirs = [f for f in all_items if f.is_dir()]
|
||||||
|
|
||||||
|
for i in range(len(allfiles)-1,-1,-1):# fastest way to iterate on index in reverse
|
||||||
|
if not re.search(pattern, allfiles[i].name):
|
||||||
|
if only_matching:
|
||||||
|
allfiles.pop(i)
|
||||||
|
else:
|
||||||
|
files.append(allfiles.pop(i).path)
|
||||||
|
|
||||||
|
# separate remaining files in prefix grouped lists
|
||||||
|
lilist = [list(v) for k, v in groupby(allfiles, key=lambda x: re.split(pattern, x.name)[0])]
|
||||||
|
|
||||||
|
# get only item last of each sorted grouplist
|
||||||
|
for l in lilist:
|
||||||
|
versions = sorted(l, key=lambda x: x.name)[-keep:] # exclude older
|
||||||
|
for f in versions:
|
||||||
|
files.append(f.path)
|
||||||
|
|
||||||
|
if verbose and len(l) > 1:
|
||||||
|
print(f'{root}: keep {str([x.name for x in versions])} out of {len(l)} elements')
|
||||||
|
|
||||||
|
for d in dirs: # recursively treat all detected directory
|
||||||
|
if ex_dir and is_exclude(d.name, ex_dir):
|
||||||
|
# skip folder with excluded name
|
||||||
|
continue
|
||||||
|
files += get_last_files(
|
||||||
|
d.path, pattern=pattern, only_matching=only_matching, ex_file=ex_file, ex_dir=ex_dir, keep=keep)
|
||||||
|
|
||||||
|
return sorted(files)
|
||||||
|
|
||||||
|
def copy_file(src, dst, only_new=False, only_recent=False):
|
||||||
|
if dst.exists():
|
||||||
|
if only_new:
|
||||||
|
return
|
||||||
|
elif only_recent and dst.stat().st_mtime >= src.stat().st_mtime:
|
||||||
|
return
|
||||||
|
|
||||||
|
dst.parent.mkdir(exist_ok=True, parents=True)
|
||||||
|
print(f'Copy file from {src} to {dst}')
|
||||||
|
if platform.system() == 'Windows':
|
||||||
|
subprocess.call(['copy', str(src), str(dst)], shell=True)
|
||||||
|
else:
|
||||||
|
subprocess.call(['cp', str(src), str(dst)])
|
||||||
|
|
||||||
|
def copy_dir(src, dst, only_new=False, only_recent=False, excludes=['.*'], includes=[]):
|
||||||
|
src, dst = Path(src), Path(dst)
|
||||||
|
|
||||||
|
if includes:
|
||||||
|
includes = r'|'.join([fnmatch.translate(x) for x in includes])
|
||||||
|
if excludes:
|
||||||
|
excludes = r'|'.join([fnmatch.translate(x) for x in excludes])
|
||||||
|
|
||||||
|
if dst.is_dir():
|
||||||
|
dst.mkdir(exist_ok=True, parents=True)
|
||||||
|
else:
|
||||||
|
dst.parent.mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
|
if src.is_file():
|
||||||
|
copy_file(src, dst, only_new=only_new, only_recent=only_recent)
|
||||||
|
|
||||||
|
elif src.is_dir():
|
||||||
|
src_files = list(src.rglob('*'))
|
||||||
|
if excludes:
|
||||||
|
src_files = [f for f in src_files if not re.match(excludes, f.name)]
|
||||||
|
|
||||||
|
if includes:
|
||||||
|
src_files = [f for f in src_files if re.match(includes, f.name)]
|
||||||
|
|
||||||
|
dst_files = [dst/f.relative_to(src) for f in src_files]
|
||||||
|
|
||||||
|
for src_file, dst_file in zip(src_files, dst_files) :
|
||||||
|
if src_file.is_dir():
|
||||||
|
dst_file.mkdir(exist_ok=True, parents=True)
|
||||||
|
else:
|
||||||
|
copy_file(src_file, dst_file, only_new=only_new, only_recent=only_recent)
|
||||||
|
|
||||||
|
|
||||||
|
def open_file(filepath, select=False):
|
||||||
|
'''Open a filepath inside the os explorer'''
|
||||||
|
|
||||||
|
if platform.system() == 'Darwin': # macOS
|
||||||
|
cmd = ['open']
|
||||||
|
elif platform.system() == 'Windows': # Windows
|
||||||
|
cmd = ['explorer']
|
||||||
|
if select:
|
||||||
|
cmd += ['/select,']
|
||||||
|
else: # linux variants
|
||||||
|
cmd = ['xdg-open']
|
||||||
|
if select:
|
||||||
|
cmd = ['nemo']
|
||||||
|
|
||||||
|
cmd += [str(filepath)]
|
||||||
|
subprocess.Popen(cmd)
|
||||||
|
|
||||||
|
def open_blender_file(filepath=None):
|
||||||
|
filepath = filepath or bpy.data.filepath
|
||||||
|
|
||||||
|
cmd = sys.argv
|
||||||
|
|
||||||
|
# if no filepath, use command as is to reopen blender
|
||||||
|
if filepath != '':
|
||||||
|
if len(cmd) > 1 and cmd[1].endswith('.blend'):
|
||||||
|
cmd[1] = str(filepath)
|
||||||
|
else:
|
||||||
|
cmd.insert(1, str(filepath))
|
||||||
|
|
||||||
|
subprocess.Popen(cmd)
|
||||||
|
|
||||||
|
def read_file(path):
|
||||||
|
'''Read a file with an extension in (json, yaml, yml, txt)'''
|
||||||
|
|
||||||
|
exts = ('.json', '.yaml', '.yml', '.txt')
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
print('Try to read empty file')
|
||||||
|
|
||||||
|
path = Path(path)
|
||||||
|
if not path.exists():
|
||||||
|
print('File not exist', path)
|
||||||
|
return
|
||||||
|
|
||||||
|
if path.suffix not in exts:
|
||||||
|
print(f'Cannot read file {path}, extension must be in {exts}')
|
||||||
|
return
|
||||||
|
|
||||||
|
txt = path.read_text()
|
||||||
|
data = None
|
||||||
|
|
||||||
|
if path.suffix.lower() in ('.yaml', '.yml'):
|
||||||
|
yaml = install_module('yaml')
|
||||||
|
try:
|
||||||
|
data = yaml.safe_load(txt)
|
||||||
|
except Exception:
|
||||||
|
print(f'Could not load yaml file {path}')
|
||||||
|
return
|
||||||
|
elif path.suffix.lower() == '.json':
|
||||||
|
try:
|
||||||
|
data = json.loads(txt)
|
||||||
|
except Exception:
|
||||||
|
print(f'Could not load json file {path}')
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
data = txt
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def write_file(path, data, indent=4):
|
||||||
|
'''Read a file with an extension in (json, yaml, yml, text)'''
|
||||||
|
|
||||||
|
exts = ('.json', '.yaml', '.yml', '.txt')
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
print('Try to write empty file')
|
||||||
|
|
||||||
|
path = Path(path)
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
if path.suffix not in exts:
|
||||||
|
print(f'Cannot read file {path}, extension must be in {exts}')
|
||||||
|
return
|
||||||
|
|
||||||
|
if path.suffix.lower() in ('.yaml', '.yml'):
|
||||||
|
yaml = install_module('yaml')
|
||||||
|
try:
|
||||||
|
path.write_text(yaml.dump(data), encoding='utf8')
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
print(f'Could not write yaml file {path}')
|
||||||
|
return
|
||||||
|
elif path.suffix.lower() == '.json':
|
||||||
|
try:
|
||||||
|
path.write_text(json.dumps(data, indent=indent), encoding='utf8')
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
print(f'Could not write json file {path}')
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
data = path.write_text(data, encoding='utf8')
|
||||||
|
|
||||||
|
|
||||||
|
def synchronize(src, dst, only_new=False, only_recent=False, clear=False):
|
||||||
|
|
||||||
|
#actionlib_dir = get_actionlib_dir(custom=custom)
|
||||||
|
#local_actionlib_dir = get_actionlib_dir(local=True, custom=custom)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if clear and Path(dst).exists():
|
||||||
|
shutil.rmtree(dst)
|
||||||
|
|
||||||
|
#set_actionlib_dir(custom=custom)
|
||||||
|
|
||||||
|
script = Path(__file__).parent / 'synchronize.py'
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
sys.executable,
|
||||||
|
script,
|
||||||
|
'--src', str(src),
|
||||||
|
'--dst', str(dst),
|
||||||
|
'--only-new', json.dumps(only_new),
|
||||||
|
'--only-recent', json.dumps(only_recent),
|
||||||
|
]
|
||||||
|
|
||||||
|
subprocess.Popen(cmd)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
|
@ -0,0 +1,465 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Function relative to the asset browser addon
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import time
|
||||||
|
#from asset_library.constants import ASSETLIB_FILENAME
|
||||||
|
import inspect
|
||||||
|
from asset_library.common.file_utils import read_file
|
||||||
|
from asset_library.common.bl_utils import get_addon_prefs
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
def command(func):
|
||||||
|
'''Decorator to be used from printed functions argument and run time'''
|
||||||
|
func_name = func.__name__.replace('_', ' ').title()
|
||||||
|
|
||||||
|
def _command(*args, **kargs):
|
||||||
|
|
||||||
|
bound = inspect.signature(func).bind(*args, **kargs)
|
||||||
|
bound.apply_defaults()
|
||||||
|
|
||||||
|
args_str = ', '.join([f'{k}={v}' for k, v in bound.arguments.items()])
|
||||||
|
print(f'\n[>-] {func_name} ({args_str}) --- Start ---')
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
result = func(*args, **kargs)
|
||||||
|
|
||||||
|
print(f'[>-] {func_name} --- Finished (total time : {time.time() - t0:.2f}s) ---')
|
||||||
|
return result
|
||||||
|
|
||||||
|
return _command
|
||||||
|
|
||||||
|
def asset_warning_callback(self, context):
|
||||||
|
"""Callback function to display a warning message when ading or modifying an asset"""
|
||||||
|
self.warning = ''
|
||||||
|
|
||||||
|
if not self.name:
|
||||||
|
self.warning = 'You need to specify a name'
|
||||||
|
return
|
||||||
|
if not self.catalog:
|
||||||
|
self.warning = 'You need to specify a catalog'
|
||||||
|
return
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
action_path = lib.adapter.get_asset_relative_path(self.name, self.catalog)
|
||||||
|
self.path = action_path.as_posix()
|
||||||
|
|
||||||
|
if lib.merge_libraries:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
lib = prefs.libraries[lib.store_library]
|
||||||
|
|
||||||
|
if not lib.adapter.get_asset_path(self.name, self.catalog).parents[1].exists():
|
||||||
|
self.warning = 'A new folder will be created'
|
||||||
|
|
||||||
|
def get_active_library():
|
||||||
|
'''Get the pref library properties from the active library of the asset browser'''
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
asset_lib_ref = bpy.context.space_data.params.asset_library_ref
|
||||||
|
|
||||||
|
#Check for merged library
|
||||||
|
for l in prefs.libraries:
|
||||||
|
if l.library_name == asset_lib_ref:
|
||||||
|
return l
|
||||||
|
|
||||||
|
def get_active_catalog():
|
||||||
|
'''Get the active catalog path'''
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
cat_data = lib.adapter.read_catalog()
|
||||||
|
cat_data = {v['id']:k for k,v in cat_data.items()}
|
||||||
|
|
||||||
|
cat_id = bpy.context.space_data.params.catalog_id
|
||||||
|
if cat_id in cat_data:
|
||||||
|
return cat_data[cat_id]
|
||||||
|
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
def norm_asset_datas(asset_file_datas):
|
||||||
|
''' Return a new flat list of asset data
|
||||||
|
the filepath keys are merge with the assets keys'''
|
||||||
|
|
||||||
|
asset_datas = []
|
||||||
|
for asset_file_data in asset_file_datas:
|
||||||
|
asset_file_data = asset_file_data.copy()
|
||||||
|
if 'assets' in asset_file_data:
|
||||||
|
|
||||||
|
assets = asset_file_data.pop('assets')
|
||||||
|
for asset_data in assets:
|
||||||
|
|
||||||
|
asset_datas.append({**asset_file_data, **asset_data})
|
||||||
|
|
||||||
|
else:
|
||||||
|
asset_datas.append(asset_file_data)
|
||||||
|
|
||||||
|
return asset_datas
|
||||||
|
|
||||||
|
def cache_diff(cache, new_cache):
|
||||||
|
'''Compare and return the difference between two asset datas list'''
|
||||||
|
|
||||||
|
#TODO use an id to be able to tell modified asset if renamed
|
||||||
|
#cache = {a.get('id', a['name']) : a for a in norm_asset_datas(cache)}
|
||||||
|
#new_cache = {a.get('id', a['name']) : a for a in norm_asset_datas(new_cache)}
|
||||||
|
|
||||||
|
cache = {f"{a['filepath']}/{a['name']}": a for a in norm_asset_datas(cache)}
|
||||||
|
new_cache = {f"{a['filepath']}/{a['name']}" : a for a in norm_asset_datas(new_cache)}
|
||||||
|
|
||||||
|
assets_added = [v for k, v in new_cache.items() if k not in cache]
|
||||||
|
assets_removed = [v for k, v in cache.items() if k not in new_cache]
|
||||||
|
assets_modified = [v for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
||||||
|
|
||||||
|
if assets_added:
|
||||||
|
print(f'{len(assets_added)} Assets Added \n{tuple(a["name"] for a in assets_added[:10])}\n')
|
||||||
|
if assets_removed:
|
||||||
|
print(f'{len(assets_removed)} Assets Removed \n{tuple(a["name"] for a in assets_removed[:10])}\n')
|
||||||
|
if assets_modified:
|
||||||
|
print(f'{len(assets_modified)} Assets Modified \n{tuple(a["name"] for a in assets_modified[:10])}\n')
|
||||||
|
|
||||||
|
assets_added = [dict(a, operation='ADD') for a in assets_added]
|
||||||
|
assets_removed = [dict(a, operation='REMOVE') for a in assets_removed]
|
||||||
|
assets_modified = [dict(a, operation='MODIFY') for a in assets_modified]
|
||||||
|
|
||||||
|
assets_diff = assets_added + assets_removed + assets_modified
|
||||||
|
if not assets_diff:
|
||||||
|
print('No change in the library')
|
||||||
|
|
||||||
|
return assets_diff
|
||||||
|
|
||||||
|
def clean_default_lib():
|
||||||
|
prefs = bpy.context.preferences
|
||||||
|
|
||||||
|
if not prefs.filepaths.asset_libraries:
|
||||||
|
print('[>-] No Asset Libraries Filepaths Setted.')
|
||||||
|
return
|
||||||
|
|
||||||
|
lib, lib_id = get_lib_id(
|
||||||
|
library_name='User Library',
|
||||||
|
asset_libraries=prefs.filepaths.asset_libraries
|
||||||
|
)
|
||||||
|
if lib:
|
||||||
|
bpy.ops.preferences.asset_library_remove(index=lib_id)
|
||||||
|
|
||||||
|
def get_asset_source(replace_local=False):
|
||||||
|
sp = bpy.context.space_data
|
||||||
|
prefs = bpy.context.preferences.addons[__package__].preferences
|
||||||
|
asset_file_handle = bpy.context.asset_file_handle
|
||||||
|
|
||||||
|
if asset_file_handle is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if asset_file_handle.local_id:
|
||||||
|
publish_path = os.path.expandvars(scn.actionlib.get('publish_path'))
|
||||||
|
if not publish_path:
|
||||||
|
print('[>.] No \'Publish Dir\' found. Publish file first.' )
|
||||||
|
return None
|
||||||
|
|
||||||
|
return Path(publish_path)
|
||||||
|
|
||||||
|
asset_library_ref = bpy.context.asset_library_ref
|
||||||
|
source_path = bpy.types.AssetHandle.get_full_library_path(asset_file_handle, asset_library_ref)
|
||||||
|
|
||||||
|
if replace_local:
|
||||||
|
if 'custom' in sp.params.asset_library_ref.lower():
|
||||||
|
actionlib_path = prefs.action.custom_path
|
||||||
|
actionlib_path_local = prefs.action.custom_path_local
|
||||||
|
else:
|
||||||
|
actionlib_path = prefs.action.path
|
||||||
|
actionlib_path_local = prefs.action.path_local
|
||||||
|
|
||||||
|
source_path = re.sub(actionlib_dir_local, actionlib_dir, source_path)
|
||||||
|
|
||||||
|
return source_path
|
||||||
|
|
||||||
|
def get_catalog_path(filepath=None):
|
||||||
|
filepath = filepath or bpy.data.filepath
|
||||||
|
filepath = Path(filepath)
|
||||||
|
|
||||||
|
if filepath.is_file():
|
||||||
|
filepath = filepath.parent
|
||||||
|
|
||||||
|
filepath.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
catalog = filepath / 'blender_assets.cats.txt'
|
||||||
|
if not catalog.exists():
|
||||||
|
catalog.touch(exist_ok=False)
|
||||||
|
|
||||||
|
return catalog
|
||||||
|
|
||||||
|
|
||||||
|
# def read_catalog(path, key='path'):
|
||||||
|
# cat_data = {}
|
||||||
|
|
||||||
|
# supported_keys = ('path', 'id', 'name')
|
||||||
|
|
||||||
|
# if key not in supported_keys:
|
||||||
|
# raise Exception(f'Not supported key: {key} for read catalog, supported keys are {supported_keys}')
|
||||||
|
|
||||||
|
# for line in Path(path).read_text(encoding="utf-8").split('\n'):
|
||||||
|
# if line.startswith(('VERSION', '#')) or not line:
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# cat_id, cat_path, cat_name = line.split(':')
|
||||||
|
|
||||||
|
# if key == 'id':
|
||||||
|
# cat_data[cat_id] = {'path':cat_path, 'name':cat_name}
|
||||||
|
# elif key == 'path':
|
||||||
|
# cat_data[cat_path] = {'id':cat_id, 'name':cat_name}
|
||||||
|
# elif key =='name':
|
||||||
|
# cat_data[cat_name] = {'id':cat_id, 'path':cat_path}
|
||||||
|
|
||||||
|
# return cat_data
|
||||||
|
|
||||||
|
def read_catalog(path):
|
||||||
|
cat_data = {}
|
||||||
|
|
||||||
|
for line in Path(path).read_text(encoding="utf-8").split('\n'):
|
||||||
|
if line.startswith(('VERSION', '#')) or not line:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cat_id, cat_path, cat_name = line.split(':')
|
||||||
|
cat_data[cat_path] = {'id':cat_id, 'name':cat_name}
|
||||||
|
|
||||||
|
return cat_data
|
||||||
|
|
||||||
|
def write_catalog(path, data):
|
||||||
|
lines = ['VERSION 1', '']
|
||||||
|
|
||||||
|
# Add missing parents catalog
|
||||||
|
norm_data = {}
|
||||||
|
for cat_path, cat_data in data.items():
|
||||||
|
norm_data[cat_path] = cat_data
|
||||||
|
for p in Path(cat_path).parents[:-1]:
|
||||||
|
if p in data or p in norm_data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
norm_data[p.as_posix()] = {'id': str(uuid.uuid4()), 'name': '-'.join(p.parts)}
|
||||||
|
|
||||||
|
for cat_path, cat_data in sorted(norm_data.items()):
|
||||||
|
cat_name = cat_data['name'].replace('/', '-')
|
||||||
|
lines.append(f"{cat_data['id']}:{cat_path}:{cat_name}")
|
||||||
|
|
||||||
|
print(f'Catalog writen at: {path}')
|
||||||
|
Path(path).write_text('\n'.join(lines), encoding="utf-8")
|
||||||
|
|
||||||
|
def create_catalog_file(json_path : str|Path, keep_existing_category : bool = True):
|
||||||
|
'''create asset catalog file from json
|
||||||
|
if catalog already exists, keep existing catalog uid'''
|
||||||
|
|
||||||
|
json_path = Path(json_path)
|
||||||
|
# if not json.exists(): return
|
||||||
|
assert json_path.exists(), 'Json not exists !'
|
||||||
|
|
||||||
|
category_datas = json.loads(json_path.read_text(encoding="utf-8"))
|
||||||
|
|
||||||
|
catalog_path = json_path.parent / 'blender_assets.cats.txt'
|
||||||
|
catalog_data = {}
|
||||||
|
if catalog_path.exists():
|
||||||
|
catalog_data = read_catalog(catalog_path)
|
||||||
|
## retrun a format catalog_data[path] = {'id':id, 'name':name}
|
||||||
|
## note: 'path' in catalog is 'name' in category_datas
|
||||||
|
|
||||||
|
catalog_lines = ['VERSION 1', '']
|
||||||
|
|
||||||
|
## keep existing
|
||||||
|
for c in category_datas:
|
||||||
|
# keep same catalog line for existing category keys
|
||||||
|
if keep_existing_category and catalog_data.get(c['name']):
|
||||||
|
print(c['name'], 'category exists')
|
||||||
|
cat = catalog_data[c['name']] #get
|
||||||
|
catalog_lines.append(f"{cat['id']}:{c['name']}:{cat['name']}")
|
||||||
|
else:
|
||||||
|
print(c['name'], 'new category')
|
||||||
|
# add new category
|
||||||
|
catalog_lines.append(f"{c['id']}:{c['name']}:{c['name'].replace('/', '-')}")
|
||||||
|
|
||||||
|
## keep category that are non-existing in json ?
|
||||||
|
if keep_existing_category:
|
||||||
|
for k in catalog_data.keys():
|
||||||
|
if next((c['name'] for c in category_datas if c['name'] == k), None):
|
||||||
|
continue
|
||||||
|
print(k, 'category not existing in json')
|
||||||
|
cat = catalog_data[k]
|
||||||
|
# rebuild existing line
|
||||||
|
catalog_lines.append(f"{cat['id']}:{k}:{cat['name']}")
|
||||||
|
|
||||||
|
## write_text overwrite the file
|
||||||
|
catalog_path.write_text('\n'.join(catalog_lines), encoding="utf-8")
|
||||||
|
|
||||||
|
print(f'Catalog saved at: {catalog_path}')
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def clear_env_libraries():
|
||||||
|
print('clear_env_libraries')
|
||||||
|
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
asset_libraries = bpy.context.preferences.filepaths.asset_libraries
|
||||||
|
|
||||||
|
for env_lib in prefs.env_libraries:
|
||||||
|
name = env_lib.get('asset_library')
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
asset_lib = asset_libraries.get(name)
|
||||||
|
if not asset_lib:
|
||||||
|
continue
|
||||||
|
|
||||||
|
index = list(asset_libraries).index(asset_lib)
|
||||||
|
bpy.ops.preferences.asset_library_remove(index=index)
|
||||||
|
|
||||||
|
prefs.env_libraries.clear()
|
||||||
|
|
||||||
|
'''
|
||||||
|
env_libs = get_env_libraries()
|
||||||
|
paths = [Path(l['path']).resolve().as_posix() for n, l in env_libs.items()]
|
||||||
|
|
||||||
|
for i, l in reversed(enumerate(libs)):
|
||||||
|
lib_path = Path(l.path).resolve().as_posix()
|
||||||
|
|
||||||
|
if (l.name in env_libs or lib_path in paths):
|
||||||
|
libs.remove(i)
|
||||||
|
'''
|
||||||
|
|
||||||
|
def set_env_libraries(path=None) -> list:
|
||||||
|
'''Read the environments variables and create the libraries'''
|
||||||
|
|
||||||
|
#from asset_library.prefs import AssetLibraryOptions
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
path = path or prefs.config_directory
|
||||||
|
|
||||||
|
#print('Read', path)
|
||||||
|
library_data = read_file(path)
|
||||||
|
|
||||||
|
clear_env_libraries()
|
||||||
|
|
||||||
|
if not library_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
libs = []
|
||||||
|
|
||||||
|
for lib_info in library_data:
|
||||||
|
lib = prefs.env_libraries.add()
|
||||||
|
|
||||||
|
lib.set_dict(lib_info)
|
||||||
|
|
||||||
|
libs.append(lib)
|
||||||
|
|
||||||
|
return libs
|
||||||
|
|
||||||
|
'''
|
||||||
|
def get_env_libraries():
|
||||||
|
env_libraries = {}
|
||||||
|
|
||||||
|
for k, v in os.environ.items():
|
||||||
|
if not re.findall('ASSET_LIBRARY_[0-9]', k):
|
||||||
|
continue
|
||||||
|
|
||||||
|
lib_infos = v.split(os.pathsep)
|
||||||
|
|
||||||
|
if len(lib_infos) == 5:
|
||||||
|
name, data_type, tpl, src_path, bdl_path = lib_infos
|
||||||
|
elif len(lib_infos) == 4:
|
||||||
|
name, data_type, tpl, src_path = lib_infos
|
||||||
|
bdl_path = ''
|
||||||
|
else:
|
||||||
|
print(f'Wrong env key {k}', lib_infos)
|
||||||
|
continue
|
||||||
|
|
||||||
|
source_type = 'TEMPLATE'
|
||||||
|
if tpl.lower().endswith(('.json', '.yml', 'yaml')):
|
||||||
|
source_type = 'DATA_FILE'
|
||||||
|
|
||||||
|
env_libraries[name] = {
|
||||||
|
'data_type': data_type,
|
||||||
|
'source_directory': src_path,
|
||||||
|
'bundle_directory': bdl_path,
|
||||||
|
'source_type': source_type,
|
||||||
|
'template': tpl,
|
||||||
|
}
|
||||||
|
|
||||||
|
return env_libraries
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def resync_lib(name, waiting_time):
|
||||||
|
bpy.app.timers.register(
|
||||||
|
lambda: bpy.ops.assetlib.synchronize(only_recent=True, name=name),
|
||||||
|
first_interval=waiting_time
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
def set_assetlib_paths():
|
||||||
|
prefs = bpy.context.preferences
|
||||||
|
|
||||||
|
assetlib_name = 'Assets'
|
||||||
|
assetlib = prefs.filepaths.asset_libraries.get(assetlib_name)
|
||||||
|
|
||||||
|
if not assetlib:
|
||||||
|
bpy.ops.preferences.asset_library_add(directory=str(assetlib_path))
|
||||||
|
assetlib = prefs.filepaths.asset_libraries[-1]
|
||||||
|
assetlib.name = assetlib_name
|
||||||
|
|
||||||
|
assetlib.path = str(actionlib_dir)
|
||||||
|
|
||||||
|
def set_actionlib_paths():
|
||||||
|
prefs = bpy.context.preferences
|
||||||
|
|
||||||
|
actionlib_name = 'Action Library'
|
||||||
|
actionlib_custom_name = 'Action Library Custom'
|
||||||
|
|
||||||
|
actionlib = prefs.filepaths.asset_libraries.get(actionlib_name)
|
||||||
|
|
||||||
|
if not assetlib:
|
||||||
|
bpy.ops.preferences.asset_library_add(directory=str(assetlib_path))
|
||||||
|
assetlib = prefs.filepaths.asset_libraries[-1]
|
||||||
|
assetlib.name = assetlib_name
|
||||||
|
|
||||||
|
actionlib_dir = get_actionlib_dir(custom=custom)
|
||||||
|
local_actionlib_dir = get_actionlib_dir(local=True, custom=custom)
|
||||||
|
|
||||||
|
if local_actionlib_dir:
|
||||||
|
actionlib_dir = local_actionlib_dir
|
||||||
|
|
||||||
|
if actionlib_name not in prefs.filepaths.asset_libraries:
|
||||||
|
bpy.ops.preferences.asset_library_add(directory=str(actionlib_dir))
|
||||||
|
|
||||||
|
#lib, lib_id = get_lib_id(
|
||||||
|
# library_path=actionlib_dir,
|
||||||
|
# asset_libraries=prefs.filepaths.asset_libraries
|
||||||
|
#)
|
||||||
|
|
||||||
|
#if not lib:
|
||||||
|
# print(f'Cannot set dir for {actionlib_name}')
|
||||||
|
# return
|
||||||
|
|
||||||
|
prefs.filepaths.asset_libraries[lib_id].name = actionlib_name
|
||||||
|
#prefs.filepaths.asset_libraries[lib_id].path = str(actionlib_dir)
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import fnmatch
|
||||||
|
import importlib.util
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# import module utils without excuting __init__
|
||||||
|
spec = importlib.util.spec_from_file_location(
|
||||||
|
"utils", Path(__file__).parent/"file_utils.py"
|
||||||
|
)
|
||||||
|
utils = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(utils)
|
||||||
|
|
||||||
|
|
||||||
|
def synchronize(src, dst, only_new=False, only_recent=False):
|
||||||
|
|
||||||
|
excludes=['*.sync-conflict-*', '.*']
|
||||||
|
includes=['*.blend', 'blender_assets.cats.txt']
|
||||||
|
|
||||||
|
utils.copy_dir(
|
||||||
|
src, dst,
|
||||||
|
only_new=only_new, only_recent=only_recent,
|
||||||
|
excludes=excludes, includes=includes
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='Add Comment To the tracker',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('--src')
|
||||||
|
parser.add_argument('--dst')
|
||||||
|
parser.add_argument('--only-new', type=json.loads, default='false')
|
||||||
|
parser.add_argument('--only-recent', type=json.loads, default='false')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
synchronize(**vars(args))
|
|
@ -0,0 +1,89 @@
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
|
||||||
|
class Template:
|
||||||
|
field_pattern = re.compile(r'{(\w+)\*{0,2}}')
|
||||||
|
field_pattern_recursive = re.compile(r'{(\w+)\*{2}}')
|
||||||
|
|
||||||
|
def __init__(self, template):
|
||||||
|
#asset_data_path = Path(lib_path) / ASSETLIB_FILENAME
|
||||||
|
|
||||||
|
self.template = template
|
||||||
|
|
||||||
|
@property
|
||||||
|
def glob_pattern(self):
|
||||||
|
pattern = self.field_pattern_recursive.sub('**', self.template)
|
||||||
|
pattern = self.field_pattern.sub('*', pattern)
|
||||||
|
return pattern
|
||||||
|
|
||||||
|
@property
|
||||||
|
def re_pattern(self):
|
||||||
|
pattern = self.field_pattern_recursive.sub('([\\\w -_.\/]+)', self.template)
|
||||||
|
pattern = self.field_pattern.sub('([\\\w -_.]+)', pattern)
|
||||||
|
pattern = pattern.replace('?', '.')
|
||||||
|
pattern = pattern.replace('*', '.*')
|
||||||
|
|
||||||
|
return re.compile(pattern)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self):
|
||||||
|
return self.field_pattern.findall(self.template)
|
||||||
|
#return [f or '0' for f in fields]
|
||||||
|
|
||||||
|
def parse(self, path):
|
||||||
|
|
||||||
|
path = Path(path).as_posix()
|
||||||
|
|
||||||
|
res = self.re_pattern.findall(path)
|
||||||
|
if not res:
|
||||||
|
print('Could not parse {path} with {self.re_pattern}')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
fields = self.fields
|
||||||
|
|
||||||
|
if len(fields) == 1:
|
||||||
|
field_values = res
|
||||||
|
else:
|
||||||
|
field_values = res[0]
|
||||||
|
|
||||||
|
return {k:v for k,v in zip(fields, field_values)}
|
||||||
|
|
||||||
|
def format(self, data=None, **kargs):
|
||||||
|
|
||||||
|
#print('format', self.template, data, kargs)
|
||||||
|
|
||||||
|
data = {**(data or {}), **kargs}
|
||||||
|
|
||||||
|
try:
|
||||||
|
path = self.template.format(**data)
|
||||||
|
except KeyError:
|
||||||
|
print(f'Cannot format {self.template} with {data}')
|
||||||
|
return
|
||||||
|
|
||||||
|
path = os.path.expandvars(path)
|
||||||
|
return Path(path)
|
||||||
|
|
||||||
|
def glob(self, directory, pattern=None):
|
||||||
|
'''If pattern is given it need to be absolute'''
|
||||||
|
if pattern is None:
|
||||||
|
pattern = Path(directory, self.glob_pattern).as_posix()
|
||||||
|
|
||||||
|
for entry in os.scandir(directory):
|
||||||
|
entry_path = Path(entry.path)
|
||||||
|
if entry.is_file() and fnmatch(entry_path.as_posix(), pattern):
|
||||||
|
yield entry_path
|
||||||
|
elif entry.is_dir():
|
||||||
|
yield from self.glob(entry.path, pattern)
|
||||||
|
|
||||||
|
def find(self, data, **kargs):
|
||||||
|
pattern = self.format(data, **kargs)
|
||||||
|
paths = glob(pattern.as_posix())
|
||||||
|
if paths:
|
||||||
|
return Path(paths[0])
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'Template({self.template})'
|
|
@ -0,0 +1,15 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
DATA_TYPE_ITEMS = [
|
||||||
|
("ACTION", "Action", "", "ACTION", 1),
|
||||||
|
("COLLECTION", "Collection", "", "OUTLINER_OB_GROUP_INSTANCE", 2),
|
||||||
|
("FILE", "File", "", "FILE", 3)
|
||||||
|
]
|
||||||
|
DATA_TYPES = [i[0] for i in DATA_TYPE_ITEMS]
|
||||||
|
ICONS = {identifier: icon for identifier, name, description, icon, number in DATA_TYPE_ITEMS}
|
||||||
|
|
||||||
|
ASSETLIB_FILENAME = "blender_assets.libs.json"
|
||||||
|
MODULE_DIR = Path(__file__).parent
|
||||||
|
RESOURCES_DIR = MODULE_DIR / 'resources'
|
||||||
|
ADAPTER_DIR = MODULE_DIR / 'adapters'
|
|
@ -0,0 +1,18 @@
|
||||||
|
|
||||||
|
from asset_library.file import (
|
||||||
|
operators, gui, keymaps)
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
importlib.reload(operators)
|
||||||
|
importlib.reload(gui)
|
||||||
|
importlib.reload(keymaps)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
operators.register()
|
||||||
|
keymaps.register()
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
operators.unregister()
|
||||||
|
keymaps.unregister()
|
|
@ -0,0 +1,171 @@
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import bpy
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
from itertools import groupby
|
||||||
|
|
||||||
|
from asset_library.constants import ASSETLIB_FILENAME, MODULE_DIR
|
||||||
|
from asset_library.common.bl_utils import thumbnail_blend_file
|
||||||
|
from asset_library.common.functions import (read_catalog, get_catalog_path,
|
||||||
|
command, write_catalog)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@command
|
||||||
|
def bundle_library(source_directory, bundle_directory, asset_description_template, thumbnail_template,
|
||||||
|
template=None, data_file=None):
|
||||||
|
|
||||||
|
field_pattern = r'{(\w+)}'
|
||||||
|
asset_data_path = Path(bundle_directory) / ASSETLIB_FILENAME
|
||||||
|
|
||||||
|
glob_pattern = re.sub(field_pattern, '*', template)
|
||||||
|
re_pattern = re.sub(field_pattern, r'([\\w -_.]+)', template)
|
||||||
|
re_pattern = re_pattern.replace('?', '.')
|
||||||
|
|
||||||
|
field_names = re.findall(field_pattern, template)
|
||||||
|
|
||||||
|
asset_file_datas = []
|
||||||
|
for f in sorted(Path(source_directory).glob(glob_pattern)):
|
||||||
|
rel_path = f.relative_to(source_directory).as_posix()
|
||||||
|
|
||||||
|
field_values = re.findall(re_pattern, rel_path)[0]
|
||||||
|
field_data = {k:v for k,v in zip(field_names, field_values)}
|
||||||
|
|
||||||
|
name = field_data.get('name', f.stem)
|
||||||
|
thumbnail = (f / thumbnail_template.format(name=name)).resolve()
|
||||||
|
asset_data = (f / asset_description_template.format(name=name)).resolve()
|
||||||
|
|
||||||
|
catalogs = sorted([v for k,v in sorted(field_data.items()) if k.isdigit()])
|
||||||
|
catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
||||||
|
|
||||||
|
if not thumbnail.exists():
|
||||||
|
thumbnail_blend_file(f, thumbnail)
|
||||||
|
|
||||||
|
asset_data = {
|
||||||
|
'catalog' : '/'.join(catalogs),
|
||||||
|
'preview' : thumbnail.as_posix(), #'./' + bpy.path.relpath(str(thumbnail), start=str(f))[2:],
|
||||||
|
'filepath' : f.as_posix(), #'./' + bpy.path.relpath(str(f), start=str(asset_data_path))[2:],
|
||||||
|
'name': name,
|
||||||
|
'tags': [],
|
||||||
|
'metadata': {'filepath': f.as_posix()}
|
||||||
|
}
|
||||||
|
|
||||||
|
asset_file_datas.append(asset_data)
|
||||||
|
|
||||||
|
# Write json data file to store all asset found
|
||||||
|
print(f'Writing asset data file to, {asset_data_path}')
|
||||||
|
asset_data_path.write_text(json.dumps(asset_file_datas, indent=4))
|
||||||
|
|
||||||
|
#script = MODULE_DIR / 'common' / 'bundle_blend.py'
|
||||||
|
#cmd = [bpy.app.binary_path, '--python', str(script), '--', '--filepath', str(filepath)]
|
||||||
|
#print(cmd)
|
||||||
|
#subprocess.call(cmd)
|
||||||
|
|
||||||
|
@command
|
||||||
|
def bundle_blend(filepath, depth=0):
|
||||||
|
#print('Bundle Blend...')
|
||||||
|
filepath = Path(filepath)
|
||||||
|
|
||||||
|
#asset_data_path = get_asset_datas_file(filepath)
|
||||||
|
|
||||||
|
asset_data_path = filepath / ASSETLIB_FILENAME
|
||||||
|
blend_name = filepath.name.replace(' ', '_').lower()
|
||||||
|
blend_path = (filepath / blend_name).with_suffix('.blend')
|
||||||
|
|
||||||
|
if not asset_data_path.exists():
|
||||||
|
raise Exception(f'The file {asset_data_path} not exist')
|
||||||
|
|
||||||
|
catalog_path = get_catalog_path(filepath)
|
||||||
|
catalog_data = read_catalog(catalog_path)
|
||||||
|
|
||||||
|
asset_file_data = json.loads(asset_data_path.read_text())
|
||||||
|
#asset_file_data = {i['catalog']:i for i in asset_file_data}
|
||||||
|
|
||||||
|
if depth == 0:
|
||||||
|
groups = [asset_file_data]
|
||||||
|
else:
|
||||||
|
asset_file_data.sort(key=lambda x :x['catalog'].split('/')[:depth])
|
||||||
|
groups = groupby(asset_file_data, key=lambda x :x['catalog'].split('/')[:depth])
|
||||||
|
|
||||||
|
#progress = 0
|
||||||
|
total_assets = len(asset_file_data)
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for sub_path, asset_datas in groups:
|
||||||
|
bpy.ops.wm.read_homefile(use_empty=True)
|
||||||
|
|
||||||
|
for asset_data in asset_datas:
|
||||||
|
blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||||
|
blend_path = Path(filepath, *sub_path, blend_name).with_suffix('.blend')
|
||||||
|
|
||||||
|
if i % int(total_assets / 100) == 0:
|
||||||
|
print(f'Progress: {int(i / total_assets * 100)}')
|
||||||
|
|
||||||
|
col = bpy.data.collections.new(name=asset_data['name'])
|
||||||
|
|
||||||
|
# Seems slow
|
||||||
|
#bpy.context.scene.collection.children.link(col)
|
||||||
|
col.asset_mark()
|
||||||
|
|
||||||
|
with bpy.context.temp_override(id=col):
|
||||||
|
bpy.ops.ed.lib_id_load_custom_preview(
|
||||||
|
filepath=asset_data['preview']
|
||||||
|
)
|
||||||
|
|
||||||
|
col.asset_data.description = asset_data.get('description', '')
|
||||||
|
|
||||||
|
catalog_name = asset_data['catalog']
|
||||||
|
catalog = catalog_data.get(catalog_name)
|
||||||
|
if not catalog:
|
||||||
|
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
||||||
|
catalog_data[catalog_name] = catalog
|
||||||
|
|
||||||
|
col.asset_data.catalog_id = catalog['id']
|
||||||
|
|
||||||
|
for k, v in asset_data.get('metadata', {}).items():
|
||||||
|
col.asset_data[k] = v
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
print(f'Saving Blend to {blend_path}')
|
||||||
|
|
||||||
|
blend_path.mkdir(exist_ok=True, parents=True)
|
||||||
|
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||||
|
|
||||||
|
write_catalog(catalog_path, catalog_data)
|
||||||
|
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__' :
|
||||||
|
parser = argparse.ArgumentParser(description='bundle_blend',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
|
||||||
|
parser.add_argument('--source-path')
|
||||||
|
parser.add_argument('--bundle-path')
|
||||||
|
parser.add_argument('--asset-data-template')
|
||||||
|
parser.add_argument('--thumbnail-template')
|
||||||
|
parser.add_argument('--template', default=None)
|
||||||
|
parser.add_argument('--data-file', default=None)
|
||||||
|
parser.add_argument('--depth', default=0, type=int)
|
||||||
|
|
||||||
|
if '--' in sys.argv :
|
||||||
|
index = sys.argv.index('--')
|
||||||
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
bundle_library(
|
||||||
|
source_directory=args.source_directory,
|
||||||
|
bundle_directory=args.bundle_directory,
|
||||||
|
asset_description_template=args.asset_description_template,
|
||||||
|
thumbnail_template=args.thumbnail_template,
|
||||||
|
template=args.template,
|
||||||
|
data_file=args.data_file)
|
||||||
|
|
||||||
|
bundle_blend(filepath=args.bundle_directory, depth=args.depth)
|
|
@ -0,0 +1,30 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from bpy.types import (
|
||||||
|
Context,
|
||||||
|
Header,
|
||||||
|
Menu,
|
||||||
|
Panel,
|
||||||
|
UIList,
|
||||||
|
WindowManager,
|
||||||
|
WorkSpace,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
from asset_library.common.bl_utils import get_object_libraries, get_addon_prefs
|
||||||
|
|
||||||
|
|
||||||
|
def draw_context_menu(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
#asset = context.active_file
|
||||||
|
|
||||||
|
layout.operator("assetlib.open_blend_file", text="Open Blend File")#.filepath = asset.asset_data['filepath']
|
||||||
|
|
||||||
|
|
||||||
|
def draw_header(layout):
|
||||||
|
'''Draw the header of the Asset Browser Window'''
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
#layout.operator("actionlib.store_anim_pose", text='Add Action', icon='FILE_NEW')
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.app.handlers import persistent
|
||||||
|
|
||||||
|
addon_keymaps: List[Tuple[bpy.types.KeyMap, bpy.types.KeyMapItem]] = []
|
||||||
|
|
||||||
|
def register() -> None:
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
if not wm.keyconfigs.addon:
|
||||||
|
# This happens when Blender is running in the background.
|
||||||
|
return
|
||||||
|
|
||||||
|
km = wm.keyconfigs.addon.keymaps.new(name="File Browser Main", space_type="FILE_BROWSER")
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("assetlib.open_blend_file", "LEFTMOUSE", "DOUBLE_CLICK")
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
|
||||||
|
def unregister() -> None:
|
||||||
|
for km, kmi in addon_keymaps:
|
||||||
|
km.keymap_items.remove(kmi)
|
||||||
|
addon_keymaps.clear()
|
|
@ -0,0 +1,57 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.types import Context, Operator
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
from bpy.props import StringProperty
|
||||||
|
from typing import List, Tuple, Set
|
||||||
|
|
||||||
|
from asset_library.common.file_utils import (open_blender_file,
|
||||||
|
synchronize, open_blender_file)
|
||||||
|
|
||||||
|
from asset_library.common.functions import get_active_library
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_open_blend_file(Operator):
|
||||||
|
bl_idname = "assetlib.open_blend_file"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = 'Open Blender File'
|
||||||
|
bl_description = 'Open blender file'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
cls.poll_message_set("Current editor is not an asset browser")
|
||||||
|
return False
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
if not lib or lib.data_type != 'FILE':
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not context.active_file or 'filepath' not in context.active_file.asset_data:
|
||||||
|
cls.poll_message_set("Has not filepath property")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
print(lib, lib.data_type)
|
||||||
|
|
||||||
|
filepath = context.active_file.asset_data['filepath']
|
||||||
|
open_blender_file(filepath)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
ASSETLIB_OT_open_blend_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for cls in reversed(classes):
|
||||||
|
bpy.utils.unregister_class(cls)
|
|
@ -0,0 +1,333 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Action Library - GUI definition.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from bpy.types import (
|
||||||
|
AssetHandle,
|
||||||
|
Context,
|
||||||
|
Header,
|
||||||
|
Menu,
|
||||||
|
Panel,
|
||||||
|
UIList,
|
||||||
|
WindowManager,
|
||||||
|
WorkSpace,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
from asset_library.common.bl_utils import (
|
||||||
|
get_addon_prefs,
|
||||||
|
get_object_libraries,
|
||||||
|
)
|
||||||
|
|
||||||
|
from asset_library.common.functions import (
|
||||||
|
get_active_library
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def pose_library_panel_poll():
|
||||||
|
return bpy.context.object and bpy.context.object.mode == 'POSE'
|
||||||
|
|
||||||
|
class PoseLibraryPanel:
|
||||||
|
@classmethod
|
||||||
|
def pose_library_panel_poll(cls, context: Context) -> bool:
|
||||||
|
return bool(
|
||||||
|
context.object
|
||||||
|
and context.object.mode == 'POSE'
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
return cls.pose_library_panel_poll(context);
|
||||||
|
|
||||||
|
|
||||||
|
class AssetLibraryMenu:
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
from bpy_extras.asset_utils import SpaceAssetInfo
|
||||||
|
return SpaceAssetInfo.is_asset_browser_poll(context)
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_PT_libraries(Panel):
|
||||||
|
bl_label = "Libraries"
|
||||||
|
bl_space_type = 'VIEW_3D'
|
||||||
|
bl_region_type = 'UI'
|
||||||
|
bl_category = 'Item'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
return context.object and get_object_libraries(context.object)
|
||||||
|
|
||||||
|
def draw(self, context: Context) -> None:
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
for f in get_object_libraries(context.object):
|
||||||
|
row = layout.row(align=True)
|
||||||
|
row.label(text=f)
|
||||||
|
row.operator("assetlib.open_blend", icon='FILE_BLEND', text='').filepath = f
|
||||||
|
|
||||||
|
'''
|
||||||
|
class ASSETLIB_PT_pose_library_usage(Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = "TOOLS"
|
||||||
|
bl_label = "Action Library"
|
||||||
|
# asset_categories = {'ANIMATIONS'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
sp = context.space_data
|
||||||
|
|
||||||
|
if not context.object or not context.object.mode == 'POSE':
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not (sp and sp.type == 'FILE_BROWSER' and sp.browse_mode == 'ASSETS'):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def draw(self, context: Context) -> None:
|
||||||
|
layout = self.layout
|
||||||
|
wm = context.window_manager
|
||||||
|
|
||||||
|
sp = context.space_data
|
||||||
|
sp.params.asset_library_ref
|
||||||
|
|
||||||
|
if sp.params.asset_library_ref == 'LOCAL':
|
||||||
|
col = layout.column(align=True)
|
||||||
|
row = col.row(align=True)
|
||||||
|
row.operator("poselib.create_pose_asset", text="Create Pose", icon='POSE_HLT').activate_new_action = False
|
||||||
|
row.operator("actionlib.replace_pose", text='Replace Pose', icon='FILE_REFRESH')
|
||||||
|
col.operator("actionlib.create_anim_asset", text="Create Anim", icon='ANIM')
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
row = col.row(align=True)
|
||||||
|
row.operator("actionlib.edit_action", text='Edit Action', icon='ACTION')
|
||||||
|
row.operator("actionlib.clear_action", text='Finish Edit', icon='CHECKBOX_HLT')
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
col.operator("actionlib.generate_preview", icon='RESTRICT_RENDER_OFF', text="Generate Thumbnail")
|
||||||
|
col.operator("actionlib.update_action_data", icon='FILE_TEXT', text="Update Action Data")
|
||||||
|
else:
|
||||||
|
col = layout.column(align=True)
|
||||||
|
row = col.row(align=True)
|
||||||
|
row.operator("actionlib.store_anim_pose", text='Store Anim/Pose', icon='ACTION')
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_PT_pose_library_editing(PoseLibraryPanel, asset_utils.AssetBrowserPanel, Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = "TOOL_PROPS"
|
||||||
|
bl_label = "Metadata"
|
||||||
|
#bl_options = {'HIDE_HEADER'}
|
||||||
|
# asset_categories = {'ANIMATIONS'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
sp = context.space_data
|
||||||
|
|
||||||
|
if not (sp and sp.type == 'FILE_BROWSER' and sp.browse_mode == 'ASSETS'):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not (context.active_file and context.active_file.asset_data):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def draw(self, context: Context) -> None:
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
layout.use_property_split = True
|
||||||
|
asset_data = context.active_file.asset_data
|
||||||
|
metadata = ['camera', 'is_single_frame', 'rest_pose']
|
||||||
|
|
||||||
|
if 'camera' in asset_data.keys():
|
||||||
|
layout.prop(asset_data, f'["camera"]', text='Camera', icon='CAMERA_DATA')
|
||||||
|
if 'is_single_frame' in asset_data.keys():
|
||||||
|
layout.prop(asset_data, f'["is_single_frame"]', text='Is Single Frame')
|
||||||
|
if 'rest_pose' in asset_data.keys():
|
||||||
|
layout.prop(asset_data, f'["rest_pose"]', text='Rest Pose', icon='ACTION')
|
||||||
|
if 'filepath' in asset_data.keys():
|
||||||
|
layout.prop(asset_data, f'["filepath"]', text='Filepath')
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_MT_context_menu(AssetLibraryMenu, Menu):
|
||||||
|
bl_label = "Asset Library Menu"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
cls.poll_message_set("Current editor is not an asset browser")
|
||||||
|
return False
|
||||||
|
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
asset_lib_ref = context.space_data.params.asset_library_ref
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
if not lib:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
lib = get_active_library()
|
||||||
|
lib.adapter.draw_context_menu(self.layout)
|
||||||
|
|
||||||
|
|
||||||
|
def is_option_region_visible(context, space):
|
||||||
|
from bpy_extras.asset_utils import SpaceAssetInfo
|
||||||
|
|
||||||
|
if SpaceAssetInfo.is_asset_browser(space):
|
||||||
|
pass
|
||||||
|
# For the File Browser, there must be an operator for there to be options
|
||||||
|
# (irrelevant for the Asset Browser).
|
||||||
|
elif not space.active_operator:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for region in context.area.regions:
|
||||||
|
if region.type == 'TOOL_PROPS' and region.width <= 1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def draw_assetbrowser_header(self, context):
|
||||||
|
lib = get_active_library()
|
||||||
|
|
||||||
|
if not lib:
|
||||||
|
bpy.types.FILEBROWSER_HT_header._draw_asset_browser_buttons(self, context)
|
||||||
|
return
|
||||||
|
|
||||||
|
space_data = context.space_data
|
||||||
|
params = context.space_data.params
|
||||||
|
|
||||||
|
row = self.layout.row(align=True)
|
||||||
|
row.separator()
|
||||||
|
|
||||||
|
row.operator("assetlib.bundle", icon='UV_SYNC_SELECT', text='').name = lib.name
|
||||||
|
#op
|
||||||
|
#op.clean = False
|
||||||
|
#op.only_recent = True
|
||||||
|
|
||||||
|
lib.adapter.draw_header(row)
|
||||||
|
|
||||||
|
if context.selected_files and context.active_file:
|
||||||
|
row.separator()
|
||||||
|
row.label(text=context.active_file.name)
|
||||||
|
|
||||||
|
row.separator_spacer()
|
||||||
|
|
||||||
|
sub = row.row()
|
||||||
|
sub.ui_units_x = 10
|
||||||
|
sub.prop(params, "filter_search", text="", icon='VIEWZOOM')
|
||||||
|
|
||||||
|
row.separator_spacer()
|
||||||
|
|
||||||
|
row.prop_with_popover(
|
||||||
|
params,
|
||||||
|
"display_type",
|
||||||
|
panel="ASSETBROWSER_PT_display",
|
||||||
|
text="",
|
||||||
|
icon_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
row.operator(
|
||||||
|
"screen.region_toggle",
|
||||||
|
text="",
|
||||||
|
icon='PREFERENCES',
|
||||||
|
depress=is_option_region_visible(context, space_data)
|
||||||
|
).region_type = 'TOOL_PROPS'
|
||||||
|
|
||||||
|
|
||||||
|
### Messagebus subscription to monitor asset library changes.
|
||||||
|
_msgbus_owner = object()
|
||||||
|
|
||||||
|
def _on_asset_library_changed() -> None:
|
||||||
|
"""Update areas when a different asset library is selected."""
|
||||||
|
refresh_area_types = {'DOPESHEET_EDITOR', 'VIEW_3D'}
|
||||||
|
for win in bpy.context.window_manager.windows:
|
||||||
|
for area in win.screen.areas:
|
||||||
|
if area.type not in refresh_area_types:
|
||||||
|
continue
|
||||||
|
|
||||||
|
area.tag_redraw()
|
||||||
|
|
||||||
|
def register_message_bus() -> None:
|
||||||
|
|
||||||
|
bpy.msgbus.subscribe_rna(
|
||||||
|
key=(bpy.types.FileAssetSelectParams, "asset_library_ref"),
|
||||||
|
owner=_msgbus_owner,
|
||||||
|
args=(),
|
||||||
|
notify=_on_asset_library_changed,
|
||||||
|
options={'PERSISTENT'},
|
||||||
|
)
|
||||||
|
|
||||||
|
def unregister_message_bus() -> None:
|
||||||
|
bpy.msgbus.clear_by_owner(_msgbus_owner)
|
||||||
|
|
||||||
|
@bpy.app.handlers.persistent
|
||||||
|
def _on_blendfile_load_pre(none, other_none) -> None:
|
||||||
|
# The parameters are required, but both are None.
|
||||||
|
unregister_message_bus()
|
||||||
|
|
||||||
|
@bpy.app.handlers.persistent
|
||||||
|
def _on_blendfile_load_post(none, other_none) -> None:
|
||||||
|
# The parameters are required, but both are None.
|
||||||
|
register_message_bus()
|
||||||
|
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
ASSETLIB_PT_pose_library_editing,
|
||||||
|
#ASSETLIB_PT_pose_library_usage,
|
||||||
|
ASSETLIB_MT_context_menu,
|
||||||
|
ASSETLIB_PT_libraries
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def register() -> None:
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
bpy.types.FILEBROWSER_HT_header._draw_asset_browser_buttons = bpy.types.FILEBROWSER_HT_header.draw_asset_browser_buttons
|
||||||
|
bpy.types.FILEBROWSER_HT_header.draw_asset_browser_buttons = draw_assetbrowser_header
|
||||||
|
|
||||||
|
#WorkSpace.active_pose_asset_index = bpy.props.IntProperty(
|
||||||
|
# name="Active Pose Asset",
|
||||||
|
# # TODO explain which list the index belongs to, or how it can be used to get the pose.
|
||||||
|
# description="Per workspace index of the active pose asset"
|
||||||
|
#)
|
||||||
|
# Register for window-manager. This is a global property that shouldn't be
|
||||||
|
# written to files.
|
||||||
|
#WindowManager.pose_assets = bpy.props.CollectionProperty(type=AssetHandle)
|
||||||
|
|
||||||
|
# bpy.types.UI_MT_list_item_context_menu.prepend(pose_library_list_item_context_menu)
|
||||||
|
# bpy.types.ASSETLIB_MT_context_menu.prepend(pose_library_list_item_context_menu)
|
||||||
|
# bpy.types.ACTIONLIB_MT_context_menu.prepend(pose_library_list_item_context_menu)
|
||||||
|
#bpy.types.ASSETBROWSER_MT_editor_menus.append(draw_assetbrowser_header)
|
||||||
|
|
||||||
|
register_message_bus()
|
||||||
|
bpy.app.handlers.load_pre.append(_on_blendfile_load_pre)
|
||||||
|
bpy.app.handlers.load_post.append(_on_blendfile_load_post)
|
||||||
|
|
||||||
|
|
||||||
|
def unregister() -> None:
|
||||||
|
for cls in reversed(classes):
|
||||||
|
bpy.utils.unregister_class(cls)
|
||||||
|
|
||||||
|
bpy.types.FILEBROWSER_HT_header.draw_asset_browser_buttons = bpy.types.FILEBROWSER_HT_header._draw_asset_browser_buttons
|
||||||
|
del bpy.types.FILEBROWSER_HT_header._draw_asset_browser_buttons
|
||||||
|
|
||||||
|
unregister_message_bus()
|
||||||
|
|
||||||
|
#del WorkSpace.active_pose_asset_index
|
||||||
|
#del WindowManager.pose_assets
|
||||||
|
|
||||||
|
# bpy.types.UI_MT_list_item_context_menu.remove(pose_library_list_item_context_menu)
|
||||||
|
# bpy.types.ASSETLIB_MT_context_menu.remove(pose_library_list_item_context_menu)
|
||||||
|
# bpy.types.ACTIONLIB_MT_context_menu.remove(pose_library_list_item_context_menu)
|
||||||
|
#bpy.types.ASSETBROWSER_MT_editor_menus.remove(draw_assetbrowser_header)
|
|
@ -0,0 +1,58 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.app.handlers import persistent
|
||||||
|
|
||||||
|
addon_keymaps: List[Tuple[bpy.types.KeyMap, bpy.types.KeyMapItem]] = []
|
||||||
|
|
||||||
|
|
||||||
|
@persistent
|
||||||
|
def copy_play_anim(dummy):
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
km = wm.keyconfigs.addon.keymaps.new(name="File Browser Main", space_type="FILE_BROWSER")
|
||||||
|
|
||||||
|
km_frames = wm.keyconfigs.user.keymaps.get('Frames')
|
||||||
|
if km_frames:
|
||||||
|
play = km_frames.keymap_items.get('screen.animation_play')
|
||||||
|
if play:
|
||||||
|
kmi = km.keymap_items.new(
|
||||||
|
"assetlib.play_preview",
|
||||||
|
play.type, play.value,
|
||||||
|
any=play.any, shift=play.shift, ctrl=play.ctrl, alt=play.alt,
|
||||||
|
oskey=play.oskey, key_modifier=play.key_modifier,
|
||||||
|
)
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
|
||||||
|
def register() -> None:
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
if wm.keyconfigs.addon is None:
|
||||||
|
# This happens when Blender is running in the background.
|
||||||
|
return
|
||||||
|
|
||||||
|
km = wm.keyconfigs.addon.keymaps.new(name="File Browser Main", space_type="FILE_BROWSER")
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("wm.call_menu", "RIGHTMOUSE", "PRESS")
|
||||||
|
kmi.properties.name = 'ASSETLIB_MT_context_menu'
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
kmi = km.keymap_items.new("assetlib.play_preview", "SPACE", "PRESS")
|
||||||
|
addon_keymaps.append((km, kmi))
|
||||||
|
|
||||||
|
# km = addon.keymaps.new(name = "Grease Pencil Stroke Paint Mode", space_type = "EMPTY")
|
||||||
|
# kmi = km.keymap_items.new('wm.call_panel', type='F2', value='PRESS')
|
||||||
|
|
||||||
|
if 'copy_play_anim' not in [hand.__name__ for hand in bpy.app.handlers.load_post]:
|
||||||
|
bpy.app.handlers.load_post.append(copy_play_anim)
|
||||||
|
|
||||||
|
def unregister() -> None:
|
||||||
|
# Clear shortcuts from the keymap.
|
||||||
|
if 'copy_play_anim' in [hand.__name__ for hand in bpy.app.handlers.load_post]:
|
||||||
|
bpy.app.handlers.load_post.remove(copy_play_anim)
|
||||||
|
|
||||||
|
for km, kmi in addon_keymaps:
|
||||||
|
km.keymap_items.remove(kmi)
|
||||||
|
addon_keymaps.clear()
|
|
@ -0,0 +1,562 @@
|
||||||
|
|
||||||
|
|
||||||
|
from typing import Set
|
||||||
|
#import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
import importlib
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
from bpy.types import Context, Operator
|
||||||
|
from bpy.props import (
|
||||||
|
BoolProperty,
|
||||||
|
EnumProperty,
|
||||||
|
StringProperty,
|
||||||
|
IntProperty)
|
||||||
|
|
||||||
|
#from asset_library.constants import (DATA_TYPES, DATA_TYPE_ITEMS, MODULE_DIR)
|
||||||
|
import asset_library
|
||||||
|
from asset_library.common.bl_utils import (
|
||||||
|
get_addon_prefs,
|
||||||
|
get_bl_cmd,
|
||||||
|
#suitable_areas,
|
||||||
|
refresh_asset_browsers,
|
||||||
|
load_datablocks)
|
||||||
|
|
||||||
|
from asset_library.common.file_utils import open_blender_file, synchronize
|
||||||
|
from asset_library.common.functions import get_active_library, asset_warning_callback
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_clear_asset(Operator):
|
||||||
|
bl_idname = "assetlib.clear_asset"
|
||||||
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
bl_label = 'Clear Asset'
|
||||||
|
bl_description = 'Clear Selected Assets'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
return False
|
||||||
|
|
||||||
|
sp = context.space_data
|
||||||
|
if sp.params.asset_library_ref == 'LOCAL':
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
asset = context.active_file
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
|
||||||
|
filepath = lib.adapter.format_path(asset.asset_data['filepath'])
|
||||||
|
asset_image = lib.adapter.get_path('image', asset.name, filepath)
|
||||||
|
asset_video = lib.adapter.get_path('video', asset.name, filepath)
|
||||||
|
|
||||||
|
if filepath:
|
||||||
|
if filepath.exists():
|
||||||
|
filepath.unlink()
|
||||||
|
if asset_image:
|
||||||
|
asset_image.unlink()
|
||||||
|
if asset_video:
|
||||||
|
asset_video.unlink()
|
||||||
|
#open_blender_file(filepath)
|
||||||
|
|
||||||
|
bpy.ops.assetlib.bundle(name=lib.name, blocking=True)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_edit_data(Operator):
|
||||||
|
bl_idname = "assetlib.edit_data"
|
||||||
|
bl_label = "Edit Asset Data"
|
||||||
|
bl_description = "Edit Current Asset Data"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
|
warning: StringProperty(name='')
|
||||||
|
path: StringProperty(name='Path')
|
||||||
|
catalog: StringProperty(name='Catalog', update=asset_warning_callback, options={'TEXTEDIT_UPDATE'})
|
||||||
|
name: StringProperty(name='Name', update=asset_warning_callback, options={'TEXTEDIT_UPDATE'})
|
||||||
|
tags: StringProperty(name='Tags', description='Tags need to separate with a comma (,)')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
if lib.merge_libraries:
|
||||||
|
lib = prefs.libraries[lib.store_library]
|
||||||
|
|
||||||
|
new_name = lib.adapter.norm_file_name(self.name)
|
||||||
|
new_asset_path = lib.adapter.get_asset_path(name=new_name, catalog=self.catalog)
|
||||||
|
|
||||||
|
self.old_asset_path.unlink()
|
||||||
|
lib.adapter.write_asset(asset=self.asset, asset_path=new_asset_path)
|
||||||
|
|
||||||
|
if self.old_image_path.exists():
|
||||||
|
new_img_path = lib.adapter.get_path('image', new_name, new_asset_path)
|
||||||
|
self.old_image_path.rename(new_img_path)
|
||||||
|
|
||||||
|
if self.old_video_path.exists():
|
||||||
|
new_video_path = lib.adapter.get_path('video', new_name, new_asset_path)
|
||||||
|
self.old_video_path.rename(new_video_path)
|
||||||
|
|
||||||
|
if self.old_asset_description_path.exists():
|
||||||
|
self.old_asset_description_path.unlink()
|
||||||
|
|
||||||
|
new_asset_description = lib.adapter.get_asset_description(
|
||||||
|
asset=self.asset,
|
||||||
|
catalog=self.catalog,
|
||||||
|
modified=time.time_ns()
|
||||||
|
)
|
||||||
|
|
||||||
|
lib.adapter.write_asset_description(new_asset_description, new_asset_path)
|
||||||
|
|
||||||
|
if not list(self.old_asset_path.parent.iterdir()):
|
||||||
|
self.old_asset_path.parent.rmdir()
|
||||||
|
|
||||||
|
diff_path = Path(bpy.app.tempdir, 'diff.json')
|
||||||
|
|
||||||
|
diff = [dict(self.old_asset_description, operation='REMOVE')]
|
||||||
|
diff += [dict(lib.adapter.norm_asset_datas([new_asset_description])[0], operation='ADD')]
|
||||||
|
|
||||||
|
diff_path.write_text(json.dumps(diff, indent=4), encoding='utf-8')
|
||||||
|
|
||||||
|
bpy.ops.assetlib.bundle(name=lib.name, diff=str(diff_path), blocking=True)
|
||||||
|
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.use_property_split = True
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
|
||||||
|
if lib.merge_libraries:
|
||||||
|
layout.prop(lib, 'store_library', expand=False)
|
||||||
|
|
||||||
|
layout.prop(self, "catalog", text="Catalog")
|
||||||
|
layout.prop(self, "name", text="Name")
|
||||||
|
layout.prop(self, 'tags')
|
||||||
|
|
||||||
|
#layout.prop()
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
col = layout.column()
|
||||||
|
col.use_property_split = False
|
||||||
|
#row.enabled = False
|
||||||
|
|
||||||
|
if self.path:
|
||||||
|
col.label(text=self.path)
|
||||||
|
|
||||||
|
if self.warning:
|
||||||
|
col.label(icon='ERROR', text=self.warning)
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
|
||||||
|
active_lib = lib.adapter.get_active_asset_library()
|
||||||
|
|
||||||
|
lib.store_library = active_lib.name
|
||||||
|
|
||||||
|
asset_handle = context.asset_file_handle
|
||||||
|
|
||||||
|
catalog_file = lib.adapter.read_catalog()
|
||||||
|
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_file.items()}
|
||||||
|
|
||||||
|
#asset_handle = context.asset_file_handle
|
||||||
|
self.old_asset_name = asset_handle.name
|
||||||
|
self.old_asset_path = lib.adapter.get_active_asset_path()
|
||||||
|
|
||||||
|
self.asset = load_datablocks(self.old_asset_path, self.old_asset_name, type=lib.data_types)
|
||||||
|
|
||||||
|
self.old_image_path = lib.adapter.get_path('image', self.old_asset_name, self.old_asset_path)
|
||||||
|
self.old_video_path = lib.adapter.get_path('video', self.old_asset_name, self.old_asset_path)
|
||||||
|
|
||||||
|
self.old_asset_description_path = lib.adapter.get_asset_description_path(self.old_asset_path)
|
||||||
|
|
||||||
|
self.old_asset_description = lib.adapter.read_asset_description(self.old_asset_path)
|
||||||
|
self.old_asset_description = lib.adapter.norm_asset_datas([self.old_asset_description])[0]
|
||||||
|
|
||||||
|
|
||||||
|
if not self.asset:
|
||||||
|
self.report({'ERROR'}, 'No asset found')
|
||||||
|
|
||||||
|
self.name = self.old_asset_name
|
||||||
|
self.tags = ', '.join(self.asset.asset_data.tags.keys())
|
||||||
|
#asset_path
|
||||||
|
self.catalog = catalog_ids[asset_handle.asset_data.catalog_id]['path']
|
||||||
|
|
||||||
|
|
||||||
|
return context.window_manager.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
def cancel(self, context):
|
||||||
|
print('Cancel Edit Data, removing the asset')
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
active_lib = lib.adapter.get_active_asset_library()
|
||||||
|
|
||||||
|
getattr(bpy.data, active_lib.data_types).remove(self.asset)
|
||||||
|
|
||||||
|
class ASSETLIB_OT_remove_user_library(Operator):
|
||||||
|
bl_idname = "assetlib.remove_user_library"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = 'Remove User Library'
|
||||||
|
bl_description = 'Remove User Library'
|
||||||
|
|
||||||
|
index : IntProperty(default=-1)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
prefs.user_libraries.remove(self.index)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_add_user_library(Operator):
|
||||||
|
bl_idname = "assetlib.add_user_library"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = 'Add User Library'
|
||||||
|
bl_description = 'Add User Library'
|
||||||
|
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
lib = prefs.user_libraries.add()
|
||||||
|
lib.expand = True
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_open_blend(Operator):
|
||||||
|
bl_idname = "assetlib.open_blend"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = 'Open Blender File'
|
||||||
|
bl_description = 'Open blender file'
|
||||||
|
|
||||||
|
#filepath : StringProperty(subtype='FILE_PATH')
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
#asset = context.active_file
|
||||||
|
#prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
|
||||||
|
#filepath = lib.adapter.format_path(asset.asset_data['filepath'])
|
||||||
|
|
||||||
|
filepath = lib.adapter.get_active_asset_path()
|
||||||
|
|
||||||
|
open_blender_file(filepath)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_set_paths(Operator):
|
||||||
|
bl_idname = "assetlib.set_paths"
|
||||||
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
bl_label = 'Set Paths'
|
||||||
|
bl_description = 'Set Library Paths'
|
||||||
|
|
||||||
|
name: StringProperty()
|
||||||
|
all: BoolProperty(default=False)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
print('Set Paths')
|
||||||
|
if self.all:
|
||||||
|
libs = prefs.libraries
|
||||||
|
else:
|
||||||
|
libs = [prefs.libraries[self.name]]
|
||||||
|
|
||||||
|
for lib in libs:
|
||||||
|
lib.clear_library_path()
|
||||||
|
lib.set_library_path()
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_bundle_library(Operator):
|
||||||
|
bl_idname = "assetlib.bundle"
|
||||||
|
bl_options = {"INTERNAL"}
|
||||||
|
bl_label = 'Bundle Library'
|
||||||
|
bl_description = 'Bundle all matching asset found inside one blend'
|
||||||
|
|
||||||
|
name : StringProperty()
|
||||||
|
diff : StringProperty()
|
||||||
|
blocking : BoolProperty(default=False)
|
||||||
|
mode : EnumProperty(items=[(i.replace(' ', '_').upper(), i, '') for i in ('None', 'All', 'Auto Bundle')], default='NONE')
|
||||||
|
directory : StringProperty(subtype='DIR_PATH')
|
||||||
|
|
||||||
|
#def refresh(self):
|
||||||
|
# for area in suitable_areas(bpy.context.screen):
|
||||||
|
# bpy.ops.asset.library_refresh({"area": area, 'region': area.regions[3]})
|
||||||
|
#space_data.activate_asset_by_id(asset, deferred=deferred)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
libs = []
|
||||||
|
if self.name:
|
||||||
|
libs += [prefs.libraries[self.name]]
|
||||||
|
|
||||||
|
if self.mode == 'ALL':
|
||||||
|
libs += prefs.libraries.values()
|
||||||
|
elif self.mode == 'AUTO_BUNDLE':
|
||||||
|
libs += [l for l in prefs.libraries if l.auto_bundle]
|
||||||
|
|
||||||
|
lib_datas = [l.to_dict() for l in libs]
|
||||||
|
|
||||||
|
print(f'Bundle Libraries: {[l.name for l in libs]}')
|
||||||
|
|
||||||
|
script_path = Path(gettempdir()) / 'bundle_library.py'
|
||||||
|
script_code = dedent(f"""
|
||||||
|
import bpy
|
||||||
|
prefs = bpy.context.preferences.addons["asset_library"].preferences
|
||||||
|
|
||||||
|
for lib_data in {lib_datas}:
|
||||||
|
lib = prefs.env_libraries.add()
|
||||||
|
lib.set_dict(lib_data)
|
||||||
|
lib.adapter.bundle(cache_diff='{self.diff}')
|
||||||
|
""")
|
||||||
|
|
||||||
|
script_path.write_text(script_code)
|
||||||
|
|
||||||
|
#print(script_code)
|
||||||
|
|
||||||
|
#raise Exception()
|
||||||
|
|
||||||
|
cmd = get_bl_cmd(script=str(script_path), background=True)
|
||||||
|
|
||||||
|
#print(cmd)
|
||||||
|
if self.blocking:
|
||||||
|
subprocess.call(cmd)
|
||||||
|
bpy.app.timers.register(refresh_asset_browsers, first_interval=0.2)
|
||||||
|
else:
|
||||||
|
subprocess.Popen(cmd)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_reload_addon(Operator):
|
||||||
|
bl_idname = "assetlib.reload_addon"
|
||||||
|
bl_options = {"UNDO"}
|
||||||
|
bl_label = 'Reload Asset Library Addon'
|
||||||
|
bl_description = 'Reload The Asset Library Addon and the addapters'
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
|
||||||
|
print('Execute reload')
|
||||||
|
|
||||||
|
asset_library.unregister()
|
||||||
|
importlib.reload(asset_library)
|
||||||
|
asset_library.register()
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_diff(Operator):
|
||||||
|
bl_idname = "assetlib.diff"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = 'Synchronize'
|
||||||
|
bl_description = 'Synchronize Action Lib to Local Directory'
|
||||||
|
|
||||||
|
name : StringProperty()
|
||||||
|
conform : BoolProperty(default=False)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
lib = prefs.libraries.get(self.name)
|
||||||
|
|
||||||
|
if self.conform:
|
||||||
|
lib.conform.adapter.diff()
|
||||||
|
else:
|
||||||
|
lib.adapter.diff()
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_conform_library(Operator):
|
||||||
|
bl_idname = "assetlib.conform_library"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = "Conform Library"
|
||||||
|
bl_description = "Split each assets per blend and externalize preview"
|
||||||
|
|
||||||
|
name : StringProperty()
|
||||||
|
image_template : StringProperty()
|
||||||
|
video_template : StringProperty()
|
||||||
|
directory : StringProperty(subtype='DIR_PATH', name='Filepath')
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
lib = prefs.libraries.get(self.name)
|
||||||
|
#lib.adapter.conform(self.directory)
|
||||||
|
|
||||||
|
templates = {}
|
||||||
|
if self.image_template:
|
||||||
|
templates['image'] = self.image_template
|
||||||
|
if self.video_template:
|
||||||
|
templates['video'] = self.video_template
|
||||||
|
|
||||||
|
|
||||||
|
script_path = Path(gettempdir()) / 'bundle_library.py'
|
||||||
|
script_code = dedent(f"""
|
||||||
|
import bpy
|
||||||
|
prefs = bpy.context.preferences.addons["asset_library"].preferences
|
||||||
|
lib = prefs.env_libraries.add()
|
||||||
|
lib.set_dict({lib.to_dict()})
|
||||||
|
lib.adapter.conform(directory='{self.directory}', templates={templates})
|
||||||
|
""")
|
||||||
|
|
||||||
|
script_path.write_text(script_code)
|
||||||
|
|
||||||
|
cmd = get_bl_cmd(script=str(script_path), background=True)
|
||||||
|
|
||||||
|
subprocess.Popen(cmd)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_play_preview(Operator):
|
||||||
|
bl_idname = "assetlib.play_preview"
|
||||||
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
bl_label = 'Play Preview'
|
||||||
|
bl_description = 'Play Preview'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
if not context.active_file:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
cls.poll_message_set("Current editor is not an asset browser")
|
||||||
|
return False
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
if not lib:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
asset = context.active_file
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
lib = get_active_library()
|
||||||
|
|
||||||
|
#filepath = lib.adapter.format_path(asset.asset_data['filepath'])
|
||||||
|
asset_path = lib.adapter.get_active_asset_path()
|
||||||
|
|
||||||
|
asset_image = lib.adapter.get_image(asset.name, asset_path)
|
||||||
|
asset_video = lib.adapter.get_video(asset.name, asset_path)
|
||||||
|
|
||||||
|
if not asset_image and not asset_video:
|
||||||
|
self.report({'ERROR'}, f'Preview for {asset.name} not found.')
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
|
if asset_video:
|
||||||
|
self.report({'INFO'}, f'Video found. {asset_video}.')
|
||||||
|
|
||||||
|
if prefs.video_player:
|
||||||
|
subprocess.Popen([prefs.video_player, asset_video])
|
||||||
|
else:
|
||||||
|
bpy.ops.wm.path_open(filepath=str(asset_video))
|
||||||
|
else:
|
||||||
|
self.report({'INFO'}, f'Image found. {asset_image}.')
|
||||||
|
|
||||||
|
if prefs.image_player:
|
||||||
|
subprocess.Popen([prefs.image_player, asset_image])
|
||||||
|
else:
|
||||||
|
bpy.ops.wm.path_open(filepath=str(asset_image))
|
||||||
|
|
||||||
|
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSETLIB_OT_synchronize(Operator):
|
||||||
|
bl_idname = "assetlib.synchronize"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
bl_label = 'Synchronize'
|
||||||
|
bl_description = 'Synchronize Action Lib to Local Directory'
|
||||||
|
|
||||||
|
clean : BoolProperty(default=False)
|
||||||
|
only_new : BoolProperty(default=False)
|
||||||
|
only_recent : BoolProperty(default=False)
|
||||||
|
name: StringProperty()
|
||||||
|
all: BoolProperty(default=False)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
|
||||||
|
print('Not yet Implemented, have to be replace by Bundle instead')
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
print('Synchronize')
|
||||||
|
if self.all:
|
||||||
|
libs = prefs.libraries
|
||||||
|
else:
|
||||||
|
libs = [prefs.libraries.get(self.name)]
|
||||||
|
|
||||||
|
for lib in libs:
|
||||||
|
if self.clean and Path(lib.path_local).exists():
|
||||||
|
pass
|
||||||
|
print('To check first')
|
||||||
|
#shutil.rmtree(path_local)
|
||||||
|
|
||||||
|
if not lib.path_local:
|
||||||
|
continue
|
||||||
|
|
||||||
|
synchronize(
|
||||||
|
src=lib.path,
|
||||||
|
dst=lib.path_local,
|
||||||
|
only_new=self.only_new,
|
||||||
|
only_recent=self.only_recent
|
||||||
|
)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
ASSETLIB_OT_play_preview,
|
||||||
|
ASSETLIB_OT_open_blend,
|
||||||
|
ASSETLIB_OT_set_paths,
|
||||||
|
ASSETLIB_OT_synchronize,
|
||||||
|
ASSETLIB_OT_add_user_library,
|
||||||
|
ASSETLIB_OT_remove_user_library,
|
||||||
|
ASSETLIB_OT_diff,
|
||||||
|
ASSETLIB_OT_bundle_library,
|
||||||
|
ASSETLIB_OT_clear_asset,
|
||||||
|
ASSETLIB_OT_edit_data,
|
||||||
|
ASSETLIB_OT_conform_library,
|
||||||
|
ASSETLIB_OT_reload_addon
|
||||||
|
)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
#bpy.types.UserAssetLibrary.is_env = False
|
||||||
|
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for cls in reversed(classes):
|
||||||
|
bpy.utils.unregister_class(cls)
|
|
@ -0,0 +1,14 @@
|
||||||
|
|
||||||
|
from asset_library.pose import (
|
||||||
|
operators)
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
importlib.reload(operators)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
operators.register()
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
operators.unregister()
|
|
@ -0,0 +1,63 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Pose Library - Conversion of old pose libraries.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
from collections.abc import Collection
|
||||||
|
|
||||||
|
if "pose_creation" not in locals():
|
||||||
|
from . import pose_creation
|
||||||
|
else:
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
pose_creation = importlib.reload(pose_creation)
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.types import (
|
||||||
|
Action,
|
||||||
|
TimelineMarker,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_old_poselib(old_poselib: Action) -> Collection[Action]:
|
||||||
|
"""Convert an old-style pose library to a set of pose Actions.
|
||||||
|
|
||||||
|
Old pose libraries were one Action with multiple pose markers. Each pose
|
||||||
|
marker will be converted to an Action by itself and marked as asset.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pose_assets = [
|
||||||
|
action
|
||||||
|
for marker in old_poselib.pose_markers
|
||||||
|
if (action := convert_old_pose(old_poselib, marker))
|
||||||
|
]
|
||||||
|
|
||||||
|
# Mark all Actions as assets in one go. Ideally this would be done on an
|
||||||
|
# appropriate frame in the scene (to set up things like the background
|
||||||
|
# colour), but the old-style poselib doesn't contain such information. All
|
||||||
|
# we can do is just render on the current frame.
|
||||||
|
bpy.ops.asset.mark({'selected_ids': pose_assets})
|
||||||
|
|
||||||
|
return pose_assets
|
||||||
|
|
||||||
|
|
||||||
|
def convert_old_pose(old_poselib: Action, marker: TimelineMarker) -> Optional[Action]:
|
||||||
|
"""Convert an old-style pose library pose to a pose action."""
|
||||||
|
|
||||||
|
frame: int = marker.frame
|
||||||
|
action: Optional[Action] = None
|
||||||
|
|
||||||
|
for fcurve in old_poselib.fcurves:
|
||||||
|
key = pose_creation.find_keyframe(fcurve, frame)
|
||||||
|
if not key:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if action is None:
|
||||||
|
action = bpy.data.actions.new(marker.name)
|
||||||
|
|
||||||
|
pose_creation.create_single_key_fcurve(action, fcurve, key)
|
||||||
|
|
||||||
|
return action
|
|
@ -0,0 +1,615 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Pose Library - operators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from math import radians
|
||||||
|
from mathutils import Vector
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import gettempdir
|
||||||
|
from typing import Optional, Set
|
||||||
|
|
||||||
|
from asset_library.pose import pose_creation, pose_usage
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import uuid
|
||||||
|
import time
|
||||||
|
|
||||||
|
from bpy.props import BoolProperty, CollectionProperty, EnumProperty, PointerProperty, StringProperty
|
||||||
|
from bpy.types import (
|
||||||
|
Action,
|
||||||
|
Context,
|
||||||
|
Event,
|
||||||
|
FileSelectEntry,
|
||||||
|
Object,
|
||||||
|
Operator,
|
||||||
|
PropertyGroup,
|
||||||
|
)
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||||
|
|
||||||
|
from asset_library.action.functions import (
|
||||||
|
get_marker,
|
||||||
|
get_keyframes,
|
||||||
|
)
|
||||||
|
|
||||||
|
from asset_library.common.bl_utils import (
|
||||||
|
get_view3d_persp,
|
||||||
|
load_assets_from,
|
||||||
|
split_path
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class POSELIB_OT_create_pose_asset(Operator):
|
||||||
|
bl_idname = "poselib.create_pose_asset"
|
||||||
|
bl_label = "Create Pose Asset"
|
||||||
|
bl_description = (
|
||||||
|
"Create a new Action that contains the pose of the selected bones, and mark it as Asset. "
|
||||||
|
"The asset will be stored in the current blend file"
|
||||||
|
)
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
|
pose_name: StringProperty(name="Pose Name") # type: ignore
|
||||||
|
activate_new_action: BoolProperty(name="Activate New Action", default=True) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
# Make sure that if there is an asset browser open, the artist can see the newly created pose asset.
|
||||||
|
asset_browse_area: Optional[bpy.types.Area] = asset_browser.area_from_context(context)
|
||||||
|
if not asset_browse_area:
|
||||||
|
# No asset browser is visible, so there also aren't any expectations
|
||||||
|
# that this asset will be visible.
|
||||||
|
return True
|
||||||
|
|
||||||
|
asset_space_params = asset_browser.params(asset_browse_area)
|
||||||
|
if asset_space_params.asset_library_ref != 'LOCAL':
|
||||||
|
cls.poll_message_set("Asset Browser must be set to the Current File library")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
#pose_name = self.pose_name or context.object.name
|
||||||
|
pose_name = False
|
||||||
|
if context.object.animation_data:
|
||||||
|
if context.object.animation_data.action:
|
||||||
|
pose_name = get_marker(context.object.animation_data.action)
|
||||||
|
|
||||||
|
if pose_name:
|
||||||
|
prefix = True
|
||||||
|
asset_name = Path(bpy.data.filepath).stem.split('_')[0]
|
||||||
|
|
||||||
|
action_asset_name = re.search(f'^{asset_name}.', pose_name)
|
||||||
|
if action_asset_name:
|
||||||
|
pose_name = pose_name.replace(action_asset_name.group(0), '')
|
||||||
|
|
||||||
|
side = re.search('_\w$', pose_name)
|
||||||
|
if side:
|
||||||
|
pose_name = pose_name.replace(side.group(0), '')
|
||||||
|
|
||||||
|
if 'hands' in context.object.animation_data.action.name.lower():
|
||||||
|
pose_name = f'hand_{pose_name}'
|
||||||
|
|
||||||
|
if pose_name.startswith('lips_'):
|
||||||
|
pose_name.replace('lips_', '')
|
||||||
|
split = pose_name.split('_')
|
||||||
|
pose_name = '-'.join([s for s in split if s.isupper()])
|
||||||
|
pose_name = f'{pose_name}_{split[-1]}'
|
||||||
|
prefix = False
|
||||||
|
|
||||||
|
if prefix and not pose_name.startswith(asset_name):
|
||||||
|
pose_name = f'{asset_name}_{pose_name}'
|
||||||
|
|
||||||
|
else:
|
||||||
|
pose_name = self.pose_name or context.object.name
|
||||||
|
|
||||||
|
asset = pose_creation.create_pose_asset_from_context(context, pose_name)
|
||||||
|
if not asset:
|
||||||
|
self.report({"WARNING"}, "No keyframes were found for this pose")
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
|
### ADD ADM
|
||||||
|
data = asset.asset_data
|
||||||
|
data.catalog_id = str(uuid.UUID(int=0))
|
||||||
|
|
||||||
|
data_dict = dict(
|
||||||
|
is_single_frame=True,
|
||||||
|
)
|
||||||
|
if context.scene.camera:
|
||||||
|
data_dict.update(dict(camera=context.scene.camera.name))
|
||||||
|
|
||||||
|
|
||||||
|
for k, v in data_dict.items():
|
||||||
|
data[k] = v
|
||||||
|
###
|
||||||
|
|
||||||
|
if self.activate_new_action:
|
||||||
|
self._set_active_action(context, asset)
|
||||||
|
self._activate_asset_in_browser(context, asset)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def _set_active_action(self, context: Context, asset: Action) -> None:
|
||||||
|
self._prevent_action_loss(context.object)
|
||||||
|
|
||||||
|
anim_data = context.object.animation_data_create()
|
||||||
|
context.scene.actionlib.previous_action = anim_data.action
|
||||||
|
anim_data.action = asset
|
||||||
|
|
||||||
|
def _activate_asset_in_browser(self, context: Context, asset: Action) -> None:
|
||||||
|
"""Activate the new asset in the appropriate Asset Browser.
|
||||||
|
|
||||||
|
This makes it possible to immediately check & edit the created pose asset.
|
||||||
|
"""
|
||||||
|
|
||||||
|
asset_browse_area: Optional[bpy.types.Area] = asset_browser.area_from_context(context)
|
||||||
|
if not asset_browse_area:
|
||||||
|
return
|
||||||
|
|
||||||
|
# After creating an asset, the window manager has to process the
|
||||||
|
# notifiers before editors should be manipulated.
|
||||||
|
pose_creation.assign_from_asset_browser(asset, asset_browse_area)
|
||||||
|
|
||||||
|
# Pass deferred=True, because we just created a new asset that isn't
|
||||||
|
# known to the Asset Browser space yet. That requires the processing of
|
||||||
|
# notifiers, which will only happen after this code has finished
|
||||||
|
# running.
|
||||||
|
asset_browser.activate_asset(asset, asset_browse_area, deferred=True)
|
||||||
|
|
||||||
|
def _prevent_action_loss(self, object: Object) -> None:
|
||||||
|
"""Mark the action with Fake User if necessary.
|
||||||
|
|
||||||
|
This is to prevent action loss when we reduce its reference counter by one.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not object.animation_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
action = object.animation_data.action
|
||||||
|
if not action:
|
||||||
|
return
|
||||||
|
|
||||||
|
if action.use_fake_user or action.users > 1:
|
||||||
|
# Removing one user won't GC it.
|
||||||
|
return
|
||||||
|
|
||||||
|
action.use_fake_user = True
|
||||||
|
self.report({'WARNING'}, "Action %s marked Fake User to prevent loss" % action.name)
|
||||||
|
|
||||||
|
|
||||||
|
class POSELIB_OT_restore_previous_action(Operator):
|
||||||
|
bl_idname = "poselib.restore_previous_action"
|
||||||
|
bl_label = "Restore Previous Action"
|
||||||
|
bl_description = "Switch back to the previous Action, after creating a pose asset"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
return bool(
|
||||||
|
context.scene.actionlib.previous_action
|
||||||
|
and context.object
|
||||||
|
and context.object.animation_data
|
||||||
|
and context.object.animation_data.action
|
||||||
|
and context.object.animation_data.action.asset_data is not None
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
# This is the Action that was just created with "Create Pose Asset".
|
||||||
|
# It has to be re-applied after switching to the previous action,
|
||||||
|
# to ensure the character keeps the same pose.
|
||||||
|
self.pose_action = context.object.animation_data.action
|
||||||
|
|
||||||
|
prev_action = context.scene.actionlib.previous_action
|
||||||
|
context.object.animation_data.action = prev_action
|
||||||
|
context.scene.actionlib.previous_action = None
|
||||||
|
|
||||||
|
# Wait a bit for the action assignment to be handled, before applying the pose.
|
||||||
|
wm = context.window_manager
|
||||||
|
self._timer = wm.event_timer_add(0.001, window=context.window)
|
||||||
|
wm.modal_handler_add(self)
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def modal(self, context, event):
|
||||||
|
if event.type != 'TIMER':
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
wm = context.window_manager
|
||||||
|
wm.event_timer_remove(self._timer)
|
||||||
|
|
||||||
|
context.object.pose.apply_pose_from_action(self.pose_action)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class ASSET_OT_assign_action(Operator):
|
||||||
|
bl_idname = "asset.assign_action"
|
||||||
|
bl_label = "Assign Action"
|
||||||
|
bl_description = "Set this pose Action as active Action on the active Object"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
|
assign: BoolProperty(name="Assign", default=True) # type: ignore
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
return bool(
|
||||||
|
isinstance(getattr(context, "id", None), Action)
|
||||||
|
and context.object
|
||||||
|
and context.object.mode == "POSE" # This condition may not be desired.
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
if self.assign:
|
||||||
|
context.object.animation_data_create().action = context.id
|
||||||
|
else:
|
||||||
|
if context.object.animation_data.action:
|
||||||
|
context.object.animation_data.action = None
|
||||||
|
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
|
class POSELIB_OT_copy_as_asset(Operator):
|
||||||
|
bl_idname = "poselib.copy_as_asset"
|
||||||
|
bl_label = "Copy Pose As Asset"
|
||||||
|
bl_description = "Create a new pose asset on the clipboard, to be pasted into an Asset Browser"
|
||||||
|
bl_options = {"REGISTER"}
|
||||||
|
|
||||||
|
CLIPBOARD_ASSET_MARKER = "ASSET-BLEND="
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
return bool(
|
||||||
|
# There must be an object.
|
||||||
|
context.object
|
||||||
|
# It must be in pose mode with selected bones.
|
||||||
|
and context.object.mode == "POSE"
|
||||||
|
and context.object.pose
|
||||||
|
and context.selected_pose_bones_from_active_object
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
asset = pose_creation.create_pose_asset_from_context(
|
||||||
|
context,
|
||||||
|
context.object.name,
|
||||||
|
)
|
||||||
|
if asset is None:
|
||||||
|
self.report({"WARNING"}, "No animation data found to create asset from")
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
|
filepath = self.save_datablock(asset)
|
||||||
|
|
||||||
|
context.window_manager.clipboard = "%s%s" % (
|
||||||
|
self.CLIPBOARD_ASSET_MARKER,
|
||||||
|
filepath,
|
||||||
|
)
|
||||||
|
asset_browser.tag_redraw(context.screen)
|
||||||
|
self.report({"INFO"}, "Pose Asset copied, use Paste As New Asset in any Asset Browser to paste")
|
||||||
|
|
||||||
|
# The asset has been saved to disk, so to clean up it has to loose its asset & fake user status.
|
||||||
|
asset.asset_clear()
|
||||||
|
asset.use_fake_user = False
|
||||||
|
|
||||||
|
# The asset can be removed from the main DB, as it was purely created to
|
||||||
|
# be stored to disk, and not to be used in this file.
|
||||||
|
if asset.users > 0:
|
||||||
|
# This should never happen, and indicates a bug in the code. Having a warning about it is nice,
|
||||||
|
# but it shouldn't stand in the way of actually cleaning up the meant-to-be-temporary datablock.
|
||||||
|
self.report({"WARNING"}, "Unexpected non-zero user count for the asset, please report this as a bug")
|
||||||
|
|
||||||
|
bpy.data.actions.remove(asset)
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
def save_datablock(self, action: Action) -> Path:
|
||||||
|
tempdir = Path(bpy.app.tempdir)
|
||||||
|
filepath = tempdir / "copied_asset.blend"
|
||||||
|
bpy.data.libraries.write(
|
||||||
|
str(filepath),
|
||||||
|
datablocks={action},
|
||||||
|
path_remap="NONE",
|
||||||
|
fake_user=True,
|
||||||
|
compress=True, # Single-datablock blend file, likely little need to diff.
|
||||||
|
)
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
|
||||||
|
class POSELIB_OT_paste_asset(Operator):
|
||||||
|
bl_idname = "poselib.paste_asset"
|
||||||
|
bl_label = "Paste As New Asset"
|
||||||
|
bl_description = "Paste the Asset that was previously copied using Copy As Asset"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
cls.poll_message_set("Current editor is not an asset browser")
|
||||||
|
return False
|
||||||
|
|
||||||
|
asset_lib_ref = context.space_data.params.asset_library_ref
|
||||||
|
if asset_lib_ref != 'LOCAL':
|
||||||
|
cls.poll_message_set("Asset Browser must be set to the Current File library")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Delay checking the clipboard as much as possible, as it's CPU-heavier than the other checks.
|
||||||
|
clipboard: str = context.window_manager.clipboard
|
||||||
|
if not clipboard:
|
||||||
|
cls.poll_message_set("Clipboard is empty")
|
||||||
|
return False
|
||||||
|
|
||||||
|
marker = POSELIB_OT_copy_as_asset.CLIPBOARD_ASSET_MARKER
|
||||||
|
if not clipboard.startswith(marker):
|
||||||
|
cls.poll_message_set("Clipboard does not contain an asset")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
clipboard = context.window_manager.clipboard
|
||||||
|
marker_len = len(POSELIB_OT_copy_as_asset.CLIPBOARD_ASSET_MARKER)
|
||||||
|
filepath = Path(clipboard[marker_len:])
|
||||||
|
|
||||||
|
assets = load_assets_from(filepath)
|
||||||
|
if not assets:
|
||||||
|
self.report({"ERROR"}, "Did not find any assets on clipboard")
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
|
self.report({"INFO"}, "Pasted %d assets" % len(assets))
|
||||||
|
|
||||||
|
bpy.ops.asset.library_refresh()
|
||||||
|
|
||||||
|
asset_browser_area = asset_browser.area_from_context(context)
|
||||||
|
if not asset_browser_area:
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
# Assign same catalog_global as in asset browser.
|
||||||
|
catalog_id = asset_browser.active_catalog_id(asset_browser_area)
|
||||||
|
for asset in assets:
|
||||||
|
asset.asset_data.catalog_id = catalog_id
|
||||||
|
asset_browser.activate_asset(assets[0], asset_browser_area, deferred=True)
|
||||||
|
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
|
class POSELIB_OT_pose_asset_select_bones(Operator):
|
||||||
|
bl_idname = "poselib.pose_asset_select_bones"
|
||||||
|
bl_label = "Select Bones"
|
||||||
|
#bl_description = "Select those bones that are used in this pose"
|
||||||
|
bl_description = "Click: Select used Bones\nAlt+Click: Select Flipped Bones\nCtrl+Click: Select Both sides."
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
#bl_property = "selected_side"
|
||||||
|
|
||||||
|
selected_side: EnumProperty(
|
||||||
|
name='Selected Side',
|
||||||
|
items=(
|
||||||
|
('CURRENT', "Current", ""),
|
||||||
|
('FLIPPED', "Flipped", ""),
|
||||||
|
('BOTH', "Both", ""),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
if not (
|
||||||
|
context.object
|
||||||
|
and context.object.mode == "POSE" # This condition may not be desired.
|
||||||
|
and context.asset_library_ref
|
||||||
|
and context.asset_file_handle
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return context.asset_file_handle.id_type == 'ACTION'
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
asset: FileSelectEntry = context.asset_file_handle
|
||||||
|
if asset.local_id:
|
||||||
|
return self.use_pose(context, asset.local_id)
|
||||||
|
return self._load_and_use_pose(context)
|
||||||
|
|
||||||
|
def use_pose(self, context: Context, asset: bpy.types.ID) -> Set[str]:
|
||||||
|
# Implement in subclass.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _load_and_use_pose(self, context: Context) -> Set[str]:
|
||||||
|
asset_library_ref = context.asset_library_ref
|
||||||
|
asset = context.asset_file_handle
|
||||||
|
asset_lib_path = bpy.types.AssetHandle.get_full_library_path(asset, asset_library_ref)
|
||||||
|
|
||||||
|
if not asset_lib_path:
|
||||||
|
self.report( # type: ignore
|
||||||
|
{"ERROR"},
|
||||||
|
# TODO: Add some way to get the library name from the library reference (just asset_library_ref.name?).
|
||||||
|
f"Selected asset {asset.name} could not be located inside the asset library",
|
||||||
|
)
|
||||||
|
return {"CANCELLED"}
|
||||||
|
if asset.id_type != 'ACTION':
|
||||||
|
self.report( # type: ignore
|
||||||
|
{"ERROR"},
|
||||||
|
f"Selected asset {asset.name} is not an Action",
|
||||||
|
)
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
|
with bpy.types.BlendData.temp_data() as temp_data:
|
||||||
|
with temp_data.libraries.load(asset_lib_path) as (data_from, data_to):
|
||||||
|
data_to.actions = [asset.name]
|
||||||
|
|
||||||
|
action: Action = data_to.actions[0]
|
||||||
|
return self.use_pose(context, action)
|
||||||
|
|
||||||
|
def use_pose(self, context: Context, pose_asset: Action) -> Set[str]:
|
||||||
|
arm_object: Object = context.object
|
||||||
|
#pose_usage.select_bones(arm_object, pose_asset, select=self.select, flipped=self.flipped)
|
||||||
|
pose_usage.select_bones(arm_object, pose_asset, selected_side=self.selected_side)
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
# This operator takes the Window Manager's `actionlib_flipped` property, and
|
||||||
|
# passes it to the `POSELIB_OT_blend_pose_asset` operator. This makes it
|
||||||
|
# possible to bind a key to the operator and still have it respect the global
|
||||||
|
# "Flip Pose" checkbox.
|
||||||
|
class POSELIB_OT_blend_pose_asset_for_keymap(Operator):
|
||||||
|
bl_idname = "poselib.blend_pose_asset_for_keymap"
|
||||||
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
|
||||||
|
_rna = bpy.ops.poselib.blend_pose_asset.get_rna_type()
|
||||||
|
bl_label = _rna.name
|
||||||
|
bl_description = _rna.description
|
||||||
|
del _rna
|
||||||
|
|
||||||
|
flipped: BoolProperty(name="Flipped", default=False) # type: ignore
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
return bpy.ops.poselib.blend_pose_asset.poll(context.copy())
|
||||||
|
|
||||||
|
"""
|
||||||
|
def invoke(self, context, event):
|
||||||
|
if event.type == 'LEFTMOUSE':
|
||||||
|
self.flipped = True if event.alt else False
|
||||||
|
|
||||||
|
if event.ctrl:
|
||||||
|
bpy.ops.poselib.blend_pose_asset(context.copy(), 'INVOKE_DEFAULT', flipped=not self.flipped)
|
||||||
|
bpy.ops.poselib.blend_pose_asset(context.copy(), 'INVOKE_DEFAULT', flipped=self.flipped)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def invoke(self, context: Context, event: Event) -> Set[str]:
|
||||||
|
return bpy.ops.poselib.blend_pose_asset(context.copy(), 'INVOKE_DEFAULT', flipped=self.flipped)
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
return bpy.ops.poselib.blend_pose_asset(context.copy(), 'EXEC_DEFAULT', flipped=self.flipped)
|
||||||
|
|
||||||
|
|
||||||
|
# This operator takes the Window Manager's `actionlib_flipped` property, and
|
||||||
|
# passes it to the `POSELIB_OT_apply_pose_asset` operator. This makes it
|
||||||
|
# possible to bind a key to the operator and still have it respect the global
|
||||||
|
# "Flip Pose" checkbox.
|
||||||
|
|
||||||
|
class POSELIB_OT_apply_pose_asset_for_keymap(Operator):
|
||||||
|
bl_idname = "poselib.apply_pose_asset_for_keymap"
|
||||||
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
|
||||||
|
_rna = bpy.ops.poselib.apply_pose_asset.get_rna_type()
|
||||||
|
bl_label = _rna.name
|
||||||
|
#bl_description = _rna.description
|
||||||
|
bl_description = 'Apply Pose to Bones'
|
||||||
|
del _rna
|
||||||
|
|
||||||
|
flipped: BoolProperty(name="Flipped", default=False) # type: ignore
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
if not asset_utils.SpaceAssetInfo.is_asset_browser(context.space_data):
|
||||||
|
return False
|
||||||
|
return bpy.ops.poselib.apply_pose_asset.poll(context.copy())
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
if self.flipped:
|
||||||
|
action = bpy.data.actions.get(context.active_file.name)
|
||||||
|
|
||||||
|
store_bones = {}
|
||||||
|
|
||||||
|
bones = [
|
||||||
|
'blendshape-eyes', 'blendshape-eye.L', 'blendshape-eye.R',
|
||||||
|
'blendshape-corner-mouth', 'blendshape-corner-mouth.L',
|
||||||
|
'blendshape-corner-down-mouth.L', 'blendshape-corner-up-mouth.L',
|
||||||
|
'blendshape-corner-mouth-add.L','blendshape-corner-mouth.R',
|
||||||
|
'blendshape-corner-down-mouth.R', 'blendshape-corner-up-mouth.R',
|
||||||
|
'blendshape-corner-mouth-add.R', 'blendshape-center-up-mouth',
|
||||||
|
'blendshape-center-down-mouth',
|
||||||
|
'hat1.R', 'hat2.R', 'hat3.R', 'hat1.L', 'hat2.L', 'hat3.L',
|
||||||
|
]
|
||||||
|
|
||||||
|
attributes = [
|
||||||
|
'location', 'rotation_quaternion',
|
||||||
|
'rotation_euler', 'rotation_axis_angle', 'scale'
|
||||||
|
]
|
||||||
|
|
||||||
|
if action:
|
||||||
|
for fc in action.fcurves:
|
||||||
|
bone_name, prop_name = split_path(fc.data_path)
|
||||||
|
if bone_name not in bones:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not bone_name in store_bones.keys():
|
||||||
|
store_bones[bone_name] = {}
|
||||||
|
|
||||||
|
for attr in attributes:
|
||||||
|
if prop_name == attr:
|
||||||
|
if not prop_name in store_bones[bone_name].keys():
|
||||||
|
store_bones[bone_name][prop_name] = []
|
||||||
|
|
||||||
|
val = getattr(context.object.pose.bones[bone_name], prop_name)
|
||||||
|
|
||||||
|
store_bones[bone_name][prop_name].append(fc.evaluate(context.scene.frame_current))
|
||||||
|
|
||||||
|
bpy.ops.poselib.apply_pose_asset(context.copy(), 'EXEC_DEFAULT', flipped=True)
|
||||||
|
|
||||||
|
for bone, v in store_bones.items():
|
||||||
|
for attr, attr_val in v.items():
|
||||||
|
flipped_vector = 1
|
||||||
|
|
||||||
|
### TODO FAIRE ÇA PROPREMENT AVEC UNE COMPREHENSION LIST OU AUTRE
|
||||||
|
if re.search(r'\.[RL]$', bone):
|
||||||
|
flipped_bone = pose_usage.flip_side_name(bone)
|
||||||
|
if attr == 'location':
|
||||||
|
flipped_vector = Vector((-1, 1, 1))
|
||||||
|
# print('-----', store_bones.get(flipped_bone)[attr])
|
||||||
|
attr_val = Vector(store_bones.get(flipped_bone)[attr]) * flipped_vector
|
||||||
|
|
||||||
|
setattr(context.object.pose.bones[bone], attr, attr_val)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
return bpy.ops.poselib.apply_pose_asset(context.copy(), 'EXEC_DEFAULT', flipped=False)
|
||||||
|
|
||||||
|
|
||||||
|
class POSELIB_OT_convert_old_poselib(Operator):
|
||||||
|
bl_idname = "poselib.convert_old_poselib"
|
||||||
|
bl_label = "Convert Old-Style Pose Library"
|
||||||
|
bl_description = "Create a pose asset for each pose marker in the current action"
|
||||||
|
bl_options = {"REGISTER", "UNDO"}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context: Context) -> bool:
|
||||||
|
action = context.object and context.object.animation_data and context.object.animation_data.action
|
||||||
|
if not action:
|
||||||
|
cls.poll_message_set("Active object has no Action")
|
||||||
|
return False
|
||||||
|
if not action.pose_markers:
|
||||||
|
cls.poll_message_set("Action %r is not a old-style pose library" % action.name)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context: Context) -> Set[str]:
|
||||||
|
from . import conversion
|
||||||
|
|
||||||
|
old_poselib = context.object.animation_data.action
|
||||||
|
new_actions = conversion.convert_old_poselib(old_poselib)
|
||||||
|
|
||||||
|
if not new_actions:
|
||||||
|
self.report({'ERROR'}, "Unable to convert to pose assets")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
self.report({'INFO'}, "Converted %d poses to pose assets" % len(new_actions))
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
POSELIB_OT_apply_pose_asset_for_keymap,
|
||||||
|
POSELIB_OT_blend_pose_asset_for_keymap,
|
||||||
|
POSELIB_OT_convert_old_poselib,
|
||||||
|
POSELIB_OT_copy_as_asset,
|
||||||
|
POSELIB_OT_create_pose_asset,
|
||||||
|
POSELIB_OT_paste_asset,
|
||||||
|
POSELIB_OT_pose_asset_select_bones,
|
||||||
|
POSELIB_OT_restore_previous_action
|
||||||
|
)
|
||||||
|
|
||||||
|
register, unregister = bpy.utils.register_classes_factory(classes)
|
|
@ -0,0 +1,423 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Pose Library - creation functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses
|
||||||
|
import functools
|
||||||
|
import re
|
||||||
|
|
||||||
|
from typing import Optional, FrozenSet, Set, Union, Iterable, cast
|
||||||
|
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.types import (
|
||||||
|
Action,
|
||||||
|
Bone,
|
||||||
|
Context,
|
||||||
|
FCurve,
|
||||||
|
Keyframe,
|
||||||
|
)
|
||||||
|
|
||||||
|
FCurveValue = Union[float, int]
|
||||||
|
|
||||||
|
pose_bone_re = re.compile(r'pose.bones\["([^"]+)"\]')
|
||||||
|
"""RegExp for matching FCurve data paths."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass(unsafe_hash=True, frozen=True)
|
||||||
|
class PoseCreationParams:
|
||||||
|
armature_ob: bpy.types.Object
|
||||||
|
src_action: Optional[Action]
|
||||||
|
src_frame_nr: float
|
||||||
|
bone_names: FrozenSet[str]
|
||||||
|
new_asset_name: str
|
||||||
|
|
||||||
|
|
||||||
|
class UnresolvablePathError(ValueError):
|
||||||
|
"""Raised when a data_path cannot be resolved to a current value."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass(unsafe_hash=True)
|
||||||
|
class PoseActionCreator:
|
||||||
|
"""Create an Action that's suitable for marking as Asset.
|
||||||
|
|
||||||
|
Does not mark as asset yet, nor does it add asset metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
params: PoseCreationParams
|
||||||
|
|
||||||
|
# These were taken from Blender's Action baking code in `anim_utils.py`.
|
||||||
|
# Items are (name, array_length) tuples.
|
||||||
|
_bbone_props = [
|
||||||
|
("bbone_curveinx", None),
|
||||||
|
("bbone_curveoutx", None),
|
||||||
|
("bbone_curveinz", None),
|
||||||
|
("bbone_curveoutz", None),
|
||||||
|
("bbone_rollin", None),
|
||||||
|
("bbone_rollout", None),
|
||||||
|
("bbone_scalein", 3),
|
||||||
|
("bbone_scaleout", 3),
|
||||||
|
("bbone_easein", None),
|
||||||
|
("bbone_easeout", None),
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self) -> Optional[Action]:
|
||||||
|
"""Create a single-frame Action containing only the given bones, or None if no anim data was found."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
dst_action = self._create_new_action()
|
||||||
|
self._store_pose(dst_action)
|
||||||
|
finally:
|
||||||
|
# Prevent next instantiations of this class from reusing pointers to
|
||||||
|
# bones. They may not be valid by then any more.
|
||||||
|
self._find_bone.cache_clear()
|
||||||
|
|
||||||
|
if len(dst_action.fcurves) == 0:
|
||||||
|
bpy.data.actions.remove(dst_action)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return dst_action
|
||||||
|
|
||||||
|
def _create_new_action(self) -> Action:
|
||||||
|
dst_action = bpy.data.actions.new(self.params.new_asset_name)
|
||||||
|
if self.params.src_action:
|
||||||
|
dst_action.id_root = self.params.src_action.id_root
|
||||||
|
dst_action.user_clear() # actions.new() sets users=1, but marking as asset also increments user count.
|
||||||
|
return dst_action
|
||||||
|
|
||||||
|
def _store_pose(self, dst_action: Action) -> None:
|
||||||
|
"""Store the current pose into the given action."""
|
||||||
|
self._store_bone_pose_parameters(dst_action)
|
||||||
|
self._store_animated_parameters(dst_action)
|
||||||
|
self._store_parameters_from_callback(dst_action)
|
||||||
|
|
||||||
|
def _store_bone_pose_parameters(self, dst_action: Action) -> None:
|
||||||
|
"""Store loc/rot/scale/bbone values in the Action."""
|
||||||
|
|
||||||
|
for bone_name in sorted(self.params.bone_names):
|
||||||
|
self._store_location(dst_action, bone_name)
|
||||||
|
self._store_rotation(dst_action, bone_name)
|
||||||
|
self._store_scale(dst_action, bone_name)
|
||||||
|
self._store_bbone(dst_action, bone_name)
|
||||||
|
|
||||||
|
def _store_animated_parameters(self, dst_action: Action) -> None:
|
||||||
|
"""Store the current value of any animated bone properties."""
|
||||||
|
if self.params.src_action is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
armature_ob = self.params.armature_ob
|
||||||
|
for fcurve in self.params.src_action.fcurves:
|
||||||
|
match = pose_bone_re.match(fcurve.data_path)
|
||||||
|
if not match:
|
||||||
|
# Not animating a bone property.
|
||||||
|
continue
|
||||||
|
|
||||||
|
bone_name = match.group(1)
|
||||||
|
if bone_name not in self.params.bone_names:
|
||||||
|
# Bone is not our export set.
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dst_action.fcurves.find(fcurve.data_path, index=fcurve.array_index):
|
||||||
|
# This property is already handled by a previous _store_xxx() call.
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Only include in the pose if there is a key on this frame.
|
||||||
|
if not self._has_key_on_frame(fcurve):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = self._current_value(armature_ob, fcurve.data_path, fcurve.array_index)
|
||||||
|
except UnresolvablePathError:
|
||||||
|
# A once-animated property no longer exists.
|
||||||
|
continue
|
||||||
|
|
||||||
|
dst_fcurve = dst_action.fcurves.new(
|
||||||
|
fcurve.data_path, index=fcurve.array_index, action_group=bone_name
|
||||||
|
)
|
||||||
|
dst_fcurve.keyframe_points.insert(self.params.src_frame_nr, value=value)
|
||||||
|
dst_fcurve.update()
|
||||||
|
|
||||||
|
def _store_parameters_from_callback(self, dst_action: Action) -> None:
|
||||||
|
"""Store extra parameters in the pose based on arbitrary callbacks.
|
||||||
|
|
||||||
|
Not implemented yet, needs a proper design & some user stories.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _store_location(self, dst_action: Action, bone_name: str) -> None:
|
||||||
|
"""Store bone location."""
|
||||||
|
self._store_bone_array(dst_action, bone_name, "location", 3)
|
||||||
|
|
||||||
|
def _store_rotation(self, dst_action: Action, bone_name: str) -> None:
|
||||||
|
"""Store bone rotation given current rotation mode."""
|
||||||
|
bone = self._find_bone(bone_name)
|
||||||
|
if bone.rotation_mode == "QUATERNION":
|
||||||
|
self._store_bone_array(dst_action, bone_name, "rotation_quaternion", 4)
|
||||||
|
elif bone.rotation_mode == "AXIS_ANGLE":
|
||||||
|
self._store_bone_array(dst_action, bone_name, "rotation_axis_angle", 4)
|
||||||
|
else:
|
||||||
|
self._store_bone_array(dst_action, bone_name, "rotation_euler", 3)
|
||||||
|
|
||||||
|
def _store_scale(self, dst_action: Action, bone_name: str) -> None:
|
||||||
|
"""Store bone scale."""
|
||||||
|
self._store_bone_array(dst_action, bone_name, "scale", 3)
|
||||||
|
|
||||||
|
def _store_bbone(self, dst_action: Action, bone_name: str) -> None:
|
||||||
|
"""Store bendy-bone parameters."""
|
||||||
|
for prop_name, array_length in self._bbone_props:
|
||||||
|
if array_length:
|
||||||
|
self._store_bone_array(dst_action, bone_name, prop_name, array_length)
|
||||||
|
else:
|
||||||
|
self._store_bone_property(dst_action, bone_name, prop_name)
|
||||||
|
|
||||||
|
def _store_bone_array(
|
||||||
|
self, dst_action: Action, bone_name: str, property_name: str, array_length: int
|
||||||
|
) -> None:
|
||||||
|
"""Store all elements of an array property."""
|
||||||
|
for array_index in range(array_length):
|
||||||
|
self._store_bone_property(dst_action, bone_name, property_name, array_index)
|
||||||
|
|
||||||
|
def _store_bone_property(
|
||||||
|
self,
|
||||||
|
dst_action: Action,
|
||||||
|
bone_name: str,
|
||||||
|
property_path: str,
|
||||||
|
array_index: int = -1,
|
||||||
|
) -> None:
|
||||||
|
"""Store the current value of a single bone property."""
|
||||||
|
|
||||||
|
bone = self._find_bone(bone_name)
|
||||||
|
value = self._current_value(bone, property_path, array_index)
|
||||||
|
|
||||||
|
# Get the full 'pose.bones["bone_name"].blablabla' path suitable for FCurves.
|
||||||
|
rna_path = bone.path_from_id(property_path)
|
||||||
|
|
||||||
|
fcurve: Optional[FCurve] = dst_action.fcurves.find(rna_path, index=array_index)
|
||||||
|
if fcurve is None:
|
||||||
|
fcurve = dst_action.fcurves.new(rna_path, index=array_index, action_group=bone_name)
|
||||||
|
|
||||||
|
fcurve.keyframe_points.insert(self.params.src_frame_nr, value=value)
|
||||||
|
fcurve.update()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _current_value(
|
||||||
|
cls, datablock: bpy.types.ID, data_path: str, array_index: int
|
||||||
|
) -> FCurveValue:
|
||||||
|
"""Resolve an RNA path + array index to an actual value."""
|
||||||
|
value_or_array = cls._path_resolve(datablock, data_path)
|
||||||
|
|
||||||
|
# Both indices -1 and 0 are used for non-array properties.
|
||||||
|
# -1 cannot be used in arrays, whereas 0 can be used in both arrays and non-arrays.
|
||||||
|
|
||||||
|
if array_index == -1:
|
||||||
|
return cast(FCurveValue, value_or_array)
|
||||||
|
|
||||||
|
if array_index == 0:
|
||||||
|
value_or_array = cls._path_resolve(datablock, data_path)
|
||||||
|
try:
|
||||||
|
# MyPy doesn't understand this try/except is to determine the type.
|
||||||
|
value = value_or_array[array_index] # type: ignore
|
||||||
|
except TypeError:
|
||||||
|
# Not an array after all.
|
||||||
|
return cast(FCurveValue, value_or_array)
|
||||||
|
return cast(FCurveValue, value)
|
||||||
|
|
||||||
|
# MyPy doesn't understand that array_index>0 implies this is indexable.
|
||||||
|
return cast(FCurveValue, value_or_array[array_index]) # type: ignore
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _path_resolve(
|
||||||
|
datablock: bpy.types.ID, data_path: str
|
||||||
|
) -> Union[FCurveValue, Iterable[FCurveValue]]:
|
||||||
|
"""Wrapper for datablock.path_resolve(data_path).
|
||||||
|
|
||||||
|
Raise UnresolvablePathError when the path cannot be resolved.
|
||||||
|
This is easier to deal with upstream than the generic ValueError raised
|
||||||
|
by Blender.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return datablock.path_resolve(data_path) # type: ignore
|
||||||
|
except ValueError as ex:
|
||||||
|
raise UnresolvablePathError(str(ex)) from ex
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=1024)
|
||||||
|
def _find_bone(self, bone_name: str) -> Bone:
|
||||||
|
"""Find a bone by name.
|
||||||
|
|
||||||
|
Assumes the named bone exists, as the bones this class handles comes
|
||||||
|
from the user's selection, and you can't select a non-existent bone.
|
||||||
|
"""
|
||||||
|
|
||||||
|
bone: Bone = self.params.armature_ob.pose.bones[bone_name]
|
||||||
|
return bone
|
||||||
|
|
||||||
|
def _has_key_on_frame(self, fcurve: FCurve) -> bool:
|
||||||
|
"""Return True iff the FCurve has a key on the source frame."""
|
||||||
|
|
||||||
|
points = fcurve.keyframe_points
|
||||||
|
if not points:
|
||||||
|
return False
|
||||||
|
|
||||||
|
frame_to_find = self.params.src_frame_nr
|
||||||
|
margin = 0.001
|
||||||
|
high = len(points) - 1
|
||||||
|
low = 0
|
||||||
|
while low <= high:
|
||||||
|
mid = (high + low) // 2
|
||||||
|
diff = points[mid].co.x - frame_to_find
|
||||||
|
if abs(diff) < margin:
|
||||||
|
return True
|
||||||
|
if diff < 0:
|
||||||
|
# Frame to find is bigger than the current middle.
|
||||||
|
low = mid + 1
|
||||||
|
else:
|
||||||
|
# Frame to find is smaller than the current middle
|
||||||
|
high = mid - 1
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def create_pose_asset(
|
||||||
|
params: PoseCreationParams,
|
||||||
|
) -> Optional[Action]:
|
||||||
|
"""Create a single-frame Action containing only the pose of the given bones.
|
||||||
|
|
||||||
|
DOES mark as asset, DOES NOT configure asset metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
creator = PoseActionCreator(params)
|
||||||
|
pose_action = creator.create()
|
||||||
|
if pose_action is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
pose_action.asset_mark()
|
||||||
|
pose_action.asset_generate_preview()
|
||||||
|
return pose_action
|
||||||
|
|
||||||
|
#def create_pose_asset_from_context(context: Context, new_asset_name: str, selection=True) -> Optional[Action]:
|
||||||
|
def create_pose_asset_from_context(context: Context, new_asset_name: str) -> Optional[Action]:
|
||||||
|
"""Create Action asset from active object & selected bones."""
|
||||||
|
|
||||||
|
bones = context.selected_pose_bones_from_active_object
|
||||||
|
bone_names = {bone.name for bone in bones}
|
||||||
|
|
||||||
|
params = PoseCreationParams(
|
||||||
|
context.object,
|
||||||
|
getattr(context.object.animation_data, "action", None),
|
||||||
|
context.scene.frame_current,
|
||||||
|
frozenset(bone_names),
|
||||||
|
new_asset_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
return create_pose_asset(params)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_fcurves(
|
||||||
|
dst_action: Action,
|
||||||
|
src_action: Action,
|
||||||
|
src_frame_nr: float,
|
||||||
|
bone_names: Set[str],
|
||||||
|
) -> int:
|
||||||
|
"""Copy FCurves, returning number of curves copied."""
|
||||||
|
num_fcurves_copied = 0
|
||||||
|
for fcurve in src_action.fcurves:
|
||||||
|
match = pose_bone_re.match(fcurve.data_path)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
|
||||||
|
bone_name = match.group(1)
|
||||||
|
if bone_name not in bone_names:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if there is a keyframe on this frame.
|
||||||
|
keyframe = find_keyframe(fcurve, src_frame_nr)
|
||||||
|
if keyframe is None:
|
||||||
|
continue
|
||||||
|
create_single_key_fcurve(dst_action, fcurve, keyframe)
|
||||||
|
num_fcurves_copied += 1
|
||||||
|
return num_fcurves_copied
|
||||||
|
|
||||||
|
|
||||||
|
def create_single_key_fcurve(
|
||||||
|
dst_action: Action, src_fcurve: FCurve, src_keyframe: Keyframe
|
||||||
|
) -> FCurve:
|
||||||
|
"""Create a copy of the source FCurve, but only for the given keyframe.
|
||||||
|
|
||||||
|
Returns a new FCurve with just one keyframe.
|
||||||
|
"""
|
||||||
|
|
||||||
|
dst_fcurve = copy_fcurve_without_keys(dst_action, src_fcurve)
|
||||||
|
copy_keyframe(dst_fcurve, src_keyframe)
|
||||||
|
return dst_fcurve
|
||||||
|
|
||||||
|
|
||||||
|
def copy_fcurve_without_keys(dst_action: Action, src_fcurve: FCurve) -> FCurve:
|
||||||
|
"""Create a new FCurve and copy some properties."""
|
||||||
|
|
||||||
|
src_group_name = src_fcurve.group.name if src_fcurve.group else ""
|
||||||
|
dst_fcurve = dst_action.fcurves.new(
|
||||||
|
src_fcurve.data_path, index=src_fcurve.array_index, action_group=src_group_name
|
||||||
|
)
|
||||||
|
for propname in {"auto_smoothing", "color", "color_mode", "extrapolation"}:
|
||||||
|
setattr(dst_fcurve, propname, getattr(src_fcurve, propname))
|
||||||
|
return dst_fcurve
|
||||||
|
|
||||||
|
|
||||||
|
def copy_keyframe(dst_fcurve: FCurve, src_keyframe: Keyframe) -> Keyframe:
|
||||||
|
"""Copy a keyframe from one FCurve to the other."""
|
||||||
|
|
||||||
|
dst_keyframe = dst_fcurve.keyframe_points.insert(
|
||||||
|
src_keyframe.co.x, src_keyframe.co.y, options={'FAST'}, keyframe_type=src_keyframe.type
|
||||||
|
)
|
||||||
|
|
||||||
|
for propname in {
|
||||||
|
"amplitude",
|
||||||
|
"back",
|
||||||
|
"easing",
|
||||||
|
"handle_left",
|
||||||
|
"handle_left_type",
|
||||||
|
"handle_right",
|
||||||
|
"handle_right_type",
|
||||||
|
"interpolation",
|
||||||
|
"period",
|
||||||
|
}:
|
||||||
|
setattr(dst_keyframe, propname, getattr(src_keyframe, propname))
|
||||||
|
dst_fcurve.update()
|
||||||
|
return dst_keyframe
|
||||||
|
|
||||||
|
|
||||||
|
def find_keyframe(fcurve: FCurve, frame: float) -> Optional[Keyframe]:
|
||||||
|
# Binary search adapted from https://pythonguides.com/python-binary-search/
|
||||||
|
keyframes = fcurve.keyframe_points
|
||||||
|
low = 0
|
||||||
|
high = len(keyframes) - 1
|
||||||
|
mid = 0
|
||||||
|
|
||||||
|
# Accept any keyframe that's within 'epsilon' of the requested frame.
|
||||||
|
# This should account for rounding errors and the likes.
|
||||||
|
epsilon = 1e-4
|
||||||
|
frame_lowerbound = frame - epsilon
|
||||||
|
frame_upperbound = frame + epsilon
|
||||||
|
while low <= high:
|
||||||
|
mid = (high + low) // 2
|
||||||
|
keyframe = keyframes[mid]
|
||||||
|
if keyframe.co.x < frame_lowerbound:
|
||||||
|
low = mid + 1
|
||||||
|
elif keyframe.co.x > frame_upperbound:
|
||||||
|
high = mid - 1
|
||||||
|
else:
|
||||||
|
return keyframe
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def assign_from_asset_browser(asset: Action, asset_browser_area: bpy.types.Area) -> None:
|
||||||
|
"""Assign some things from the asset browser to the asset.
|
||||||
|
|
||||||
|
This sets the current catalog ID, and in the future could include tags
|
||||||
|
from the active dynamic catalog, etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cat_id = asset_browser.active_catalog_id(asset_browser_area)
|
||||||
|
asset.asset_data.catalog_id = cat_id
|
|
@ -0,0 +1,180 @@
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Pose Library - usage functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Set
|
||||||
|
import re
|
||||||
|
|
||||||
|
from bpy.types import (
|
||||||
|
Action,
|
||||||
|
Object,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
#def select_bones(arm_object: Object, action: Action, *, select: bool, flipped: bool, both=False) -> None:
|
||||||
|
def select_bones(arm_object: Object, action: Action, *, selected_side, toggle=True):
|
||||||
|
pose_bone_re = re.compile(r'pose.bones\["([^"]+)"\]')
|
||||||
|
pose = arm_object.pose
|
||||||
|
|
||||||
|
seen_bone_names: Set[str] = set()
|
||||||
|
bones_to_select = set()
|
||||||
|
|
||||||
|
for fcurve in action.fcurves:
|
||||||
|
data_path: str = fcurve.data_path
|
||||||
|
match = pose_bone_re.match(data_path)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
|
||||||
|
bone_name = match.group(1)
|
||||||
|
bone_name_flip = flip_side_name(bone_name)
|
||||||
|
|
||||||
|
if bone_name in seen_bone_names:
|
||||||
|
continue
|
||||||
|
seen_bone_names.add(bone_name)
|
||||||
|
|
||||||
|
if selected_side == 'FLIPPED':
|
||||||
|
bones_to_select.add(bone_name_flip)
|
||||||
|
elif selected_side == 'BOTH':
|
||||||
|
bones_to_select.add(bone_name_flip)
|
||||||
|
bones_to_select.add(bone_name)
|
||||||
|
elif selected_side == 'CURRENT':
|
||||||
|
bones_to_select.add(bone_name)
|
||||||
|
|
||||||
|
|
||||||
|
for bone in bones_to_select:
|
||||||
|
pose_bone = pose.bones.get(bone)
|
||||||
|
if pose_bone:
|
||||||
|
if toggle:
|
||||||
|
pose_bone.bone.select = not pose_bone.bone.select
|
||||||
|
else:
|
||||||
|
pose_bone.bone.select = True
|
||||||
|
|
||||||
|
|
||||||
|
_FLIP_SEPARATORS = set(". -_")
|
||||||
|
|
||||||
|
# These are single-character replacements, others are handled differently.
|
||||||
|
_FLIP_REPLACEMENTS = {
|
||||||
|
"l": "r",
|
||||||
|
"L": "R",
|
||||||
|
"r": "l",
|
||||||
|
"R": "L",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def flip_side_name(to_flip: str) -> str:
|
||||||
|
"""Flip left and right indicators in the name.
|
||||||
|
|
||||||
|
Basically a Python implementation of BLI_string_flip_side_name.
|
||||||
|
|
||||||
|
>>> flip_side_name('bone_L.004')
|
||||||
|
'bone_R.004'
|
||||||
|
>>> flip_side_name('left_bone')
|
||||||
|
'right_bone'
|
||||||
|
>>> flip_side_name('Left_bone')
|
||||||
|
'Right_bone'
|
||||||
|
>>> flip_side_name('LEFT_bone')
|
||||||
|
'RIGHT_bone'
|
||||||
|
>>> flip_side_name('some.bone-RIGHT.004')
|
||||||
|
'some.bone-LEFT.004'
|
||||||
|
>>> flip_side_name('some.bone-right.004')
|
||||||
|
'some.bone-left.004'
|
||||||
|
>>> flip_side_name('some.bone-Right.004')
|
||||||
|
'some.bone-Left.004'
|
||||||
|
>>> flip_side_name('some.bone-LEFT.004')
|
||||||
|
'some.bone-RIGHT.004'
|
||||||
|
>>> flip_side_name('some.bone-left.004')
|
||||||
|
'some.bone-right.004'
|
||||||
|
>>> flip_side_name('some.bone-Left.004')
|
||||||
|
'some.bone-Right.004'
|
||||||
|
>>> flip_side_name('.004')
|
||||||
|
'.004'
|
||||||
|
>>> flip_side_name('L.004')
|
||||||
|
'R.004'
|
||||||
|
"""
|
||||||
|
import string
|
||||||
|
|
||||||
|
if len(to_flip) < 3:
|
||||||
|
# we don't flip names like .R or .L
|
||||||
|
return to_flip
|
||||||
|
|
||||||
|
# We first check the case with a .### extension, let's find the last period.
|
||||||
|
number = ""
|
||||||
|
replace = to_flip
|
||||||
|
if to_flip[-1] in string.digits:
|
||||||
|
try:
|
||||||
|
index = to_flip.rindex(".")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if to_flip[index + 1] in string.digits:
|
||||||
|
# TODO(Sybren): this doesn't handle "bone.1abc2" correctly.
|
||||||
|
number = to_flip[index:]
|
||||||
|
replace = to_flip[:index]
|
||||||
|
|
||||||
|
if not replace:
|
||||||
|
# Nothing left after the number, so no flips necessary.
|
||||||
|
return replace + number
|
||||||
|
|
||||||
|
if len(replace) == 1:
|
||||||
|
replace = _FLIP_REPLACEMENTS.get(replace, replace)
|
||||||
|
return replace + number
|
||||||
|
|
||||||
|
# First case; separator . - _ with extensions r R l L.
|
||||||
|
if replace[-2] in _FLIP_SEPARATORS and replace[-1] in _FLIP_REPLACEMENTS:
|
||||||
|
replace = replace[:-1] + _FLIP_REPLACEMENTS[replace[-1]]
|
||||||
|
return replace + number
|
||||||
|
|
||||||
|
# Second case; beginning with r R l L, with separator after it.
|
||||||
|
if replace[1] in _FLIP_SEPARATORS and replace[0] in _FLIP_REPLACEMENTS:
|
||||||
|
replace = _FLIP_REPLACEMENTS[replace[0]] + replace[1:]
|
||||||
|
return replace + number
|
||||||
|
|
||||||
|
lower = replace.lower()
|
||||||
|
prefix = suffix = ""
|
||||||
|
if lower.startswith("right"):
|
||||||
|
bit = replace[0:2]
|
||||||
|
if bit == "Ri":
|
||||||
|
prefix = "Left"
|
||||||
|
elif bit == "RI":
|
||||||
|
prefix = "LEFT"
|
||||||
|
else:
|
||||||
|
prefix = "left"
|
||||||
|
replace = replace[5:]
|
||||||
|
elif lower.startswith("left"):
|
||||||
|
bit = replace[0:2]
|
||||||
|
if bit == "Le":
|
||||||
|
prefix = "Right"
|
||||||
|
elif bit == "LE":
|
||||||
|
prefix = "RIGHT"
|
||||||
|
else:
|
||||||
|
prefix = "right"
|
||||||
|
replace = replace[4:]
|
||||||
|
elif lower.endswith("right"):
|
||||||
|
bit = replace[-5:-3]
|
||||||
|
if bit == "Ri":
|
||||||
|
suffix = "Left"
|
||||||
|
elif bit == "RI":
|
||||||
|
suffix = "LEFT"
|
||||||
|
else:
|
||||||
|
suffix = "left"
|
||||||
|
replace = replace[:-5]
|
||||||
|
elif lower.endswith("left"):
|
||||||
|
bit = replace[-4:-2]
|
||||||
|
if bit == "Le":
|
||||||
|
suffix = "Right"
|
||||||
|
elif bit == "LE":
|
||||||
|
suffix = "RIGHT"
|
||||||
|
else:
|
||||||
|
suffix = "right"
|
||||||
|
replace = replace[:-4]
|
||||||
|
|
||||||
|
return prefix + replace + suffix + number
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import doctest
|
||||||
|
|
||||||
|
print(f"Test result: {doctest.testmod()}")
|
|
@ -0,0 +1,777 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import os
|
||||||
|
from os.path import abspath, join
|
||||||
|
|
||||||
|
from bpy.types import (AddonPreferences, PointerProperty, PropertyGroup)
|
||||||
|
from bpy.props import (BoolProperty, StringProperty, CollectionProperty,
|
||||||
|
EnumProperty, IntProperty)
|
||||||
|
|
||||||
|
from asset_library.constants import (DATA_TYPES, DATA_TYPE_ITEMS,
|
||||||
|
ICONS, RESOURCES_DIR, ADAPTER_DIR)
|
||||||
|
|
||||||
|
from asset_library.common.file_utils import import_module_from_path, norm_str
|
||||||
|
from asset_library.common.bl_utils import get_addon_prefs
|
||||||
|
from asset_library.common.functions import get_catalog_path
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import importlib
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
|
||||||
|
ADAPTERS = []
|
||||||
|
|
||||||
|
def update_library_config(self, context):
|
||||||
|
print('update_library_config not yet implemented')
|
||||||
|
|
||||||
|
def update_library_path(self, context):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
self['bundle_directory'] = str(self.library_path)
|
||||||
|
|
||||||
|
if not self.custom_bundle_name:
|
||||||
|
self['custom_bundle_name'] = self.name
|
||||||
|
|
||||||
|
if not self.custom_bundle_directory:
|
||||||
|
custom_bundle_dir = Path(prefs.bundle_directory, self.library_name).resolve()
|
||||||
|
self['custom_bundle_directory'] = str(custom_bundle_dir)
|
||||||
|
|
||||||
|
#if self.custom_bundle_directory:
|
||||||
|
# self['custom_bundle_directory'] = abspath(bpy.path.abspath(self.custom_bundle_directory))
|
||||||
|
#else:
|
||||||
|
# bundle_directory = join(prefs.bundle_directory, norm_str(self.name))
|
||||||
|
# self['custom_bundle_directory'] = abspath(bundle_directory)
|
||||||
|
|
||||||
|
self.set_library_path()
|
||||||
|
|
||||||
|
def update_all_library_path(self, context):
|
||||||
|
#print('update_all_assetlib_paths')
|
||||||
|
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
#if self.custom_bundle_directory:
|
||||||
|
# self['custom_bundle_directory'] = abspath(bpy.path.abspath(self.custom_bundle_directory))
|
||||||
|
|
||||||
|
for lib in prefs.libraries:
|
||||||
|
update_library_path(lib, context)
|
||||||
|
#lib.set_library_path()
|
||||||
|
|
||||||
|
def get_adapter_items(self, context):
|
||||||
|
#prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
items = [('NONE', 'None', '', 0)]
|
||||||
|
items += [(norm_str(a.name, format=str.upper), a.name, "", i+1) for i, a in enumerate(ADAPTERS)]
|
||||||
|
return items
|
||||||
|
|
||||||
|
def get_library_items(self, context):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
items = [('NONE', 'None', '', 0)]
|
||||||
|
items += [(l.name, l.name, "", i+1) for i, l in enumerate(prefs.libraries) if l != self]
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
|
def get_store_library_items(self, context):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
libraries = [l for l in prefs.libraries if l.merge_library == self.name]
|
||||||
|
|
||||||
|
return [(l.name, l.name, "", i) for i, l in enumerate([self] + libraries)]
|
||||||
|
|
||||||
|
|
||||||
|
class AssetLibraryAdapters(PropertyGroup):
|
||||||
|
parent = None
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return (getattr(self, p) for p in self.bl_rna.properties.keys() if p not in ('rna_type', 'name'))
|
||||||
|
|
||||||
|
|
||||||
|
class ConformAssetLibrary(PropertyGroup):
|
||||||
|
adapters : bpy.props.PointerProperty(type=AssetLibraryAdapters)
|
||||||
|
adapter_name : EnumProperty(items=get_adapter_items)
|
||||||
|
directory : StringProperty(
|
||||||
|
name="Destination Directory",
|
||||||
|
subtype='DIR_PATH',
|
||||||
|
default=''
|
||||||
|
)
|
||||||
|
image_template : StringProperty()
|
||||||
|
video_template : StringProperty()
|
||||||
|
|
||||||
|
externalize_data: BoolProperty(default=False, name='Externalize Data')
|
||||||
|
blend_depth: IntProperty(default=1, name='Blend Depth')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def adapter(self):
|
||||||
|
name = norm_str(self.adapter_name)
|
||||||
|
if not hasattr(self.adapters, name):
|
||||||
|
return
|
||||||
|
|
||||||
|
return getattr(self.adapters, name)
|
||||||
|
|
||||||
|
|
||||||
|
class AssetLibrary(PropertyGroup):
|
||||||
|
name : StringProperty(name='Name', default='Action Library', update=update_library_path)
|
||||||
|
id : StringProperty()
|
||||||
|
auto_bundle : BoolProperty(name='Auto Bundle', default=True)
|
||||||
|
expand : BoolProperty(name='Expand', default=False)
|
||||||
|
use : BoolProperty(name='Use', default=True, update=update_library_path)
|
||||||
|
data_type : EnumProperty(name='Type', items=DATA_TYPE_ITEMS, default='ACTION')
|
||||||
|
|
||||||
|
bundle_directory : StringProperty(
|
||||||
|
name="Bundle Directory",
|
||||||
|
subtype='DIR_PATH',
|
||||||
|
default=''
|
||||||
|
)
|
||||||
|
|
||||||
|
use_custom_bundle_directory : BoolProperty(default=False, update=update_library_path)
|
||||||
|
custom_bundle_directory : StringProperty(
|
||||||
|
name="Bundle Directory",
|
||||||
|
subtype='DIR_PATH',
|
||||||
|
default='',
|
||||||
|
update=update_library_path
|
||||||
|
)
|
||||||
|
#use_merge : BoolProperty(default=False, update=update_library_path)
|
||||||
|
|
||||||
|
use_custom_bundle_name : BoolProperty(default=False, update=update_library_path)
|
||||||
|
custom_bundle_name : StringProperty(name='Merge Name', update=update_library_path)
|
||||||
|
#merge_library : EnumProperty(name='Merge Library', items=get_library_items, update=update_library_path)
|
||||||
|
#merge_name : StringProperty(name='Merge Name', update=update_library_path)
|
||||||
|
|
||||||
|
#Library when adding an asset to the library if merge with another
|
||||||
|
store_library: EnumProperty(items=get_store_library_items, name="Library")
|
||||||
|
|
||||||
|
template: StringProperty()
|
||||||
|
expand_extra : BoolProperty(name='Expand', default=False)
|
||||||
|
blend_depth : IntProperty(name='Blend Depth', default=0)
|
||||||
|
|
||||||
|
# source_directory : StringProperty(
|
||||||
|
# name="Path",
|
||||||
|
# subtype='DIR_PATH',
|
||||||
|
# default='',
|
||||||
|
# update=update_library_path
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
#adapter : EnumProperty(items=adapter_ITEMS)
|
||||||
|
adapters : bpy.props.PointerProperty(type=AssetLibraryAdapters)
|
||||||
|
adapter_name : EnumProperty(items=get_adapter_items)
|
||||||
|
|
||||||
|
conform: bpy.props.PointerProperty(type=ConformAssetLibrary)
|
||||||
|
|
||||||
|
# data_file_path : StringProperty(
|
||||||
|
# name="Path",
|
||||||
|
# subtype='FILE_PATH',
|
||||||
|
# default='',
|
||||||
|
# )
|
||||||
|
|
||||||
|
#expand_conform : BoolProperty(name='Expand Conform', default=False)
|
||||||
|
|
||||||
|
#def __init__(self):
|
||||||
|
# self.adapters.parent = self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def merge_libraries(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
return [l for l in prefs.libraries if l != self and (l.library_path == self.library_path)]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data_types(self):
|
||||||
|
return f'{self.data_type.lower()}s'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def adapter(self):
|
||||||
|
name = norm_str(self.adapter_name)
|
||||||
|
if not hasattr(self.adapters, name):
|
||||||
|
return
|
||||||
|
|
||||||
|
return getattr(self.adapters, name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
asset_lib_ref = bpy.context.space_data.params.asset_library_ref
|
||||||
|
|
||||||
|
#TODO work also outside asset_library_area
|
||||||
|
if asset_lib_ref not in prefs.libraries:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return prefs.libraries[asset_lib_ref]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library_path(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
library_name = self.library_name
|
||||||
|
#if not self.use_custom_bundle_name:
|
||||||
|
# library_name = norm_str(library_name)
|
||||||
|
|
||||||
|
if self.use_custom_bundle_directory:
|
||||||
|
return Path(self.custom_bundle_directory, library_name).resolve()
|
||||||
|
else:
|
||||||
|
library_name = norm_str(library_name)
|
||||||
|
return Path(prefs.bundle_directory, library_name).resolve()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library_name(self):
|
||||||
|
if self.use_custom_bundle_name:
|
||||||
|
return self.custom_bundle_name
|
||||||
|
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image_template(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
return prefs.image_template
|
||||||
|
|
||||||
|
@property
|
||||||
|
def video_template(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
return prefs.video_template
|
||||||
|
|
||||||
|
@property
|
||||||
|
def asset_description_template(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
return prefs.asset_description_template
|
||||||
|
|
||||||
|
@property
|
||||||
|
def catalog_path(self):
|
||||||
|
return get_catalog_path(self.library_path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def options(self):
|
||||||
|
return {k: getattr(self.adapter, k) for k, v in self.options.bl_rna.properties.keys() if p !='rna_type'}
|
||||||
|
|
||||||
|
def clear_library_path(self):
|
||||||
|
#print('Clear Library Path', self.name)
|
||||||
|
|
||||||
|
prefs = bpy.context.preferences
|
||||||
|
libs = prefs.filepaths.asset_libraries
|
||||||
|
|
||||||
|
#path = self.library_path.as_posix()
|
||||||
|
|
||||||
|
for l in reversed(libs):
|
||||||
|
#lib_path = Path(l.path).resolve().as_posix()
|
||||||
|
|
||||||
|
prev_name = self.get('asset_library') or self.library_name
|
||||||
|
|
||||||
|
#print(l.name, prev_name)
|
||||||
|
|
||||||
|
if (l.name == prev_name):
|
||||||
|
index = list(libs).index(l)
|
||||||
|
try:
|
||||||
|
bpy.ops.preferences.asset_library_remove(index=index)
|
||||||
|
return
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
#print('No library removed')
|
||||||
|
|
||||||
|
def set_dict(self, data, obj=None):
|
||||||
|
""""Recursive method to set all attribute from a dict to this instance"""
|
||||||
|
obj = obj or self
|
||||||
|
|
||||||
|
# Make shure the input dict is not modidied
|
||||||
|
data = data.copy()
|
||||||
|
|
||||||
|
#print(obj)
|
||||||
|
|
||||||
|
for key, value in data.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
#print('Nested value', key)
|
||||||
|
|
||||||
|
if 'name' in value:
|
||||||
|
setattr(obj, f'{key}_name', value.pop('name'))
|
||||||
|
|
||||||
|
self.set_dict(value, obj=getattr(obj, key))
|
||||||
|
|
||||||
|
elif key in obj.bl_rna.properties.keys():
|
||||||
|
if key == 'id':
|
||||||
|
value = str(value)
|
||||||
|
|
||||||
|
elif key == 'custom_bundle_name':
|
||||||
|
if not 'use_custom_bundle_name' in data.values():
|
||||||
|
obj["use_custom_bundle_name"] = True
|
||||||
|
|
||||||
|
elif isinstance(value, str):
|
||||||
|
value = os.path.expandvars(value)
|
||||||
|
value = os.path.expanduser(value)
|
||||||
|
|
||||||
|
obj[key] = value
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f'Prop {key} of {obj} not exist')
|
||||||
|
|
||||||
|
self['bundle_directory'] = str(self.library_path)
|
||||||
|
|
||||||
|
if not self.custom_bundle_name:
|
||||||
|
self['custom_bundle_name'] = self.name
|
||||||
|
|
||||||
|
# self.adapter_name = data['adapter']
|
||||||
|
# if not self.adapter:
|
||||||
|
# print(f"No adapter named {data['adapter']}")
|
||||||
|
# return
|
||||||
|
|
||||||
|
|
||||||
|
# for key, value in data.items():
|
||||||
|
# if key == 'options':
|
||||||
|
# for k, v in data['options'].items():
|
||||||
|
# setattr(self.adapter, k, v)
|
||||||
|
# elif key in self.bl_rna.properties.keys():
|
||||||
|
# if key == 'id':
|
||||||
|
# value = str(value)
|
||||||
|
|
||||||
|
# if key == 'custom_bundle_name':
|
||||||
|
# if not 'use_custom_bundle_name' in data.values():
|
||||||
|
# self["use_custom_bundle_name"] = True
|
||||||
|
|
||||||
|
# self[key] = value
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
data = {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||||
|
data['options'] = self.adapter.to_dict()
|
||||||
|
data['adapter'] = data.pop('adapter_name')
|
||||||
|
del data['adapters']
|
||||||
|
return data
|
||||||
|
|
||||||
|
def set_library_path(self):
|
||||||
|
'''Update the Blender Preference Filepaths tab with the addon libraries'''
|
||||||
|
|
||||||
|
prefs = bpy.context.preferences
|
||||||
|
|
||||||
|
name = self.library_name
|
||||||
|
prev_name = self.get('asset_library') or name
|
||||||
|
|
||||||
|
lib = prefs.filepaths.asset_libraries.get(prev_name)
|
||||||
|
lib_path = self.library_path
|
||||||
|
|
||||||
|
#print('name', name)
|
||||||
|
#print('lib', lib)
|
||||||
|
#print('lib_path', lib_path)
|
||||||
|
#print('self.merge_library ', self.merge_library)
|
||||||
|
#print('prev_name', prev_name)
|
||||||
|
#print('\nset_library_path')
|
||||||
|
#print(f'{self.name=}, {prev_name=}, {lib_path=}, {self.use}')
|
||||||
|
|
||||||
|
if not lib_path:
|
||||||
|
self.clear_library_path()
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.use:
|
||||||
|
if all(not l.use for l in self.merge_libraries):
|
||||||
|
self.clear_library_path()
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create the Asset Library Path
|
||||||
|
if not lib:
|
||||||
|
try:
|
||||||
|
bpy.ops.preferences.asset_library_add(directory=str(lib_path))
|
||||||
|
except AttributeError:
|
||||||
|
return
|
||||||
|
|
||||||
|
lib = prefs.filepaths.asset_libraries[-1]
|
||||||
|
|
||||||
|
lib.name = name
|
||||||
|
|
||||||
|
self['asset_library'] = name
|
||||||
|
lib.path = str(lib_path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_user(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
return self in prefs.user_libraries.values()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_env(self):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
return self in prefs.env_libraries.values()
|
||||||
|
|
||||||
|
|
||||||
|
def add_row(self, layout, data=None, prop=None, label='',
|
||||||
|
boolean=None, factor=0.39):
|
||||||
|
'''Act like the use_property_split but with much more control'''
|
||||||
|
|
||||||
|
enabled = True
|
||||||
|
split = layout.split(factor=factor, align=True)
|
||||||
|
|
||||||
|
row = split.row(align=False)
|
||||||
|
row.use_property_split = False
|
||||||
|
row.alignment= 'RIGHT'
|
||||||
|
row.label(text=str(label))
|
||||||
|
if boolean:
|
||||||
|
boolean_data = self
|
||||||
|
if isinstance(boolean, (list, tuple)):
|
||||||
|
boolean_data, boolean = boolean
|
||||||
|
|
||||||
|
row.prop(boolean_data, boolean, text='')
|
||||||
|
enabled = getattr(boolean_data, boolean)
|
||||||
|
|
||||||
|
row = split.row(align=True)
|
||||||
|
row.enabled = enabled
|
||||||
|
|
||||||
|
if isinstance(data, str):
|
||||||
|
row.label(text=data)
|
||||||
|
else:
|
||||||
|
row.prop(data or self, prop, text='')
|
||||||
|
|
||||||
|
return split
|
||||||
|
|
||||||
|
|
||||||
|
def draw_operators(self, layout):
|
||||||
|
row = layout.row(align=True)
|
||||||
|
row.alignment = 'RIGHT'
|
||||||
|
row.prop(self, 'adapter_name', text='')
|
||||||
|
row.prop(self, 'auto_bundle', text='', icon='UV_SYNC_SELECT')
|
||||||
|
|
||||||
|
row.operator("assetlib.diff", text='', icon='FILE_REFRESH').name = self.name
|
||||||
|
|
||||||
|
op = row.operator("assetlib.bundle", icon='MOD_BUILD', text='')
|
||||||
|
op.name = self.name
|
||||||
|
|
||||||
|
layout.separator(factor=3)
|
||||||
|
|
||||||
|
def draw_extra(self, layout):
|
||||||
|
#box = layout.box()
|
||||||
|
|
||||||
|
col = layout.column(align=False)
|
||||||
|
|
||||||
|
row = col.row(align=True)
|
||||||
|
row.use_property_split = False
|
||||||
|
#row.alignment = 'LEFT'
|
||||||
|
icon = "DISCLOSURE_TRI_DOWN" if self.expand_extra else "DISCLOSURE_TRI_RIGHT"
|
||||||
|
row.label(icon='BLANK1')
|
||||||
|
subrow = row.row(align=True)
|
||||||
|
subrow.alignment = 'LEFT'
|
||||||
|
subrow.prop(self, 'expand_extra', icon=icon, emboss=False, text="Conform Options")
|
||||||
|
#row.prop(self, 'expand_extra', text='', icon="OPTIONS", emboss=False)
|
||||||
|
#row.prop(self, 'expand_extra', emboss=False, text='Options')
|
||||||
|
#row.label(text='Conform Options')
|
||||||
|
subrow = row.row(align=True)
|
||||||
|
subrow.alignment = 'RIGHT'
|
||||||
|
subrow.prop(self.conform, "adapter_name", text='')
|
||||||
|
|
||||||
|
op = subrow.operator('assetlib.diff', text='', icon='FILE_REFRESH')#, icon='MOD_BUILD'
|
||||||
|
op.name = self.name
|
||||||
|
op.conform = True
|
||||||
|
|
||||||
|
op = subrow.operator('assetlib.bundle', text='', icon='MOD_BUILD')#, icon='MOD_BUILD'
|
||||||
|
op.name = self.name
|
||||||
|
op.directory = self.conform.directory
|
||||||
|
subrow.label(icon='BLANK1')
|
||||||
|
#subrow.separator(factor=3)
|
||||||
|
|
||||||
|
if self.expand_extra and self.conform.adapter:
|
||||||
|
col.separator()
|
||||||
|
#row = layout.row(align=True)
|
||||||
|
#row.label(text='Conform Library')
|
||||||
|
col.prop(self.conform, "directory")
|
||||||
|
col.prop(self.conform, "blend_depth")
|
||||||
|
col.prop(self.conform, "externalize_data")
|
||||||
|
col.prop(self.conform, "image_template", text='Image Template')
|
||||||
|
col.prop(self.conform, "video_template", text='Video Template')
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
self.conform.adapter.draw_prefs(col)
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def draw(self, layout):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
box = layout.box()
|
||||||
|
|
||||||
|
row = box.row(align=True)
|
||||||
|
icon = "DISCLOSURE_TRI_DOWN" if self.expand else "DISCLOSURE_TRI_RIGHT"
|
||||||
|
row.prop(self, 'expand', icon=icon, emboss=False, text='')
|
||||||
|
|
||||||
|
if self.is_user:
|
||||||
|
row.prop(self, 'use', text='')
|
||||||
|
row.prop(self, 'data_type', icon_only=True, emboss=False)
|
||||||
|
row.prop(self, 'name', text='')
|
||||||
|
|
||||||
|
self.draw_operators(row)
|
||||||
|
|
||||||
|
index = prefs.user_libraries.index(self)
|
||||||
|
row.operator("assetlib.remove_user_library", icon="X", text='', emboss=False).index = index
|
||||||
|
|
||||||
|
else:
|
||||||
|
row.prop(self, 'use', text='')
|
||||||
|
row.label(icon=ICONS[self.data_type])
|
||||||
|
row.label(text=self.name)
|
||||||
|
|
||||||
|
self.draw_operators(row)
|
||||||
|
|
||||||
|
sub_row = row.row()
|
||||||
|
sub_row.enabled = False
|
||||||
|
sub_row.label(icon='FAKE_USER_ON')
|
||||||
|
|
||||||
|
if self.expand:
|
||||||
|
col = box.column(align=False)
|
||||||
|
col.use_property_split = True
|
||||||
|
#row = col.row(align=True)
|
||||||
|
|
||||||
|
row = self.add_row(col,
|
||||||
|
prop="custom_bundle_name",
|
||||||
|
boolean="use_custom_bundle_name",
|
||||||
|
label='Custom Bundle Name')
|
||||||
|
|
||||||
|
row.enabled = not self.use_custom_bundle_directory
|
||||||
|
|
||||||
|
prop = "bundle_directory"
|
||||||
|
if self.use_custom_bundle_directory:
|
||||||
|
prop = "custom_bundle_directory"
|
||||||
|
|
||||||
|
self.add_row(col, prop=prop,
|
||||||
|
boolean="use_custom_bundle_directory",
|
||||||
|
label='Custom Bundle Directory',
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.adapter:
|
||||||
|
col.separator()
|
||||||
|
self.adapter.draw_prefs(col)
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
self.draw_extra(col)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Collections:
|
||||||
|
'''Util Class to merge multiple collections'''
|
||||||
|
|
||||||
|
collections = []
|
||||||
|
|
||||||
|
def __init__(self, *collection):
|
||||||
|
self.collections = collection
|
||||||
|
|
||||||
|
for col in collection:
|
||||||
|
#print('Merge methods')
|
||||||
|
for attr in dir(col):
|
||||||
|
if attr.startswith('_'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = getattr(col, attr)
|
||||||
|
#if not callable(value):
|
||||||
|
# continue
|
||||||
|
|
||||||
|
setattr(self, attr, value)
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
if isinstance(item, str):
|
||||||
|
return item in self.to_dict()
|
||||||
|
else:
|
||||||
|
return item in self
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self.to_list().__iter__()
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
if isinstance(item, int):
|
||||||
|
return self.to_list()[item]
|
||||||
|
else:
|
||||||
|
return self.to_dict()[item]
|
||||||
|
|
||||||
|
def get(self, item, fallback=None):
|
||||||
|
return self.to_dict().get(item) or fallback
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {k:v for c in self.collections for k, v in c.items()}
|
||||||
|
|
||||||
|
def to_list(self):
|
||||||
|
return [v for c in self.collections for v in c.values()]
|
||||||
|
|
||||||
|
def get_parent(self, item):
|
||||||
|
for c in self.collections:
|
||||||
|
if item in c.values():
|
||||||
|
return c
|
||||||
|
|
||||||
|
def index(self, item):
|
||||||
|
c = self.get_parent(item)
|
||||||
|
|
||||||
|
if not c:
|
||||||
|
return item in self
|
||||||
|
|
||||||
|
return list(c.values()).index(item)
|
||||||
|
|
||||||
|
|
||||||
|
#class AssetLibraryOptions(PropertyGroup):
|
||||||
|
# pass
|
||||||
|
|
||||||
|
|
||||||
|
class AssetLibraryPrefs(AddonPreferences):
|
||||||
|
bl_idname = __package__
|
||||||
|
|
||||||
|
#action : bpy.props.PointerProperty(type=AssetLibraryPath)
|
||||||
|
#asset : bpy.props.PointerProperty(type=AssetLibraryPath)
|
||||||
|
#adapters = {}
|
||||||
|
|
||||||
|
image_player: StringProperty(default='')
|
||||||
|
video_player: StringProperty(default='')
|
||||||
|
|
||||||
|
adapter_directory : StringProperty(
|
||||||
|
name="Adapter Directory",
|
||||||
|
subtype='DIR_PATH'
|
||||||
|
)
|
||||||
|
|
||||||
|
env_libraries : CollectionProperty(type=AssetLibrary)
|
||||||
|
user_libraries : CollectionProperty(type=AssetLibrary)
|
||||||
|
expand_settings: BoolProperty(default=False)
|
||||||
|
bundle_directory : StringProperty(
|
||||||
|
name="Path",
|
||||||
|
subtype='DIR_PATH',
|
||||||
|
default='',
|
||||||
|
update=update_all_library_path
|
||||||
|
)
|
||||||
|
|
||||||
|
use_single_path : BoolProperty(default=True)
|
||||||
|
asset_description_template : StringProperty(default='../{name}_asset_description.json')
|
||||||
|
image_template : StringProperty(default='../{name}_image.png')
|
||||||
|
video_template : StringProperty(default='../{name}_video.mov')
|
||||||
|
|
||||||
|
config_directory : StringProperty(
|
||||||
|
name="Config Path",
|
||||||
|
subtype='FILE_PATH',
|
||||||
|
default=str(RESOURCES_DIR/"asset_library_config.json"),
|
||||||
|
update=update_library_config
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_adapters(self):
|
||||||
|
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||||
|
|
||||||
|
#global ADAPTERS
|
||||||
|
|
||||||
|
print('\n------Load Adapters')
|
||||||
|
|
||||||
|
ADAPTERS.clear()
|
||||||
|
|
||||||
|
adapter_files = list(ADAPTER_DIR.glob('*.py'))
|
||||||
|
if self.adapter_directory:
|
||||||
|
user_adapter_dir = Path(self.adapter_directory)
|
||||||
|
if user_adapter_dir.exists():
|
||||||
|
adapter_files += list(user_adapter_dir.glob('*.py'))
|
||||||
|
|
||||||
|
for adapter_file in adapter_files:
|
||||||
|
mod = import_module_from_path(adapter_file)
|
||||||
|
|
||||||
|
if adapter_file.stem.startswith('_'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(adapter_file)
|
||||||
|
for name, obj in inspect.getmembers(mod):
|
||||||
|
|
||||||
|
if not inspect.isclass(obj):
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(obj.__bases__)
|
||||||
|
if not AssetLibraryAdapter in obj.__mro__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Non registering base adapter
|
||||||
|
if obj is AssetLibraryAdapter or obj.name in (a.name for a in ADAPTERS):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
print(f'Register Plugin {name}')
|
||||||
|
bpy.utils.register_class(obj)
|
||||||
|
setattr(AssetLibraryAdapters, norm_str(obj.name), bpy.props.PointerProperty(type=obj))
|
||||||
|
ADAPTERS.append(obj)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Could not register adapter {name}')
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def libraries(self):
|
||||||
|
return Collections(self.env_libraries, self.user_libraries)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
layout = self.layout
|
||||||
|
#layout.use_property_split = True
|
||||||
|
|
||||||
|
main_col = layout.column(align=False)
|
||||||
|
|
||||||
|
box = main_col.box()
|
||||||
|
row = box.row(align=True)
|
||||||
|
icon = "DISCLOSURE_TRI_DOWN" if self.expand_settings else "DISCLOSURE_TRI_RIGHT"
|
||||||
|
row.prop(self, 'expand_settings', icon=icon, emboss=False, text='')
|
||||||
|
row.label(icon='PREFERENCES')
|
||||||
|
row.label(text='Settings')
|
||||||
|
#row.separator_spacer()
|
||||||
|
subrow = row.row()
|
||||||
|
subrow.alignment = 'RIGHT'
|
||||||
|
subrow.operator("assetlib.reload_addon", text='Reload Addon')
|
||||||
|
|
||||||
|
if prefs.expand_settings:
|
||||||
|
col = box.column(align=True)
|
||||||
|
col.use_property_split = True
|
||||||
|
|
||||||
|
#col.prop(self, 'use_single_path', text='Single Path')
|
||||||
|
col.prop(self, 'bundle_directory', text='Bundle Directory')
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col.prop(self, 'adapter_directory')
|
||||||
|
col.prop(self, 'config_directory')
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col.prop(self, 'asset_description_template', text='Asset Description Template', icon='COPY_ID')
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col.prop(self, 'image_template', text='Image Template', icon='COPY_ID')
|
||||||
|
col.prop(self, 'image_player', text='Image Player') #icon='OUTLINER_OB_IMAGE'
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col.prop(self, 'video_template', text='Video Template', icon='COPY_ID')
|
||||||
|
col.prop(self, 'video_player', text='Video Player') #icon='FILE_MOVIE'
|
||||||
|
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col.operator("assetlib.add_user_library", text='Bundle All Libraries', icon='MOD_BUILD')
|
||||||
|
|
||||||
|
for lib in self.libraries:# list(self.env_libraries) + list(self.user_libraries):
|
||||||
|
|
||||||
|
lib.draw(main_col)
|
||||||
|
|
||||||
|
row = main_col.row()
|
||||||
|
row.alignment = 'RIGHT'
|
||||||
|
row.operator("assetlib.add_user_library", icon="ADD", text='', emboss=False)
|
||||||
|
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
AssetLibraryAdapters,
|
||||||
|
ConformAssetLibrary,
|
||||||
|
AssetLibrary,
|
||||||
|
AssetLibraryPrefs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
prefs = get_addon_prefs()
|
||||||
|
|
||||||
|
# Read Env and override preferences
|
||||||
|
bundle_dir = os.getenv('ASSETLIB_BUNDLE_DIR')
|
||||||
|
if bundle_dir:
|
||||||
|
prefs.bundle_directory = os.path.expandvars(bundle_dir)
|
||||||
|
|
||||||
|
config_dir = os.getenv('ASSETLIB_CONFIG_DIR')
|
||||||
|
if config_dir:
|
||||||
|
prefs.config_directory = os.path.expandvars(config_dir)
|
||||||
|
|
||||||
|
adapter_dir = os.getenv('ASSETLIB_ADAPTER_DIR')
|
||||||
|
if adapter_dir:
|
||||||
|
prefs.adapter_directory = os.path.expandvars(adapter_dir)
|
||||||
|
|
||||||
|
prefs.load_adapters()
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for cls in reversed(list(classes) + ADAPTERS):
|
||||||
|
bpy.utils.unregister_class(cls)
|
||||||
|
|
||||||
|
ADAPTERS.clear()
|
Binary file not shown.
After Width: | Height: | Size: 731 B |
Loading…
Reference in New Issue