Make all features working
parent
060aa80c9b
commit
d1c17581ff
19
__init__.py
19
__init__.py
|
@ -23,9 +23,9 @@ from asset_library import pose
|
|||
from asset_library import action
|
||||
from asset_library import collection
|
||||
from asset_library import file
|
||||
from asset_library import (gui, keymaps, prefs, operators)
|
||||
from asset_library import (gui, keymaps, preferences, operators)
|
||||
from asset_library import constants
|
||||
#from asset_library.common.adapter import AssetLibraryAdapter
|
||||
#from asset_library.common.library_type import LibraryType
|
||||
from asset_library.common.bl_utils import get_addon_prefs
|
||||
from asset_library.common.functions import set_env_libraries
|
||||
from asset_library.common.template import Template
|
||||
|
@ -38,10 +38,11 @@ if 'bpy' in locals():
|
|||
|
||||
import importlib
|
||||
|
||||
importlib.reload(constants)
|
||||
importlib.reload(gui)
|
||||
importlib.reload(keymaps)
|
||||
|
||||
importlib.reload(prefs)
|
||||
importlib.reload(preferences)
|
||||
importlib.reload(operators)
|
||||
importlib.reload(constants)
|
||||
|
||||
|
@ -63,7 +64,7 @@ bl_modules = (
|
|||
file,
|
||||
keymaps,
|
||||
gui,
|
||||
prefs
|
||||
preferences
|
||||
)
|
||||
|
||||
|
||||
|
@ -72,13 +73,14 @@ def load_handler():
|
|||
|
||||
set_env_libraries()
|
||||
bpy.ops.assetlib.set_paths(all=True)
|
||||
#bpy.ops.assetlib.#(all=True, only_recent=True)
|
||||
|
||||
bpy.ops.assetlib.bundle(blocking=False, mode='AUTO_BUNDLE')
|
||||
if not bpy.app.background:
|
||||
bpy.ops.assetlib.bundle(blocking=False, mode='AUTO_BUNDLE')
|
||||
|
||||
|
||||
|
||||
def register() -> None:
|
||||
|
||||
|
||||
for m in bl_modules:
|
||||
m.register()
|
||||
|
@ -92,5 +94,10 @@ def register() -> None:
|
|||
|
||||
|
||||
def unregister() -> None:
|
||||
prefs = get_addon_prefs()
|
||||
bpy.utils.previews.remove(prefs.previews)
|
||||
|
||||
for m in reversed(bl_modules):
|
||||
m.unregister()
|
||||
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ def draw_context_menu(layout):
|
|||
layout.operator_context = 'INVOKE_DEFAULT'
|
||||
|
||||
#layout.operator("assetlib.rename_asset", text="Rename Action")
|
||||
layout.operator("assetlib.clear_asset", text="Remove Asset")
|
||||
layout.operator("assetlib.remove_assets", text="Remove Assets")
|
||||
layout.operator("assetlib.edit_data", text="Edit Asset data")
|
||||
|
||||
#layout.operator("actionlib.clear_asset", text="Clear Asset (Fake User)").use_fake_user = True
|
||||
|
|
|
@ -21,6 +21,7 @@ import uuid
|
|||
import time
|
||||
from pathlib import Path
|
||||
from functools import partial
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
from asset_library.pose.pose_creation import(
|
||||
|
@ -90,7 +91,8 @@ from asset_library.common.bl_utils import (
|
|||
split_path,
|
||||
get_preview,
|
||||
get_view3d_persp,
|
||||
load_assets_from,
|
||||
get_viewport,
|
||||
#load_assets_from,
|
||||
get_asset_space_params,
|
||||
get_bl_cmd,
|
||||
get_overriden_col
|
||||
|
@ -275,7 +277,7 @@ class ACTIONLIB_OT_apply_anim(Operator):
|
|||
lib = get_active_library()
|
||||
if 'filepath' in asset_file_handle.asset_data:
|
||||
action_path = asset_file_handle.asset_data['filepath']
|
||||
action_path = lib.adapter.format_path(action_path)
|
||||
action_path = lib.library_type.format_path(action_path)
|
||||
else:
|
||||
action_path = bpy.types.AssetHandle.get_full_library_path(
|
||||
asset_file_handle, asset_library_ref
|
||||
|
@ -778,14 +780,68 @@ class ACTIONLIB_OT_open_blendfile(Operator):
|
|||
return {'FINISHED'}
|
||||
|
||||
|
||||
|
||||
|
||||
#LIBRARY_ITEMS = []
|
||||
'''
|
||||
def callback_operator(modal_func, operator, override={}):
|
||||
|
||||
def wrap(self, context, event):
|
||||
ret, = retset = modal_func(self, context, event)
|
||||
if ret in {'FINISHED'}:
|
||||
|
||||
with context.temp_override(**override):
|
||||
callback()
|
||||
|
||||
return retset
|
||||
return wrap
|
||||
'''
|
||||
|
||||
|
||||
class ACTIONLIB_OT_make_custom_preview(Operator):
|
||||
bl_idname = "actionlib.make_custom_preview"
|
||||
bl_label = "Custom Preview"
|
||||
bl_description = "Set a camera to preview an asset"
|
||||
|
||||
def modal(self, context, event):
|
||||
prefs = get_addons_prefs()
|
||||
|
||||
if not prefs.preview_modal:
|
||||
with context.temp_override(area=self.source_area, region=self.source_area.regions[-1]):
|
||||
bpy.ops.actionlib.store_anim_pose("INVOKE_DEFAULT", clear_previews=False, **prefs.add_asset_dict)
|
||||
return {"FINISHED"}
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
|
||||
self.source_area = bpy.context.area
|
||||
|
||||
view3d = get_viewport()
|
||||
with context.temp_override(area=view3d, region=view3d.regions[-1], window=context.window):
|
||||
# To close the popup
|
||||
bpy.ops.screen.screen_full_area()
|
||||
bpy.ops.screen.back_to_previous()
|
||||
|
||||
view3d = get_viewport()
|
||||
with context.temp_override(area=view3d, region=view3d.regions[-1], window=context.window):
|
||||
bpy.ops.assetlib.make_custom_preview('INVOKE_DEFAULT', modal=True)
|
||||
|
||||
context.window_manager.modal_handler_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
|
||||
def get_preview_items(self, context):
|
||||
prefs = get_addon_prefs()
|
||||
return sorted([(k, k, '', v.icon_id, index) for index, (k, v) in enumerate(prefs.previews.items())], reverse=True)
|
||||
|
||||
|
||||
|
||||
class ACTIONLIB_OT_store_anim_pose(Operator):
|
||||
bl_idname = "actionlib.store_anim_pose"
|
||||
bl_label = "Add Action to the current library"
|
||||
bl_description = "Store current pose/anim to local library"
|
||||
|
||||
#use_new_folder: BoolProperty(default=False)
|
||||
warning: StringProperty(name='')
|
||||
path: StringProperty(name='Path')
|
||||
catalog: StringProperty(name='Catalog', update=asset_warning_callback, options={'TEXTEDIT_UPDATE'})
|
||||
|
@ -795,13 +851,12 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
frame_end: IntProperty(name="Frame End")
|
||||
tags: StringProperty(name='Tags', description='Tags need to separate with a comma (,)')
|
||||
description: StringProperty(name='Description')
|
||||
preview : EnumProperty(items=get_preview_items)
|
||||
clear_previews : BoolProperty(default=True)
|
||||
store_library: StringProperty(name='Store Library')
|
||||
|
||||
#library: EnumProperty(items=lambda s, c: s.library_items, name="Library")
|
||||
#library: EnumProperty(items=lambda s, c: LIBRARY_ITEMS, name="Library")
|
||||
#CLIPBOARD_ASSET_MARKER = "ASSET-BLEND="
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: Context) -> bool:
|
||||
def poll(cls, context: Context) -> bool:
|
||||
ob = context.object
|
||||
if not ob:
|
||||
cls.poll_message_set(f'You have no active object')
|
||||
|
@ -826,16 +881,36 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
# col.operator("asset.tag_add", icon='ADD', text="")
|
||||
# col.operator("asset.tag_remove", icon='REMOVE', text="")
|
||||
|
||||
def to_dict(self):
|
||||
keys = ("catalog", "name", "action_type", "frame_start", "frame_end", "tags", "description", "store_library")
|
||||
return {k : getattr(self, k) for k in keys}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.separator()
|
||||
prefs = get_addon_prefs()
|
||||
|
||||
#row = layout.row(align=True)
|
||||
layout.use_property_split = True
|
||||
#layout.alignment = 'LEFT'
|
||||
split = layout.split(factor=0.39, align=True)
|
||||
#row = split.row(align=False)
|
||||
#split.use_property_split = False
|
||||
split.alignment = 'RIGHT'
|
||||
|
||||
split.label(text='Preview')
|
||||
|
||||
sub = split.row(align=True)
|
||||
sub.template_icon_view(self, "preview", show_labels=False)
|
||||
sub.separator()
|
||||
sub.operator("actionlib.make_custom_preview", icon='RESTRICT_RENDER_OFF', text='')
|
||||
|
||||
prefs.add_asset_dict.clear()
|
||||
prefs.add_asset_dict.update(self.to_dict())
|
||||
|
||||
sub.label(icon='BLANK1')
|
||||
#layout.ui_units_x = 50
|
||||
|
||||
#row = layout.row(align=True)
|
||||
layout.use_property_split = True
|
||||
|
||||
if self.current_library.merge_libraries:
|
||||
layout.prop(self.current_library, 'store_library', expand=False)
|
||||
|
||||
|
@ -890,7 +965,14 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
self.asset_action.asset_mark()
|
||||
self.area = context.area
|
||||
self.current_library = get_active_library()
|
||||
#self.sce
|
||||
|
||||
if self.store_library:
|
||||
self.current_library.store_library = self.store_library
|
||||
else:
|
||||
lib = self.current_library.library_type.get_active_asset_library()
|
||||
if lib.name:
|
||||
self.current_library.store_library = lib.name
|
||||
self.store_library = lib.name
|
||||
|
||||
#lib = self.current_library
|
||||
self.tags = ''
|
||||
|
@ -899,9 +981,21 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
#print(self, self.library_items)
|
||||
|
||||
self.catalog = get_active_catalog()
|
||||
self.set_action_type()
|
||||
self.set_action_type()
|
||||
|
||||
return context.window_manager.invoke_props_dialog(self, width=450)
|
||||
if self.clear_previews:
|
||||
prefs.previews.clear()
|
||||
|
||||
view3d = get_viewport()
|
||||
with context.temp_override(area=view3d, region=view3d.regions[-1]):
|
||||
bpy.ops.assetlib.make_custom_preview('INVOKE_DEFAULT')
|
||||
|
||||
else:
|
||||
preview_items = get_preview_items(self, context)
|
||||
if preview_items:
|
||||
self.preview = preview_items[0][0]
|
||||
|
||||
return context.window_manager.invoke_props_dialog(self, width=350)
|
||||
|
||||
def action_to_asset(self, action):
|
||||
#action.asset_mark()
|
||||
|
@ -945,34 +1039,20 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
|
||||
return action
|
||||
|
||||
def render_preview(self, image_path, video_path):
|
||||
def render_animation(self, video_path):
|
||||
ctx = bpy.context
|
||||
scn = ctx.scene
|
||||
vl = ctx.view_layer
|
||||
area = get_view3d_persp()
|
||||
space = area.spaces.active
|
||||
|
||||
preview_attrs = [
|
||||
attrs = [
|
||||
(scn, 'use_preview_range', True),
|
||||
(scn, 'frame_preview_start', self.frame_start),
|
||||
(scn, 'frame_preview_end', self.frame_end),
|
||||
(scn.render, 'resolution_percentage', 100),
|
||||
(space.overlay, 'show_overlays', False),
|
||||
(space.region_3d, 'view_perspective', 'CAMERA'),
|
||||
]
|
||||
|
||||
image_attrs = [
|
||||
(scn.render, 'resolution_x', 512),
|
||||
(scn.render, 'resolution_y', 512),
|
||||
(scn.render, 'film_transparent', True),
|
||||
(scn.render.image_settings, 'file_format', 'PNG'),
|
||||
(scn.render.image_settings, 'color_mode', 'RGBA'),
|
||||
(scn.render.image_settings, 'color_depth', 8),
|
||||
(scn.render, 'use_overwrite', True),
|
||||
(scn.render, 'filepath', str(image_path))
|
||||
]
|
||||
|
||||
video_attrs = [
|
||||
(scn.render, 'resolution_x', 1280),
|
||||
(scn.render, 'resolution_y', 720),
|
||||
(scn.render.image_settings, 'file_format', 'FFMPEG'),
|
||||
|
@ -984,10 +1064,6 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
(scn.render, 'filepath', str(video_path)),
|
||||
]
|
||||
|
||||
with attr_set(preview_attrs+image_attrs):
|
||||
with ctx.temp_override(area=area):
|
||||
bpy.ops.render.opengl(write_still=True)
|
||||
|
||||
if self.action_type == "ANIMATION":
|
||||
with attr_set(preview_attrs+video_attrs):
|
||||
with ctx.temp_override(area=area):
|
||||
|
@ -1000,7 +1076,8 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
bpy.ops.asset.library_refresh({"area": area, 'region': area.regions[3]})
|
||||
#space_data.activate_asset_by_id(asset, deferred=deferred)
|
||||
|
||||
def execute(self, context: Context):
|
||||
def execute(self, context: Context):
|
||||
|
||||
scn = context.scene
|
||||
vl = context.view_layer
|
||||
ob = context.object
|
||||
|
@ -1011,11 +1088,11 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
if lib.merge_libraries:
|
||||
lib = prefs.libraries[self.current_library.store_library]
|
||||
|
||||
#lib_path = lib.library_path
|
||||
#name = lib.adapter.norm_file_name(self.name)
|
||||
asset_path = lib.adapter.get_asset_path(name=self.name, catalog=self.catalog)
|
||||
img_path = lib.adapter.get_image_path(name=self.name, catalog=self.catalog, filepath=asset_path)
|
||||
video_path = lib.adapter.get_video_path(name=self.name, catalog=self.catalog, filepath=asset_path)
|
||||
lib_type = lib.library_type
|
||||
|
||||
asset_path = lib_type.get_asset_path(name=self.name, catalog=self.catalog)
|
||||
img_path = lib_type.get_image_path(name=self.name, catalog=self.catalog, filepath=asset_path)
|
||||
video_path = lib_type.get_video_path(name=self.name, catalog=self.catalog, filepath=asset_path)
|
||||
|
||||
## Copy Action
|
||||
current_action = ob.animation_data.action
|
||||
|
@ -1025,31 +1102,31 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
|
||||
self.action_to_asset(asset_action)
|
||||
|
||||
#lib.adapter.new_asset()
|
||||
#lib_type.new_asset()
|
||||
|
||||
#Saving the preview
|
||||
self.render_preview(img_path, video_path)
|
||||
with context.temp_override(id=asset_action):
|
||||
bpy.ops.ed.lib_id_load_custom_preview(
|
||||
filepath=str(img_path)
|
||||
)
|
||||
#Saving the video
|
||||
if self.action_type == "ANIMATION":
|
||||
self.render_animation(video_path)
|
||||
|
||||
#Saving the preview image
|
||||
preview = prefs.previews[self.preview]
|
||||
lib_type.write_preview(preview, img_path)
|
||||
|
||||
lib.adapter.write_asset(asset=asset_action, asset_path=asset_path)
|
||||
# Transfert the pixel to the action preview
|
||||
pixels = [0] * preview.image_size[0] * preview.image_size[1] * 4
|
||||
preview.image_pixels_float.foreach_get(pixels)
|
||||
asset_action.preview_ensure().image_pixels_float.foreach_set(pixels)
|
||||
|
||||
lib_type.write_asset(asset=asset_action, asset_path=asset_path)
|
||||
|
||||
asset_data = lib.adapter.get_asset_data(asset_action)
|
||||
asset_data = dict(lib_type.get_asset_data(asset_action), catalog=self.catalog)
|
||||
asset_info = lib_type.format_asset_info([asset_data], asset_path=asset_path)
|
||||
|
||||
diff = [dict(asset_data,
|
||||
image=str(img_path),
|
||||
filepath=str(asset_path),
|
||||
type='ACTION',
|
||||
library_id=lib.id,
|
||||
catalog=self.catalog,
|
||||
operation='ADD'
|
||||
)]
|
||||
# lib.adapter.write_description_file(asset_description, asset_path)
|
||||
#print('asset_info')
|
||||
#pprint(asset_info)
|
||||
|
||||
diff = [dict(a, operation='ADD') for a in lib_type.flatten_cache([asset_info])]
|
||||
|
||||
# Restore action and cleanup
|
||||
ob.animation_data.action = current_action
|
||||
|
||||
asset_action.asset_clear()
|
||||
|
@ -1059,7 +1136,7 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
|||
# TODO Write a proper method for this
|
||||
diff_path = Path(bpy.app.tempdir, 'diff.json')
|
||||
|
||||
#diff = [dict(a, operation='ADD') for a in [asset_description])]
|
||||
#diff = [dict(a, operation='ADD') for a in [asset_info])]
|
||||
diff_path.write_text(json.dumps(diff, indent=4))
|
||||
|
||||
bpy.ops.assetlib.bundle(name=lib.name, diff=str(diff_path), blocking=True)
|
||||
|
@ -1085,6 +1162,7 @@ classes = (
|
|||
ACTIONLIB_OT_update_action_data,
|
||||
ACTIONLIB_OT_assign_rest_pose,
|
||||
ACTIONLIB_OT_store_anim_pose,
|
||||
ACTIONLIB_OT_make_custom_preview
|
||||
)
|
||||
|
||||
register, unregister = bpy.utils.register_classes_factory(classes)
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
|
||||
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||
from asset_library.adapters.copy_folder import CopyFolderLibrary
|
||||
from asset_library.adapters.scan_folder import ScanFolderLibrary
|
|
@ -1,772 +1,12 @@
|
|||
|
||||
#from asset_library.common.functions import (norm_asset_datas,)
|
||||
from asset_library.common.bl_utils import get_addon_prefs, load_datablocks
|
||||
from asset_library.common.file_utils import read_file, write_file
|
||||
from asset_library.common.template import Template
|
||||
from asset_library.constants import (MODULE_DIR, RESOURCES_DIR)
|
||||
|
||||
from asset_library import (action, collection, file)
|
||||
|
||||
from bpy.types import PropertyGroup
|
||||
from bpy.props import StringProperty
|
||||
import bpy
|
||||
|
||||
from itertools import groupby
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import os
|
||||
import json
|
||||
import uuid
|
||||
import time
|
||||
from functools import partial
|
||||
import subprocess
|
||||
from glob import glob
|
||||
|
||||
|
||||
class AssetLibraryAdapter(PropertyGroup):
|
||||
class Adapter(PropertyGroup):
|
||||
|
||||
#def __init__(self):
|
||||
name = "Base Adapter"
|
||||
#library = None
|
||||
|
||||
@property
|
||||
def library(self):
|
||||
prefs = self.addon_prefs
|
||||
for lib in prefs.libraries:
|
||||
if lib.adapter == self:
|
||||
return lib
|
||||
|
||||
@property
|
||||
def bundle_directory(self):
|
||||
return self.library.library_path
|
||||
|
||||
@property
|
||||
def data_type(self):
|
||||
return self.library.data_type
|
||||
|
||||
@property
|
||||
def data_types(self):
|
||||
return self.library.data_types
|
||||
|
||||
def get_catalog_path(self, directory=None):
|
||||
directory = directory or self.bundle_directory
|
||||
return Path(directory, 'blender_assets.cats.txt')
|
||||
|
||||
@property
|
||||
def cache_file(self):
|
||||
return Path(self.bundle_directory) / f"blender_assets.{self.library.id}.json"
|
||||
|
||||
@property
|
||||
def tmp_cache_file(self):
|
||||
return Path(bpy.app.tempdir) / f"blender_assets.{self.library.id}.json"
|
||||
|
||||
@property
|
||||
def diff_file(self):
|
||||
return Path(bpy.app.tempdir, 'diff.json')
|
||||
|
||||
@property
|
||||
def preview_blend(self):
|
||||
return MODULE_DIR / self.data_type.lower() / "preview.blend"
|
||||
|
||||
@property
|
||||
def preview_assets_file(self):
|
||||
return Path(bpy.app.tempdir, "preview_assets_file.json")
|
||||
|
||||
@property
|
||||
def addon_prefs(self):
|
||||
return get_addon_prefs()
|
||||
|
||||
@property
|
||||
def module_type(self):
|
||||
lib_type = self.library.data_type
|
||||
if lib_type == 'ACTION':
|
||||
return action
|
||||
elif lib_type == 'FILE':
|
||||
return file
|
||||
elif lib_type == 'COLLECTION':
|
||||
return collection
|
||||
|
||||
def to_dict(self):
|
||||
return {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||
|
||||
@property
|
||||
def format_data(self):
|
||||
"""Dict for formating template"""
|
||||
return dict(self.to_dict(), bundle_dir=self.library.library_path)
|
||||
|
||||
def fetch(self):
|
||||
raise Exception('This method need to be define in the adapter')
|
||||
|
||||
def norm_file_name(self, name):
|
||||
return name.replace(' ', '_')
|
||||
|
||||
def read_file(self, file):
|
||||
return read_file(file)
|
||||
|
||||
def write_file(self, file, data):
|
||||
return write_file(file, data)
|
||||
|
||||
def copy_file(self, source, destination):
|
||||
src = Path(source)
|
||||
dst = Path(destination)
|
||||
|
||||
if not src.exists():
|
||||
print(f'Cannot copy file {src}: file not exist')
|
||||
return
|
||||
|
||||
dst.parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
if src == dst:
|
||||
print(f'Cannot copy file {src}: source and destination are the same')
|
||||
return
|
||||
|
||||
print(f'Copy file from {src} to {dst}')
|
||||
shutil.copy2(str(src), str(dst))
|
||||
|
||||
def load_datablocks(self, src, names=None, type='objects', link=True, expr=None, assets_only=False):
|
||||
"""Link or append a datablock from a blendfile"""
|
||||
|
||||
if type.isupper():
|
||||
type = f'{type.lower()}s'
|
||||
|
||||
return load_datablocks(src, names=names, type=type, link=link, expr=expr, assets_only=assets_only)
|
||||
|
||||
def get_asset_data(self, asset):
|
||||
"""Extract asset information on a datablock"""
|
||||
|
||||
return dict(
|
||||
name=asset.name,
|
||||
author=asset.asset_data.author,
|
||||
tags=list(asset.asset_data.tags.keys()),
|
||||
metadata=dict(asset.asset_data),
|
||||
description=asset.asset_data.description,
|
||||
)
|
||||
|
||||
def get_asset_relative_path(self, name, catalog):
|
||||
'''Get a relative path for the asset'''
|
||||
name = self.norm_file_name(name)
|
||||
return Path(catalog, name, name).with_suffix('.blend')
|
||||
|
||||
def get_active_asset_library(self):
|
||||
asset_handle = bpy.context.asset_file_handle
|
||||
prefs = get_addon_prefs()
|
||||
asset_handle = bpy.context.asset_file_handle
|
||||
|
||||
lib = None
|
||||
if '.library_id' in asset_handle.asset_data:
|
||||
lib_id = asset_handle.asset_data['.library_id']
|
||||
lib = next((l for l in prefs.libraries if l.id == lib_id), None)
|
||||
|
||||
if not lib:
|
||||
print(f"No library found for id {lib_id}")
|
||||
|
||||
if not lib:
|
||||
lib = self
|
||||
|
||||
return lib
|
||||
|
||||
def get_active_asset_path(self):
|
||||
'''Get the full path of the active asset_handle from the asset brower'''
|
||||
prefs = get_addon_prefs()
|
||||
asset_handle = bpy.context.asset_file_handle
|
||||
|
||||
lib = self.get_active_asset_library()
|
||||
|
||||
if 'filepath' in asset_handle.asset_data:
|
||||
asset_path = asset_handle.asset_data['filepath']
|
||||
asset_path = lib.adapter.format_path(asset_path)
|
||||
else:
|
||||
asset_path = bpy.types.AssetHandle.get_full_library_path(
|
||||
asset_handle, bpy.context.asset_library_ref
|
||||
)
|
||||
|
||||
return asset_path
|
||||
|
||||
def get_image_path(self, name, catalog, filepath):
|
||||
raise Exception('Need to be defined in the adapter')
|
||||
|
||||
def get_video_path(self, name, catalog, filepath):
|
||||
raise Exception('Need to be defined in the adapter')
|
||||
|
||||
def new_asset(self, asset, asset_data):
|
||||
raise Exception('Need to be defined in the adapter')
|
||||
|
||||
def remove_asset(self, asset, asset_data):
|
||||
raise Exception('Need to be defined in the adapter')
|
||||
|
||||
|
||||
def format_asset_data(self, data):
|
||||
"""Get a dict for use in template fields"""
|
||||
return {
|
||||
'asset_name': data['name'],
|
||||
'asset_path': Path(data['filepath']),
|
||||
'catalog': data['catalog'],
|
||||
'catalog_name': data['catalog'].replace('/', '_'),
|
||||
}
|
||||
|
||||
def format_path(self, template, data={}, **kargs):
|
||||
if not template:
|
||||
return None
|
||||
|
||||
if data:
|
||||
data = self.format_asset_data(dict(data, **kargs))
|
||||
else:
|
||||
data = kargs
|
||||
|
||||
|
||||
if template.startswith('.'): #the template is relative
|
||||
template = Path(data['asset_path'], template).as_posix()
|
||||
|
||||
params = dict(
|
||||
**data,
|
||||
**self.format_data,
|
||||
)
|
||||
|
||||
return Template(template).format(params).resolve()
|
||||
|
||||
def find_path(self, template, data, **kargs):
|
||||
path = self.format_path(template, data, **kargs)
|
||||
paths = glob(str(path))
|
||||
if paths:
|
||||
return Path(paths[0])
|
||||
|
||||
def read_asset_description_file(self, asset_path) -> dict:
|
||||
"""Read the description file of the asset"""
|
||||
|
||||
description_path = self.get_description_path(asset_path)
|
||||
return self.read_file(description_path)
|
||||
|
||||
def write_description_file(self, asset_data, asset_path) -> None:
|
||||
description_path = self.get_description_path(asset_path)
|
||||
return write_file(description_path, asset_data)
|
||||
|
||||
def write_asset(self, asset, asset_path):
|
||||
|
||||
Path(asset_path).parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
bpy.data.libraries.write(
|
||||
str(asset_path),
|
||||
{asset},
|
||||
path_remap="NONE",
|
||||
fake_user=True,
|
||||
compress=True
|
||||
)
|
||||
|
||||
def read_catalog(self, directory=None):
|
||||
"""Read the catalog file of the library target directory or of the specified directory"""
|
||||
catalog_path = self.get_catalog_path(directory)
|
||||
|
||||
if not catalog_path.exists():
|
||||
return {}
|
||||
|
||||
cat_data = {}
|
||||
|
||||
for line in catalog_path.read_text(encoding="utf-8").split('\n'):
|
||||
if line.startswith(('VERSION', '#')) or not line:
|
||||
continue
|
||||
|
||||
cat_id, cat_path, cat_name = line.split(':')
|
||||
cat_data[cat_path] = {'id':cat_id, 'name':cat_name}
|
||||
|
||||
return cat_data
|
||||
|
||||
def write_catalog(self, catalog_data, directory=None):
|
||||
"""Write the catalog file in the library target directory or of the specified directory"""
|
||||
|
||||
catalog_path = self.get_catalog_path(directory)
|
||||
|
||||
lines = ['VERSION 1', '']
|
||||
|
||||
# Add missing parents catalog
|
||||
norm_data = {}
|
||||
for cat_path, cat_data in catalog_data.items():
|
||||
norm_data[cat_path] = cat_data
|
||||
for p in Path(cat_path).parents[:-1]:
|
||||
if p in cat_data or p in norm_data:
|
||||
continue
|
||||
|
||||
norm_data[p.as_posix()] = {'id': str(uuid.uuid4()), 'name': '-'.join(p.parts)}
|
||||
|
||||
for cat_path, cat_data in sorted(norm_data.items()):
|
||||
cat_name = cat_data['name'].replace('/', '-')
|
||||
lines.append(f"{cat_data['id']}:{cat_path}:{cat_name}")
|
||||
|
||||
print(f'Catalog writen at: {catalog_path}')
|
||||
catalog_path.write_text('\n'.join(lines), encoding="utf-8")
|
||||
|
||||
def read_cache(self, cache_path=None):
|
||||
cache_path = cache_path or self.cache_file
|
||||
print(f'Read cache from {cache_path}')
|
||||
return self.read_file(cache_path)
|
||||
|
||||
def write_cache(self, asset_descriptions, cache_path=None):
|
||||
cache_path = cache_path or self.cache_file
|
||||
print(f'cache file writen to {cache_path}')
|
||||
return write_file(cache_path, list(asset_descriptions))
|
||||
|
||||
def prop_rel_path(self, path, prop):
|
||||
'''Get a filepath relative to a property of the adapter'''
|
||||
field_prop = '{%s}/'%prop
|
||||
|
||||
prop_value = getattr(self, prop)
|
||||
prop_value = Path(os.path.expandvars(prop_value)).resolve()
|
||||
|
||||
rel_path = Path(path).resolve().relative_to(prop_value).as_posix()
|
||||
|
||||
return field_prop + rel_path
|
||||
|
||||
def write_preview(self, preview, filepath):
|
||||
if not preview or not filepath:
|
||||
return
|
||||
|
||||
filepath = Path(filepath)
|
||||
filepath.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
img_size = preview.image_size
|
||||
|
||||
px = [0] * img_size[0] * img_size[1] * 4
|
||||
preview.image_pixels_float.foreach_get(px)
|
||||
img = bpy.data.images.new(name=filepath.name, width=img_size[0], height=img_size[1], is_data=True, alpha=True)
|
||||
img.pixels.foreach_set(px)
|
||||
img.filepath_raw = str(filepath.with_suffix('.png'))
|
||||
img.file_format = 'PNG'
|
||||
img.save()
|
||||
|
||||
def draw_header(self, layout):
|
||||
"""Draw the header of the Asset Browser Window"""
|
||||
#layout.separator()
|
||||
|
||||
self.module_type.gui.draw_header(layout)
|
||||
|
||||
def draw_context_menu(self, layout):
|
||||
"""Draw the context menu of the Asset Browser Window"""
|
||||
self.module_type.gui.draw_context_menu(layout)
|
||||
|
||||
def generate_blend_preview(self, asset_description):
|
||||
asset_name = asset_description['name']
|
||||
catalog = asset_description['catalog']
|
||||
|
||||
asset_path = self.format_path(asset_description['filepath'])
|
||||
dst_image_path = self.get_image_path(asset_name, asset_path, catalog)
|
||||
|
||||
if dst_image_path.exists():
|
||||
return
|
||||
|
||||
# Check if a source image exists and if so copying it in the new directory
|
||||
src_image_path = asset_description.get('image')
|
||||
if src_image_path:
|
||||
src_image_path = self.get_template_path(src_image_path, asset_name, asset_path, catalog)
|
||||
if src_image_path and src_image_path.exists():
|
||||
self.copy_file(src_image_path, dst_image_path)
|
||||
return
|
||||
|
||||
print(f'Thumbnailing {asset_path} to {dst_image_path}')
|
||||
blender_thumbnailer = Path(bpy.app.binary_path).parent / 'blender-thumbnailer'
|
||||
|
||||
dst_image_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
subprocess.call([blender_thumbnailer, str(asset_path), str(dst_image_path)])
|
||||
|
||||
success = dst_image_path.exists()
|
||||
|
||||
if not success:
|
||||
empty_preview = RESOURCES_DIR / 'empty_preview.png'
|
||||
self.copy_file(str(empty_preview), str(dst_image_path))
|
||||
|
||||
return success
|
||||
|
||||
def generate_asset_preview(self, asset_description):
|
||||
"""Only generate preview when conforming a library"""
|
||||
|
||||
#print('\ngenerate_preview', asset_description['filepath'])
|
||||
|
||||
scn = bpy.context.scene
|
||||
#Creating the preview for collection, object or material
|
||||
camera = scn.camera
|
||||
vl = bpy.context.view_layer
|
||||
|
||||
data_type = self.data_type #asset_description['data_type']
|
||||
asset_path = self.format_path(asset_description['filepath'])
|
||||
|
||||
# Check if a source video exists and if so copying it in the new directory
|
||||
if self.library.template_video:
|
||||
for asset_data in asset_description['assets']:
|
||||
dst_asset_path = self.get_asset_bundle_path(asset_data)
|
||||
dst_video_path = self.format_path(self.library.template_video, asset_data, filepath=dst_asset_path) #Template(src_video_path).find(asset_data, asset_path=dst_asset_path, **self.format_data)
|
||||
|
||||
if dst_video_path.exists():
|
||||
print(f'The dest video {dst_video_path} already exist')
|
||||
continue
|
||||
|
||||
src_video_template = asset_data.get('video')
|
||||
if not src_video_template:
|
||||
continue
|
||||
|
||||
src_video_path = self.find_path(src_video_template, asset_data, filepath=asset_path)#Template(src_video_path).find(asset_data, asset_path=dst_asset_path, **self.format_data)
|
||||
if src_video_path:
|
||||
print(f'Copy video from {src_video_path} to {dst_video_path}')
|
||||
self.copy_file(src_video_path, dst_video_path)
|
||||
|
||||
# Check if asset as a preview image or need it to be generated
|
||||
asset_data_names = {}
|
||||
|
||||
if self.library.template_image:
|
||||
for asset_data in asset_description['assets']:
|
||||
name = asset_data['name']
|
||||
dst_asset_path = self.get_asset_bundle_path(asset_data)
|
||||
|
||||
dst_image_path = self.format_path(self.library.template_image, asset_data, filepath=dst_asset_path)
|
||||
if dst_image_path.exists():
|
||||
print(f'The dest image {dst_image_path} already exist')
|
||||
continue
|
||||
|
||||
# Check if a source image exists and if so copying it in the new directory
|
||||
src_image_template = asset_data.get('image')
|
||||
if src_image_template:
|
||||
src_image_path = self.find_path(src_image_template, asset_data, filepath=asset_path)
|
||||
|
||||
if src_image_path:
|
||||
self.copy_file(src_image_path, dst_image_path)
|
||||
#print(f'Copy image from {src_image_path} to {dst_image_path}')
|
||||
return
|
||||
|
||||
#Store in a dict all asset_data that does not have preview
|
||||
asset_data_names[name] = dict(asset_data, image_path=dst_image_path)
|
||||
|
||||
|
||||
if not asset_data_names:
|
||||
# No preview to generate
|
||||
return
|
||||
|
||||
#print('Making Preview for', asset_data_names)
|
||||
|
||||
asset_names = list(asset_data_names.keys())
|
||||
assets = self.load_datablocks(asset_path, names=asset_names, link=True, type=data_type)
|
||||
|
||||
for asset in assets:
|
||||
if not asset:
|
||||
continue
|
||||
|
||||
asset_data = asset_data_names[asset.name]
|
||||
image_path = asset_data['image_path']
|
||||
|
||||
if asset.preview:
|
||||
print(f'Writing asset preview to {image_path}')
|
||||
self.write_preview(asset.preview, image_path)
|
||||
continue
|
||||
|
||||
if data_type == 'COLLECTION':
|
||||
|
||||
bpy.ops.object.collection_instance_add(name=asset.name)
|
||||
|
||||
bpy.ops.view3d.camera_to_view_selected()
|
||||
instance = vl.objects.active
|
||||
|
||||
#scn.collection.children.link(asset)
|
||||
|
||||
scn.render.filepath = str(image_path)
|
||||
|
||||
print(f'Render asset {asset.name} to {image_path}')
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
#instance.user_clear()
|
||||
asset.user_clear()
|
||||
|
||||
bpy.data.objects.remove(instance)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
|
||||
|
||||
def generate_previews(self, cache=None):
|
||||
|
||||
print('Generate previews')
|
||||
|
||||
if cache in (None, ''):
|
||||
cache = self.fetch()
|
||||
elif isinstance(cache, (Path, str)):
|
||||
cache = self.read_cache(cache)
|
||||
|
||||
#cache_diff.sort(key=lambda x :x['filepath'])
|
||||
#blend_groups = groupby(cache_diff, key=lambda x :x['filepath'])
|
||||
|
||||
#TODO Support all multiple data_type
|
||||
for asset_description in cache:
|
||||
|
||||
if asset_description.get('type', self.data_type) == 'FILE':
|
||||
self.generate_blend_preview(asset_description)
|
||||
else:
|
||||
self.generate_asset_preview(asset_description)
|
||||
|
||||
# filepath = asset_description['filepath']
|
||||
|
||||
# asset_datas = asset_description["assets"]
|
||||
|
||||
# asset_datas.sort(key=lambda x :x.get('type', self.data_type))
|
||||
# data_type_groups = groupby(asset_datas, key=lambda x :x.get('type', self.data_type))
|
||||
|
||||
# for data_type, same_type_asset_datas in data_type_groups:
|
||||
|
||||
# asset_names = [a['name'] for a in same_type_asset_datas]
|
||||
# self.generate_preview(filepath, asset_names, data_type)
|
||||
|
||||
def set_asset_preview(self, asset, asset_data):
|
||||
'''Load an externalize image as preview for an asset'''
|
||||
|
||||
asset_path = self.format_path(asset_data['filepath'])
|
||||
|
||||
image_template = asset_data.get('image')
|
||||
if self.library.template_image:
|
||||
asset_path = self.get_asset_bundle_path(asset_data)
|
||||
image_template = self.library.template_image
|
||||
|
||||
image_path = self.find_path(image_template, asset_data, filepath=asset_path)
|
||||
|
||||
if image_path:
|
||||
#print(f'Set asset preview for {image_path} for {asset}')
|
||||
with bpy.context.temp_override(id=asset):
|
||||
bpy.ops.ed.lib_id_load_custom_preview(
|
||||
filepath=str(image_path)
|
||||
)
|
||||
|
||||
if asset.preview:
|
||||
return asset.preview
|
||||
|
||||
def set_asset_catalog(self, asset, asset_data, catalog_data):
|
||||
"""Find the catalog if already exist or create it"""
|
||||
catalog_name = asset_data['catalog']
|
||||
catalog = catalog_data.get(catalog_name)
|
||||
if not catalog:
|
||||
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
||||
catalog_data[catalog_name] = catalog
|
||||
|
||||
asset.asset_data.catalog_id = catalog['id']
|
||||
|
||||
def set_asset_metadata(self, asset, asset_data):
|
||||
"""Create custom prop to an asset base on provided data"""
|
||||
metadata = asset_data.get('metadata', {})
|
||||
|
||||
library_id = self.library.id
|
||||
if 'library_id' in asset_data:
|
||||
library_id = asset_data['library_id']
|
||||
|
||||
metadata['.library_id'] = library_id
|
||||
metadata['filepath'] = asset_data['filepath']
|
||||
for k, v in metadata.items():
|
||||
asset.asset_data[k] = v
|
||||
|
||||
def set_asset_tags(self, asset, asset_data):
|
||||
"""Create asset tags base on provided data"""
|
||||
|
||||
if 'tags' in asset_data:
|
||||
for tag in asset.asset_data.tags[:]:
|
||||
asset.asset_data.tags.remove(tag)
|
||||
|
||||
for tag in asset_data['tags']:
|
||||
if not tag:
|
||||
continue
|
||||
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
||||
|
||||
def set_asset_info(self, asset, asset_data):
|
||||
"""Set asset description base on provided data"""
|
||||
|
||||
for key in ('author', 'description'):
|
||||
if key in asset_data:
|
||||
setattr(asset.asset_data, key, asset_data.get(key) or '')
|
||||
|
||||
def get_asset_bundle_path(self, asset_data):
|
||||
|
||||
catalog_parts = asset_data['catalog'].split('/') + [asset_data['name']]
|
||||
|
||||
sub_path = catalog_parts[:self.library.blend_depth]
|
||||
|
||||
blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||
return Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
||||
|
||||
def bundle(self, cache_diff=None):
|
||||
"""Group all new assets in one or multiple blends for the asset browser"""
|
||||
|
||||
if self.data_type not in ('FILE', 'ACTION', 'COLLECTION'):
|
||||
print(f'{self.data_type} is not supported yet')
|
||||
return
|
||||
|
||||
catalog_data = self.read_catalog() #TODO remove unused catalog
|
||||
|
||||
write_cache = False
|
||||
if not cache_diff:
|
||||
# Get list of all modifications
|
||||
asset_descriptions = self.fetch()
|
||||
|
||||
|
||||
cache, cache_diff = self.diff(asset_descriptions)
|
||||
|
||||
# Only write complete cache at the end
|
||||
write_cache = True
|
||||
|
||||
#self.generate_previews(asset_descriptions)
|
||||
self.write_cache(asset_descriptions, self.tmp_cache_file)
|
||||
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(self.tmp_cache_file))
|
||||
|
||||
#print()
|
||||
#print(cache)
|
||||
#raise Exception()
|
||||
|
||||
elif isinstance(cache_diff, (Path, str)):
|
||||
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||
|
||||
|
||||
if self.library.blend_depth == 0:
|
||||
raise Exception('Blender depth must be 1 at min')
|
||||
#groups = [(cache_diff)]
|
||||
else:
|
||||
cache_diff.sort(key=self.get_asset_bundle_path)
|
||||
groups = groupby(cache_diff, key=self.get_asset_bundle_path)
|
||||
|
||||
total_assets = len(cache_diff)
|
||||
print(f'total_assets={total_assets}')
|
||||
|
||||
if total_assets == 0:
|
||||
print('No assets found')
|
||||
return
|
||||
|
||||
#data_types = self.data_types
|
||||
#if self.data_types == 'FILE'
|
||||
|
||||
i = 0
|
||||
#assets_to_preview = []
|
||||
for blend_path, asset_datas in groups:
|
||||
#blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||
#blend_path = Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
||||
|
||||
if blend_path.exists():
|
||||
print(f'Opening existing bundle blend: {blend_path}')
|
||||
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
||||
else:
|
||||
print(f'Create new bundle blend to: {blend_path}')
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
|
||||
for asset_data in asset_datas:
|
||||
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
||||
print(f'Progress: {int(i / total_assets * 100)+1}')
|
||||
|
||||
operation = asset_data.get('operation', 'ADD')
|
||||
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
||||
|
||||
if operation == 'REMOVE':
|
||||
if asset:
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
else:
|
||||
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
||||
continue
|
||||
|
||||
if operation == 'MODIFY' and not asset:
|
||||
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
||||
|
||||
elif operation == 'ADD' or not asset:
|
||||
if asset:
|
||||
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
||||
print(f"Asset {asset_data['name']} Already in Blend")
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
|
||||
print(f"INFO: Add new asset: {asset_data['name']}")
|
||||
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
||||
else:
|
||||
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFIED)')
|
||||
continue
|
||||
|
||||
asset.asset_mark()
|
||||
|
||||
self.set_asset_preview(asset, asset_data)
|
||||
|
||||
#if not asset_preview:
|
||||
# assets_to_preview.append((asset_data['filepath'], asset_data['name'], asset_data['data_type']))
|
||||
#if self.externalize_data:
|
||||
# self.write_preview(preview, filepath)
|
||||
|
||||
self.set_asset_catalog(asset, asset_data, catalog_data)
|
||||
self.set_asset_metadata(asset, asset_data)
|
||||
self.set_asset_tags(asset, asset_data)
|
||||
self.set_asset_info(asset, asset_data)
|
||||
|
||||
|
||||
i += 1
|
||||
|
||||
#self.write_asset_preview_file()
|
||||
|
||||
print(f'Saving Blend to {blend_path}')
|
||||
|
||||
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||
|
||||
if write_cache:
|
||||
self.write_cache(asset_descriptions)
|
||||
|
||||
self.write_catalog(catalog_data)
|
||||
|
||||
|
||||
bpy.ops.wm.quit_blender()
|
||||
|
||||
def norm_cache(self, cache):
|
||||
""" Return a new flat list of asset data
|
||||
the filepath keys are merge with the assets keys"""
|
||||
|
||||
if not cache or not isinstance(cache[0], dict):
|
||||
return []
|
||||
|
||||
new_cache = []
|
||||
|
||||
for asset_description in cache:
|
||||
asset_description = asset_description.copy()
|
||||
if 'assets' in asset_description:
|
||||
|
||||
assets = asset_description.pop('assets')
|
||||
for asset_data in assets:
|
||||
new_cache.append({**asset_description, **asset_data})
|
||||
else:
|
||||
new_cache.append(asset_description)
|
||||
|
||||
return new_cache
|
||||
|
||||
def diff(self, asset_descriptions=None):
|
||||
"""Compare the library cache with it current state and return the difference"""
|
||||
|
||||
cache = self.read_cache()
|
||||
|
||||
if cache is None:
|
||||
print(f'Fetch The library {self.library.name} for the first time, might be long...')
|
||||
cache = []
|
||||
|
||||
asset_descriptions = asset_descriptions or self.fetch()
|
||||
|
||||
#print('\n-------------------------', cache)
|
||||
|
||||
cache = {f"{a['filepath']}/{a['name']}": a for a in self.norm_cache(cache)}
|
||||
new_cache = {f"{a['filepath']}/{a['name']}" : a for a in self.norm_cache(asset_descriptions)}
|
||||
|
||||
assets_added = [v for k, v in new_cache.items() if k not in cache]
|
||||
assets_removed = [v for k, v in cache.items() if k not in new_cache]
|
||||
assets_modified = [v for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
||||
|
||||
if assets_added:
|
||||
print(f'{len(assets_added)} Assets Added \n{tuple(a["name"] for a in assets_added[:10])}\n')
|
||||
if assets_removed:
|
||||
print(f'{len(assets_removed)} Assets Removed \n{tuple(a["name"] for a in assets_removed[:10])}\n')
|
||||
if assets_modified:
|
||||
print(f'{len(assets_modified)} Assets Modified \n{tuple(a["name"] for a in assets_modified[:10])}\n')
|
||||
|
||||
assets_added = [dict(a, operation='ADD') for a in assets_added]
|
||||
assets_removed = [dict(a, operation='REMOVE') for a in assets_removed]
|
||||
assets_modified = [dict(a, operation='MODIFY') for a in assets_modified]
|
||||
|
||||
cache_diff = assets_added + assets_removed + assets_modified
|
||||
if not cache_diff:
|
||||
print('No change in the library')
|
||||
|
||||
return new_cache, cache_diff
|
||||
|
||||
def draw_prefs(self, layout):
|
||||
"""Draw the options in the addon preference for this adapter"""
|
||||
|
||||
annotations = self.__class__.__annotations__
|
||||
for k, v in annotations.items():
|
||||
layout.prop(self, k, text=bpy.path.display_name(k))
|
||||
|
||||
return {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
|
@ -1,153 +0,0 @@
|
|||
|
||||
"""
|
||||
Plugin for making an asset library of all blender file found in a folder
|
||||
"""
|
||||
|
||||
|
||||
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||
from asset_library.common.template import Template
|
||||
from asset_library.common.file_utils import install_module
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||
import re
|
||||
from pathlib import Path
|
||||
from itertools import groupby
|
||||
import uuid
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import urllib3
|
||||
import traceback
|
||||
import time
|
||||
|
||||
|
||||
class KitsuLibrary(AssetLibraryAdapter):
|
||||
|
||||
name = "Kitsu"
|
||||
template_name : StringProperty()
|
||||
template_file : StringProperty()
|
||||
source_directory : StringProperty(subtype='DIR_PATH')
|
||||
#blend_depth: IntProperty(default=1)
|
||||
|
||||
url: StringProperty()
|
||||
login: StringProperty()
|
||||
password: StringProperty(subtype='PASSWORD')
|
||||
project_name: StringProperty()
|
||||
|
||||
def connect(self, url=None, login=None, password=None):
|
||||
'''Connect to kitsu api using provided url, login and password'''
|
||||
|
||||
gazu = install_module('gazu')
|
||||
urllib3.disable_warnings()
|
||||
|
||||
if not self.url:
|
||||
print(f'Kitsu Url: {self.url} is empty')
|
||||
return
|
||||
|
||||
url = self.url
|
||||
if not url.endswith('/api'):
|
||||
url += '/api'
|
||||
|
||||
print(f'Info: Setting Host for kitsu {url}')
|
||||
gazu.client.set_host(url)
|
||||
|
||||
if not gazu.client.host_is_up():
|
||||
print('Error: Kitsu Host is down')
|
||||
|
||||
try:
|
||||
print(f'Info: Log in to kitsu as {self.login}')
|
||||
res = gazu.log_in(self.login, self.password)
|
||||
print(f'Info: Sucessfully login to Kitsu as {res["user"]["full_name"]}')
|
||||
return res['user']
|
||||
except Exception as e:
|
||||
print(f'Error: {traceback.format_exc()}')
|
||||
|
||||
def get_asset_path(self, name, catalog, directory=None):
|
||||
directory = directory or self.source_directory
|
||||
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||
|
||||
def get_asset_description(self, data, asset_path):
|
||||
|
||||
modified = time.time_ns()
|
||||
catalog = data['entity_type_name']
|
||||
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
#asset_name = self.norm_file_name(data['name'])
|
||||
|
||||
asset_description = dict(
|
||||
filepath=asset_path,
|
||||
modified=modified,
|
||||
library_id=self.library.id,
|
||||
assets=[dict(
|
||||
catalog=catalog,
|
||||
metadata=data.get('data', {}),
|
||||
description=data['description'],
|
||||
tags=[],
|
||||
type=self.data_type,
|
||||
image=self.library.template_image,
|
||||
video=self.library.template_video,
|
||||
name=data['name'])
|
||||
]
|
||||
)
|
||||
|
||||
return asset_description
|
||||
|
||||
# def bundle(self, cache_diff=None):
|
||||
# """Group all asset in one or multiple blends for the asset browser"""
|
||||
|
||||
# return super().bundle(cache_diff=cache_diff)
|
||||
|
||||
def fetch(self):
|
||||
"""Gather in a list all assets found in the folder"""
|
||||
|
||||
print(f'Fetch Assets for {self.library.name}')
|
||||
|
||||
gazu = install_module('gazu')
|
||||
self.connect()
|
||||
|
||||
template_file = Template(self.template_file)
|
||||
template_name = Template(self.template_name)
|
||||
|
||||
project = gazu.client.fetch_first('projects', {'name': self.project_name})
|
||||
entity_types = gazu.client.fetch_all('entity-types')
|
||||
entity_types_ids = {e['id']: e['name'] for e in entity_types}
|
||||
|
||||
asset_descriptions = []
|
||||
for asset_data in gazu.asset.all_assets_for_project(project):
|
||||
asset_data['entity_type_name'] = entity_types_ids[asset_data.pop('entity_type_id')]
|
||||
asset_name = asset_data['name']
|
||||
|
||||
asset_field_data = dict(asset_name=asset_name, type=asset_data['entity_type_name'], source_directory=self.source_directory)
|
||||
|
||||
try:
|
||||
asset_field_data.update(template_name.parse(asset_name))
|
||||
except Exception:
|
||||
print(f'Warning: Could not parse {asset_name} with template {template_name}')
|
||||
|
||||
asset_path = template_file.find(asset_field_data)
|
||||
if not asset_path:
|
||||
print(f'Warning: Could not find file for {template_file.format(asset_field_data)}')
|
||||
continue
|
||||
|
||||
#print(asset_path)
|
||||
|
||||
# TODO group when multiple asset are store in the same blend
|
||||
asset_descriptions.append(self.get_asset_description(asset_data, asset_path))
|
||||
|
||||
#asset = load_datablocks(asset_path, data_type='collections', names=asset_data['name'], link=True)
|
||||
#if not asset:
|
||||
# print(f"Asset {asset_name} not found in {asset_path}")
|
||||
|
||||
|
||||
#asset_description = self.get_asset_description(asset)
|
||||
|
||||
#asset_descriptions.append(asset_description)
|
||||
|
||||
#print(assets)
|
||||
# for k, v in assets[0].items():
|
||||
# print(f'- {k} {v}')
|
||||
|
||||
#print('+++++++++++++')
|
||||
#print(asset_descriptions)
|
||||
|
||||
return asset_descriptions
|
|
@ -1,165 +0,0 @@
|
|||
|
||||
"""
|
||||
Plugin for making an asset library of all blender file found in a folder
|
||||
"""
|
||||
|
||||
|
||||
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||
from asset_library.common.bl_utils import load_datablocks
|
||||
from asset_library.common.template import Template
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||
import re
|
||||
from pathlib import Path
|
||||
from itertools import groupby
|
||||
import uuid
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
class ScanFolderLibrary(AssetLibraryAdapter):
|
||||
|
||||
name = "Scan Folder"
|
||||
source_directory : StringProperty(subtype='DIR_PATH')
|
||||
template_file : StringProperty()
|
||||
template_image : StringProperty()
|
||||
template_video : StringProperty()
|
||||
template_description : StringProperty()
|
||||
|
||||
def get_asset_path(self, name, catalog, directory=None):
|
||||
directory = directory or self.source_directory
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
|
||||
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||
|
||||
def get_image_path(self, name, catalog, filepath):
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
return self.format_path(self.template_image, dict(name=name, catalog=catalog, filepath=filepath))
|
||||
|
||||
def get_video_path(self, name, catalog, filepath):
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
return self.format_path(self.template_video, dict(name=name, catalog=catalog, filepath=filepath))
|
||||
|
||||
def format_asset_description(self, asset_description, asset_path):
|
||||
|
||||
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
modified = asset_description.get('modified', time.time_ns())
|
||||
|
||||
if self.data_type == 'FILE':
|
||||
return dict(
|
||||
filepath=asset_path,
|
||||
author=asset_description.get('author'),
|
||||
modified=modified,
|
||||
catalog=asset_description['catalog'],
|
||||
tags=[],
|
||||
description=asset_description.get('description', ''),
|
||||
type=self.data_type,
|
||||
image=self.template_image,
|
||||
name=asset_description['name']
|
||||
)
|
||||
|
||||
return dict(
|
||||
filepath=asset_path,
|
||||
modified=modified,
|
||||
library_id=self.library.id,
|
||||
assets=[dict(
|
||||
catalog=asset_data.get('catalog', asset_description['catalog']),
|
||||
author=asset_data.get('author'),
|
||||
metadata=asset_data.get('metadata', {}),
|
||||
description=asset_data.get('description', ''),
|
||||
tags=asset_data.get('tags', []),
|
||||
type=self.data_type,
|
||||
image=self.template_image,
|
||||
video=self.template_video,
|
||||
name=asset_data['name']) for asset_data in asset_description['assets']
|
||||
]
|
||||
)
|
||||
|
||||
def fetch(self):
|
||||
"""Gather in a list all assets found in the folder"""
|
||||
|
||||
print(f'Fetch Assets for {self.library.name}')
|
||||
|
||||
source_directory = Path(self.source_directory)
|
||||
template_file = Template(self.template_file)
|
||||
catalog_data = self.read_catalog(directory=source_directory)
|
||||
catalog_ids = {v['id']: k for k, v in catalog_data.items()}
|
||||
|
||||
cache = self.read_cache() or []
|
||||
|
||||
print(f'Search for blend using glob template: {template_file.glob_pattern}')
|
||||
print(f'Scanning Folder {source_directory}...')
|
||||
|
||||
new_cache = []
|
||||
|
||||
for asset_path in template_file.glob(source_directory):#sorted(blend_files):
|
||||
|
||||
source_rel_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
modified = asset_path.stat().st_mtime_ns
|
||||
|
||||
# Check if the asset description as already been cached
|
||||
asset_description = next((a for a in cache if a['filepath'] == source_rel_path), None)
|
||||
|
||||
if asset_description and asset_description['modified'] >= modified:
|
||||
print(asset_path, 'is skipped because not modified')
|
||||
new_cache.append(asset_description)
|
||||
continue
|
||||
|
||||
rel_path = asset_path.relative_to(source_directory).as_posix()
|
||||
field_data = template_file.parse(rel_path)
|
||||
|
||||
catalogs = [v for k,v in sorted(field_data.items()) if k.isdigit()]
|
||||
#catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
||||
|
||||
asset_name = field_data.get('asset_name', asset_path.stem)
|
||||
|
||||
asset_description = {
|
||||
"name": asset_name,
|
||||
"catalog": '/'.join(catalogs),
|
||||
"assets": [],
|
||||
'modified': modified
|
||||
}
|
||||
|
||||
if self.data_type == 'FILE':
|
||||
asset_description = self.format_asset_description(asset_description, asset_path)
|
||||
new_cache.append(asset_description)
|
||||
continue
|
||||
|
||||
# Now check if there is a asset description file
|
||||
asset_description_path = self.find_path(self.template_description, asset_description, filepath=asset_path)
|
||||
if asset_description_path:
|
||||
new_cache.append(self.read_file(asset_description_path))
|
||||
continue
|
||||
|
||||
# Scan the blend file for assets inside and write a custom asset description for info found
|
||||
print(f'Scanning blendfile {asset_path}...')
|
||||
assets = self.load_datablocks(asset_path, type=self.data_types, link=True, assets_only=True)
|
||||
print(f'Found {len(assets)} {self.data_types} inside')
|
||||
|
||||
for asset in assets:
|
||||
#catalog_path = catalog_ids.get(asset.asset_data.catalog_id)
|
||||
|
||||
#if not catalog_path:
|
||||
# print(f'No catalog found for asset {asset.name}')
|
||||
#catalog_path = asset_description['catalog']#asset_path.relative_to(self.source_directory).as_posix()
|
||||
|
||||
# For now the catalog used is the one extract from the template file
|
||||
asset_description['assets'].append(self.get_asset_data(asset))
|
||||
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
|
||||
asset_description = self.format_asset_description(asset_description, asset_path)
|
||||
|
||||
new_cache.append(asset_description)
|
||||
|
||||
|
||||
new_cache.sort(key=lambda x:x['filepath'])
|
||||
|
||||
return new_cache
|
||||
|
|
@ -50,9 +50,9 @@ class ASSETLIB_OT_load_asset(Operator):
|
|||
self.report({"ERROR"}, 'No asset selected')
|
||||
return {'CANCELLED'}
|
||||
|
||||
active_lib = lib.adapter.get_active_asset_library()
|
||||
active_lib = lib.library_type.get_active_asset_library()
|
||||
asset_path = asset.asset_data['filepath']
|
||||
asset_path = active_lib.adapter.format_path(asset_path)
|
||||
asset_path = active_lib.library_type.format_path(asset_path)
|
||||
name = asset.name
|
||||
|
||||
## set mode to object
|
||||
|
|
Binary file not shown.
|
@ -28,6 +28,10 @@ class attr_set():
|
|||
for item in attrib_list:
|
||||
prop, attr = item[:2]
|
||||
self.store.append( (prop, attr, getattr(prop, attr)) )
|
||||
|
||||
for item in attrib_list:
|
||||
prop, attr = item[:2]
|
||||
|
||||
if len(item) >= 3:
|
||||
try:
|
||||
setattr(prop, attr, item[2])
|
||||
|
@ -38,6 +42,9 @@ class attr_set():
|
|||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
self.restore()
|
||||
|
||||
def restore(self):
|
||||
for prop, attr, old_val in self.store:
|
||||
setattr(prop, attr, old_val)
|
||||
|
||||
|
@ -55,6 +62,15 @@ def get_view3d_persp():
|
|||
view_3d = next((a for a in view_3ds if a.spaces.active.region_3d.view_perspective == 'PERSP'), view_3ds[0])
|
||||
return view_3d
|
||||
|
||||
def get_viewport():
|
||||
screen = bpy.context.screen
|
||||
|
||||
areas = [a for a in screen.areas if a.type == 'VIEW_3D']
|
||||
areas.sort(key=lambda x : x.width*x.height)
|
||||
|
||||
return areas[-1]
|
||||
|
||||
|
||||
def biggest_asset_browser_area(screen: bpy.types.Screen) -> Optional[bpy.types.Area]:
|
||||
"""Return the asset browser Area that's largest on screen.
|
||||
|
||||
|
|
|
@ -52,14 +52,14 @@ def asset_warning_callback(self, context):
|
|||
return
|
||||
|
||||
lib = get_active_library()
|
||||
action_path = lib.adapter.get_asset_relative_path(self.name, self.catalog)
|
||||
action_path = lib.library_type.get_asset_relative_path(self.name, self.catalog)
|
||||
self.path = action_path.as_posix()
|
||||
|
||||
if lib.merge_libraries:
|
||||
prefs = get_addon_prefs()
|
||||
lib = prefs.libraries[lib.store_library]
|
||||
|
||||
if not lib.adapter.get_asset_path(self.name, self.catalog).parents[1].exists():
|
||||
if not lib.library_type.get_asset_path(self.name, self.catalog).parents[1].exists():
|
||||
self.warning = 'A new folder will be created'
|
||||
|
||||
def get_active_library():
|
||||
|
@ -76,7 +76,7 @@ def get_active_catalog():
|
|||
'''Get the active catalog path'''
|
||||
|
||||
lib = get_active_library()
|
||||
cat_data = lib.adapter.read_catalog()
|
||||
cat_data = lib.library_type.read_catalog()
|
||||
cat_data = {v['id']:k for k,v in cat_data.items()}
|
||||
|
||||
cat_id = bpy.context.space_data.params.catalog_id
|
||||
|
|
|
@ -3,8 +3,28 @@ import os
|
|||
from pathlib import Path
|
||||
from fnmatch import fnmatch
|
||||
from glob import glob
|
||||
import string
|
||||
|
||||
|
||||
class TemplateFormatter(string.Formatter):
|
||||
def format_field(self, value, format_spec):
|
||||
if isinstance(value, str):
|
||||
spec, sep = [*format_spec.split(':'), None][:2]
|
||||
|
||||
if sep:
|
||||
value = value.replace('_', ' ')
|
||||
value = value = re.sub(r'([a-z])([A-Z])', rf'\1{sep}\2', value)
|
||||
value = value.replace(' ', sep)
|
||||
|
||||
if spec == 'u':
|
||||
value = value.upper()
|
||||
elif spec == 'l':
|
||||
value = value.lower()
|
||||
elif spec == 't':
|
||||
value = value.title()
|
||||
|
||||
return super().format(value, format_spec)
|
||||
|
||||
class Template:
|
||||
field_pattern = re.compile(r'{(\w+)\*{0,2}}')
|
||||
field_pattern_recursive = re.compile(r'{(\w+)\*{2}}')
|
||||
|
@ -13,6 +33,7 @@ class Template:
|
|||
#asset_data_path = Path(lib_path) / ASSETLIB_FILENAME
|
||||
|
||||
self.raw = template
|
||||
self.formatter = TemplateFormatter()
|
||||
|
||||
@property
|
||||
def glob_pattern(self):
|
||||
|
@ -52,12 +73,24 @@ class Template:
|
|||
|
||||
return {k:v for k,v in zip(fields, field_values)}
|
||||
|
||||
def norm_data(self, data):
|
||||
norm_data = {}
|
||||
for k, v in data.items():
|
||||
|
||||
if isinstance(v, Path):
|
||||
v = v.as_posix()
|
||||
|
||||
norm_data[k] = v
|
||||
|
||||
return norm_data
|
||||
|
||||
def format(self, data=None, **kargs):
|
||||
|
||||
data = {**(data or {}), **kargs}
|
||||
|
||||
try:
|
||||
path = self.raw.format(**data)
|
||||
#print('FORMAT', self.raw, data)
|
||||
path = self.formatter.format(self.raw, **self.norm_data(data))
|
||||
except KeyError as e:
|
||||
print(f'Cannot format {self.raw} with {data}, field {e} is missing')
|
||||
return
|
||||
|
@ -88,7 +121,7 @@ class Template:
|
|||
if paths:
|
||||
return Path(paths[0])
|
||||
|
||||
return pattern
|
||||
#return pattern
|
||||
|
||||
def __repr__(self):
|
||||
return f'Template({self.raw})'
|
|
@ -1,4 +1,5 @@
|
|||
from pathlib import Path
|
||||
import bpy
|
||||
|
||||
|
||||
DATA_TYPE_ITEMS = [
|
||||
|
@ -12,6 +13,14 @@ ICONS = {identifier: icon for identifier, name, description, icon, number in DAT
|
|||
ASSETLIB_FILENAME = "blender_assets.libs.json"
|
||||
MODULE_DIR = Path(__file__).parent
|
||||
RESOURCES_DIR = MODULE_DIR / 'resources'
|
||||
|
||||
LIBRARY_TYPE_DIR = MODULE_DIR / 'library_types'
|
||||
LIBRARY_TYPES = []
|
||||
|
||||
ADAPTER_DIR = MODULE_DIR / 'adapters'
|
||||
ADAPTERS = []
|
||||
|
||||
PREVIEW_ASSETS_SCRIPT = MODULE_DIR / 'common' / 'preview_assets.py'
|
||||
|
||||
#ADD_ASSET_DICT = {}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ command, write_catalog)
|
|||
|
||||
|
||||
@command
|
||||
def bundle_library(source_directory, bundle_directory, template_description, thumbnail_template,
|
||||
def bundle_library(source_directory, bundle_directory, template_info, thumbnail_template,
|
||||
template=None, data_file=None):
|
||||
|
||||
field_pattern = r'{(\w+)}'
|
||||
|
@ -38,9 +38,9 @@ def bundle_library(source_directory, bundle_directory, template_description, thu
|
|||
|
||||
name = field_data.get('name', f.stem)
|
||||
thumbnail = (f / thumbnail_template.format(name=name)).resolve()
|
||||
asset_data = (f / template_description.format(name=name)).resolve()
|
||||
asset_data = (f / template_info.format(name=name)).resolve()
|
||||
|
||||
catalogs = sorted([v for k,v in sorted(field_data.items()) if k.isdigit()])
|
||||
catalogs = sorted([v for k,v in sorted(field_data.items()) if re.findall('cat[0-9]+', k)])
|
||||
catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
||||
|
||||
if not thumbnail.exists():
|
||||
|
@ -163,7 +163,7 @@ if __name__ == '__main__' :
|
|||
bundle_library(
|
||||
source_directory=args.source_directory,
|
||||
bundle_directory=args.bundle_directory,
|
||||
template_description=args.template_description,
|
||||
template_info=args.template_info,
|
||||
thumbnail_template=args.thumbnail_template,
|
||||
template=args.template,
|
||||
data_file=args.data_file)
|
||||
|
|
|
@ -21,7 +21,7 @@ def draw_context_menu(layout):
|
|||
#asset = context.active_file
|
||||
layout.operator_context = "INVOKE_DEFAULT"
|
||||
lib = get_active_library()
|
||||
filepath = lib.adapter.get_active_asset_path()
|
||||
filepath = lib.library_type.get_active_asset_path()
|
||||
|
||||
layout.operator("assetlib.open_blend_file", text="Open Blend File")#.filepath = asset.asset_data['filepath']
|
||||
op = layout.operator("wm.link", text="Link")
|
||||
|
|
|
@ -37,7 +37,7 @@ class ASSETLIB_OT_open_blend_file(Operator):
|
|||
|
||||
lib = get_active_library()
|
||||
|
||||
filepath = lib.get_active_asset_path()
|
||||
filepath = lib.library_type.get_active_asset_path()
|
||||
|
||||
open_blender_file(filepath)
|
||||
|
||||
|
|
4
gui.py
4
gui.py
|
@ -176,7 +176,7 @@ class ASSETLIB_MT_context_menu(AssetLibraryMenu, Menu):
|
|||
|
||||
def draw(self, context):
|
||||
lib = get_active_library()
|
||||
lib.adapter.draw_context_menu(self.layout)
|
||||
lib.library_type.draw_context_menu(self.layout)
|
||||
|
||||
|
||||
def is_option_region_visible(context, space):
|
||||
|
@ -214,7 +214,7 @@ def draw_assetbrowser_header(self, context):
|
|||
#op.clean = False
|
||||
#op.only_recent = True
|
||||
|
||||
lib.adapter.draw_header(row)
|
||||
lib.library_type.draw_header(row)
|
||||
|
||||
if context.selected_files and context.active_file:
|
||||
row.separator()
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
|
||||
from asset_library.library_types import library_type
|
||||
from asset_library.library_types import copy_folder
|
||||
from asset_library.library_types import scan_folder
|
||||
|
||||
if 'bpy' in locals():
|
||||
import importlib
|
||||
|
||||
importlib.reload(library_type)
|
||||
importlib.reload(copy_folder)
|
||||
importlib.reload(scan_folder)
|
||||
|
||||
import bpy
|
||||
|
||||
LibraryType = library_type.LibraryType
|
||||
CopyFolder = copy_folder.CopyFolder
|
||||
ScanFolder = scan_folder.ScanFolder
|
|
@ -0,0 +1,212 @@
|
|||
|
||||
"""
|
||||
Plugin for making an asset library of all blender file found in a folder
|
||||
"""
|
||||
|
||||
|
||||
from asset_library.library_types.scan_folder import ScanFolder
|
||||
from asset_library.common.bl_utils import load_datablocks
|
||||
from asset_library.common.template import Template
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||
import re
|
||||
from pathlib import Path
|
||||
from itertools import groupby
|
||||
import uuid
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import time
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
class Conform(ScanFolder):
|
||||
|
||||
name = "Conform"
|
||||
source_directory : StringProperty(subtype='DIR_PATH')
|
||||
|
||||
target_template_file : StringProperty()
|
||||
target_template_info : StringProperty()
|
||||
target_template_image : StringProperty()
|
||||
target_template_video : StringProperty()
|
||||
|
||||
def draw_prefs(self, layout):
|
||||
layout.prop(self, "source_directory", text="Source : Directory")
|
||||
|
||||
col = layout.column(align=True)
|
||||
col.prop(self, "source_template_file", icon='COPY_ID', text='Template file')
|
||||
col.prop(self, "source_template_image", icon='COPY_ID', text='Template image')
|
||||
col.prop(self, "source_template_video", icon='COPY_ID', text='Template video')
|
||||
col.prop(self, "source_template_info", icon='COPY_ID', text='Template info')
|
||||
|
||||
col = layout.column(align=True)
|
||||
col.prop(self, "target_template_file", icon='COPY_ID', text='Target : Template file')
|
||||
col.prop(self, "target_template_image", icon='COPY_ID', text='Template image')
|
||||
col.prop(self, "target_template_video", icon='COPY_ID', text='Template video')
|
||||
col.prop(self, "target_template_info", icon='COPY_ID', text='Template info')
|
||||
|
||||
def get_asset_bundle_path(self, asset_data):
|
||||
"""Template file are relative"""
|
||||
|
||||
src_directory = Path(self.source_directory).resolve()
|
||||
src_template_file = Template(self.source_template_file)
|
||||
|
||||
asset_path = Path(asset_data['filepath']).as_posix()
|
||||
asset_path = self.format_path(asset_path)
|
||||
|
||||
rel_path = asset_path.relative_to(src_directory).as_posix()
|
||||
field_data = src_template_file.parse(rel_path)
|
||||
#field_data = {f"catalog_{k}": v for k, v in field_data.items()}
|
||||
|
||||
# Change the int in the template by string to allow format
|
||||
#target_template_file = re.sub(r'{(\d+)}', r'{cat\1}', self.target_template_file)
|
||||
|
||||
format_data = self.format_asset_data(asset_data)
|
||||
#format_data['asset_name'] = format_data['asset_name'].lower().replace(' ', '_')
|
||||
|
||||
path = Template(self.target_template_file).format(format_data, **field_data).with_suffix('.blend')
|
||||
path = Path(self.bundle_directory, path).resolve()
|
||||
|
||||
return path
|
||||
|
||||
def set_asset_preview(self, asset, asset_data):
|
||||
'''Load an externalize image as preview for an asset using the target template'''
|
||||
|
||||
image_template = self.target_template_image
|
||||
if not image_template:
|
||||
return
|
||||
|
||||
asset_path = self.get_asset_bundle_path(asset_data)
|
||||
image_path = self.find_path(image_template, asset_data, filepath=asset_path)
|
||||
|
||||
if image_path:
|
||||
with bpy.context.temp_override(id=asset):
|
||||
bpy.ops.ed.lib_id_load_custom_preview(
|
||||
filepath=str(image_path)
|
||||
)
|
||||
else:
|
||||
print(f'No image found for {image_template} on {asset.name}')
|
||||
|
||||
if asset.preview:
|
||||
return asset.preview
|
||||
|
||||
def generate_previews(self, cache=None):
|
||||
|
||||
print('Generate previews...')
|
||||
|
||||
if cache in (None, ''):
|
||||
cache = self.fetch()
|
||||
elif isinstance(cache, (Path, str)):
|
||||
cache = self.read_cache(cache)
|
||||
|
||||
|
||||
#TODO Support all multiple data_type
|
||||
for asset_info in cache:
|
||||
|
||||
if asset_info.get('type', self.data_type) == 'FILE':
|
||||
self.generate_blend_preview(asset_info)
|
||||
else:
|
||||
self.generate_asset_preview(asset_info)
|
||||
|
||||
def generate_asset_preview(self, asset_info):
|
||||
"""Only generate preview when conforming a library"""
|
||||
|
||||
#print('\ngenerate_preview', asset_info['filepath'])
|
||||
|
||||
scn = bpy.context.scene
|
||||
vl = bpy.context.view_layer
|
||||
#Creating the preview for collection, object or material
|
||||
#camera = scn.camera
|
||||
|
||||
data_type = self.data_type #asset_info['data_type']
|
||||
asset_path = self.format_path(asset_info['filepath'])
|
||||
|
||||
# Check if a source video exists and if so copying it in the new directory
|
||||
if self.source_template_video and self.target_template_video:
|
||||
for asset_data in asset_info['assets']:
|
||||
asset_data = dict(asset_data, filepath=asset_path)
|
||||
|
||||
dst_asset_path = self.get_asset_bundle_path(asset_data)
|
||||
dst_video_path = self.format_path(self.target_template_video, asset_data, filepath=dst_asset_path)
|
||||
if dst_video_path.exists():
|
||||
print(f'The dest video {dst_video_path} already exist')
|
||||
continue
|
||||
|
||||
src_video_path = self.find_path(self.source_template_video, asset_data)
|
||||
if src_video_path:
|
||||
print(f'Copy video from {src_video_path} to {dst_video_path}')
|
||||
self.copy_file(src_video_path, dst_video_path)
|
||||
|
||||
# Check if asset as a preview image or need it to be generated
|
||||
asset_data_names = {}
|
||||
|
||||
if self.target_template_image:
|
||||
for asset_data in asset_info['assets']:
|
||||
asset_data = dict(asset_data, filepath=asset_path)
|
||||
name = asset_data['name']
|
||||
dst_asset_path = self.get_asset_bundle_path(asset_data)
|
||||
|
||||
dst_image_path = self.format_path(self.target_template_image, asset_data, filepath=dst_asset_path)
|
||||
if dst_image_path.exists():
|
||||
print(f'The dest image {dst_image_path} already exist')
|
||||
continue
|
||||
|
||||
# Check if a source image exists and if so copying it in the new directory
|
||||
if self.source_template_image:
|
||||
src_image_path = self.find_path(self.source_template_image, asset_data)
|
||||
|
||||
if src_image_path:
|
||||
if src_image_path.suffix == dst_image_path.suffix:
|
||||
self.copy_file(src_image_path, dst_image_path)
|
||||
else:
|
||||
|
||||
print(src_image_path)
|
||||
self.save_image(src_image_path, dst_image_path, remove=True)
|
||||
|
||||
continue
|
||||
|
||||
#Store in a dict all asset_data that does not have preview
|
||||
asset_data_names[name] = dict(asset_data, image_path=dst_image_path)
|
||||
|
||||
|
||||
if not asset_data_names:# No preview to generate
|
||||
return
|
||||
|
||||
print('Making Preview for', list(asset_data_names.keys()))
|
||||
|
||||
asset_names = list(asset_data_names.keys())
|
||||
assets = self.load_datablocks(asset_path, names=asset_names, link=True, type=data_type)
|
||||
|
||||
for asset in assets:
|
||||
if not asset:
|
||||
continue
|
||||
|
||||
asset_data = asset_data_names[asset.name]
|
||||
image_path = asset_data['image_path']
|
||||
|
||||
if asset.preview:
|
||||
print(f'Writing asset preview to {image_path}')
|
||||
self.write_preview(asset.preview, image_path)
|
||||
continue
|
||||
|
||||
if data_type == 'COLLECTION':
|
||||
|
||||
bpy.ops.object.collection_instance_add(name=asset.name)
|
||||
|
||||
bpy.ops.view3d.camera_to_view_selected()
|
||||
instance = vl.objects.active
|
||||
|
||||
#scn.collection.children.link(asset)
|
||||
|
||||
scn.render.filepath = str(image_path)
|
||||
|
||||
print(f'Render asset {asset.name} to {image_path}')
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
#instance.user_clear()
|
||||
asset.user_clear()
|
||||
|
||||
bpy.data.objects.remove(instance)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
|
@ -4,14 +4,14 @@ Adapter for making an asset library of all blender file found in a folder
|
|||
"""
|
||||
|
||||
|
||||
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||
from asset_library.library_types.library_type import LibraryType
|
||||
from asset_library.common.file_utils import copy_dir
|
||||
from bpy.props import StringProperty
|
||||
from os.path import expandvars
|
||||
import bpy
|
||||
|
||||
|
||||
class CopyFolderLibrary(AssetLibraryAdapter):
|
||||
class CopyFolder(LibraryType):
|
||||
"""Copy library folder from a server to a local disk for better performance"""
|
||||
|
||||
name = "Copy Folder"
|
||||
|
@ -34,13 +34,13 @@ class CopyFolderLibrary(AssetLibraryAdapter):
|
|||
)
|
||||
|
||||
def filter_prop(self, prop):
|
||||
if prop in ('template_description', 'template_video', 'template_image', 'blend_depth'):
|
||||
if prop in ('template_info', 'template_video', 'template_image', 'blend_depth'):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# def draw_prop(self, layout, prop):
|
||||
# if prop in ('template_description', 'template_video', 'template_image', 'blend_depth'):
|
||||
# if prop in ('template_info', 'template_video', 'template_image', 'blend_depth'):
|
||||
# return
|
||||
|
||||
# super().draw_prop(layout)
|
|
@ -0,0 +1,275 @@
|
|||
|
||||
"""
|
||||
Plugin for making an asset library of all blender file found in a folder
|
||||
"""
|
||||
|
||||
|
||||
from asset_library.library_types.library_type import LibraryType
|
||||
from asset_library.common.template import Template
|
||||
from asset_library.common.file_utils import install_module
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||
import re
|
||||
from pathlib import Path
|
||||
from itertools import groupby
|
||||
import uuid
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import urllib3
|
||||
import traceback
|
||||
import time
|
||||
|
||||
|
||||
class Kitsu(LibraryType):
|
||||
|
||||
name = "Kitsu"
|
||||
template_name : StringProperty()
|
||||
template_file : StringProperty()
|
||||
source_directory : StringProperty(subtype='DIR_PATH')
|
||||
#blend_depth: IntProperty(default=1)
|
||||
source_template_image : StringProperty()
|
||||
target_template_image : StringProperty()
|
||||
|
||||
url: StringProperty()
|
||||
login: StringProperty()
|
||||
password: StringProperty(subtype='PASSWORD')
|
||||
project_name: StringProperty()
|
||||
|
||||
def connect(self, url=None, login=None, password=None):
|
||||
'''Connect to kitsu api using provided url, login and password'''
|
||||
|
||||
gazu = install_module('gazu')
|
||||
urllib3.disable_warnings()
|
||||
|
||||
if not self.url:
|
||||
print(f'Kitsu Url: {self.url} is empty')
|
||||
return
|
||||
|
||||
url = self.url
|
||||
if not url.endswith('/api'):
|
||||
url += '/api'
|
||||
|
||||
print(f'Info: Setting Host for kitsu {url}')
|
||||
gazu.client.set_host(url)
|
||||
|
||||
if not gazu.client.host_is_up():
|
||||
print('Error: Kitsu Host is down')
|
||||
|
||||
try:
|
||||
print(f'Info: Log in to kitsu as {self.login}')
|
||||
res = gazu.log_in(self.login, self.password)
|
||||
print(f'Info: Sucessfully login to Kitsu as {res["user"]["full_name"]}')
|
||||
return res['user']
|
||||
except Exception as e:
|
||||
print(f'Error: {traceback.format_exc()}')
|
||||
|
||||
def get_asset_path(self, name, catalog, directory=None):
|
||||
directory = directory or self.source_directory
|
||||
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||
|
||||
def get_asset_info(self, data, asset_path):
|
||||
|
||||
modified = time.time_ns()
|
||||
catalog = data['entity_type_name'].title()
|
||||
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
#asset_name = self.norm_file_name(data['name'])
|
||||
|
||||
asset_info = dict(
|
||||
filepath=asset_path,
|
||||
modified=modified,
|
||||
library_id=self.library.id,
|
||||
assets=[dict(
|
||||
catalog=catalog,
|
||||
metadata=data.get('data', {}),
|
||||
description=data['description'],
|
||||
tags=[],
|
||||
type=self.data_type,
|
||||
#image=self.library.template_image,
|
||||
#video=self.library.template_video,
|
||||
name=data['name'])
|
||||
]
|
||||
)
|
||||
|
||||
return asset_info
|
||||
|
||||
# def bundle(self, cache_diff=None):
|
||||
# """Group all asset in one or multiple blends for the asset browser"""
|
||||
|
||||
# return super().bundle(cache_diff=cache_diff)
|
||||
|
||||
def set_asset_preview(self, asset, asset_data):
|
||||
'''Load an externalize image as preview for an asset using the source template'''
|
||||
|
||||
asset_path = self.format_path(Path(asset_data['filepath']).as_posix())
|
||||
|
||||
image_path = self.find_path(self.target_template_image, asset_data, filepath=asset_path)
|
||||
|
||||
if image_path:
|
||||
with bpy.context.temp_override(id=asset):
|
||||
bpy.ops.ed.lib_id_load_custom_preview(
|
||||
filepath=str(image_path)
|
||||
)
|
||||
else:
|
||||
print(f'No image found for {self.target_template_image} on {asset.name}')
|
||||
|
||||
if asset.preview:
|
||||
return asset.preview
|
||||
|
||||
|
||||
def generate_previews(self, cache=None):
|
||||
|
||||
print('Generate previews...')
|
||||
|
||||
if cache in (None, ''):
|
||||
cache = self.fetch()
|
||||
elif isinstance(cache, (Path, str)):
|
||||
cache = self.read_cache(cache)
|
||||
|
||||
#TODO Support all multiple data_type
|
||||
for asset_info in cache:
|
||||
|
||||
if asset_info.get('type', self.data_type) == 'FILE':
|
||||
self.generate_blend_preview(asset_info)
|
||||
else:
|
||||
self.generate_asset_preview(asset_info)
|
||||
|
||||
def generate_asset_preview(self, asset_info):
|
||||
|
||||
data_type = self.data_type
|
||||
scn = bpy.context.scene
|
||||
vl = bpy.context.view_layer
|
||||
|
||||
asset_path = self.format_path(asset_info['filepath'])
|
||||
|
||||
lens = 85
|
||||
|
||||
if not asset_path.exists():
|
||||
print(f'Blend file {asset_path} not exit')
|
||||
return
|
||||
|
||||
|
||||
asset_data_names = {}
|
||||
|
||||
# First check wich assets need a preview
|
||||
for asset_data in asset_info['assets']:
|
||||
name = asset_data['name']
|
||||
image_path = self.format_path(self.target_template_image, asset_data, filepath=asset_path)
|
||||
|
||||
if image_path.exists():
|
||||
continue
|
||||
|
||||
#Store in a dict all asset_data that does not have preview
|
||||
asset_data_names[name] = dict(asset_data, image_path=image_path)
|
||||
|
||||
if not asset_data_names:
|
||||
print(f'All previews already existing for {asset_path}')
|
||||
return
|
||||
|
||||
#asset_names = [a['name'] for a in asset_info['assets']]
|
||||
asset_names = list(asset_data_names.keys())
|
||||
assets = self.load_datablocks(asset_path, names=asset_names, link=True, type=data_type)
|
||||
|
||||
print(asset_names)
|
||||
print(assets)
|
||||
|
||||
for asset in assets:
|
||||
if not asset:
|
||||
continue
|
||||
|
||||
print(f'Generate Preview for asset {asset.name}')
|
||||
|
||||
asset_data = asset_data_names[asset.name]
|
||||
|
||||
#print(self.target_template_image, asset_path)
|
||||
image_path = self.format_path(self.target_template_image, asset_data, filepath=asset_path)
|
||||
|
||||
# Force redo preview
|
||||
# if asset.preview:
|
||||
# print(f'Writing asset preview to {image_path}')
|
||||
# self.write_preview(asset.preview, image_path)
|
||||
# continue
|
||||
|
||||
if data_type == 'COLLECTION':
|
||||
|
||||
bpy.ops.object.collection_instance_add(name=asset.name)
|
||||
|
||||
scn.camera.data.lens = lens
|
||||
bpy.ops.view3d.camera_to_view_selected()
|
||||
scn.camera.data.lens -= 5
|
||||
|
||||
instance = vl.objects.active
|
||||
|
||||
#scn.collection.children.link(asset)
|
||||
|
||||
scn.render.filepath = str(image_path)
|
||||
scn.render.image_settings.file_format = self.format_from_ext(image_path.suffix)
|
||||
scn.render.image_settings.color_mode = 'RGBA'
|
||||
scn.render.image_settings.quality = 90
|
||||
|
||||
|
||||
print(f'Render asset {asset.name} to {image_path}')
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
#instance.user_clear()
|
||||
asset.user_clear()
|
||||
|
||||
bpy.data.objects.remove(instance)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
|
||||
def fetch(self):
|
||||
"""Gather in a list all assets found in the folder"""
|
||||
|
||||
print(f'Fetch Assets for {self.library.name}')
|
||||
|
||||
gazu = install_module('gazu')
|
||||
self.connect()
|
||||
|
||||
template_file = Template(self.template_file)
|
||||
template_name = Template(self.template_name)
|
||||
|
||||
project = gazu.client.fetch_first('projects', {'name': self.project_name})
|
||||
entity_types = gazu.client.fetch_all('entity-types')
|
||||
entity_types_ids = {e['id']: e['name'] for e in entity_types}
|
||||
|
||||
asset_infos = []
|
||||
for asset_data in gazu.asset.all_assets_for_project(project):
|
||||
asset_data['entity_type_name'] = entity_types_ids[asset_data.pop('entity_type_id')]
|
||||
asset_name = asset_data['name']
|
||||
|
||||
asset_field_data = dict(asset_name=asset_name, type=asset_data['entity_type_name'], source_directory=self.source_directory)
|
||||
|
||||
try:
|
||||
asset_field_data.update(template_name.parse(asset_name))
|
||||
except Exception:
|
||||
print(f'Warning: Could not parse {asset_name} with template {template_name}')
|
||||
|
||||
asset_path = template_file.find(asset_field_data)
|
||||
if not asset_path:
|
||||
print(f'Warning: Could not find file for {template_file.format(asset_field_data)}')
|
||||
continue
|
||||
|
||||
#print(asset_path)
|
||||
|
||||
# TODO group when multiple asset are store in the same blend
|
||||
asset_infos.append(self.get_asset_info(asset_data, asset_path))
|
||||
|
||||
#asset = load_datablocks(asset_path, data_type='collections', names=asset_data['name'], link=True)
|
||||
#if not asset:
|
||||
# print(f"Asset {asset_name} not found in {asset_path}")
|
||||
|
||||
|
||||
#asset_info = self.get_asset_info(asset)
|
||||
|
||||
#asset_infos.append(asset_info)
|
||||
|
||||
#print(assets)
|
||||
# for k, v in assets[0].items():
|
||||
# print(f'- {k} {v}')
|
||||
|
||||
#print('+++++++++++++')
|
||||
#print(asset_infos)
|
||||
|
||||
return asset_infos
|
|
@ -0,0 +1,799 @@
|
|||
|
||||
#from asset_library.common.functions import (norm_asset_datas,)
|
||||
from asset_library.common.bl_utils import get_addon_prefs, load_datablocks
|
||||
from asset_library.common.file_utils import read_file, write_file
|
||||
from asset_library.common.template import Template
|
||||
from asset_library.constants import (MODULE_DIR, RESOURCES_DIR)
|
||||
|
||||
from asset_library import (action, collection, file)
|
||||
|
||||
from bpy.types import PropertyGroup
|
||||
from bpy.props import StringProperty
|
||||
import bpy
|
||||
|
||||
from itertools import groupby
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import os
|
||||
import json
|
||||
import uuid
|
||||
import time
|
||||
from functools import partial
|
||||
import subprocess
|
||||
from glob import glob
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
class LibraryType(PropertyGroup):
|
||||
|
||||
#def __init__(self):
|
||||
name = "Base Adapter"
|
||||
#library = None
|
||||
|
||||
@property
|
||||
def library(self):
|
||||
prefs = self.addon_prefs
|
||||
for lib in prefs.libraries:
|
||||
if lib.library_type == self:
|
||||
return lib
|
||||
|
||||
@property
|
||||
def bundle_directory(self):
|
||||
return self.library.library_path
|
||||
|
||||
@property
|
||||
def data_type(self):
|
||||
return self.library.data_type
|
||||
|
||||
@property
|
||||
def data_types(self):
|
||||
return self.library.data_types
|
||||
|
||||
def get_catalog_path(self, directory=None):
|
||||
directory = directory or self.bundle_directory
|
||||
return Path(directory, 'blender_assets.cats.txt')
|
||||
|
||||
@property
|
||||
def cache_file(self):
|
||||
return Path(self.bundle_directory) / f"blender_assets.{self.library.id}.json"
|
||||
|
||||
@property
|
||||
def tmp_cache_file(self):
|
||||
return Path(bpy.app.tempdir) / f"blender_assets.{self.library.id}.json"
|
||||
|
||||
@property
|
||||
def diff_file(self):
|
||||
return Path(bpy.app.tempdir, 'diff.json')
|
||||
|
||||
@property
|
||||
def preview_blend(self):
|
||||
return MODULE_DIR / self.data_type.lower() / "preview.blend"
|
||||
|
||||
@property
|
||||
def preview_assets_file(self):
|
||||
return Path(bpy.app.tempdir, "preview_assets_file.json")
|
||||
|
||||
@property
|
||||
def addon_prefs(self):
|
||||
return get_addon_prefs()
|
||||
|
||||
@property
|
||||
def module_type(self):
|
||||
lib_type = self.library.data_type
|
||||
if lib_type == 'ACTION':
|
||||
return action
|
||||
elif lib_type == 'FILE':
|
||||
return file
|
||||
elif lib_type == 'COLLECTION':
|
||||
return collection
|
||||
|
||||
def to_dict(self):
|
||||
return {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||
|
||||
@property
|
||||
def format_data(self):
|
||||
"""Dict for formating template"""
|
||||
return dict(self.to_dict(), bundle_dir=self.library.bundle_dir, parent=self.library.parent)
|
||||
|
||||
def fetch(self):
|
||||
raise Exception('This method need to be define in the library_type')
|
||||
|
||||
def norm_file_name(self, name):
|
||||
return name.replace(' ', '_')
|
||||
|
||||
def read_file(self, file):
|
||||
return read_file(file)
|
||||
|
||||
def write_file(self, file, data):
|
||||
return write_file(file, data)
|
||||
|
||||
def copy_file(self, source, destination):
|
||||
src = Path(source)
|
||||
dst = Path(destination)
|
||||
|
||||
if not src.exists():
|
||||
print(f'Cannot copy file {src}: file not exist')
|
||||
return
|
||||
|
||||
dst.parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
if src == dst:
|
||||
print(f'Cannot copy file {src}: source and destination are the same')
|
||||
return
|
||||
|
||||
print(f'Copy file from {src} to {dst}')
|
||||
shutil.copy2(str(src), str(dst))
|
||||
|
||||
def load_datablocks(self, src, names=None, type='objects', link=True, expr=None, assets_only=False):
|
||||
"""Link or append a datablock from a blendfile"""
|
||||
|
||||
if type.isupper():
|
||||
type = f'{type.lower()}s'
|
||||
|
||||
return load_datablocks(src, names=names, type=type, link=link, expr=expr, assets_only=assets_only)
|
||||
|
||||
def get_asset_data(self, asset):
|
||||
"""Extract asset information on a datablock"""
|
||||
|
||||
return dict(
|
||||
name=asset.name,
|
||||
author=asset.asset_data.author,
|
||||
tags=list(asset.asset_data.tags.keys()),
|
||||
metadata=dict(asset.asset_data),
|
||||
description=asset.asset_data.description,
|
||||
)
|
||||
|
||||
def get_asset_relative_path(self, name, catalog):
|
||||
'''Get a relative path for the asset'''
|
||||
name = self.norm_file_name(name)
|
||||
return Path(catalog, name, name).with_suffix('.blend')
|
||||
|
||||
def get_active_asset_library(self):
|
||||
asset_handle = bpy.context.asset_file_handle
|
||||
prefs = get_addon_prefs()
|
||||
asset_handle = bpy.context.asset_file_handle
|
||||
|
||||
if not asset_handle:
|
||||
return self
|
||||
|
||||
lib = None
|
||||
if '.library_id' in asset_handle.asset_data:
|
||||
lib_id = asset_handle.asset_data['.library_id']
|
||||
lib = next((l for l in prefs.libraries if l.id == lib_id), None)
|
||||
|
||||
if not lib:
|
||||
print(f"No library found for id {lib_id}")
|
||||
|
||||
if not lib:
|
||||
lib = self
|
||||
|
||||
return lib
|
||||
|
||||
def get_active_asset_path(self):
|
||||
'''Get the full path of the active asset_handle from the asset brower'''
|
||||
prefs = get_addon_prefs()
|
||||
asset_handle = bpy.context.asset_file_handle
|
||||
|
||||
lib = self.get_active_asset_library()
|
||||
|
||||
if 'filepath' in asset_handle.asset_data:
|
||||
asset_path = asset_handle.asset_data['filepath']
|
||||
asset_path = lib.library_type.format_path(asset_path)
|
||||
else:
|
||||
asset_path = bpy.types.AssetHandle.get_full_library_path(
|
||||
asset_handle, bpy.context.asset_library_ref
|
||||
)
|
||||
|
||||
return asset_path
|
||||
|
||||
def generate_previews(self):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def get_image_path(self, name, catalog, filepath):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def get_video_path(self, name, catalog, filepath):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def new_asset(self, asset, asset_data):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def remove_asset(self, asset, asset_data):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def set_asset_preview(asset, asset_data):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def format_asset_data(self, data):
|
||||
"""Get a dict for use in template fields"""
|
||||
return {
|
||||
'asset_name': data['name'],
|
||||
'asset_path': Path(data['filepath']),
|
||||
'catalog': data['catalog'],
|
||||
'catalog_name': data['catalog'].replace('/', '_'),
|
||||
}
|
||||
|
||||
def format_path(self, template, data={}, **kargs):
|
||||
if not template:
|
||||
return None
|
||||
|
||||
if data:
|
||||
data = self.format_asset_data(dict(data, **kargs))
|
||||
else:
|
||||
data = kargs
|
||||
|
||||
if template.startswith('.'): #the template is relative
|
||||
template = Path(data['asset_path'], template).as_posix()
|
||||
|
||||
params = dict(
|
||||
**data,
|
||||
**self.format_data,
|
||||
)
|
||||
|
||||
return Template(template).format(params).resolve()
|
||||
|
||||
def find_path(self, template, data, **kargs):
|
||||
path = self.format_path(template, data, **kargs)
|
||||
paths = glob(str(path))
|
||||
if paths:
|
||||
return Path(paths[0])
|
||||
|
||||
def read_asset_info_file(self, asset_path) -> dict:
|
||||
"""Read the description file of the asset"""
|
||||
|
||||
description_path = self.get_description_path(asset_path)
|
||||
return self.read_file(description_path)
|
||||
|
||||
def write_description_file(self, asset_data, asset_path) -> None:
|
||||
description_path = self.get_description_path(asset_path)
|
||||
return write_file(description_path, asset_data)
|
||||
|
||||
def write_asset(self, asset, asset_path):
|
||||
|
||||
Path(asset_path).parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
bpy.data.libraries.write(
|
||||
str(asset_path),
|
||||
{asset},
|
||||
path_remap="NONE",
|
||||
fake_user=True,
|
||||
compress=True
|
||||
)
|
||||
|
||||
def read_catalog(self, directory=None):
|
||||
"""Read the catalog file of the library target directory or of the specified directory"""
|
||||
catalog_path = self.get_catalog_path(directory)
|
||||
|
||||
if not catalog_path.exists():
|
||||
return {}
|
||||
|
||||
cat_data = {}
|
||||
|
||||
for line in catalog_path.read_text(encoding="utf-8").split('\n'):
|
||||
if line.startswith(('VERSION', '#')) or not line:
|
||||
continue
|
||||
|
||||
cat_id, cat_path, cat_name = line.split(':')
|
||||
cat_data[cat_path] = {'id':cat_id, 'name':cat_name}
|
||||
|
||||
return cat_data
|
||||
|
||||
def write_catalog(self, catalog_data, directory=None):
|
||||
"""Write the catalog file in the library target directory or of the specified directory"""
|
||||
|
||||
catalog_path = self.get_catalog_path(directory)
|
||||
|
||||
lines = ['VERSION 1', '']
|
||||
|
||||
# Add missing parents catalog
|
||||
norm_data = {}
|
||||
for cat_path, cat_data in catalog_data.items():
|
||||
norm_data[cat_path] = cat_data
|
||||
for p in Path(cat_path).parents[:-1]:
|
||||
if p in cat_data or p in norm_data:
|
||||
continue
|
||||
|
||||
norm_data[p.as_posix()] = {'id': str(uuid.uuid4()), 'name': '-'.join(p.parts)}
|
||||
|
||||
for cat_path, cat_data in sorted(norm_data.items()):
|
||||
cat_name = cat_data['name'].replace('/', '-')
|
||||
lines.append(f"{cat_data['id']}:{cat_path}:{cat_name}")
|
||||
|
||||
print(f'Catalog writen at: {catalog_path}')
|
||||
catalog_path.write_text('\n'.join(lines), encoding="utf-8")
|
||||
|
||||
def read_cache(self, cache_path=None):
|
||||
cache_path = cache_path or self.cache_file
|
||||
print(f'Read cache from {cache_path}')
|
||||
return self.read_file(cache_path)
|
||||
|
||||
def write_cache(self, asset_infos, cache_path=None):
|
||||
cache_path = cache_path or self.cache_file
|
||||
print(f'cache file writen to {cache_path}')
|
||||
return write_file(cache_path, list(asset_infos))
|
||||
|
||||
def prop_rel_path(self, path, prop):
|
||||
'''Get a filepath relative to a property of the library_type'''
|
||||
field_prop = '{%s}/'%prop
|
||||
|
||||
prop_value = getattr(self, prop)
|
||||
prop_value = Path(os.path.expandvars(prop_value)).resolve()
|
||||
|
||||
rel_path = Path(path).resolve().relative_to(prop_value).as_posix()
|
||||
|
||||
return field_prop + rel_path
|
||||
|
||||
def format_from_ext(self, ext):
|
||||
if ext.startswith('.'):
|
||||
ext = ext[1:]
|
||||
|
||||
file_format = ext.upper()
|
||||
|
||||
if file_format == 'JPG':
|
||||
file_format = 'JPEG'
|
||||
elif file_format == 'EXR':
|
||||
file_format = 'OPEN_EXR'
|
||||
|
||||
return file_format
|
||||
|
||||
def save_image(self, image, filepath, remove=False):
|
||||
filepath = Path(filepath)
|
||||
|
||||
if isinstance(image, (str, Path)):
|
||||
image = bpy.data.images.load(str(image))
|
||||
image.update()
|
||||
|
||||
image.filepath_raw = str(filepath)
|
||||
file_format = self.format_from_ext(filepath.suffix)
|
||||
|
||||
image.file_format = file_format
|
||||
image.save()
|
||||
|
||||
if remove:
|
||||
bpy.data.images.remove(image)
|
||||
else:
|
||||
return image
|
||||
|
||||
def write_preview(self, preview, filepath):
|
||||
if not preview or not filepath:
|
||||
return
|
||||
|
||||
filepath = Path(filepath)
|
||||
filepath.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
img_size = preview.image_size
|
||||
|
||||
px = [0] * img_size[0] * img_size[1] * 4
|
||||
preview.image_pixels_float.foreach_get(px)
|
||||
img = bpy.data.images.new(name=filepath.name, width=img_size[0], height=img_size[1], is_data=True, alpha=True)
|
||||
img.pixels.foreach_set(px)
|
||||
|
||||
self.save_image(img, filepath, remove=True)
|
||||
|
||||
|
||||
def draw_header(self, layout):
|
||||
"""Draw the header of the Asset Browser Window"""
|
||||
#layout.separator()
|
||||
|
||||
self.module_type.gui.draw_header(layout)
|
||||
|
||||
def draw_context_menu(self, layout):
|
||||
"""Draw the context menu of the Asset Browser Window"""
|
||||
self.module_type.gui.draw_context_menu(layout)
|
||||
|
||||
def generate_blend_preview(self, asset_info):
|
||||
asset_name = asset_info['name']
|
||||
catalog = asset_info['catalog']
|
||||
|
||||
asset_path = self.format_path(asset_info['filepath'])
|
||||
dst_image_path = self.get_image_path(asset_name, asset_path, catalog)
|
||||
|
||||
if dst_image_path.exists():
|
||||
return
|
||||
|
||||
# Check if a source image exists and if so copying it in the new directory
|
||||
src_image_path = asset_info.get('image')
|
||||
if src_image_path:
|
||||
src_image_path = self.get_template_path(src_image_path, asset_name, asset_path, catalog)
|
||||
if src_image_path and src_image_path.exists():
|
||||
self.copy_file(src_image_path, dst_image_path)
|
||||
return
|
||||
|
||||
print(f'Thumbnailing {asset_path} to {dst_image_path}')
|
||||
blender_thumbnailer = Path(bpy.app.binary_path).parent / 'blender-thumbnailer'
|
||||
|
||||
dst_image_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
subprocess.call([blender_thumbnailer, str(asset_path), str(dst_image_path)])
|
||||
|
||||
success = dst_image_path.exists()
|
||||
|
||||
if not success:
|
||||
empty_preview = RESOURCES_DIR / 'empty_preview.png'
|
||||
self.copy_file(str(empty_preview), str(dst_image_path))
|
||||
|
||||
return success
|
||||
|
||||
'''
|
||||
def generate_asset_preview(self, asset_info):
|
||||
"""Only generate preview when conforming a library"""
|
||||
|
||||
#print('\ngenerate_preview', asset_info['filepath'])
|
||||
|
||||
scn = bpy.context.scene
|
||||
#Creating the preview for collection, object or material
|
||||
camera = scn.camera
|
||||
vl = bpy.context.view_layer
|
||||
|
||||
data_type = self.data_type #asset_info['data_type']
|
||||
asset_path = self.format_path(asset_info['filepath'])
|
||||
|
||||
# Check if a source video exists and if so copying it in the new directory
|
||||
if self.library.template_video:
|
||||
for asset_data in asset_info['assets']:
|
||||
dst_asset_path = self.get_asset_bundle_path(asset_data)
|
||||
dst_video_path = self.format_path(self.library.template_video, asset_data, filepath=dst_asset_path) #Template(src_video_path).find(asset_data, asset_path=dst_asset_path, **self.format_data)
|
||||
|
||||
if dst_video_path.exists():
|
||||
print(f'The dest video {dst_video_path} already exist')
|
||||
continue
|
||||
|
||||
src_video_template = asset_data.get('video')
|
||||
if not src_video_template:
|
||||
continue
|
||||
|
||||
src_video_path = self.find_path(src_video_template, asset_data, filepath=asset_path)#Template(src_video_path).find(asset_data, asset_path=dst_asset_path, **self.format_data)
|
||||
if src_video_path:
|
||||
print(f'Copy video from {src_video_path} to {dst_video_path}')
|
||||
self.copy_file(src_video_path, dst_video_path)
|
||||
|
||||
# Check if asset as a preview image or need it to be generated
|
||||
asset_data_names = {}
|
||||
|
||||
if self.library.template_image:
|
||||
for asset_data in asset_info['assets']:
|
||||
name = asset_data['name']
|
||||
dst_asset_path = self.get_asset_bundle_path(asset_data)
|
||||
|
||||
dst_image_path = self.format_path(self.library.template_image, asset_data, filepath=dst_asset_path)
|
||||
if dst_image_path.exists():
|
||||
print(f'The dest image {dst_image_path} already exist')
|
||||
continue
|
||||
|
||||
# Check if a source image exists and if so copying it in the new directory
|
||||
src_image_template = asset_data.get('image')
|
||||
if src_image_template:
|
||||
src_image_path = self.find_path(src_image_template, asset_data, filepath=asset_path)
|
||||
|
||||
if src_image_path:
|
||||
if src_image_path.suffix == dst_image_path.suffix:
|
||||
self.copy_file(src_image_path, dst_image_path)
|
||||
else:
|
||||
#img = bpy.data.images.load(str(src_image_path))
|
||||
self.save_image(src_image_path, dst_image_path, remove=True)
|
||||
|
||||
return
|
||||
|
||||
#Store in a dict all asset_data that does not have preview
|
||||
asset_data_names[name] = dict(asset_data, image_path=dst_image_path)
|
||||
|
||||
|
||||
if not asset_data_names:
|
||||
# No preview to generate
|
||||
return
|
||||
|
||||
print('Making Preview for', asset_data_names)
|
||||
|
||||
asset_names = list(asset_data_names.keys())
|
||||
assets = self.load_datablocks(asset_path, names=asset_names, link=True, type=data_type)
|
||||
|
||||
for asset in assets:
|
||||
if not asset:
|
||||
continue
|
||||
|
||||
asset_data = asset_data_names[asset.name]
|
||||
image_path = asset_data['image_path']
|
||||
|
||||
if asset.preview:
|
||||
print(f'Writing asset preview to {image_path}')
|
||||
self.write_preview(asset.preview, image_path)
|
||||
continue
|
||||
|
||||
if data_type == 'COLLECTION':
|
||||
|
||||
bpy.ops.object.collection_instance_add(name=asset.name)
|
||||
|
||||
bpy.ops.view3d.camera_to_view_selected()
|
||||
instance = vl.objects.active
|
||||
|
||||
#scn.collection.children.link(asset)
|
||||
|
||||
scn.render.filepath = str(image_path)
|
||||
|
||||
print(f'Render asset {asset.name} to {image_path}')
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
#instance.user_clear()
|
||||
asset.user_clear()
|
||||
|
||||
bpy.data.objects.remove(instance)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
'''
|
||||
|
||||
def set_asset_catalog(self, asset, asset_data, catalog_data):
|
||||
"""Find the catalog if already exist or create it"""
|
||||
catalog_name = asset_data['catalog']
|
||||
catalog = catalog_data.get(catalog_name)
|
||||
if not catalog:
|
||||
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
||||
catalog_data[catalog_name] = catalog
|
||||
|
||||
asset.asset_data.catalog_id = catalog['id']
|
||||
|
||||
def set_asset_metadata(self, asset, asset_data):
|
||||
"""Create custom prop to an asset base on provided data"""
|
||||
metadata = asset_data.get('metadata', {})
|
||||
|
||||
library_id = self.library.id
|
||||
if 'library_id' in asset_data:
|
||||
library_id = asset_data['library_id']
|
||||
|
||||
metadata['.library_id'] = library_id
|
||||
metadata['filepath'] = asset_data['filepath']
|
||||
for k, v in metadata.items():
|
||||
asset.asset_data[k] = v
|
||||
|
||||
def set_asset_tags(self, asset, asset_data):
|
||||
"""Create asset tags base on provided data"""
|
||||
|
||||
if 'tags' in asset_data:
|
||||
for tag in asset.asset_data.tags[:]:
|
||||
asset.asset_data.tags.remove(tag)
|
||||
|
||||
for tag in asset_data['tags']:
|
||||
if not tag:
|
||||
continue
|
||||
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
||||
|
||||
def set_asset_info(self, asset, asset_data):
|
||||
"""Set asset description base on provided data"""
|
||||
|
||||
for key in ('author', 'description'):
|
||||
if key in asset_data:
|
||||
setattr(asset.asset_data, key, asset_data.get(key) or '')
|
||||
|
||||
def get_asset_bundle_path(self, asset_data):
|
||||
|
||||
catalog_parts = asset_data['catalog'].split('/') + [asset_data['name']]
|
||||
|
||||
sub_path = catalog_parts[:self.library.blend_depth]
|
||||
|
||||
blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||
return Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
||||
|
||||
def bundle(self, cache_diff=None):
|
||||
"""Group all new assets in one or multiple blends for the asset browser"""
|
||||
|
||||
supported_types = ('FILE', 'ACTION', 'COLLECTION')
|
||||
supported_operations = ('ADD', 'REMOVE', 'MODIFY')
|
||||
|
||||
if self.data_type not in supported_types:
|
||||
print(f'{self.data_type} is not supported yet supported types are {supported_types}')
|
||||
return
|
||||
|
||||
catalog_data = self.read_catalog() #TODO remove unused catalog
|
||||
|
||||
write_cache = False
|
||||
if not cache_diff:
|
||||
# Get list of all modifications
|
||||
asset_infos = self.fetch()
|
||||
|
||||
|
||||
cache, cache_diff = self.diff(asset_infos)
|
||||
|
||||
# Only write complete cache at the end
|
||||
write_cache = True
|
||||
|
||||
#self.generate_previews(asset_infos)
|
||||
self.write_cache(asset_infos, self.tmp_cache_file)
|
||||
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(self.tmp_cache_file))
|
||||
|
||||
#print()
|
||||
#print(cache)
|
||||
#raise Exception()
|
||||
|
||||
elif isinstance(cache_diff, (Path, str)):
|
||||
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||
|
||||
|
||||
if self.library.blend_depth == 0:
|
||||
raise Exception('Blender depth must be 1 at min')
|
||||
#groups = [(cache_diff)]
|
||||
else:
|
||||
cache_diff.sort(key=self.get_asset_bundle_path)
|
||||
groups = groupby(cache_diff, key=self.get_asset_bundle_path)
|
||||
|
||||
total_assets = len(cache_diff)
|
||||
print(f'total_assets={total_assets}')
|
||||
|
||||
if total_assets == 0:
|
||||
print('No assets found')
|
||||
return
|
||||
|
||||
#data_types = self.data_types
|
||||
#if self.data_types == 'FILE'
|
||||
|
||||
i = 0
|
||||
#assets_to_preview = []
|
||||
for blend_path, asset_datas in groups:
|
||||
#blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||
#blend_path = Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
||||
|
||||
if blend_path.exists():
|
||||
print(f'Opening existing bundle blend: {blend_path}')
|
||||
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
||||
else:
|
||||
print(f'Create new bundle blend to: {blend_path}')
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
|
||||
for asset_data in asset_datas:
|
||||
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
||||
print(f'Progress: {int(i / total_assets * 100)+1}')
|
||||
|
||||
operation = asset_data.get('operation', 'ADD')
|
||||
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
||||
|
||||
if operation not in supported_operations:
|
||||
print(f'operation {operation} not supported, supported operations are {supported_operations}')
|
||||
continue
|
||||
|
||||
if operation == 'REMOVE':
|
||||
if asset:
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
else:
|
||||
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
||||
continue
|
||||
|
||||
elif operation == 'MODIFY':
|
||||
if not asset:
|
||||
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
||||
|
||||
if operation == 'ADD' or not asset:
|
||||
if asset:
|
||||
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
||||
print(f"Asset {asset_data['name']} Already in Blend")
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
|
||||
#print(f"INFO: Add new asset: {asset_data['name']}")
|
||||
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
||||
|
||||
|
||||
asset.asset_mark()
|
||||
|
||||
self.set_asset_preview(asset, asset_data)
|
||||
|
||||
#if not asset_preview:
|
||||
# assets_to_preview.append((asset_data['filepath'], asset_data['name'], asset_data['data_type']))
|
||||
#if self.externalize_data:
|
||||
# self.write_preview(preview, filepath)
|
||||
|
||||
self.set_asset_catalog(asset, asset_data, catalog_data)
|
||||
self.set_asset_metadata(asset, asset_data)
|
||||
self.set_asset_tags(asset, asset_data)
|
||||
self.set_asset_info(asset, asset_data)
|
||||
|
||||
|
||||
i += 1
|
||||
|
||||
#self.write_asset_preview_file()
|
||||
|
||||
print(f'Saving Blend to {blend_path}')
|
||||
|
||||
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||
|
||||
if write_cache:
|
||||
self.write_cache(asset_infos)
|
||||
|
||||
self.write_catalog(catalog_data)
|
||||
|
||||
|
||||
bpy.ops.wm.quit_blender()
|
||||
|
||||
def unflatten_cache(self, cache):
|
||||
""" Return a new unflattten list of asset data
|
||||
grouped by filepath"""
|
||||
|
||||
new_cache = []
|
||||
|
||||
cache = deepcopy(cache)
|
||||
|
||||
cache.sort(key=lambda x : x['filepath'])
|
||||
groups = groupby(cache, key=lambda x : x['filepath'])
|
||||
|
||||
keys = ['filepath', 'modified', 'library_id']
|
||||
|
||||
for _, asset_datas in groups:
|
||||
asset_datas = list(asset_datas)
|
||||
|
||||
#print(asset_datas[0])
|
||||
|
||||
asset_info = {k:asset_datas[0][k] for k in keys}
|
||||
asset_info['assets'] = [{k:v for k, v in a.items() if k not in keys+['operation']} for a in asset_datas]
|
||||
|
||||
new_cache.append(asset_info)
|
||||
|
||||
return new_cache
|
||||
|
||||
def flatten_cache(self, cache):
|
||||
""" Return a new flat list of asset data
|
||||
the filepath keys are merge with the assets keys"""
|
||||
|
||||
# If the cache has a wrong format
|
||||
if not cache or not isinstance(cache[0], dict):
|
||||
return []
|
||||
|
||||
new_cache = []
|
||||
|
||||
for asset_info in cache:
|
||||
asset_info = asset_info.copy()
|
||||
if 'assets' in asset_info:
|
||||
|
||||
assets = asset_info.pop('assets')
|
||||
for asset_data in assets:
|
||||
new_cache.append({**asset_info, **asset_data})
|
||||
else:
|
||||
new_cache.append(asset_info)
|
||||
|
||||
return new_cache
|
||||
|
||||
def diff(self, asset_infos=None):
|
||||
"""Compare the library cache with it current state and return the new cache and the difference"""
|
||||
|
||||
cache = self.read_cache()
|
||||
|
||||
if cache is None:
|
||||
print(f'Fetch The library {self.library.name} for the first time, might be long...')
|
||||
cache = []
|
||||
|
||||
asset_infos = asset_infos or self.fetch()
|
||||
|
||||
cache = {f"{a['filepath']}/{a['name']}": a for a in self.flatten_cache(cache)}
|
||||
new_cache = {f"{a['filepath']}/{a['name']}" : a for a in self.flatten_cache(asset_infos)}
|
||||
|
||||
# print('\n-------------------------')
|
||||
# print([v for k,v in cache.items() if 'WIP_Test' in k])
|
||||
# print()
|
||||
|
||||
# print([v for k,v in new_cache.items() if 'WIP_Test' in k])
|
||||
# print()
|
||||
|
||||
assets_added = [v for k, v in new_cache.items() if k not in cache]
|
||||
assets_removed = [v for k, v in cache.items() if k not in new_cache]
|
||||
assets_modified = [v for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
||||
|
||||
if assets_added:
|
||||
print(f'{len(assets_added)} Assets Added \n{tuple(a["name"] for a in assets_added[:10])}\n')
|
||||
if assets_removed:
|
||||
print(f'{len(assets_removed)} Assets Removed \n{tuple(a["name"] for a in assets_removed[:10])}\n')
|
||||
if assets_modified:
|
||||
print(f'{len(assets_modified)} Assets Modified \n{tuple(a["name"] for a in assets_modified[:10])}\n')
|
||||
|
||||
assets_added = [dict(a, operation='ADD') for a in assets_added]
|
||||
assets_removed = [dict(a, operation='REMOVE') for a in assets_removed]
|
||||
assets_modified = [dict(a, operation='MODIFY') for a in assets_modified]
|
||||
|
||||
cache_diff = assets_added + assets_removed + assets_modified
|
||||
if not cache_diff:
|
||||
print('No change in the library')
|
||||
|
||||
return list(new_cache.values()), cache_diff
|
||||
|
||||
def draw_prefs(self, layout):
|
||||
"""Draw the options in the addon preference for this library_type"""
|
||||
|
||||
annotations = self.__class__.__annotations__
|
||||
for k, v in annotations.items():
|
||||
layout.prop(self, k, text=bpy.path.display_name(k))
|
||||
|
|
@ -0,0 +1,141 @@
|
|||
|
||||
"""
|
||||
Plugin for making an asset library of all blender file found in a folder
|
||||
"""
|
||||
|
||||
|
||||
from asset_library.library_types.library_type import LibraryType
|
||||
from asset_library.common.template import Template
|
||||
from asset_library.common.file_utils import install_module
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, IntProperty, BoolProperty, EnumProperty)
|
||||
import re
|
||||
from pathlib import Path
|
||||
from itertools import groupby
|
||||
import uuid
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import requests
|
||||
import urllib3
|
||||
import traceback
|
||||
import time
|
||||
|
||||
from pprint import pprint as pp
|
||||
|
||||
REQ_HEADERS = requests.utils.default_headers()
|
||||
REQ_HEADERS.update({"User-Agent": "Blender: PH Assets"})
|
||||
|
||||
class PolyHaven(LibraryType):
|
||||
|
||||
name = "Poly Haven"
|
||||
# template_name : StringProperty()
|
||||
# template_file : StringProperty()
|
||||
directory : StringProperty(subtype='DIR_PATH')
|
||||
asset_type : EnumProperty(items=[(i.replace(' ', '_').upper(), i, '') for i in ('HDRIs', 'Models', 'Textures')], default='HDRIS')
|
||||
main_category : StringProperty(
|
||||
default='artificial light, natural light, nature, studio, skies, urban'
|
||||
)
|
||||
secondary_category : StringProperty(
|
||||
default='high constrast, low constrast, medium constrast, midday, morning-afternoon, night, sunrise-sunset'
|
||||
)
|
||||
|
||||
#blend_depth: IntProperty(default=1)
|
||||
|
||||
# url: StringProperty()
|
||||
# login: StringProperty()
|
||||
# password: StringProperty(subtype='PASSWORD')
|
||||
# project_name: StringProperty()
|
||||
|
||||
def get_asset_path(self, name, catalog, directory=None):
|
||||
# chemin: Source, Asset_type, asset_name / asset_name.blend -> PolyHaven/HDRIs/test/test.blend
|
||||
directory = directory or self.source_directory
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
|
||||
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||
|
||||
# def bundle(self, cache_diff=None):
|
||||
# """Group all asset in one or multiple blends for the asset browser"""
|
||||
|
||||
# return super().bundle(cache_diff=cache_diff)
|
||||
|
||||
def format_asset_info(self, asset_info, asset_path):
|
||||
# prend un asset info et output un asset description
|
||||
|
||||
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
modified = asset_info.get('modified', time.time_ns())
|
||||
|
||||
return dict(
|
||||
filepath=asset_path,
|
||||
modified=modified,
|
||||
library_id=self.library.id,
|
||||
assets=[dict(
|
||||
catalog=asset_data.get('catalog', asset_info['catalog']),
|
||||
author=asset_data.get('author'),
|
||||
metadata=asset_data.get('metadata', {}),
|
||||
description=asset_data.get('description', ''),
|
||||
tags=asset_data.get('tags', []),
|
||||
type=self.data_type,
|
||||
image=self.template_image,
|
||||
video=self.template_video,
|
||||
name=asset_data['name']) for asset_data in asset_info['assets']
|
||||
]
|
||||
)
|
||||
|
||||
def fetch(self):
|
||||
"""Gather in a list all assets found in the folder"""
|
||||
|
||||
print(f'Fetch Assets for {self.library.name}')
|
||||
|
||||
print('self.asset_type: ', self.asset_type)
|
||||
url = f"https://api.polyhaven.com/assets?t={self.asset_type.lower()}"
|
||||
# url2 = f"https://polyhaven.com/{self.asset_type.lower()}"
|
||||
# url += "&future=true" if early_access else ""
|
||||
# verify_ssl = not bpy.context.preferences.addons["polyhavenassets"].preferences.disable_ssl_verify
|
||||
|
||||
verify_ssl = False
|
||||
try:
|
||||
res = requests.get(url, headers=REQ_HEADERS, verify=verify_ssl)
|
||||
res2 = requests.get(url2, headers=REQ_HEADERS, verify=verify_ssl)
|
||||
except Exception as e:
|
||||
msg = f"[{type(e).__name__}] Error retrieving {url}"
|
||||
print(msg)
|
||||
# return (msg, None)
|
||||
|
||||
if res.status_code != 200:
|
||||
error = f"Error retrieving asset list, status code: {res.status_code}"
|
||||
print(error)
|
||||
# return (error, None)
|
||||
|
||||
catalog = None
|
||||
|
||||
# return (None, res.json())
|
||||
for asset_info in res.json().values():
|
||||
main_category = None
|
||||
secondary_category = None
|
||||
for category in asset_info['categories']:
|
||||
if category in self.main_category and not main_category:
|
||||
main_category = category
|
||||
if category in self.secondary_category and not secondary_category:
|
||||
secondary_category = category
|
||||
|
||||
if main_category and secondary_category:
|
||||
catalog = f'{main_category}_{secondary_category}'
|
||||
|
||||
if not catalog:
|
||||
return
|
||||
|
||||
asset_path = self.get_asset_path(asset_info['name'], catalog)
|
||||
print('asset_path: ', asset_path)
|
||||
asset_info = self.format_asset_info(asset_info, asset_path)
|
||||
print('asset_info: ', asset_info)
|
||||
|
||||
# return self.format_asset_info([asset['name'], self.get_asset_path(asset['name'], catalog) for asset, asset_infos in res.json().items()])
|
||||
# pp(res.json())
|
||||
# pp(res2.json())
|
||||
# print(res2)
|
||||
|
||||
|
||||
# return asset_infos
|
|
@ -0,0 +1,347 @@
|
|||
|
||||
"""
|
||||
Plugin for making an asset library of all blender file found in a folder
|
||||
"""
|
||||
|
||||
|
||||
from asset_library.library_types.library_type import LibraryType
|
||||
from asset_library.common.bl_utils import load_datablocks
|
||||
from asset_library.common.template import Template
|
||||
|
||||
import bpy
|
||||
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
||||
import re
|
||||
from pathlib import Path
|
||||
from itertools import groupby
|
||||
import uuid
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
class ScanFolder(LibraryType):
|
||||
|
||||
name = "Scan Folder"
|
||||
source_directory : StringProperty(subtype='DIR_PATH')
|
||||
|
||||
source_template_file : StringProperty()
|
||||
source_template_image : StringProperty()
|
||||
source_template_video : StringProperty()
|
||||
source_template_info : StringProperty()
|
||||
|
||||
def draw_prefs(self, layout):
|
||||
layout.prop(self, "source_directory", text="Source: Directory")
|
||||
|
||||
col = layout.column(align=True)
|
||||
col.prop(self, "source_template_file", icon='COPY_ID', text='Template file')
|
||||
col.prop(self, "source_template_image", icon='COPY_ID', text='Template image')
|
||||
col.prop(self, "source_template_video", icon='COPY_ID', text='Template video')
|
||||
col.prop(self, "source_template_info", icon='COPY_ID', text='Template info')
|
||||
|
||||
def get_asset_path(self, name, catalog, directory=None):
|
||||
directory = directory or self.source_directory
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
|
||||
return Path(directory, self.get_asset_relative_path(name, catalog))
|
||||
|
||||
def get_image_path(self, name, catalog, filepath):
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
return self.format_path(self.source_template_image, dict(name=name, catalog=catalog, filepath=filepath))
|
||||
|
||||
def get_video_path(self, name, catalog, filepath):
|
||||
catalog = self.norm_file_name(catalog)
|
||||
name = self.norm_file_name(name)
|
||||
return self.format_path(self.source_template_video, dict(name=name, catalog=catalog, filepath=filepath))
|
||||
|
||||
def new_asset(self, asset, asset_data):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def remove_asset(self, asset, asset_data):
|
||||
raise Exception('Need to be defined in the library_type')
|
||||
|
||||
def format_asset_info(self, asset_datas, asset_path, modified=None):
|
||||
|
||||
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
modified = modified or time.time_ns()
|
||||
library_id = self.library.id
|
||||
|
||||
# if self.data_type == 'FILE':
|
||||
# return dict(
|
||||
# filepath=asset_path,
|
||||
# author=asset_info.get('author'),
|
||||
# modified=modified,
|
||||
# library_id=library_id,
|
||||
# catalog=asset_info['catalog'],
|
||||
# tags=[],
|
||||
# description=asset_info.get('description', ''),
|
||||
# type=self.data_type,
|
||||
# #image=self.source_template_image,
|
||||
# name=asset_info['name']
|
||||
# )
|
||||
|
||||
return dict(
|
||||
filepath=asset_path,
|
||||
modified=modified,
|
||||
library_id=library_id,
|
||||
assets=[dict(
|
||||
catalog=asset_data['catalog'],
|
||||
author=asset_data.get('author', ''),
|
||||
metadata=asset_data.get('metadata', {}),
|
||||
description=asset_data.get('description', ''),
|
||||
tags=asset_data.get('tags', []),
|
||||
type=self.data_type,
|
||||
name=asset_data['name']) for asset_data in asset_datas
|
||||
]
|
||||
)
|
||||
|
||||
def set_asset_preview(self, asset, asset_data):
|
||||
'''Load an externalize image as preview for an asset using the source template'''
|
||||
|
||||
asset_path = self.format_path(asset_data['filepath'])
|
||||
|
||||
image_template = self.source_template_image
|
||||
if not image_template:
|
||||
return
|
||||
|
||||
image_path = self.find_path(image_template, asset_data, filepath=asset_path)
|
||||
|
||||
if image_path:
|
||||
with bpy.context.temp_override(id=asset):
|
||||
bpy.ops.ed.lib_id_load_custom_preview(
|
||||
filepath=str(image_path)
|
||||
)
|
||||
else:
|
||||
print(f'No image found for {image_template} on {asset.name}')
|
||||
|
||||
if asset.preview:
|
||||
return asset.preview
|
||||
|
||||
def bundle(self, cache_diff=None):
|
||||
"""Group all new assets in one or multiple blends for the asset browser"""
|
||||
|
||||
if self.data_type not in ('FILE', 'ACTION', 'COLLECTION'):
|
||||
print(f'{self.data_type} is not supported yet')
|
||||
return
|
||||
|
||||
catalog_data = self.read_catalog() #TODO remove unused catalog
|
||||
|
||||
write_cache = False
|
||||
if not cache_diff:
|
||||
# Get list of all modifications
|
||||
asset_infos = self.fetch()
|
||||
|
||||
#print(asset_infos[:2])
|
||||
|
||||
flat_cache, cache_diff = self.diff(asset_infos)
|
||||
|
||||
catalogs = [a['catalog'] for a in flat_cache]
|
||||
catalog_data = {k:v for k, v in catalog_data.items() if k in catalogs}
|
||||
|
||||
|
||||
print('cache_diff', cache_diff)
|
||||
|
||||
# Only write complete cache at the end
|
||||
write_cache = True
|
||||
|
||||
#self.generate_previews(asset_infos)
|
||||
self.write_cache(asset_infos, self.tmp_cache_file)
|
||||
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(self.tmp_cache_file))
|
||||
|
||||
elif isinstance(cache_diff, (Path, str)):
|
||||
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||
|
||||
if self.library.blend_depth == 0:
|
||||
raise Exception('Blender depth must be 1 at min')
|
||||
#groups = [(cache_diff)]
|
||||
else:
|
||||
cache_diff.sort(key=self.get_asset_bundle_path)
|
||||
groups = groupby(cache_diff, key=self.get_asset_bundle_path)
|
||||
|
||||
total_assets = len(cache_diff)
|
||||
print(f'total_assets={total_assets}')
|
||||
|
||||
if total_assets == 0:
|
||||
print('No assets found')
|
||||
return
|
||||
|
||||
#data_types = self.data_types
|
||||
#if self.data_types == 'FILE'
|
||||
|
||||
i = 0
|
||||
#assets_to_preview = []
|
||||
for blend_path, asset_datas in groups:
|
||||
#blend_name = sub_path[-1].replace(' ', '_').lower()
|
||||
#blend_path = Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
||||
|
||||
if blend_path.exists():
|
||||
print(f'Opening existing bundle blend: {blend_path}')
|
||||
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
||||
else:
|
||||
print(f'Create new bundle blend to: {blend_path}')
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
|
||||
for asset_data in asset_datas:
|
||||
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
||||
print(f'Progress: {int(i / total_assets * 100)+1}')
|
||||
|
||||
operation = asset_data.get('operation', 'ADD')
|
||||
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
||||
|
||||
if operation == 'REMOVE':
|
||||
if asset:
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
else:
|
||||
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
||||
continue
|
||||
|
||||
if operation == 'MODIFY' and not asset:
|
||||
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
||||
|
||||
if operation == 'ADD' or not asset:
|
||||
if asset:
|
||||
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
||||
print(f"Asset {asset_data['name']} Already in Blend")
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
|
||||
#print(f"INFO: Add new asset: {asset_data['name']}")
|
||||
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
||||
else:
|
||||
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFY)')
|
||||
continue
|
||||
|
||||
asset.asset_mark()
|
||||
|
||||
self.set_asset_preview(asset, asset_data)
|
||||
|
||||
#if not asset_preview:
|
||||
# assets_to_preview.append((asset_data['filepath'], asset_data['name'], asset_data['data_type']))
|
||||
#if self.externalize_data:
|
||||
# self.write_preview(preview, filepath)
|
||||
|
||||
self.set_asset_catalog(asset, asset_data, catalog_data)
|
||||
self.set_asset_metadata(asset, asset_data)
|
||||
self.set_asset_tags(asset, asset_data)
|
||||
self.set_asset_info(asset, asset_data)
|
||||
|
||||
i += 1
|
||||
|
||||
#self.write_asset_preview_file()
|
||||
|
||||
print(f'Saving Blend to {blend_path}')
|
||||
|
||||
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||
|
||||
if write_cache:
|
||||
self.write_cache(asset_infos)
|
||||
else:
|
||||
cache = self.read_cache()
|
||||
|
||||
# Update the cache with the modification
|
||||
if not cache:
|
||||
cache = []
|
||||
|
||||
flat_cache = {f"{a['filepath']}/{a['name']}": a for a in self.flatten_cache(cache)}
|
||||
flat_cache_diff = {f"{a['filepath']}/{a['name']}": a for a in cache_diff}
|
||||
|
||||
#Update the cache with the operations
|
||||
for k, v in flat_cache_diff.items():
|
||||
if v['operation'] == 'REMOVE':
|
||||
if k in flat_cache:
|
||||
flat_cache.remove(k)
|
||||
elif v['operation'] in ('MODIFY', 'ADD'):
|
||||
flat_cache[k] = v
|
||||
|
||||
new_cache = self.unflatten_cache(list(flat_cache.values()))
|
||||
self.write_cache(new_cache)
|
||||
|
||||
|
||||
self.write_catalog(catalog_data)
|
||||
|
||||
|
||||
bpy.ops.wm.quit_blender()
|
||||
|
||||
def fetch(self):
|
||||
"""Gather in a list all assets found in the folder"""
|
||||
|
||||
print(f'Fetch Assets for {self.library.name}')
|
||||
|
||||
source_directory = Path(self.source_directory)
|
||||
template_file = Template(self.source_template_file)
|
||||
catalog_data = self.read_catalog(directory=source_directory)
|
||||
catalog_ids = {v['id']: k for k, v in catalog_data.items()}
|
||||
|
||||
cache = self.read_cache() or []
|
||||
|
||||
print(f'Search for blend using glob template: {template_file.glob_pattern}')
|
||||
print(f'Scanning Folder {source_directory}...')
|
||||
|
||||
new_cache = []
|
||||
|
||||
for asset_path in template_file.glob(source_directory):#sorted(blend_files):
|
||||
|
||||
source_rel_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||
modified = asset_path.stat().st_mtime_ns
|
||||
|
||||
# Check if the asset description as already been cached
|
||||
asset_info = next((a for a in cache if a['filepath'] == source_rel_path), None)
|
||||
|
||||
if asset_info and asset_info['modified'] >= modified:
|
||||
#print(asset_path, 'is skipped because not modified')
|
||||
new_cache.append(asset_info)
|
||||
continue
|
||||
|
||||
rel_path = asset_path.relative_to(source_directory).as_posix()
|
||||
field_data = template_file.parse(rel_path)
|
||||
|
||||
catalogs = [v for k,v in sorted(field_data.items()) if re.findall('cat[0-9]+', k)]
|
||||
#catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
||||
|
||||
asset_name = field_data.get('asset_name', asset_path.stem)
|
||||
|
||||
if self.data_type == 'FILE':
|
||||
asset_datas = [{"name": asset_name, "catalog": '/'.join(catalogs)}]
|
||||
asset_info = self.format_asset_info(asset_datas, asset_path, modified=modified)
|
||||
new_cache.append(asset_info)
|
||||
continue
|
||||
|
||||
# Now check if there is a asset description file
|
||||
asset_info_path = self.find_path(self.source_template_info, asset_info, filepath=asset_path)
|
||||
if asset_info_path:
|
||||
new_cache.append(self.read_file(asset_info_path))
|
||||
continue
|
||||
|
||||
# Scan the blend file for assets inside and write a custom asset description for info found
|
||||
print(f'Scanning blendfile {asset_path}...')
|
||||
assets = self.load_datablocks(asset_path, type=self.data_types, link=True, assets_only=True)
|
||||
print(f'Found {len(assets)} {self.data_types} inside')
|
||||
|
||||
asset_datas = []
|
||||
for asset in assets:
|
||||
#catalog_path = catalog_ids.get(asset.asset_data.catalog_id)
|
||||
|
||||
#if not catalog_path:
|
||||
# print(f'No catalog found for asset {asset.name}')
|
||||
#catalog_path = asset_info['catalog']#asset_path.relative_to(self.source_directory).as_posix()
|
||||
|
||||
# For now the catalog used is the one extract from the template file
|
||||
asset_data = self.get_asset_data(asset)
|
||||
asset_data['catalog'] = '/'.join(catalogs)
|
||||
|
||||
asset_datas.append(asset_data)
|
||||
|
||||
getattr(bpy.data, self.data_types).remove(asset)
|
||||
|
||||
|
||||
asset_info = self.format_asset_info(asset_datas, asset_path, modified=modified)
|
||||
|
||||
new_cache.append(asset_info)
|
||||
|
||||
|
||||
new_cache.sort(key=lambda x:x['filepath'])
|
||||
|
||||
return new_cache#[:5]
|
||||
|
290
operators.py
290
operators.py
|
@ -20,8 +20,10 @@ from bpy.props import (
|
|||
#from asset_library.constants import (DATA_TYPES, DATA_TYPE_ITEMS, MODULE_DIR)
|
||||
import asset_library
|
||||
from asset_library.common.bl_utils import (
|
||||
attr_set,
|
||||
get_addon_prefs,
|
||||
get_bl_cmd,
|
||||
get_view3d_persp,
|
||||
#suitable_areas,
|
||||
refresh_asset_browsers,
|
||||
load_datablocks)
|
||||
|
@ -31,13 +33,17 @@ from asset_library.common.functions import get_active_library, asset_warning_cal
|
|||
|
||||
from textwrap import dedent
|
||||
from tempfile import gettempdir
|
||||
import gpu
|
||||
from gpu_extras.batch import batch_for_shader
|
||||
import blf
|
||||
import bgl
|
||||
|
||||
|
||||
class ASSETLIB_OT_clear_asset(Operator):
|
||||
bl_idname = "assetlib.clear_asset"
|
||||
class ASSETLIB_OT_remove_assets(Operator):
|
||||
bl_idname = "assetlib.remove_assets"
|
||||
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||
bl_label = 'Clear Asset'
|
||||
bl_description = 'Clear Selected Assets'
|
||||
bl_label = 'Remove Assets'
|
||||
bl_description = 'Remove Selected Assets'
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
|
@ -54,20 +60,32 @@ class ASSETLIB_OT_clear_asset(Operator):
|
|||
asset = context.active_file
|
||||
|
||||
lib = get_active_library()
|
||||
lib_type = lib.library_type
|
||||
|
||||
filepath = lib.adapter.format_path(asset.asset_data['filepath'])
|
||||
asset_image = lib.adapter.get_path('image', asset.name, filepath)
|
||||
asset_video = lib.adapter.get_path('video', asset.name, filepath)
|
||||
asset_handle = context.asset_file_handle
|
||||
|
||||
if filepath:
|
||||
if filepath.exists():
|
||||
filepath.unlink()
|
||||
if asset_image:
|
||||
asset_image.unlink()
|
||||
if asset_video:
|
||||
asset_video.unlink()
|
||||
catalog_file = lib.library_type.read_catalog()
|
||||
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_file.items()}
|
||||
catalog = catalog_ids[asset_handle.asset_data.catalog_id]['path']
|
||||
|
||||
asset_path = lib_type.format_path(asset.asset_data['filepath'])
|
||||
|
||||
img_path = lib_type.get_image_path(name=asset_handle.name, catalog=catalog, filepath=asset_path)
|
||||
video_path = lib_type.get_video_path(name=asset_handle.name, catalog=catalog, filepath=asset_path)
|
||||
|
||||
if asset_path and asset_path.exists():
|
||||
asset_path.unlink()
|
||||
if img_path and img_path.exists():
|
||||
img_path.unlink()
|
||||
if video_path and video_path.exists():
|
||||
video_path.unlink()
|
||||
#open_blender_file(filepath)
|
||||
|
||||
try:
|
||||
asset_path.parent.rmdir()
|
||||
except Exception:#Directory not empty
|
||||
pass
|
||||
|
||||
bpy.ops.assetlib.bundle(name=lib.name, blocking=True)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
@ -99,29 +117,29 @@ class ASSETLIB_OT_edit_data(Operator):
|
|||
if lib.merge_libraries:
|
||||
lib = prefs.libraries[lib.store_library]
|
||||
|
||||
new_name = lib.adapter.norm_file_name(self.name)
|
||||
new_asset_path = lib.adapter.get_asset_path(name=new_name, catalog=self.catalog)
|
||||
new_name = lib.library_type.norm_file_name(self.name)
|
||||
new_asset_path = lib.library_type.get_asset_path(name=new_name, catalog=self.catalog)
|
||||
|
||||
#asset_data = lib.adapter.get_asset_data(self.asset)
|
||||
#asset_data = lib.library_type.get_asset_data(self.asset)
|
||||
asset_data = dict(
|
||||
tags=[t.strip() for t in self.tags.split(',') if t],
|
||||
description=self.description,
|
||||
)
|
||||
|
||||
#lib.adapter.set_asset_catalog(asset, asset_data, catalog_data)
|
||||
#lib.library_type.set_asset_catalog(asset, asset_data, catalog_data)
|
||||
self.asset.name = self.name
|
||||
lib.adapter.set_asset_tags(self.asset, asset_data)
|
||||
lib.adapter.set_asset_info(self.asset, asset_data)
|
||||
lib.library_type.set_asset_tags(self.asset, asset_data)
|
||||
lib.library_type.set_asset_info(self.asset, asset_data)
|
||||
|
||||
self.old_asset_path.unlink()
|
||||
lib.adapter.write_asset(asset=self.asset, asset_path=new_asset_path)
|
||||
lib.library_type.write_asset(asset=self.asset, asset_path=new_asset_path)
|
||||
|
||||
if self.old_image_path.exists():
|
||||
new_img_path = lib.adapter.get_image_path(new_name, self.catalog, new_asset_path)
|
||||
new_img_path = lib.library_type.get_image_path(new_name, self.catalog, new_asset_path)
|
||||
self.old_image_path.rename(new_img_path)
|
||||
|
||||
if self.old_video_path.exists():
|
||||
new_video_path = lib.adapter.get_video_path(new_name, self.catalog, new_asset_path)
|
||||
new_video_path = lib.library_type.get_video_path(new_name, self.catalog, new_asset_path)
|
||||
self.old_video_path.rename(new_video_path)
|
||||
|
||||
#if self.old_description_path.exists():
|
||||
|
@ -135,7 +153,7 @@ class ASSETLIB_OT_edit_data(Operator):
|
|||
diff_path = Path(bpy.app.tempdir, 'diff.json')
|
||||
diff = [dict(name=self.old_asset_name, catalog=self.old_catalog, filepath=str(self.old_asset_path), operation='REMOVE')]
|
||||
|
||||
asset_data = lib.adapter.get_asset_data(self.asset)
|
||||
asset_data = lib.library_type.get_asset_data(self.asset)
|
||||
diff += [dict(asset_data,
|
||||
image=str(new_img_path),
|
||||
filepath=str(new_asset_path),
|
||||
|
@ -186,18 +204,18 @@ class ASSETLIB_OT_edit_data(Operator):
|
|||
|
||||
lib = get_active_library()
|
||||
|
||||
active_lib = lib.adapter.get_active_asset_library()
|
||||
active_lib = lib.library_type.get_active_asset_library()
|
||||
|
||||
lib.store_library = active_lib.name
|
||||
|
||||
asset_handle = context.asset_file_handle
|
||||
|
||||
catalog_file = lib.adapter.read_catalog()
|
||||
catalog_file = lib.library_type.read_catalog()
|
||||
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_file.items()}
|
||||
|
||||
#asset_handle = context.asset_file_handle
|
||||
self.old_asset_name = asset_handle.name
|
||||
self.old_asset_path = lib.adapter.get_active_asset_path()
|
||||
self.old_asset_path = lib.library_type.get_active_asset_path()
|
||||
|
||||
self.asset = load_datablocks(self.old_asset_path, self.old_asset_name, type=lib.data_types)
|
||||
|
||||
|
@ -213,13 +231,13 @@ class ASSETLIB_OT_edit_data(Operator):
|
|||
self.old_catalog = catalog_ids[asset_handle.asset_data.catalog_id]['path']
|
||||
self.catalog = self.old_catalog
|
||||
|
||||
self.old_image_path = lib.adapter.get_image_path(name=self.name, catalog=self.catalog, filepath=self.old_asset_path)
|
||||
self.old_video_path = lib.adapter.get_video_path(name=self.name, catalog=self.catalog, filepath=self.old_asset_path)
|
||||
self.old_image_path = lib.library_type.get_image_path(name=self.name, catalog=self.catalog, filepath=self.old_asset_path)
|
||||
self.old_video_path = lib.library_type.get_video_path(name=self.name, catalog=self.catalog, filepath=self.old_asset_path)
|
||||
|
||||
#self.old_description_path = lib.adapter.get_description_path(self.old_asset_path)
|
||||
#self.old_description_path = lib.library_type.get_description_path(self.old_asset_path)
|
||||
|
||||
#self.old_asset_description = lib.adapter.read_asset_description_file(self.old_asset_path)
|
||||
#self.old_asset_description = lib.adapter.norm_asset_datas([self.old_asset_description])[0]
|
||||
#self.old_asset_info = lib.library_type.read_asset_info_file(self.old_asset_path)
|
||||
#self.old_asset_info = lib.library_type.norm_asset_datas([self.old_asset_info])[0]
|
||||
|
||||
|
||||
|
||||
|
@ -231,7 +249,7 @@ class ASSETLIB_OT_edit_data(Operator):
|
|||
print('Cancel Edit Data, removing the asset')
|
||||
|
||||
lib = get_active_library()
|
||||
active_lib = lib.adapter.get_active_asset_library()
|
||||
active_lib = lib.library_type.get_active_asset_library()
|
||||
|
||||
getattr(bpy.data, active_lib.data_types).remove(self.asset)
|
||||
|
||||
|
@ -281,9 +299,9 @@ class ASSETLIB_OT_open_blend(Operator):
|
|||
|
||||
lib = get_active_library()
|
||||
|
||||
#filepath = lib.adapter.format_path(asset.asset_data['filepath'])
|
||||
#filepath = lib.library_type.format_path(asset.asset_data['filepath'])
|
||||
|
||||
filepath = lib.adapter.get_active_asset_path()
|
||||
filepath = lib.library_type.get_active_asset_path()
|
||||
|
||||
open_blender_file(filepath)
|
||||
|
||||
|
@ -357,7 +375,7 @@ class ASSETLIB_OT_bundle_library(Operator):
|
|||
for lib_data in {lib_datas}:
|
||||
lib = prefs.env_libraries.add()
|
||||
lib.set_dict(lib_data)
|
||||
lib.adapter.bundle(cache_diff='{self.diff}')
|
||||
lib.library_type.bundle(cache_diff='{self.diff}')
|
||||
|
||||
bpy.ops.wm.quit_blender()
|
||||
""")
|
||||
|
@ -411,11 +429,7 @@ class ASSETLIB_OT_diff(Operator):
|
|||
prefs = get_addon_prefs()
|
||||
|
||||
lib = prefs.libraries.get(self.name)
|
||||
|
||||
if self.conform:
|
||||
lib.conform.adapter.diff()
|
||||
else:
|
||||
lib.adapter.diff()
|
||||
lib.library_type.diff()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
@ -435,7 +449,7 @@ class ASSETLIB_OT_conform_library(Operator):
|
|||
prefs = get_addon_prefs()
|
||||
|
||||
lib = prefs.libraries.get(self.name)
|
||||
#lib.adapter.conform(self.directory)
|
||||
#lib.library_type.conform(self.directory)
|
||||
|
||||
templates = {}
|
||||
if self.template_image:
|
||||
|
@ -450,7 +464,7 @@ class ASSETLIB_OT_conform_library(Operator):
|
|||
prefs = bpy.context.preferences.addons["asset_library"].preferences
|
||||
lib = prefs.env_libraries.add()
|
||||
lib.set_dict({lib.to_dict()})
|
||||
lib.adapter.conform(directory='{self.directory}', templates={templates})
|
||||
lib.library_type.conform(directory='{self.directory}', templates={templates})
|
||||
""")
|
||||
|
||||
script_path.write_text(script_code)
|
||||
|
@ -466,6 +480,179 @@ class ASSETLIB_OT_conform_library(Operator):
|
|||
return {'RUNNING_MODAL'}
|
||||
'''
|
||||
|
||||
class ASSETLIB_OT_make_custom_preview(Operator):
|
||||
bl_idname = "assetlib.make_custom_preview"
|
||||
bl_label = "Custom Preview"
|
||||
bl_description = "Set a camera to preview an asset"
|
||||
|
||||
image_size : IntProperty(default=512)
|
||||
modal : BoolProperty(default=False)
|
||||
|
||||
def modal(self, context, event):
|
||||
if event.type in {'ESC'}: # Cancel
|
||||
self.restore()
|
||||
return {'CANCELLED'}
|
||||
|
||||
elif event.type in {'RET', 'NUMPAD_ENTER'}: # Cancel
|
||||
return self.execute(context)
|
||||
#return {'FINISHED'}
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
prefs = get_addon_prefs()
|
||||
bpy.ops.render.opengl(write_still=True)
|
||||
|
||||
img_path = context.scene.render.filepath
|
||||
|
||||
#print('Load Image to previews')
|
||||
prefs.previews.load(Path(img_path).stem, img_path, 'IMAGE')
|
||||
#img = bpy.data.images.load(context.scene.render.filepath)
|
||||
#img.update()
|
||||
#img.preview_ensure()
|
||||
|
||||
|
||||
#Copy the image with a new name
|
||||
# render = bpy.data.images['Render Result']
|
||||
|
||||
# render_pixels = [0] * self.image_size * self.image_size * 4
|
||||
# render.pixels.foreach_get(render_pixels)
|
||||
# img = bpy.data.images.new(name=img_name, width=self.image_size, height=self.image_size, is_data=True, alpha=True)
|
||||
# img.pixels.foreach_set(render_pixels)
|
||||
|
||||
#img.scale(128, 128)
|
||||
#img.preview_ensure()
|
||||
|
||||
# preview_size = render.size
|
||||
|
||||
# pixels = [0] * preview_size[0] * preview_size[1] * 4
|
||||
# render.pixels.foreach_get(pixels)
|
||||
|
||||
# image.preview.image_size = preview_size
|
||||
# image.preview.image_pixels_float.foreach_set(pixels)
|
||||
|
||||
|
||||
|
||||
self.restore()
|
||||
|
||||
#self.is_running = False
|
||||
prefs.preview_modal = False
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
def restore(self):
|
||||
print('RESTORE')
|
||||
try:
|
||||
bpy.types.SpaceView3D.draw_handler_remove(self._handle, 'WINDOW')
|
||||
except:
|
||||
print('Failed remove handler')
|
||||
pass
|
||||
|
||||
bpy.data.objects.remove(self.camera)
|
||||
self.attr_changed.restore()
|
||||
|
||||
def draw_callback_px(self, context):
|
||||
if context.space_data != self._space_data:
|
||||
return
|
||||
|
||||
dpi = context.preferences.system.dpi
|
||||
|
||||
bg_color = (0.8, 0.1, 0.1, 0.5)
|
||||
font_color = (1, 1, 1, 1)
|
||||
text = f'Escape: Cancel Enter: Make Preview'
|
||||
font_id = 0
|
||||
dim = blf.dimensions(font_id, text)
|
||||
|
||||
#gpu.state.line_width_set(100)
|
||||
# bgl.glLineWidth(100)
|
||||
# self.shader_2d.bind()
|
||||
# self.shader_2d.uniform_float("color", bg_color)
|
||||
# self.screen_framing.draw(self.shader_2d)
|
||||
|
||||
# # Reset
|
||||
# gpu.state.line_width_set(1)
|
||||
|
||||
# -dim[0]/2, +dim[1]/2 + 5
|
||||
|
||||
# Display Text
|
||||
blf.color(font_id, *font_color) # unpack color
|
||||
blf.position(font_id, context.region.width/2 -dim[0]/2, dim[1]/2 + 5, 0)
|
||||
blf.size(font_id, 12, dpi)
|
||||
blf.draw(font_id, f'Escape: Cancel Enter: Make Preview')
|
||||
|
||||
def get_image_name(self):
|
||||
prefs = get_addon_prefs()
|
||||
preview_names = [p for p in prefs.previews.keys()]
|
||||
preview_names.sort()
|
||||
|
||||
index = 0
|
||||
if preview_names:
|
||||
index = int(preview_names[-1][-2:]) + 1
|
||||
|
||||
return f'preview_{index:03d}'
|
||||
|
||||
def invoke(self, context, event):
|
||||
prefs = get_addon_prefs()
|
||||
cam_data = bpy.data.cameras.new(name='Preview Camera')
|
||||
self.camera = bpy.data.objects.new(name='Preview Camera', object_data=cam_data)
|
||||
|
||||
#view_3d = get_view3d_persp()
|
||||
|
||||
scn = context.scene
|
||||
space = context.space_data
|
||||
|
||||
matrix = space.region_3d.view_matrix.inverted()
|
||||
if space.region_3d.view_perspective == 'CAMERA':
|
||||
matrix = scn.camera.matrix_world
|
||||
|
||||
self.camera.matrix_world = matrix
|
||||
|
||||
img_name = self.get_image_name()
|
||||
img_path = Path(bpy.app.tempdir, img_name).with_suffix('.webp')
|
||||
|
||||
self.attr_changed = attr_set([
|
||||
(space.overlay, 'show_overlays', False),
|
||||
(space.region_3d, 'view_perspective', 'CAMERA'),
|
||||
(space.region_3d, 'view_camera_offset'),
|
||||
(space.region_3d, 'view_camera_zoom'),
|
||||
(space, 'lock_camera', True),
|
||||
(space, 'show_region_ui', False),
|
||||
(scn, 'camera', self.camera),
|
||||
(scn.render, 'resolution_percentage', 100),
|
||||
(scn.render, 'resolution_x', self.image_size),
|
||||
(scn.render, 'resolution_y', self.image_size),
|
||||
(scn.render, 'film_transparent', True),
|
||||
(scn.render.image_settings, 'file_format', 'WEBP'),
|
||||
(scn.render.image_settings, 'color_mode', 'RGBA'),
|
||||
#(scn.render.image_settings, 'color_depth', '8'),
|
||||
(scn.render, 'use_overwrite', True),
|
||||
(scn.render, 'filepath', str(img_path)),
|
||||
])
|
||||
|
||||
bpy.ops.view3d.view_center_camera()
|
||||
space.region_3d.view_camera_zoom -= 6
|
||||
space.region_3d.view_camera_offset[1] += 0.03
|
||||
|
||||
w, h = (context.region.width, context.region.height)
|
||||
|
||||
self._space_data = context.space_data
|
||||
|
||||
if self.modal:
|
||||
prefs.preview_modal = True
|
||||
|
||||
self.shader_2d = gpu.shader.from_builtin('2D_UNIFORM_COLOR')
|
||||
self.screen_framing = batch_for_shader(
|
||||
self.shader_2d, 'LINE_LOOP', {"pos": [(0,0), (0,h), (w,h), (w,0)]})
|
||||
|
||||
self._handle = bpy.types.SpaceView3D.draw_handler_add(self.draw_callback_px, (context,), 'WINDOW', 'POST_PIXEL')
|
||||
context.window_manager.modal_handler_add(self)
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
else:
|
||||
return self.execute(context)
|
||||
|
||||
|
||||
class ASSETLIB_OT_generate_previews(Operator):
|
||||
bl_idname = "assetlib.generate_previews"
|
||||
bl_options = {"REGISTER", "UNDO"}
|
||||
|
@ -493,7 +680,7 @@ class ASSETLIB_OT_generate_previews(Operator):
|
|||
# '--preview-assets-file', str(self.preview_assets_file)
|
||||
# ]
|
||||
# subprocess.call(cmd)
|
||||
preview_blend = self.preview_blend or lib.adapter.preview_blend
|
||||
preview_blend = self.preview_blend or lib.library_type.preview_blend
|
||||
|
||||
if not preview_blend or not Path(preview_blend).exists():
|
||||
preview_blend = MODULE_DIR / 'common' / 'preview.blend'
|
||||
|
@ -506,7 +693,7 @@ class ASSETLIB_OT_generate_previews(Operator):
|
|||
lib.set_dict({lib.to_dict()})
|
||||
|
||||
bpy.ops.wm.open_mainfile(filepath='{preview_blend}', load_ui=True)
|
||||
lib.adapter.generate_previews(cache='{self.cache}')
|
||||
lib.library_type.generate_previews(cache='{self.cache}')
|
||||
""")
|
||||
|
||||
script_path.write_text(script_code)
|
||||
|
@ -548,11 +735,11 @@ class ASSETLIB_OT_play_preview(Operator):
|
|||
|
||||
lib = get_active_library()
|
||||
|
||||
#filepath = lib.adapter.format_path(asset.asset_data['filepath'])
|
||||
asset_path = lib.adapter.get_active_asset_path()
|
||||
#filepath = lib.library_type.format_path(asset.asset_data['filepath'])
|
||||
asset_path = lib.library_type.get_active_asset_path()
|
||||
|
||||
asset_image = lib.adapter.get_image(asset.name, asset_path)
|
||||
asset_video = lib.adapter.get_video(asset.name, asset_path)
|
||||
asset_image = lib.library_type.get_image(asset.name, asset_path)
|
||||
asset_video = lib.library_type.get_video(asset.name, asset_path)
|
||||
|
||||
if not asset_image and not asset_video:
|
||||
self.report({'ERROR'}, f'Preview for {asset.name} not found.')
|
||||
|
@ -629,10 +816,11 @@ classes = (
|
|||
ASSETLIB_OT_diff,
|
||||
ASSETLIB_OT_generate_previews,
|
||||
ASSETLIB_OT_bundle_library,
|
||||
ASSETLIB_OT_clear_asset,
|
||||
ASSETLIB_OT_remove_assets,
|
||||
ASSETLIB_OT_edit_data,
|
||||
#ASSETLIB_OT_conform_library,
|
||||
ASSETLIB_OT_reload_addon
|
||||
ASSETLIB_OT_reload_addon,
|
||||
ASSETLIB_OT_make_custom_preview
|
||||
)
|
||||
|
||||
def register():
|
||||
|
|
|
@ -8,7 +8,7 @@ from bpy.props import (BoolProperty, StringProperty, CollectionProperty,
|
|||
EnumProperty, IntProperty)
|
||||
|
||||
from asset_library.constants import (DATA_TYPES, DATA_TYPE_ITEMS,
|
||||
ICONS, RESOURCES_DIR, ADAPTER_DIR, ADAPTERS)
|
||||
ICONS, RESOURCES_DIR, LIBRARY_TYPE_DIR, LIBRARY_TYPES, ADAPTERS)
|
||||
|
||||
from asset_library.common.file_utils import import_module_from_path, norm_str
|
||||
from asset_library.common.bl_utils import get_addon_prefs
|
||||
|
@ -54,13 +54,21 @@ def update_all_library_path(self, context):
|
|||
update_library_path(lib, context)
|
||||
#lib.set_library_path()
|
||||
|
||||
def get_adapter_items(self, context):
|
||||
def get_library_type_items(self, context):
|
||||
#prefs = get_addon_prefs()
|
||||
|
||||
items = [('NONE', 'None', '', 0)]
|
||||
items += [(norm_str(a.name, format=str.upper), a.name, "", i+1) for i, a in enumerate(LIBRARY_TYPES)]
|
||||
return items
|
||||
|
||||
def get_adapters_items(self, context):
|
||||
#prefs = get_addon_prefs()
|
||||
|
||||
items = [('NONE', 'None', '', 0)]
|
||||
items += [(norm_str(a.name, format=str.upper), a.name, "", i+1) for i, a in enumerate(ADAPTERS)]
|
||||
return items
|
||||
|
||||
|
||||
def get_library_items(self, context):
|
||||
prefs = get_addon_prefs()
|
||||
|
||||
|
@ -77,47 +85,15 @@ def get_store_library_items(self, context):
|
|||
return [(l.name, l.name, "", i) for i, l in enumerate([self] + self.merge_libraries)]
|
||||
|
||||
|
||||
class AssetLibraryAdapters(PropertyGroup):
|
||||
parent = None
|
||||
|
||||
class LibraryTypes(PropertyGroup):
|
||||
def __iter__(self):
|
||||
return (getattr(self, p) for p in self.bl_rna.properties.keys() if p not in ('rna_type', 'name'))
|
||||
|
||||
'''
|
||||
class ConformAssetLibrary(PropertyGroup):
|
||||
adapters : bpy.props.PointerProperty(type=AssetLibraryAdapters)
|
||||
adapter_name : EnumProperty(items=get_adapter_items)
|
||||
directory : StringProperty(
|
||||
name="Target Directory",
|
||||
subtype='DIR_PATH',
|
||||
default=''
|
||||
)
|
||||
|
||||
template_image : StringProperty(default='', description='../{name}_image.png')
|
||||
template_video : StringProperty(default='', description='../{name}_video.mov')
|
||||
template_description : StringProperty(default='', description='../{name}_asset_description.json')
|
||||
|
||||
#externalize_data: BoolProperty(default=False, name='Externalize Data')
|
||||
blend_depth: IntProperty(default=1, name='Blend Depth')
|
||||
class Adapters(PropertyGroup):
|
||||
def __iter__(self):
|
||||
return (getattr(self, p) for p in self.bl_rna.properties.keys() if p not in ('rna_type', 'name'))
|
||||
|
||||
@property
|
||||
def adapter(self):
|
||||
name = norm_str(self.adapter_name)
|
||||
if not hasattr(self.adapters, name):
|
||||
return
|
||||
|
||||
return getattr(self.adapters, name)
|
||||
|
||||
def to_dict(self):
|
||||
data = {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||
|
||||
data['adapter'] = self.adapter.to_dict()
|
||||
data['adapter']['name'] = data.pop('adapter_name')
|
||||
|
||||
del data['adapters']
|
||||
|
||||
return data
|
||||
'''
|
||||
|
||||
class AssetLibrary(PropertyGroup):
|
||||
name : StringProperty(name='Name', default='Action Library', update=update_library_path)
|
||||
|
@ -127,9 +103,10 @@ class AssetLibrary(PropertyGroup):
|
|||
use : BoolProperty(name='Use', default=True, update=update_library_path)
|
||||
data_type : EnumProperty(name='Type', items=DATA_TYPE_ITEMS, default='COLLECTION')
|
||||
|
||||
template_image : StringProperty(default='', description='../{name}_image.png')
|
||||
template_video : StringProperty(default='', description='../{name}_video.mov')
|
||||
template_description : StringProperty(default='', description='../{name}_asset_description.json')
|
||||
|
||||
#template_image : StringProperty(default='', description='../{name}_image.png')
|
||||
#template_video : StringProperty(default='', description='../{name}_video.mov')
|
||||
#template_info : StringProperty(default='', description='../{name}_asset_info.json')
|
||||
|
||||
bundle_directory : StringProperty(
|
||||
name="Bundle Directory",
|
||||
|
@ -166,10 +143,14 @@ class AssetLibrary(PropertyGroup):
|
|||
# )
|
||||
|
||||
|
||||
#adapter : EnumProperty(items=adapter_ITEMS)
|
||||
adapters : bpy.props.PointerProperty(type=AssetLibraryAdapters)
|
||||
adapter_name : EnumProperty(items=get_adapter_items)
|
||||
parent : StringProperty()
|
||||
#library_type : EnumProperty(items=library_type_ITEMS)
|
||||
library_types : bpy.props.PointerProperty(type=LibraryTypes)
|
||||
library_type_name : EnumProperty(items=get_library_type_items)
|
||||
|
||||
adapters : bpy.props.PointerProperty(type=Adapters)
|
||||
adapter_name : EnumProperty(items=get_adapters_items)
|
||||
|
||||
parent_name : StringProperty()
|
||||
|
||||
# data_file_path : StringProperty(
|
||||
# name="Path",
|
||||
|
@ -178,7 +159,13 @@ class AssetLibrary(PropertyGroup):
|
|||
# )
|
||||
|
||||
#def __init__(self):
|
||||
# self.adapters.parent = self
|
||||
# self.library_types.parent = self
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
prefs = get_addon_prefs()
|
||||
if self.parent_name:
|
||||
return prefs.libraries[self.parent_name]
|
||||
|
||||
@property
|
||||
def merge_libraries(self):
|
||||
|
@ -188,7 +175,7 @@ class AssetLibrary(PropertyGroup):
|
|||
@property
|
||||
def child_libraries(self):
|
||||
prefs = get_addon_prefs()
|
||||
return [l for l in prefs.libraries if l != self and (l.parent == self.name)]
|
||||
return [l for l in prefs.libraries if l != self and (l.parent == self)]
|
||||
|
||||
@property
|
||||
def data_types(self):
|
||||
|
@ -197,6 +184,14 @@ class AssetLibrary(PropertyGroup):
|
|||
data_type = 'COLLECTION'
|
||||
return f'{data_type.lower()}s'
|
||||
|
||||
@property
|
||||
def library_type(self):
|
||||
name = norm_str(self.library_type_name)
|
||||
if not hasattr(self.library_types, name):
|
||||
return
|
||||
|
||||
return getattr(self.library_types, name)
|
||||
|
||||
@property
|
||||
def adapter(self):
|
||||
name = norm_str(self.adapter_name)
|
||||
|
@ -230,6 +225,10 @@ class AssetLibrary(PropertyGroup):
|
|||
library_name = norm_str(library_name)
|
||||
return Path(prefs.bundle_directory, library_name).resolve()
|
||||
|
||||
@property
|
||||
def bundle_dir(self):
|
||||
return self.library_path.as_posix()
|
||||
|
||||
@property
|
||||
def library_name(self):
|
||||
if self.use_custom_bundle_name:
|
||||
|
@ -306,16 +305,16 @@ class AssetLibrary(PropertyGroup):
|
|||
if not self.custom_bundle_name:
|
||||
self['custom_bundle_name'] = self.name
|
||||
|
||||
# self.adapter_name = data['adapter']
|
||||
# if not self.adapter:
|
||||
# print(f"No adapter named {data['adapter']}")
|
||||
# self.library_type_name = data['library_type']
|
||||
# if not self.library_type:
|
||||
# print(f"No library_type named {data['library_type']}")
|
||||
# return
|
||||
|
||||
|
||||
# for key, value in data.items():
|
||||
# if key == 'options':
|
||||
# for k, v in data['options'].items():
|
||||
# setattr(self.adapter, k, v)
|
||||
# setattr(self.library_type, k, v)
|
||||
# elif key in self.bl_rna.properties.keys():
|
||||
# if key == 'id':
|
||||
# value = str(value)
|
||||
|
@ -328,12 +327,16 @@ class AssetLibrary(PropertyGroup):
|
|||
|
||||
def to_dict(self):
|
||||
data = {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||
data['adapter'] = self.adapter.to_dict()
|
||||
#data['adapter'] = data.pop('adapter_name')
|
||||
data['adapter']['name'] = data.pop('adapter_name')
|
||||
del data['adapters']
|
||||
|
||||
#data['conform'] = self.conform.to_dict()
|
||||
if self.library_type:
|
||||
data['library_type'] = self.library_type.to_dict()
|
||||
data['library_type']['name'] = data.pop('library_type_name')
|
||||
del data['library_types']
|
||||
|
||||
if self.adapter:
|
||||
data['adapter'] = self.adapter.to_dict()
|
||||
data['adapter']['name'] = data.pop('adapter_name')
|
||||
del data['adapters']
|
||||
|
||||
return data
|
||||
|
||||
|
@ -341,32 +344,32 @@ class AssetLibrary(PropertyGroup):
|
|||
'''Update the Blender Preference Filepaths tab with the addon libraries'''
|
||||
|
||||
prefs = bpy.context.preferences
|
||||
|
||||
name = self.library_name
|
||||
prev_name = self.get('asset_library') or name
|
||||
|
||||
lib = prefs.filepaths.asset_libraries.get(prev_name)
|
||||
lib_path = self.library_path
|
||||
|
||||
#print('name', name)
|
||||
#print('lib', lib)
|
||||
#print('lib_path', lib_path)
|
||||
#print('self.merge_library ', self.merge_library)
|
||||
#print('prev_name', prev_name)
|
||||
#print('\nset_library_path')
|
||||
#print(f'{self.name=}, {prev_name=}, {lib_path=}, {self.use}')
|
||||
self.clear_library_path()
|
||||
|
||||
if not lib_path:
|
||||
self.clear_library_path()
|
||||
return
|
||||
|
||||
if not self.use:
|
||||
if all(not l.use for l in self.merge_libraries):
|
||||
self.clear_library_path()
|
||||
if not self.use or not lib_path:
|
||||
# if all(not l.use for l in self.merge_libraries):
|
||||
# self.clear_library_path()
|
||||
return
|
||||
|
||||
# lib = None
|
||||
# if self.get('asset_library'):
|
||||
# #print('old_name', self['asset_library'])
|
||||
# lib = prefs.filepaths.asset_libraries.get(self['asset_library'])
|
||||
|
||||
# if not lib:
|
||||
# #print('keys', prefs.filepaths.asset_libraries.keys())
|
||||
# #print('name', name)
|
||||
# #print(prefs.filepaths.asset_libraries.get(name))
|
||||
# lib = prefs.filepaths.asset_libraries.get(name)
|
||||
|
||||
# Create the Asset Library Path
|
||||
lib = prefs.filepaths.asset_libraries.get(name)
|
||||
if not lib:
|
||||
#print(f'Creating the lib {name}')
|
||||
try:
|
||||
bpy.ops.preferences.asset_library_add(directory=str(lib_path))
|
||||
except AttributeError:
|
||||
|
@ -392,7 +395,7 @@ class AssetLibrary(PropertyGroup):
|
|||
|
||||
def add_row(self, layout, data=None, prop=None, label='',
|
||||
boolean=None, factor=0.39):
|
||||
'''Act like the use_property_split but with much more control'''
|
||||
'''Act like the use_property_split but with more control'''
|
||||
|
||||
enabled = True
|
||||
split = layout.split(factor=factor, align=True)
|
||||
|
@ -423,7 +426,7 @@ class AssetLibrary(PropertyGroup):
|
|||
def draw_operators(self, layout):
|
||||
row = layout.row(align=True)
|
||||
row.alignment = 'RIGHT'
|
||||
row.prop(self, 'adapter_name', text='')
|
||||
row.prop(self, 'library_type_name', text='')
|
||||
row.prop(self, 'auto_bundle', text='', icon='UV_SYNC_SELECT')
|
||||
|
||||
row.operator("assetlib.diff", text='', icon='FILE_REFRESH').name = self.name
|
||||
|
@ -433,65 +436,6 @@ class AssetLibrary(PropertyGroup):
|
|||
|
||||
layout.separator(factor=3)
|
||||
|
||||
"""
|
||||
def draw_extra(self, layout):
|
||||
#box = layout.box()
|
||||
|
||||
col = layout.column(align=False)
|
||||
|
||||
row = col.row(align=True)
|
||||
row.use_property_split = False
|
||||
#row.alignment = 'LEFT'
|
||||
icon = "DISCLOSURE_TRI_DOWN" if self.expand_extra else "DISCLOSURE_TRI_RIGHT"
|
||||
row.label(icon='BLANK1')
|
||||
subrow = row.row(align=True)
|
||||
subrow.alignment = 'LEFT'
|
||||
subrow.prop(self, 'expand_extra', icon=icon, emboss=False, text="Conform Options")
|
||||
#row.prop(self, 'expand_extra', text='', icon="OPTIONS", emboss=False)
|
||||
#row.prop(self, 'expand_extra', emboss=False, text='Options')
|
||||
#row.label(text='Conform Options')
|
||||
subrow = row.row(align=True)
|
||||
subrow.alignment = 'RIGHT'
|
||||
subrow.prop(self.conform, "adapter_name", text='')
|
||||
|
||||
op = subrow.operator('assetlib.diff', text='', icon='FILE_REFRESH')#, icon='MOD_BUILD'
|
||||
op.name = self.name
|
||||
op.conform = True
|
||||
|
||||
op = subrow.operator('assetlib.generate_previews', text='', icon='SEQ_PREVIEW')#, icon='MOD_BUILD'
|
||||
op.name = self.name
|
||||
#op.conform = True
|
||||
|
||||
op = subrow.operator('assetlib.bundle', text='', icon='MOD_BUILD')#, icon='MOD_BUILD'
|
||||
op.name = self.name
|
||||
op.directory = self.conform.directory
|
||||
op.conform = True
|
||||
|
||||
subrow.label(icon='BLANK1')
|
||||
#subrow.separator(factor=3)
|
||||
|
||||
if self.expand_extra and self.conform.adapter:
|
||||
col.separator()
|
||||
self.conform.adapter.draw_prefs(col)
|
||||
|
||||
col.separator()
|
||||
col.separator()
|
||||
#row = layout.row(align=True)
|
||||
#row.label(text='Conform Library')
|
||||
col.prop(self.conform, "directory")
|
||||
col.prop(self.conform, "blend_depth")
|
||||
#col.prop(self.conform, "externalize_data")
|
||||
subcol = col.column(align=True)
|
||||
subcol.prop(self.conform, "template_description", text='Template Description', icon='COPY_ID')
|
||||
subcol.prop(self.conform, "template_image", text='Template Image', icon='COPY_ID')
|
||||
subcol.prop(self.conform, "template_video", text='Template Video', icon='COPY_ID')
|
||||
|
||||
|
||||
|
||||
col.separator()
|
||||
"""
|
||||
|
||||
|
||||
def draw(self, layout):
|
||||
prefs = get_addon_prefs()
|
||||
#box = layout.box()
|
||||
|
@ -510,7 +454,7 @@ class AssetLibrary(PropertyGroup):
|
|||
|
||||
self.draw_operators(row)
|
||||
|
||||
index = prefs.user_libraries.index(self)
|
||||
index = list(prefs.user_libraries).index(self)
|
||||
row.operator("assetlib.remove_user_library", icon="X", text='', emboss=False).index = index
|
||||
|
||||
else:
|
||||
|
@ -551,15 +495,14 @@ class AssetLibrary(PropertyGroup):
|
|||
|
||||
col.prop(self, "blend_depth")
|
||||
|
||||
subcol = col.column(align=True)
|
||||
subcol.prop(self, "template_description", text='Template Description', icon='COPY_ID')
|
||||
subcol.prop(self, "template_image", text='Template Image', icon='COPY_ID')
|
||||
subcol.prop(self, "template_video", text='Template Video', icon='COPY_ID')
|
||||
#subcol = col.column(align=True)
|
||||
#subcol.prop(self, "template_info", text='Template Info', icon='COPY_ID')
|
||||
#subcol.prop(self, "template_image", text='Template Image', icon='COPY_ID')
|
||||
#subcol.prop(self, "template_video", text='Template Video', icon='COPY_ID')
|
||||
|
||||
|
||||
if self.adapter:
|
||||
if self.library_type:
|
||||
col.separator()
|
||||
self.adapter.draw_prefs(col)
|
||||
self.library_type.draw_prefs(col)
|
||||
|
||||
for lib in self.child_libraries:
|
||||
lib.draw(layout)
|
||||
|
@ -634,19 +577,21 @@ class AssetLibraryPrefs(AddonPreferences):
|
|||
bl_idname = __package__
|
||||
|
||||
adapters = []
|
||||
library_types = []
|
||||
previews = bpy.utils.previews.new()
|
||||
preview_modal = False
|
||||
add_asset_dict = {}
|
||||
|
||||
#action : bpy.props.PointerProperty(type=AssetLibraryPath)
|
||||
#asset : bpy.props.PointerProperty(type=AssetLibraryPath)
|
||||
#adapters = {}
|
||||
#library_types = {}
|
||||
author: StringProperty(default=os.getlogin())
|
||||
|
||||
image_player: StringProperty(default='')
|
||||
video_player: StringProperty(default='')
|
||||
|
||||
adapter_directory : StringProperty(
|
||||
name="Adapter Directory",
|
||||
subtype='DIR_PATH'
|
||||
)
|
||||
library_type_directory : StringProperty(name="Library Type Directory", subtype='DIR_PATH')
|
||||
adapter_directory : StringProperty(name="Adapter Directory", subtype='DIR_PATH')
|
||||
|
||||
env_libraries : CollectionProperty(type=AssetLibrary)
|
||||
user_libraries : CollectionProperty(type=AssetLibrary)
|
||||
|
@ -658,8 +603,6 @@ class AssetLibraryPrefs(AddonPreferences):
|
|||
update=update_all_library_path
|
||||
)
|
||||
|
||||
|
||||
|
||||
config_directory : StringProperty(
|
||||
name="Config Path",
|
||||
subtype='FILE_PATH',
|
||||
|
@ -667,50 +610,52 @@ class AssetLibraryPrefs(AddonPreferences):
|
|||
update=update_library_config
|
||||
)
|
||||
|
||||
def load_adapters(self):
|
||||
from asset_library.adapters.adapter import AssetLibraryAdapter
|
||||
def load_library_types(self):
|
||||
from asset_library.library_types.library_type import LibraryType
|
||||
|
||||
#global ADAPTERS
|
||||
|
||||
print('\n------Load Adapters')
|
||||
print('Asset Library: Load Library Types')
|
||||
|
||||
ADAPTERS.clear()
|
||||
LIBRARY_TYPES.clear()
|
||||
|
||||
adapter_files = list(ADAPTER_DIR.glob('*.py'))
|
||||
if self.adapter_directory:
|
||||
user_adapter_dir = Path(self.adapter_directory)
|
||||
if user_adapter_dir.exists():
|
||||
adapter_files += list(user_adapter_dir.glob('*.py'))
|
||||
library_type_files = list(LIBRARY_TYPE_DIR.glob('*.py'))
|
||||
if self.library_type_directory:
|
||||
user_LIBRARY_TYPE_DIR = Path(self.library_type_directory)
|
||||
if user_LIBRARY_TYPE_DIR.exists():
|
||||
library_type_files += list(user_LIBRARY_TYPE_DIR.glob('*.py'))
|
||||
|
||||
for adapter_file in adapter_files:
|
||||
mod = import_module_from_path(adapter_file)
|
||||
|
||||
if adapter_file.stem.startswith('_'):
|
||||
for library_type_file in library_type_files:
|
||||
if library_type_file.stem.startswith('_'):
|
||||
continue
|
||||
|
||||
mod = import_module_from_path(library_type_file)
|
||||
|
||||
|
||||
#print(adapter_file)
|
||||
#print(library_type_file)
|
||||
for name, obj in inspect.getmembers(mod):
|
||||
|
||||
if not inspect.isclass(obj):
|
||||
continue
|
||||
|
||||
#print(obj.__bases__)
|
||||
if not AssetLibraryAdapter in obj.__mro__:
|
||||
if not LibraryType in obj.__mro__:
|
||||
continue
|
||||
|
||||
# Non registering base adapter
|
||||
if obj is AssetLibraryAdapter or obj.name in (a.name for a in ADAPTERS):
|
||||
# Non registering base library_type
|
||||
if obj is LibraryType or obj.name in (a.name for a in LIBRARY_TYPES):
|
||||
continue
|
||||
|
||||
try:
|
||||
print(f'Register Plugin {name}')
|
||||
bpy.utils.register_class(obj)
|
||||
setattr(AssetLibraryAdapters, norm_str(obj.name), bpy.props.PointerProperty(type=obj))
|
||||
ADAPTERS.append(obj)
|
||||
setattr(LibraryTypes, norm_str(obj.name), bpy.props.PointerProperty(type=obj))
|
||||
LIBRARY_TYPES.append(obj)
|
||||
|
||||
except Exception as e:
|
||||
print(f'Could not register adapter {name}')
|
||||
print(f'Could not register library_type {name}')
|
||||
print(e)
|
||||
|
||||
def load_adapters(self):
|
||||
return
|
||||
|
||||
@property
|
||||
def libraries(self):
|
||||
|
@ -744,12 +689,12 @@ class AssetLibraryPrefs(AddonPreferences):
|
|||
|
||||
col.separator()
|
||||
|
||||
col.prop(self, 'adapter_directory')
|
||||
col.prop(self, 'library_type_directory')
|
||||
col.prop(self, 'config_directory')
|
||||
|
||||
col.separator()
|
||||
|
||||
#col.prop(self, 'template_description', text='Asset Description Template', icon='COPY_ID')
|
||||
#col.prop(self, 'template_info', text='Asset Description Template', icon='COPY_ID')
|
||||
|
||||
#col.separator()
|
||||
|
||||
|
@ -778,7 +723,8 @@ class AssetLibraryPrefs(AddonPreferences):
|
|||
|
||||
|
||||
classes = [
|
||||
AssetLibraryAdapters,
|
||||
LibraryTypes,
|
||||
Adapters,
|
||||
#ConformAssetLibrary,
|
||||
AssetLibrary,
|
||||
AssetLibraryPrefs,
|
||||
|
@ -799,14 +745,19 @@ def register():
|
|||
if config_dir:
|
||||
prefs['config_directory'] = os.path.expandvars(config_dir)
|
||||
|
||||
adapter_dir = os.getenv('ASSETLIB_ADAPTER_DIR')
|
||||
if adapter_dir:
|
||||
prefs['adapter_directory'] = os.path.expandvars(adapter_dir)
|
||||
LIBRARY_TYPE_DIR = os.getenv('ASSETLIB_LIBRARY_TYPE_DIR')
|
||||
if LIBRARY_TYPE_DIR:
|
||||
prefs['library_type_directory'] = os.path.expandvars(LIBRARY_TYPE_DIR)
|
||||
|
||||
ADAPTER_DIR = os.getenv('ASSETLIB_ADAPTER_DIR')
|
||||
if ADAPTER_DIR:
|
||||
prefs['adapter_directory'] = os.path.expandvars(ADAPTER_DIR)
|
||||
|
||||
prefs.load_library_types()
|
||||
prefs.load_adapters()
|
||||
|
||||
def unregister():
|
||||
for cls in reversed(classes + ADAPTERS):
|
||||
for cls in reversed(classes + LIBRARY_TYPES):
|
||||
bpy.utils.unregister_class(cls)
|
||||
|
||||
ADAPTERS.clear()
|
||||
LIBRARY_TYPES.clear()
|
Loading…
Reference in New Issue