2022-12-24 15:30:32 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
Plugin for making an asset library of all blender file found in a folder
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
from asset_library.adapters.adapter import AssetLibraryAdapter
|
|
|
|
from asset_library.common.bl_utils import load_datablocks
|
|
|
|
from asset_library.common.template import Template
|
|
|
|
|
|
|
|
import bpy
|
|
|
|
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
|
|
|
import re
|
|
|
|
from pathlib import Path
|
|
|
|
from itertools import groupby
|
|
|
|
import uuid
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
|
|
|
class ScanFolderLibrary(AssetLibraryAdapter):
|
|
|
|
|
|
|
|
name = "Scan Folder"
|
|
|
|
source_directory : StringProperty(subtype='DIR_PATH')
|
|
|
|
template : StringProperty()
|
|
|
|
blend_depth : IntProperty()
|
|
|
|
#externalize_preview : BoolProperty(default=True)
|
|
|
|
|
|
|
|
#def draw_header(self, layout):
|
|
|
|
# '''Draw the header of the Asset Browser Window'''
|
|
|
|
# layout.separator()
|
|
|
|
# layout.operator("actionlib.store_anim_pose", text='Add Action', icon='FILE_NEW')
|
|
|
|
|
|
|
|
#def update(self):
|
|
|
|
#
|
|
|
|
def get_asset_path(self, name, catalog, directory=None):
|
|
|
|
directory = directory or self.source_directory
|
|
|
|
return Path(directory, self.get_asset_relative_path(name, catalog))
|
|
|
|
|
|
|
|
def get_asset_description(self, asset, catalog, modified):
|
|
|
|
|
|
|
|
asset_path = self.get_asset_relative_path(name=asset.name, catalog=catalog)
|
|
|
|
asset_name = self.norm_file_name(asset.name)
|
|
|
|
|
|
|
|
asset_description = dict(
|
|
|
|
filepath='{source_directory}/' + asset_path.as_posix(),
|
|
|
|
modified=modified,
|
|
|
|
library_id=self.library.id,
|
|
|
|
assets=[]
|
|
|
|
)
|
|
|
|
|
|
|
|
asset_description['assets'].append(dict(
|
|
|
|
catalog=catalog,
|
|
|
|
metadata=dict(asset.asset_data),
|
|
|
|
tags=asset.asset_data.tags.keys(),
|
|
|
|
type=self.data_type,
|
2022-12-27 23:49:57 +01:00
|
|
|
image=str(self.template_image.format(name=asset_name)),
|
|
|
|
video=str(self.template_video.format(name=asset_name)),
|
2022-12-24 15:30:32 +01:00
|
|
|
name=asset.name)
|
|
|
|
)
|
|
|
|
|
|
|
|
return asset_description
|
|
|
|
|
|
|
|
def _find_blend_files(self):
|
|
|
|
'''Get a sorted list of all blender files found matching the template'''
|
|
|
|
template = Template(self.template)
|
|
|
|
|
|
|
|
print(f'Search for blend using glob template: {template.glob_pattern}')
|
|
|
|
|
|
|
|
source_directory = Path(os.path.expandvars(self.source_directory))
|
|
|
|
print(f'Scanning Folder {source_directory}...')
|
|
|
|
blend_files = list(source_directory.glob(template.glob_pattern))
|
|
|
|
|
|
|
|
blend_files.sort()
|
|
|
|
|
|
|
|
return blend_files
|
|
|
|
|
|
|
|
def _group_key(self, asset_data):
|
|
|
|
"""Group assets inside one blend"""
|
|
|
|
|
|
|
|
catalog_parts = asset_data['catalog'].split('/') + [asset_data['name']]
|
|
|
|
|
|
|
|
return catalog_parts[:self.blend_depth]
|
|
|
|
|
|
|
|
def bundle(self, cache_diff=None):
|
|
|
|
"""Group all asset in one or multiple blends for the asset browser"""
|
|
|
|
|
|
|
|
if self.data_type not in ('FILE', 'ACTION'):
|
|
|
|
print(f'{self.data_type} is not supported yet')
|
|
|
|
return
|
|
|
|
|
|
|
|
lib_path = self.library_path
|
|
|
|
catalog_data = self.read_catalog() # TODO remove unused catalog
|
|
|
|
|
|
|
|
#asset_file_datas = self.fetch() # TODO replace to only change new assets
|
|
|
|
|
|
|
|
if not cache_diff:
|
|
|
|
# Get list of all modifications
|
|
|
|
cache, cache_diff = self.diff()
|
|
|
|
self.write_cache(cache)
|
|
|
|
|
|
|
|
elif isinstance(cache_diff, (Path, str)):
|
|
|
|
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
|
|
|
|
|
|
|
if self.blend_depth == 0:
|
|
|
|
groups = [(cache_diff)]
|
|
|
|
else:
|
|
|
|
cache_diff.sort(key=self._group_key)
|
|
|
|
groups = groupby(cache_diff, key=self._group_key)
|
|
|
|
|
|
|
|
# #print(cache_diff)
|
|
|
|
# print('\n')
|
|
|
|
# for sub_path, asset_datas in groups:
|
|
|
|
|
|
|
|
# print('\n')
|
|
|
|
# print(f'{sub_path=}')
|
|
|
|
# print(f'asset_datas={list(asset_datas)}')
|
|
|
|
|
|
|
|
# raise Exception()
|
|
|
|
|
|
|
|
#progress = 0
|
|
|
|
total_assets = len(cache_diff)
|
|
|
|
print(f'total_assets={total_assets}')
|
|
|
|
|
|
|
|
if total_assets == 0:
|
|
|
|
print('No assets found')
|
|
|
|
return
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
for sub_path, asset_datas in groups:
|
|
|
|
|
|
|
|
# print('\n')
|
|
|
|
# print(f'{sub_path=}')
|
|
|
|
# print(f'asset_datas={list(asset_datas)}')
|
|
|
|
|
|
|
|
# print('\n')
|
|
|
|
|
|
|
|
blend_name = sub_path[-1].replace(' ', '_').lower()
|
|
|
|
blend_path = Path(lib_path, *sub_path, blend_name).with_suffix('.blend')
|
|
|
|
|
|
|
|
if blend_path.exists():
|
|
|
|
print(f'Opening existing bundle blend: {blend_path}')
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
|
|
|
else:
|
|
|
|
print(f'Create new bundle blend to: {blend_path}')
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True)
|
|
|
|
|
|
|
|
for asset_data in asset_datas:
|
|
|
|
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
|
|
|
print(f'Progress: {int(i / total_assets * 100)+1}')
|
|
|
|
|
|
|
|
operation = asset_data.get('operation', 'ADD')
|
|
|
|
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
|
|
|
|
|
|
|
if operation == 'REMOVE':
|
|
|
|
if asset:
|
|
|
|
getattr(bpy.data, self.data_types).remove(asset)
|
|
|
|
else:
|
|
|
|
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
|
|
|
continue
|
|
|
|
|
|
|
|
elif operation == 'MODIFY':
|
|
|
|
if not asset:
|
|
|
|
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
|
|
|
|
|
|
|
elif operation == 'ADD' or not asset:
|
|
|
|
if asset:
|
|
|
|
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
|
|
|
getattr(bpy.data, self.data_types).remove(asset)
|
|
|
|
|
|
|
|
#print(f"INFO: Add new asset: {asset_data['name']}")
|
|
|
|
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
|
|
|
else:
|
|
|
|
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFIED)')
|
|
|
|
continue
|
|
|
|
|
|
|
|
asset.asset_mark()
|
|
|
|
|
|
|
|
# Load external preview if exists
|
2022-12-27 23:49:57 +01:00
|
|
|
#template_image = Template(asset_data['preview'])
|
2022-12-24 15:30:32 +01:00
|
|
|
image_path = Path(asset_data['image'])
|
|
|
|
if not image_path.is_absolute():
|
|
|
|
image_path = Path(asset_data['filepath'], image_path)
|
|
|
|
|
|
|
|
image_path = self.format_path(image_path.as_posix())
|
|
|
|
|
|
|
|
if image_path and image_path.exists():
|
|
|
|
with bpy.context.temp_override(id=asset):
|
|
|
|
bpy.ops.ed.lib_id_load_custom_preview(
|
|
|
|
filepath=str(image_path)
|
|
|
|
)
|
|
|
|
#else:
|
|
|
|
# print(f'Preview {image_path} not found for asset {asset}')
|
|
|
|
|
|
|
|
asset.asset_data.description = asset_data.get('description', '')
|
|
|
|
|
|
|
|
catalog_name = asset_data['catalog']
|
|
|
|
catalog = catalog_data.get(catalog_name)
|
|
|
|
if not catalog:
|
|
|
|
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
|
|
|
catalog_data[catalog_name] = catalog
|
|
|
|
|
|
|
|
asset.asset_data.catalog_id = catalog['id']
|
|
|
|
|
|
|
|
metadata = asset_data.get('metadata', {})
|
|
|
|
|
|
|
|
library_id = self.library.id
|
|
|
|
if 'library_id' in asset_data:
|
|
|
|
library_id = asset_data['library_id']
|
|
|
|
|
|
|
|
metadata['.library_id'] = library_id
|
|
|
|
|
|
|
|
#print(metadata)
|
|
|
|
|
|
|
|
metadata['filepath'] = asset_data['filepath']
|
|
|
|
for k, v in metadata.items():
|
|
|
|
asset.asset_data[k] = v
|
|
|
|
|
|
|
|
# Set tags if specified the asset_description
|
|
|
|
tags = asset_data.get('tags', [])
|
|
|
|
if tags:
|
|
|
|
for tag in asset.asset_data.tags[:]:
|
|
|
|
asset.asset_data.tags.remove(tag)
|
|
|
|
|
|
|
|
for tag in tags:
|
|
|
|
if not tag:
|
|
|
|
continue
|
|
|
|
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
|
|
|
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
print(f'Saving Blend to {blend_path}')
|
|
|
|
|
|
|
|
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
|
|
|
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
|
|
|
|
|
|
|
self.write_catalog(catalog_data)
|
|
|
|
|
|
|
|
bpy.ops.wm.quit_blender()
|
|
|
|
|
|
|
|
def get_preview(self, asset_data):
|
|
|
|
|
|
|
|
name = asset_data['name']
|
2022-12-27 23:49:57 +01:00
|
|
|
preview = (f / template_image.format(name=name)).resolve()
|
2022-12-24 15:30:32 +01:00
|
|
|
if not preview.exists():
|
|
|
|
preview_blend_file(f, preview)
|
|
|
|
|
|
|
|
return preview
|
|
|
|
|
|
|
|
|
|
|
|
def conform(self, directory, templates):
|
|
|
|
"""Split each assets per blend and externalize preview"""
|
|
|
|
|
|
|
|
print(f'Conforming {self.library.name} to {directory}')
|
|
|
|
|
|
|
|
if self.data_type not in ('FILE', 'ACTION'):
|
|
|
|
print(f'{self.data_type} is not supported yet')
|
|
|
|
return
|
|
|
|
|
|
|
|
#lib_path = self.library_path
|
|
|
|
source_directory = Path(os.path.expandvars(self.source_directory))
|
|
|
|
catalog_data = self.read_catalog(filepath=source_directory)
|
|
|
|
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_data.items()}
|
|
|
|
directory = Path(directory).resolve()
|
|
|
|
|
2022-12-27 23:49:57 +01:00
|
|
|
template_image = templates.get('image') or self.template_image
|
|
|
|
template_video = templates.get('video') or self.template_video
|
2022-12-24 15:30:32 +01:00
|
|
|
|
|
|
|
# Get list of all modifications
|
|
|
|
for blend_file in self._find_blend_files():
|
|
|
|
|
|
|
|
modified = blend_file.stat().st_mtime_ns
|
|
|
|
|
|
|
|
print(f'Scanning blendfile {blend_file}...')
|
|
|
|
with bpy.data.libraries.load(str(blend_file), link=True, assets_only=True) as (data_from, data_to):
|
|
|
|
asset_names = getattr(data_from, self.data_types)
|
|
|
|
print(f'Found {len(asset_names)} {self.data_types} inside')
|
|
|
|
|
|
|
|
setattr(data_to, self.data_types, asset_names)
|
|
|
|
|
|
|
|
assets = getattr(data_to, self.data_types)
|
|
|
|
#print('assets', assets)
|
|
|
|
|
|
|
|
for asset in assets:
|
|
|
|
#TODO options for choose beetween asset catalog and filepath directory
|
|
|
|
asset_catalog_data = catalog_ids.get(asset.asset_data.catalog_id)
|
|
|
|
|
|
|
|
if not asset_catalog_data:
|
|
|
|
print(f'No catalog found for asset {asset.name}')
|
|
|
|
asset_catalog_data = {"path": blend_file.parent.relative_to(source_directory).as_posix()}
|
|
|
|
|
|
|
|
catalog_path = asset_catalog_data['path']
|
|
|
|
|
|
|
|
asset_path = self.get_asset_path(name=asset.name, catalog=catalog_path, directory=directory)
|
|
|
|
asset_description = self.get_asset_description(asset, catalog=catalog_path, modified=modified)
|
|
|
|
|
2022-12-27 23:49:57 +01:00
|
|
|
self.write_description_file(asset_description, asset_path)
|
2022-12-24 15:30:32 +01:00
|
|
|
#Write blend file containing only one asset
|
|
|
|
self.write_asset(asset=asset, asset_path=asset_path)
|
|
|
|
|
|
|
|
# Copy image if source image found else write the asset preview
|
2022-12-27 23:49:57 +01:00
|
|
|
src_image_path = self.get_path('image', name=asset.name, asset_path=blend_file, template=template_image)
|
2022-12-24 15:30:32 +01:00
|
|
|
dst_image_path = self.get_path('image', name=asset.name, asset_path=asset_path)
|
|
|
|
|
|
|
|
if src_image_path.exists():
|
|
|
|
self.copy_file(src_image_path, dst_image_path)
|
|
|
|
else:
|
|
|
|
self.write_preview(asset.preview, dst_image_path)
|
|
|
|
|
|
|
|
# Copy video if source video found
|
2022-12-27 23:49:57 +01:00
|
|
|
src_video_path = self.get_path('video', name=asset.name, asset_path=blend_file, template=template_video)
|
2022-12-24 15:30:32 +01:00
|
|
|
|
|
|
|
#print('src_video_path', src_video_path)
|
|
|
|
if src_video_path.exists():
|
|
|
|
dst_video_path = self.get_path('video', name=asset.name, asset_path=asset_path)
|
|
|
|
self.copy_file(src_video_path, dst_video_path)
|
|
|
|
|
|
|
|
self.write_catalog(catalog_data, filepath=directory)
|
|
|
|
|
|
|
|
def fetch(self):
|
|
|
|
"""Gather in a list all assets found in the folder"""
|
|
|
|
|
|
|
|
print(f'Fetch Assets for {self.library.name}')
|
|
|
|
|
|
|
|
source_directory = Path(os.path.expandvars(self.source_directory))
|
|
|
|
template = Template(self.template)
|
|
|
|
catalog_data = self.read_catalog(filepath=source_directory)
|
|
|
|
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_data.items()}
|
|
|
|
|
|
|
|
cache = self.read_cache() or []
|
|
|
|
|
|
|
|
print(f'Search for blend using glob template: {template.glob_pattern}')
|
|
|
|
|
|
|
|
print(f'Scanning Folder {source_directory}...')
|
|
|
|
#blend_files = list(source_directory.glob(template.glob_pattern))
|
|
|
|
|
|
|
|
# Remove delete blends for the list
|
|
|
|
#blend_paths = [self.prop_rel_path(f, 'source_directory') for f in blend_files]
|
|
|
|
#print('blend_paths', blend_paths)
|
|
|
|
|
|
|
|
|
|
|
|
#cache = []
|
|
|
|
#blend_paths = []
|
|
|
|
new_cache = []
|
|
|
|
|
|
|
|
for blend_file in template.glob(source_directory):#sorted(blend_files):
|
|
|
|
|
|
|
|
source_rel_path = self.prop_rel_path(blend_file, 'source_directory')
|
|
|
|
modified = blend_file.stat().st_mtime_ns
|
|
|
|
|
|
|
|
asset_description = next((a for a in cache if a['filepath'] == source_rel_path), None)
|
|
|
|
|
|
|
|
if asset_description and asset_description['modified'] >= modified:
|
|
|
|
print(blend_file, 'is skipped because not modified')
|
|
|
|
new_cache.append(asset_description)
|
|
|
|
continue
|
|
|
|
|
|
|
|
rel_path = blend_file.relative_to(source_directory).as_posix()
|
|
|
|
#field_values = re.findall(re_pattern, rel_path)[0]
|
|
|
|
#field_data = {k:v for k,v in zip(field_names, field_values)}
|
|
|
|
field_data = template.parse(rel_path)
|
|
|
|
|
|
|
|
if not field_data:
|
|
|
|
raise Exception()
|
|
|
|
|
2022-12-27 23:49:57 +01:00
|
|
|
#asset_data = (blend_file / prefs.template_description.format(name=name)).resolve()
|
2022-12-24 15:30:32 +01:00
|
|
|
|
|
|
|
catalogs = [v for k,v in sorted(field_data.items()) if k.isdigit()]
|
|
|
|
catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
|
|
|
|
|
|
|
if self.data_type == 'FILE':
|
|
|
|
name = field_data.get('name', blend_file.stem)
|
|
|
|
image = self.get_path('image', name=name, asset_path=blend_file)
|
|
|
|
|
|
|
|
asset_description = dict(
|
|
|
|
filepath=source_rel_path,
|
|
|
|
modified=modified,
|
|
|
|
catalog='/'.join(catalogs),
|
|
|
|
tags=[],
|
|
|
|
type=self.data_type,
|
|
|
|
image=self.prop_rel_path(image, 'source_directory'),
|
|
|
|
name=name
|
|
|
|
)
|
|
|
|
new_cache.append(asset_description)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
#First Check if there is a asset_data .json
|
2022-12-27 23:49:57 +01:00
|
|
|
asset_description = self.read_asset_description_file(blend_file)
|
2022-12-24 15:30:32 +01:00
|
|
|
|
|
|
|
if not asset_description:
|
|
|
|
# Scan the blend file for assets inside and write a custom asset description for info found
|
|
|
|
|
|
|
|
print(f'Scanning blendfile {blend_file}...')
|
|
|
|
with bpy.data.libraries.load(str(blend_file), link=True, assets_only=True) as (data_from, data_to):
|
|
|
|
asset_names = getattr(data_from, self.data_types)
|
|
|
|
print(f'Found {len(asset_names)} {self.data_types} inside')
|
|
|
|
|
|
|
|
setattr(data_to, self.data_types, asset_names)
|
|
|
|
assets = getattr(data_to, self.data_types)
|
|
|
|
|
|
|
|
asset_description = dict(
|
|
|
|
filepath=source_rel_path,
|
|
|
|
modified=modified,
|
|
|
|
assets=[]
|
|
|
|
)
|
|
|
|
|
|
|
|
for asset in assets:
|
|
|
|
asset_catalog_data = catalog_ids.get(asset.asset_data.catalog_id)
|
|
|
|
|
|
|
|
if not asset_catalog_data:
|
|
|
|
print(f'No catalog found for asset {asset.name}')
|
|
|
|
asset_catalog_data = {"path": blend_file.relative_to(self.source_directory).as_posix()}
|
|
|
|
|
|
|
|
catalog_path = asset_catalog_data['path']
|
|
|
|
|
|
|
|
image_path = self.get_path('image', asset.name, catalog_path)
|
|
|
|
image = self.prop_rel_path(image_path, 'source_directory')
|
|
|
|
|
|
|
|
# Write image only if no image was found
|
|
|
|
if not image_path.exists():
|
|
|
|
image_path = self.get_cache_image_path(asset.name, catalog_path)
|
|
|
|
image = self.prop_rel_path(image_path, 'library_path')
|
|
|
|
self.write_preview(asset.preview, image_path)
|
|
|
|
|
|
|
|
video_path = self.get_path('video', asset.name, catalog_path)
|
|
|
|
video = self.prop_rel_path(video_path, 'source_directory')
|
|
|
|
|
|
|
|
asset_data = dict(
|
|
|
|
filepath=self.prop_rel_path(blend_file, 'source_directory'),
|
|
|
|
modified=modified,
|
|
|
|
catalog=catalog_path,
|
|
|
|
tags=asset.asset_data.tags.keys(),
|
|
|
|
type=self.data_type,
|
|
|
|
image=image,
|
|
|
|
video=video,
|
|
|
|
name=asset.name
|
|
|
|
)
|
|
|
|
asset_description['assets'].append(asset_data)
|
|
|
|
|
|
|
|
getattr(bpy.data, self.data_types).remove(asset)
|
|
|
|
|
|
|
|
new_cache.append(asset_description)
|
|
|
|
|
|
|
|
|
|
|
|
#cache = [a for a in cache if a['filepath'] in blend_paths]
|
|
|
|
|
|
|
|
#for a in asset_data:
|
|
|
|
# print(a)
|
|
|
|
|
|
|
|
#print(asset_data)
|
|
|
|
new_cache.sort(key=lambda x:x['filepath'])
|
|
|
|
|
|
|
|
return new_cache
|
|
|
|
|
|
|
|
# Write json data file to store all asset found
|
|
|
|
#print(f'Writing asset data file to, {asset_data_path}')
|
|
|
|
#asset_data_path.write_text(json.dumps(asset_data, indent=4))
|