2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
Plugin for making an asset library of all blender file found in a folder
|
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
|
|
|
import uuid
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import json
|
|
|
|
import time
|
2024-05-27 17:22:45 +02:00
|
|
|
from pathlib import Path
|
|
|
|
from itertools import groupby
|
|
|
|
|
|
|
|
import bpy
|
|
|
|
from bpy.props import (StringProperty, IntProperty, BoolProperty)
|
|
|
|
|
2024-07-04 11:53:58 +02:00
|
|
|
from asset_library.plugins.library_plugin import LibraryPlugin
|
|
|
|
from asset_library.core.bl_utils import load_datablocks
|
|
|
|
from asset_library.core.template import Template
|
2024-05-27 17:22:45 +02:00
|
|
|
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
|
2024-05-27 17:22:45 +02:00
|
|
|
class ScanFolder(LibraryPlugin):
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
name = "Scan Folder"
|
|
|
|
source_directory : StringProperty(subtype='DIR_PATH')
|
|
|
|
|
|
|
|
source_template_file : StringProperty()
|
|
|
|
source_template_image : StringProperty()
|
|
|
|
source_template_video : StringProperty()
|
|
|
|
source_template_info : StringProperty()
|
|
|
|
|
|
|
|
def draw_prefs(self, layout):
|
|
|
|
layout.prop(self, "source_directory", text="Source: Directory")
|
|
|
|
|
|
|
|
col = layout.column(align=True)
|
|
|
|
col.prop(self, "source_template_file", icon='COPY_ID', text='Template file')
|
|
|
|
col.prop(self, "source_template_image", icon='COPY_ID', text='Template image')
|
|
|
|
col.prop(self, "source_template_video", icon='COPY_ID', text='Template video')
|
|
|
|
col.prop(self, "source_template_info", icon='COPY_ID', text='Template info')
|
|
|
|
|
|
|
|
def get_asset_path(self, name, catalog, directory=None):
|
|
|
|
directory = directory or self.source_directory
|
|
|
|
catalog = self.norm_file_name(catalog)
|
|
|
|
name = self.norm_file_name(name)
|
|
|
|
|
|
|
|
return Path(directory, self.get_asset_relative_path(name, catalog))
|
|
|
|
|
|
|
|
def get_image_path(self, name, catalog, filepath):
|
|
|
|
catalog = self.norm_file_name(catalog)
|
|
|
|
name = self.norm_file_name(name)
|
|
|
|
return self.format_path(self.source_template_image, dict(name=name, catalog=catalog, filepath=filepath))
|
|
|
|
|
|
|
|
def get_video_path(self, name, catalog, filepath):
|
|
|
|
catalog = self.norm_file_name(catalog)
|
|
|
|
name = self.norm_file_name(name)
|
|
|
|
return self.format_path(self.source_template_video, dict(name=name, catalog=catalog, filepath=filepath))
|
|
|
|
|
|
|
|
def new_asset(self, asset, asset_data):
|
2024-05-27 17:22:45 +02:00
|
|
|
raise Exception('Need to be defined in the plugin')
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
def remove_asset(self, asset, asset_data):
|
2024-05-27 17:22:45 +02:00
|
|
|
raise Exception('Need to be defined in the plugin')
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
'''
|
2023-01-17 18:05:22 +01:00
|
|
|
def format_asset_info(self, asset_datas, asset_path, modified=None):
|
|
|
|
|
|
|
|
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
|
|
|
modified = modified or time.time_ns()
|
|
|
|
library_id = self.library.id
|
|
|
|
|
|
|
|
# if self.data_type == 'FILE':
|
|
|
|
# return dict(
|
|
|
|
# filepath=asset_path,
|
|
|
|
# author=asset_info.get('author'),
|
|
|
|
# modified=modified,
|
|
|
|
# library_id=library_id,
|
|
|
|
# catalog=asset_info['catalog'],
|
|
|
|
# tags=[],
|
|
|
|
# description=asset_info.get('description', ''),
|
|
|
|
# type=self.data_type,
|
|
|
|
# #image=self.source_template_image,
|
|
|
|
# name=asset_info['name']
|
|
|
|
# )
|
|
|
|
|
|
|
|
return dict(
|
|
|
|
filepath=asset_path,
|
|
|
|
modified=modified,
|
|
|
|
library_id=library_id,
|
|
|
|
assets=[dict(
|
|
|
|
catalog=asset_data['catalog'],
|
|
|
|
author=asset_data.get('author', ''),
|
|
|
|
metadata=asset_data.get('metadata', {}),
|
|
|
|
description=asset_data.get('description', ''),
|
|
|
|
tags=asset_data.get('tags', []),
|
|
|
|
type=self.data_type,
|
|
|
|
name=asset_data['name']) for asset_data in asset_datas
|
|
|
|
]
|
|
|
|
)
|
2023-05-10 09:59:07 +02:00
|
|
|
'''
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
def set_asset_preview(self, asset, asset_cache):
|
2023-01-17 18:05:22 +01:00
|
|
|
'''Load an externalize image as preview for an asset using the source template'''
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
asset_path = self.format_path(asset_cache.filepath)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
image_template = self.source_template_image
|
|
|
|
if not image_template:
|
|
|
|
return
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
image_path = self.find_path(image_template, asset_cache.to_dict(), filepath=asset_path)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
if image_path:
|
|
|
|
with bpy.context.temp_override(id=asset):
|
|
|
|
bpy.ops.ed.lib_id_load_custom_preview(
|
|
|
|
filepath=str(image_path)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
print(f'No image found for {image_template} on {asset.name}')
|
|
|
|
|
|
|
|
if asset.preview:
|
|
|
|
return asset.preview
|
|
|
|
|
|
|
|
def bundle(self, cache_diff=None):
|
|
|
|
"""Group all new assets in one or multiple blends for the asset browser"""
|
|
|
|
|
|
|
|
if self.data_type not in ('FILE', 'ACTION', 'COLLECTION'):
|
|
|
|
print(f'{self.data_type} is not supported yet')
|
|
|
|
return
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
#catalog_data = self.read_catalog()
|
|
|
|
|
|
|
|
catalog = self.read_catalog()
|
|
|
|
cache = None
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
if not cache_diff:
|
|
|
|
# Get list of all modifications
|
2023-05-10 09:59:07 +02:00
|
|
|
cache = self.fetch()
|
|
|
|
cache_diff = cache.diff()
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
# Write the cache in a temporary file for the generate preview script
|
|
|
|
tmp_cache_file = cache.write(tmp=True)
|
2024-05-27 17:22:45 +02:00
|
|
|
bpy.ops.assetlibrary.generate_previews(name=self.library.name, cache=str(tmp_cache_file))
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
elif isinstance(cache_diff, (Path, str)):
|
|
|
|
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
|
|
|
|
|
|
|
if self.library.blend_depth == 0:
|
|
|
|
raise Exception('Blender depth must be 1 at min')
|
|
|
|
|
|
|
|
total_assets = len(cache_diff)
|
|
|
|
print(f'total_assets={total_assets}')
|
|
|
|
|
|
|
|
if total_assets == 0:
|
|
|
|
print('No assets found')
|
|
|
|
return
|
|
|
|
|
|
|
|
i = 0
|
2023-05-10 09:59:07 +02:00
|
|
|
for blend_path, asset_cache_diffs in cache_diff.group_by(key=self.get_asset_bundle_path):
|
2023-01-17 18:05:22 +01:00
|
|
|
if blend_path.exists():
|
|
|
|
print(f'Opening existing bundle blend: {blend_path}')
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
|
|
|
else:
|
|
|
|
print(f'Create new bundle blend to: {blend_path}')
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True)
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
for asset_cache_diff in asset_cache_diffs:
|
2023-01-17 18:05:22 +01:00
|
|
|
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
|
|
|
print(f'Progress: {int(i / total_assets * 100)+1}')
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
operation = asset_cache_diff.operation
|
|
|
|
asset_cache = asset_cache_diff.asset_cache
|
|
|
|
asset_name = asset_cache.name
|
|
|
|
asset = getattr(bpy.data, self.data_types).get(asset_name)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
if operation == 'REMOVE':
|
|
|
|
if asset:
|
|
|
|
getattr(bpy.data, self.data_types).remove(asset)
|
|
|
|
else:
|
2023-05-10 09:59:07 +02:00
|
|
|
print(f'ERROR : Remove Asset: {asset_name} not found in {blend_path}')
|
2023-01-17 18:05:22 +01:00
|
|
|
continue
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
if asset_cache_diff.operation == 'MODIFY' and not asset:
|
|
|
|
print(f'WARNING: Modifiy Asset: {asset_name} not found in {blend_path} it will be created')
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
if operation == 'ADD' or not asset:
|
|
|
|
if asset:
|
2023-05-10 09:59:07 +02:00
|
|
|
#raise Exception(f"Asset {asset_name} Already in Blend")
|
|
|
|
print(f"Asset {asset_name} Already in Blend")
|
2023-01-17 18:05:22 +01:00
|
|
|
getattr(bpy.data, self.data_types).remove(asset)
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
#print(f"INFO: Add new asset: {asset_name}")
|
|
|
|
asset = getattr(bpy.data, self.data_types).new(name=asset_name)
|
2023-01-17 18:05:22 +01:00
|
|
|
else:
|
|
|
|
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFY)')
|
|
|
|
continue
|
|
|
|
|
|
|
|
asset.asset_mark()
|
2023-05-10 09:59:07 +02:00
|
|
|
asset.asset_data.catalog_id = catalog.add(asset_cache_diff.catalog).id
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
self.set_asset_preview(asset, asset_cache)
|
|
|
|
self.set_asset_metadata(asset, asset_cache)
|
|
|
|
self.set_asset_tags(asset, asset_cache)
|
|
|
|
self.set_asset_info(asset, asset_cache)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
print(f'Saving Blend to {blend_path}')
|
|
|
|
|
|
|
|
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
|
|
|
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
|
|
|
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
# If the variable cache_diff was given we need to update the cache with the diff
|
|
|
|
if cache is None:
|
|
|
|
cache = self.read_cache()
|
|
|
|
cache.update(cache_diff)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
cache.write()
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
catalog.update(cache.catalogs)
|
|
|
|
catalog.write()
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
bpy.ops.wm.quit_blender()
|
|
|
|
|
|
|
|
def fetch(self):
|
|
|
|
"""Gather in a list all assets found in the folder"""
|
|
|
|
|
|
|
|
print(f'Fetch Assets for {self.library.name}')
|
|
|
|
|
|
|
|
source_directory = Path(self.source_directory)
|
|
|
|
template_file = Template(self.source_template_file)
|
2023-05-10 09:59:07 +02:00
|
|
|
#catalog_data = self.read_catalog(directory=source_directory)
|
|
|
|
#catalog_ids = {v['id']: k for k, v in catalog_data.items()}
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
#self.catalog.read()
|
|
|
|
|
|
|
|
cache = self.read_cache()
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
print(f'Search for blend using glob template: {template_file.glob_pattern}')
|
|
|
|
print(f'Scanning Folder {source_directory}...')
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
#new_cache = LibraryCache()
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
for asset_path in template_file.glob(source_directory):
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
source_rel_path = self.prop_rel_path(asset_path, 'source_directory')
|
|
|
|
modified = asset_path.stat().st_mtime_ns
|
|
|
|
|
|
|
|
# Check if the asset description as already been cached
|
2023-05-10 09:59:07 +02:00
|
|
|
file_cache = next((a for a in cache if a.filepath == source_rel_path), None)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
if file_cache:
|
|
|
|
if file_cache.modified >= modified: #print(asset_path, 'is skipped because not modified')
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
file_cache = cache.add(filepath=source_rel_path)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
rel_path = asset_path.relative_to(source_directory).as_posix()
|
|
|
|
field_data = template_file.parse(rel_path)
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
# Create the catalog path from the actual path of the asset
|
|
|
|
catalog = [v for k,v in sorted(field_data.items()) if re.findall('cat[0-9]+', k)]
|
2023-01-17 18:05:22 +01:00
|
|
|
#catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
|
|
|
|
|
|
|
asset_name = field_data.get('asset_name', asset_path.stem)
|
|
|
|
|
|
|
|
if self.data_type == 'FILE':
|
2023-05-10 09:59:07 +02:00
|
|
|
file_cache.set_data(
|
|
|
|
name=asset_name,
|
|
|
|
type='FILE',
|
|
|
|
catalog=catalog,
|
|
|
|
modified=modified
|
|
|
|
)
|
2023-01-17 18:05:22 +01:00
|
|
|
|
|
|
|
continue
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
# Now check if there is a asset description file (Commented for now propably not usefull)
|
|
|
|
#asset_info_path = self.find_path(self.source_template_info, asset_info, filepath=asset_path)
|
|
|
|
#if asset_info_path:
|
|
|
|
# new_cache.append(self.read_file(asset_info_path))
|
|
|
|
# continue
|
|
|
|
|
|
|
|
# Scan the blend file for assets inside
|
2023-01-17 18:05:22 +01:00
|
|
|
print(f'Scanning blendfile {asset_path}...')
|
|
|
|
assets = self.load_datablocks(asset_path, type=self.data_types, link=True, assets_only=True)
|
|
|
|
print(f'Found {len(assets)} {self.data_types} inside')
|
|
|
|
|
|
|
|
for asset in assets:
|
|
|
|
#catalog_path = catalog_ids.get(asset.asset_data.catalog_id)
|
|
|
|
|
|
|
|
#if not catalog_path:
|
|
|
|
# print(f'No catalog found for asset {asset.name}')
|
|
|
|
#catalog_path = asset_info['catalog']#asset_path.relative_to(self.source_directory).as_posix()
|
|
|
|
|
|
|
|
# For now the catalog used is the one extract from the template file
|
2023-05-10 09:59:07 +02:00
|
|
|
file_cache.assets.add(self.get_asset_data(asset), catalog=catalog)
|
2023-01-17 18:05:22 +01:00
|
|
|
getattr(bpy.data, self.data_types).remove(asset)
|
|
|
|
|
2023-05-10 09:59:07 +02:00
|
|
|
return cache
|
2023-01-17 18:05:22 +01:00
|
|
|
|