refacto
parent
d1c17581ff
commit
25fa9c3a3c
|
@ -979,8 +979,11 @@ class ACTIONLIB_OT_store_anim_pose(Operator):
|
||||||
|
|
||||||
|
|
||||||
#print(self, self.library_items)
|
#print(self, self.library_items)
|
||||||
|
catalog_item = lib.catalog.active_item
|
||||||
|
|
||||||
|
if catalog_item:
|
||||||
|
self.catalog = catalog_item.path #get_active_catalog()
|
||||||
|
|
||||||
self.catalog = get_active_catalog()
|
|
||||||
self.set_action_type()
|
self.set_action_type()
|
||||||
|
|
||||||
if self.clear_previews:
|
if self.clear_previews:
|
||||||
|
|
|
@ -1,26 +1,18 @@
|
||||||
|
|
||||||
#from asset_library.bundle_blend import bundle_blend, bundle_library
|
|
||||||
#from file_utils import (norm_str, norm_value,
|
|
||||||
# norm_arg, get_bl_cmd, copy_file, copy_dir)
|
|
||||||
#from asset_library.functions import
|
|
||||||
|
|
||||||
#from asset_library.common import bundle_blend
|
|
||||||
from asset_library.common import file_utils
|
from asset_library.common import file_utils
|
||||||
from asset_library.common import functions
|
from asset_library.common import functions
|
||||||
from asset_library.common import synchronize
|
from asset_library.common import synchronize
|
||||||
from asset_library.common import template
|
from asset_library.common import template
|
||||||
|
from asset_library.common import catalog
|
||||||
|
|
||||||
if 'bpy' in locals():
|
if 'bpy' in locals():
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
#importlib.reload(bundle_blend)
|
|
||||||
importlib.reload(file_utils)
|
importlib.reload(file_utils)
|
||||||
importlib.reload(functions)
|
importlib.reload(functions)
|
||||||
importlib.reload(synchronize)
|
importlib.reload(synchronize)
|
||||||
importlib.reload(template)
|
importlib.reload(template)
|
||||||
|
importlib.reload(catalog)
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -456,10 +456,12 @@ def get_preview(asset_path='', asset_name=''):
|
||||||
return next((f for f in asset_preview_dir.rglob('*') if f.stem.lower().endswith(name)), None)
|
return next((f for f in asset_preview_dir.rglob('*') if f.stem.lower().endswith(name)), None)
|
||||||
|
|
||||||
def get_object_libraries(ob):
|
def get_object_libraries(ob):
|
||||||
if not ob :
|
if ob is None:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
libraries = [ob.library, ob.data.library]
|
libraries = [ob.library]
|
||||||
|
if ob.data:
|
||||||
|
libraries += [ob.data.library]
|
||||||
|
|
||||||
if ob.type in ('MESH', 'CURVE'):
|
if ob.type in ('MESH', 'CURVE'):
|
||||||
libraries += [m.library for m in ob.data.materials if m]
|
libraries += [m.library for m in ob.data.materials if m]
|
||||||
|
|
|
@ -0,0 +1,200 @@
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import uuid
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
|
class CatalogItem:
|
||||||
|
"""Represent a single item of a catalog"""
|
||||||
|
def __init__(self, catalog, path=None, name=None, id=None):
|
||||||
|
|
||||||
|
self.catalog = catalog
|
||||||
|
|
||||||
|
self.path = path
|
||||||
|
self.name = name
|
||||||
|
self.id = str(id or uuid.uuid4())
|
||||||
|
|
||||||
|
if isinstance(self.path, Path):
|
||||||
|
self.path = self.path.as_posix()
|
||||||
|
|
||||||
|
if self.path and not self.name:
|
||||||
|
self.name = self.norm_name(self.path)
|
||||||
|
|
||||||
|
def parts(self):
|
||||||
|
return Path(self.name).parts
|
||||||
|
|
||||||
|
def norm_name(self, name):
|
||||||
|
"""Get a norm name from a catalog_path entry"""
|
||||||
|
return name.replace('/', '-')
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'CatalogItem(name={self.name}, path={self.path}, id={self.id})'
|
||||||
|
|
||||||
|
|
||||||
|
class CatalogContext:
|
||||||
|
"""Utility class to get catalog relative to the current context asset browser area"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def poll():
|
||||||
|
return asset_utils.SpaceAssetInfo.is_asset_browser(bpy.context.space_data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
if not self.poll():
|
||||||
|
return
|
||||||
|
|
||||||
|
return bpy.context.space_data.params.catalog_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def item(self):
|
||||||
|
if not self.poll():
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.get(id=self.active_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
if not self.poll():
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.active_item:
|
||||||
|
return self.active_item.path
|
||||||
|
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
class Catalog:
|
||||||
|
"""Represent the catalog of the blender asset browser library"""
|
||||||
|
def __init__(self, directory=None):
|
||||||
|
|
||||||
|
self.directory = None
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
if directory:
|
||||||
|
self.directory = Path(directory)
|
||||||
|
|
||||||
|
self.context = CatalogContext()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filepath(self):
|
||||||
|
"""Get the filepath of the catalog text file relative to the directory"""
|
||||||
|
if self.directory:
|
||||||
|
return self.directory /'blender_assets.cats.txt'
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
"""Read the catalog file of the library target directory or of the specified directory"""
|
||||||
|
|
||||||
|
if not self.filepath or not self.filepath.exists():
|
||||||
|
return {}
|
||||||
|
|
||||||
|
self._data.clear()
|
||||||
|
|
||||||
|
print(f'Read catalog from {self.filepath}')
|
||||||
|
for line in self.filepath.read_text(encoding="utf-8").split('\n'):
|
||||||
|
if line.startswith(('VERSION', '#')) or not line:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cat_id, cat_path, cat_name = line.split(':')
|
||||||
|
self._data[cat_id] = CatalogItem(self, name=cat_name, id=cat_id, path=cat_path)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def write(self, sort=True):
|
||||||
|
"""Write the catalog file in the library target directory or of the specified directory"""
|
||||||
|
|
||||||
|
if not self.filepath:
|
||||||
|
raise Exception(f'Cannot write catalog {self} no filepath setted')
|
||||||
|
|
||||||
|
lines = ['VERSION 1', '']
|
||||||
|
|
||||||
|
catalog_items = list(self)
|
||||||
|
if sort:
|
||||||
|
catalog_items.sort(key=lambda x : x.path)
|
||||||
|
|
||||||
|
for catalog_item in catalog_items:
|
||||||
|
lines.append(f"{catalog_item.id}:{catalog_item.path}:{catalog_item.name}")
|
||||||
|
|
||||||
|
print(f'Write Catalog at: {self.filepath}')
|
||||||
|
self.filepath.write_text('\n'.join(lines), encoding="utf-8")
|
||||||
|
|
||||||
|
def get(self, path=None, id=None, fallback=None):
|
||||||
|
"""Found a catalog item by is path or id"""
|
||||||
|
if isinstance(path, Path):
|
||||||
|
path = path.as_posix()
|
||||||
|
|
||||||
|
if id:
|
||||||
|
return self._data.get(id)
|
||||||
|
|
||||||
|
for catalog_item in self:
|
||||||
|
if catalog_item.path == path:
|
||||||
|
return catalog_item
|
||||||
|
|
||||||
|
return fallback
|
||||||
|
|
||||||
|
def remove(self, catalog_item):
|
||||||
|
"""Get a CatalogItem with is path and removing it if found"""
|
||||||
|
|
||||||
|
if not isinstance(catalog_item, CatalogItem):
|
||||||
|
catalog_item = self.get(catalog_item)
|
||||||
|
|
||||||
|
if catalog_item:
|
||||||
|
return self._data.pop(catalog_item.id)
|
||||||
|
|
||||||
|
print(f'Warning: {catalog_item} cannot be remove, not in {self}')
|
||||||
|
return None
|
||||||
|
|
||||||
|
def add(self, catalog_path):
|
||||||
|
"""Adding a CatalogItem with the missing parents"""
|
||||||
|
|
||||||
|
# Add missing parents catalog
|
||||||
|
for parent in Path(catalog_path).parents[:-1]:
|
||||||
|
print(parent, self.get(parent))
|
||||||
|
if self.get(parent):
|
||||||
|
continue
|
||||||
|
|
||||||
|
cat_item = CatalogItem(self, path=parent)
|
||||||
|
self._data[cat_item.id] = cat_item
|
||||||
|
|
||||||
|
cat_item = self.get(catalog_path)
|
||||||
|
if not cat_item:
|
||||||
|
cat_item = CatalogItem(self, path=catalog_path)
|
||||||
|
self._data[cat_item.id] = cat_item
|
||||||
|
|
||||||
|
return cat_item
|
||||||
|
|
||||||
|
def update(self, catalogs):
|
||||||
|
'Add or remove catalog entries if on the list given or not'
|
||||||
|
|
||||||
|
catalogs = set(catalogs) # Remove doubles
|
||||||
|
|
||||||
|
added = [c for c in catalogs if not self.get(path=c)]
|
||||||
|
removed = [c.path for c in self if c.path not in catalogs]
|
||||||
|
|
||||||
|
if added:
|
||||||
|
print(f'{len(added)} Catalog Entry Added \n{tuple(c.name for c in added[:10])}...\n')
|
||||||
|
if removed:
|
||||||
|
print(f'{len(removed)} Catalog Entry Removed \n{tuple(c.name for c in removed[:10])}...\n')
|
||||||
|
|
||||||
|
for catalog_item in removed:
|
||||||
|
self.remove(catalog_item)
|
||||||
|
|
||||||
|
for catalog_item in added:
|
||||||
|
self.add(catalog_item)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._data.values().__iter__()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
if isinstance(key, int):
|
||||||
|
return self._data.values()[key]
|
||||||
|
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
if isinstance(item, str): # item is the id
|
||||||
|
return item in self._data
|
||||||
|
else:
|
||||||
|
return item in self
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'Catalog(filepath={self.filepath})'
|
|
@ -0,0 +1,381 @@
|
||||||
|
import bpy
|
||||||
|
from pathlib import Path
|
||||||
|
from asset_library.common.file_utils import read_file, write_file
|
||||||
|
from copy import deepcopy
|
||||||
|
import time
|
||||||
|
from itertools import groupby
|
||||||
|
|
||||||
|
|
||||||
|
class AssetCache:
|
||||||
|
def __init__(self, file_cache, data=None):
|
||||||
|
|
||||||
|
self.file_cache = file_cache
|
||||||
|
|
||||||
|
self.catalog = None
|
||||||
|
self.author = None
|
||||||
|
self.description = None
|
||||||
|
self.tags = None
|
||||||
|
self.type = None
|
||||||
|
self.name = None
|
||||||
|
self._metadata = None
|
||||||
|
|
||||||
|
if data:
|
||||||
|
self.set_data(data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filepath(self):
|
||||||
|
return self.file_cache.filepath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library_id(self):
|
||||||
|
return self.file_cache.library_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def metadata(self):
|
||||||
|
metadata = {
|
||||||
|
'.library_id': self.library_id,
|
||||||
|
'.filepath': self.filepath
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata.update(self._metadata)
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
@property
|
||||||
|
def norm_name(self):
|
||||||
|
return self.name.replace(' ', '_').lower()
|
||||||
|
|
||||||
|
def unique_name(self):
|
||||||
|
return (self.filepath / self.name).as_posix()
|
||||||
|
|
||||||
|
def set_data(self, data):
|
||||||
|
catalog = data['catalog']
|
||||||
|
if isinstance(catalog, (list, tuple)):
|
||||||
|
catalog = '/'.join(catalog)
|
||||||
|
|
||||||
|
self.catalog = catalog
|
||||||
|
self.author = data.get('author', '')
|
||||||
|
self.description = data.get('description', '')
|
||||||
|
self.tags = data.get('tags', [])
|
||||||
|
self.type = data.get('type')
|
||||||
|
self.name = data['name']
|
||||||
|
self._metadata = data.get('metadata', {})
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(
|
||||||
|
catalog=self.catalog,
|
||||||
|
author=self.author,
|
||||||
|
metadata=self.metadata,
|
||||||
|
description=self.description,
|
||||||
|
tags=self.tags,
|
||||||
|
type=self.type,
|
||||||
|
name=self.name
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'AssetCache(name={self.name}, catalog={self.catalog})'
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.to_dict() == other.to_dict()
|
||||||
|
|
||||||
|
|
||||||
|
class AssetsCache:
|
||||||
|
def __init__(self, file_cache):
|
||||||
|
|
||||||
|
self.file_cache = file_cache
|
||||||
|
self._data = []
|
||||||
|
|
||||||
|
def add(self, asset_cache_data, **kargs):
|
||||||
|
asset_cache = AssetCache(self.file_cache, {**asset_cache_data, **kargs})
|
||||||
|
self._data.append(asset_cache)
|
||||||
|
|
||||||
|
return asset_cache
|
||||||
|
|
||||||
|
def remove(self, asset_cache):
|
||||||
|
if isinstance(asset_cache, str):
|
||||||
|
asset_cache = self.get(asset_cache)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._data.__iter__()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
if isinstance(key, str):
|
||||||
|
return self.to_dict()[key]
|
||||||
|
else:
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {a.name: a for a in self}
|
||||||
|
|
||||||
|
def get(self, name):
|
||||||
|
return next((a for a in self if a.name == name), None)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'AssetsCache({list(self)})'
|
||||||
|
|
||||||
|
|
||||||
|
class FileCache:
|
||||||
|
def __init__(self, library_cache, data=None):
|
||||||
|
|
||||||
|
self.library_cache = library_cache
|
||||||
|
|
||||||
|
self.filepath = None
|
||||||
|
self.modified = None
|
||||||
|
self.assets = AssetsCache(self)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
self.set_data(data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library_id(self):
|
||||||
|
return self.library_cache.library_id
|
||||||
|
|
||||||
|
def set_data(self, data):
|
||||||
|
|
||||||
|
if 'filepath' in data:
|
||||||
|
self.filepath = Path(data['filepath'])
|
||||||
|
|
||||||
|
self.modified = data.get('modified', time.time_ns())
|
||||||
|
|
||||||
|
if data.get('type') == 'FILE':
|
||||||
|
self.assets.add(data)
|
||||||
|
|
||||||
|
for asset_cache_data in data.get('assets', []):
|
||||||
|
self.assets.add(asset_cache_data)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(
|
||||||
|
filepath=self.filepath.as_posix(),
|
||||||
|
modified=self.modified,
|
||||||
|
library_id=self.library_id,
|
||||||
|
assets=[asset_cache.to_dict() for asset_cache in self]
|
||||||
|
)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self.assets.__iter__()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'FileCache(filepath={self.filepath})'
|
||||||
|
|
||||||
|
|
||||||
|
class AssetCacheDiff:
|
||||||
|
def __init__(self, library_cache, asset_cache, operation):
|
||||||
|
|
||||||
|
self.library_cache = library_cache
|
||||||
|
#self.filepath = data['filepath']
|
||||||
|
self.operation = operation
|
||||||
|
self.asset_cache = asset_cache
|
||||||
|
|
||||||
|
|
||||||
|
class LibraryCacheDiff:
|
||||||
|
def __init__(self, old_cache=None, new_cache=None, filepath=None):
|
||||||
|
|
||||||
|
self.filepath = filepath
|
||||||
|
self._data = []
|
||||||
|
|
||||||
|
self.compare(old_cache, new_cache)
|
||||||
|
|
||||||
|
def add(self, asset_cache_datas, operation):
|
||||||
|
if not isinstance(asset_cache_datas, (list, tuple)):
|
||||||
|
asset_cache_datas = [asset_cache_datas]
|
||||||
|
|
||||||
|
new_asset_diffs = []
|
||||||
|
for cache_data in asset_cache_datas:
|
||||||
|
new_asset_diffs.append(AssetCacheDiff(self, cache_data, operation))
|
||||||
|
|
||||||
|
self._data += new_asset_diffs
|
||||||
|
|
||||||
|
return new_asset_diffs
|
||||||
|
|
||||||
|
def compare(self, old_cache, new_cache):
|
||||||
|
if old_cache is None or new_cache is None:
|
||||||
|
print('Cannot Compare cache with None')
|
||||||
|
|
||||||
|
cache_dict = {a.unique_name : a for a in old_cache.asset_caches}
|
||||||
|
new_cache_dict = {a.unique_name : a for a in new_cache.asset_caches}
|
||||||
|
|
||||||
|
assets_added = self.add([v for k, v in new_cache_dict.items() if k not in cache_dict], 'ADD')
|
||||||
|
assets_removed = self.add([v for k, v in cache_dict.items() if k not in new_cache_dict], 'REMOVED')
|
||||||
|
assets_modified = self.add([v for k, v in cache_dict.items() if v not in assets_removed and v!= new_cache_dict[k]], 'MODIFIED')
|
||||||
|
|
||||||
|
if assets_added:
|
||||||
|
print(f'{len(assets_added)} Assets Added \n{tuple(a.name for a in assets_added[:10])}...\n')
|
||||||
|
if assets_removed:
|
||||||
|
print(f'{len(assets_removed)} Assets Removed \n{tuple(a.name for a in assets_removed[:10])}...\n')
|
||||||
|
if assets_modified:
|
||||||
|
print(f'{len(assets_modified)} Assets Modified \n{tuple(a.name for a in assets_modified[:10])}...\n')
|
||||||
|
|
||||||
|
if len(self) == 0:
|
||||||
|
print('No change in the library')
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def group_by(self, key):
|
||||||
|
'''Return groups of file cache diff using the key provided'''
|
||||||
|
data = list(self).sort(key=key)
|
||||||
|
return groupby(data, key=key)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._data)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'LibraryCacheDiff(operations={[o for o in self][:2]}...)'
|
||||||
|
|
||||||
|
|
||||||
|
class LibraryCache:
|
||||||
|
def __init__(self, filepath):
|
||||||
|
|
||||||
|
self.filepath = Path(filepath)
|
||||||
|
self._data = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_library(cls, library):
|
||||||
|
filepath = library.library_path / f"blender_assets.{library.id}.json"
|
||||||
|
return cls(filepath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
return self.filepath.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def library_id(self):
|
||||||
|
return self.filepath.stem.split('.')[-1]
|
||||||
|
|
||||||
|
#@property
|
||||||
|
#def filepath(self):
|
||||||
|
# """Get the filepath of the library json file relative to the library"""
|
||||||
|
# return self.directory / self.filename
|
||||||
|
|
||||||
|
def catalogs(self):
|
||||||
|
return set(a.catalog for a in self.asset_caches)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def asset_caches(self):
|
||||||
|
'''Return an iterator to get all asset caches'''
|
||||||
|
return (asset_cache for file_cache in self for asset_cache in file_cache)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tmp_filepath(self):
|
||||||
|
return Path(bpy.app.tempdir) / self.filename
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
print(f'Read cache from {self.filepath}')
|
||||||
|
|
||||||
|
for file_cache_data in read_file(self.filepath):
|
||||||
|
self.add(file_cache_data)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def write(self, tmp=False):
|
||||||
|
filepath = self.filepath
|
||||||
|
if tmp:
|
||||||
|
filepath = self.tmp_filepath
|
||||||
|
|
||||||
|
print(f'Write cache file to {filepath}')
|
||||||
|
write_file(filepath, self._data)
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
def add(self, file_cache_data=None):
|
||||||
|
file_cache = FileCache(self, file_cache_data)
|
||||||
|
|
||||||
|
self._data.append(file_cache)
|
||||||
|
|
||||||
|
return file_cache
|
||||||
|
|
||||||
|
def add_asset_cache(self, asset_cache_data, filepath=None):
|
||||||
|
if filepath is None:
|
||||||
|
filepath = asset_cache_data['filepath']
|
||||||
|
|
||||||
|
file_cache = self.get(filepath)
|
||||||
|
if not file_cache:
|
||||||
|
file_cache = self.add()
|
||||||
|
|
||||||
|
file_cache.assets.add(asset_cache_data)
|
||||||
|
|
||||||
|
# def unflatten_cache(self, cache):
|
||||||
|
# """ Return a new unflattten list of asset data
|
||||||
|
# grouped by filepath"""
|
||||||
|
|
||||||
|
# new_cache = []
|
||||||
|
|
||||||
|
# cache = deepcopy(cache)
|
||||||
|
|
||||||
|
# cache.sort(key=lambda x : x['filepath'])
|
||||||
|
# groups = groupby(cache, key=lambda x : x['filepath'])
|
||||||
|
|
||||||
|
# keys = ['filepath', 'modified', 'library_id']
|
||||||
|
|
||||||
|
# for _, asset_datas in groups:
|
||||||
|
# asset_datas = list(asset_datas)
|
||||||
|
|
||||||
|
# #print(asset_datas[0])
|
||||||
|
|
||||||
|
# asset_info = {k:asset_datas[0][k] for k in keys}
|
||||||
|
# asset_info['assets'] = [{k:v for k, v in a.items() if k not in keys+['operation']} for a in asset_datas]
|
||||||
|
|
||||||
|
# new_cache.append(asset_info)
|
||||||
|
|
||||||
|
# return new_cache
|
||||||
|
|
||||||
|
def diff(self, new_cache=None):
|
||||||
|
"""Compare the library cache with it current state and return the cache differential"""
|
||||||
|
|
||||||
|
old_cache = self.read()
|
||||||
|
|
||||||
|
if new_cache is None:
|
||||||
|
new_cache = self
|
||||||
|
|
||||||
|
return LibraryCacheDiff(old_cache, new_cache)
|
||||||
|
|
||||||
|
def update(self, cache_diff):
|
||||||
|
#Update the cache with the operations
|
||||||
|
for asset_cache_diff in cache_diff:
|
||||||
|
file_cache = self.get(asset_cache_diff.filepath)
|
||||||
|
if not asset_cache:
|
||||||
|
print(f'Filepath {asset_cache_diff.filepath} not in {self}' )
|
||||||
|
continue
|
||||||
|
|
||||||
|
asset_cache = file_cache.get(asset_cache_diff.name)
|
||||||
|
|
||||||
|
if not asset_cache:
|
||||||
|
print(f'Asset {asset_cache_diff.name} not in file_cache {file_cache}' )
|
||||||
|
continue
|
||||||
|
|
||||||
|
if asset_cache_diff.operation == 'REMOVE':
|
||||||
|
file_cache.assets.remove(asset_cache_diff.name)
|
||||||
|
|
||||||
|
elif asset_cache_diff.operation in ('MODIFY', 'ADD'):
|
||||||
|
asset_cache.set_data(asset_cache_diff.asset_cache.to_dict())
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._data)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
if isinstance(key, str):
|
||||||
|
return self.to_dict()[key]
|
||||||
|
else:
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {a.filepath: a for a in self}
|
||||||
|
|
||||||
|
def get(self, filepath):
|
||||||
|
return next((a for a in self if a.filepath == filepath), None)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'LibraryCache(library_id={self.library_id})'
|
||||||
|
|
|
@ -10,8 +10,7 @@ from itertools import groupby
|
||||||
|
|
||||||
from asset_library.constants import ASSETLIB_FILENAME, MODULE_DIR
|
from asset_library.constants import ASSETLIB_FILENAME, MODULE_DIR
|
||||||
from asset_library.common.bl_utils import thumbnail_blend_file
|
from asset_library.common.bl_utils import thumbnail_blend_file
|
||||||
from asset_library.common.functions import (read_catalog, get_catalog_path,
|
from asset_library.common.functions import command
|
||||||
command, write_catalog)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -91,14 +91,18 @@ class Conform(ScanFolder):
|
||||||
if asset.preview:
|
if asset.preview:
|
||||||
return asset.preview
|
return asset.preview
|
||||||
|
|
||||||
def generate_previews(self, cache=None):
|
def generate_previews(self, cache_diff):
|
||||||
|
|
||||||
print('Generate previews...')
|
print('Generate previews...')
|
||||||
|
|
||||||
if cache in (None, ''):
|
# if cache in (None, ''):
|
||||||
cache = self.fetch()
|
# cache = self.fetch()
|
||||||
elif isinstance(cache, (Path, str)):
|
# elif isinstance(cache, (Path, str)):
|
||||||
cache = self.read_cache(cache)
|
# cache = self.read_cache(cache)
|
||||||
|
|
||||||
|
if isinstance(cache, (Path, str)):
|
||||||
|
cache_diff = LibraryCacheDiff(cache_diff)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#TODO Support all multiple data_type
|
#TODO Support all multiple data_type
|
||||||
|
|
|
@ -234,7 +234,8 @@ class Kitsu(LibraryType):
|
||||||
entity_types = gazu.client.fetch_all('entity-types')
|
entity_types = gazu.client.fetch_all('entity-types')
|
||||||
entity_types_ids = {e['id']: e['name'] for e in entity_types}
|
entity_types_ids = {e['id']: e['name'] for e in entity_types}
|
||||||
|
|
||||||
asset_infos = []
|
cache = self.read_cache()
|
||||||
|
|
||||||
for asset_data in gazu.asset.all_assets_for_project(project):
|
for asset_data in gazu.asset.all_assets_for_project(project):
|
||||||
asset_data['entity_type_name'] = entity_types_ids[asset_data.pop('entity_type_id')]
|
asset_data['entity_type_name'] = entity_types_ids[asset_data.pop('entity_type_id')]
|
||||||
asset_name = asset_data['name']
|
asset_name = asset_data['name']
|
||||||
|
@ -251,25 +252,18 @@ class Kitsu(LibraryType):
|
||||||
print(f'Warning: Could not find file for {template_file.format(asset_field_data)}')
|
print(f'Warning: Could not find file for {template_file.format(asset_field_data)}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
#print(asset_path)
|
|
||||||
|
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||||
|
asset_cache_data = dict(
|
||||||
|
catalog=asset_data['entity_type_name'].title(),
|
||||||
|
metadata=asset_data.get('data', {}),
|
||||||
|
description=asset_data['description'],
|
||||||
|
tags=[],
|
||||||
|
type=self.data_type,
|
||||||
|
name=asset_data['name']
|
||||||
|
)
|
||||||
|
|
||||||
|
cache.add_asset_cache(asset_cache_data, filepath=asset_path)
|
||||||
|
|
||||||
# TODO group when multiple asset are store in the same blend
|
|
||||||
asset_infos.append(self.get_asset_info(asset_data, asset_path))
|
|
||||||
|
|
||||||
#asset = load_datablocks(asset_path, data_type='collections', names=asset_data['name'], link=True)
|
return cache
|
||||||
#if not asset:
|
|
||||||
# print(f"Asset {asset_name} not found in {asset_path}")
|
|
||||||
|
|
||||||
|
|
||||||
#asset_info = self.get_asset_info(asset)
|
|
||||||
|
|
||||||
#asset_infos.append(asset_info)
|
|
||||||
|
|
||||||
#print(assets)
|
|
||||||
# for k, v in assets[0].items():
|
|
||||||
# print(f'- {k} {v}')
|
|
||||||
|
|
||||||
#print('+++++++++++++')
|
|
||||||
#print(asset_infos)
|
|
||||||
|
|
||||||
return asset_infos
|
|
||||||
|
|
|
@ -6,10 +6,12 @@ from asset_library.common.template import Template
|
||||||
from asset_library.constants import (MODULE_DIR, RESOURCES_DIR)
|
from asset_library.constants import (MODULE_DIR, RESOURCES_DIR)
|
||||||
|
|
||||||
from asset_library import (action, collection, file)
|
from asset_library import (action, collection, file)
|
||||||
|
from asset_library.common.library_cache import LibraryCacheDiff
|
||||||
|
|
||||||
from bpy.types import PropertyGroup
|
from bpy.types import PropertyGroup
|
||||||
from bpy.props import StringProperty
|
from bpy.props import StringProperty
|
||||||
import bpy
|
import bpy
|
||||||
|
from bpy_extras import asset_utils
|
||||||
|
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
@ -36,7 +38,7 @@ class LibraryType(PropertyGroup):
|
||||||
for lib in prefs.libraries:
|
for lib in prefs.libraries:
|
||||||
if lib.library_type == self:
|
if lib.library_type == self:
|
||||||
return lib
|
return lib
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bundle_directory(self):
|
def bundle_directory(self):
|
||||||
return self.library.library_path
|
return self.library.library_path
|
||||||
|
@ -49,21 +51,21 @@ class LibraryType(PropertyGroup):
|
||||||
def data_types(self):
|
def data_types(self):
|
||||||
return self.library.data_types
|
return self.library.data_types
|
||||||
|
|
||||||
def get_catalog_path(self, directory=None):
|
# def get_catalog_path(self, directory=None):
|
||||||
directory = directory or self.bundle_directory
|
# directory = directory or self.bundle_directory
|
||||||
return Path(directory, 'blender_assets.cats.txt')
|
# return Path(directory, 'blender_assets.cats.txt')
|
||||||
|
|
||||||
@property
|
# @property
|
||||||
def cache_file(self):
|
# def cache_file(self):
|
||||||
return Path(self.bundle_directory) / f"blender_assets.{self.library.id}.json"
|
# return Path(self.bundle_directory) / f"blender_assets.{self.library.id}.json"
|
||||||
|
|
||||||
@property
|
# @property
|
||||||
def tmp_cache_file(self):
|
# def tmp_cache_file(self):
|
||||||
return Path(bpy.app.tempdir) / f"blender_assets.{self.library.id}.json"
|
# return Path(bpy.app.tempdir) / f"blender_assets.{self.library.id}.json"
|
||||||
|
|
||||||
@property
|
# @property
|
||||||
def diff_file(self):
|
# def diff_file(self):
|
||||||
return Path(bpy.app.tempdir, 'diff.json')
|
# return Path(bpy.app.tempdir, 'diff.json')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def preview_blend(self):
|
def preview_blend(self):
|
||||||
|
@ -87,14 +89,20 @@ class LibraryType(PropertyGroup):
|
||||||
elif lib_type == 'COLLECTION':
|
elif lib_type == 'COLLECTION':
|
||||||
return collection
|
return collection
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
return {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def format_data(self):
|
def format_data(self):
|
||||||
"""Dict for formating template"""
|
"""Dict for formating template"""
|
||||||
return dict(self.to_dict(), bundle_dir=self.library.bundle_dir, parent=self.library.parent)
|
return dict(self.to_dict(), bundle_dir=self.library.bundle_dir, parent=self.library.parent)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {p: getattr(self, p) for p in self.bl_rna.properties.keys() if p !='rna_type'}
|
||||||
|
|
||||||
|
def read_catalog(self):
|
||||||
|
return self.library.read_catalog()
|
||||||
|
|
||||||
|
def read_cache(self, filepath=None):
|
||||||
|
return self.library.read_cache(filepath=filepath)
|
||||||
|
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
raise Exception('This method need to be define in the library_type')
|
raise Exception('This method need to be define in the library_type')
|
||||||
|
|
||||||
|
@ -137,6 +145,7 @@ class LibraryType(PropertyGroup):
|
||||||
|
|
||||||
return dict(
|
return dict(
|
||||||
name=asset.name,
|
name=asset.name,
|
||||||
|
type=asset.bl_rna.name.upper(),
|
||||||
author=asset.asset_data.author,
|
author=asset.asset_data.author,
|
||||||
tags=list(asset.asset_data.tags.keys()),
|
tags=list(asset.asset_data.tags.keys()),
|
||||||
metadata=dict(asset.asset_data),
|
metadata=dict(asset.asset_data),
|
||||||
|
@ -149,7 +158,6 @@ class LibraryType(PropertyGroup):
|
||||||
return Path(catalog, name, name).with_suffix('.blend')
|
return Path(catalog, name, name).with_suffix('.blend')
|
||||||
|
|
||||||
def get_active_asset_library(self):
|
def get_active_asset_library(self):
|
||||||
asset_handle = bpy.context.asset_file_handle
|
|
||||||
prefs = get_addon_prefs()
|
prefs = get_addon_prefs()
|
||||||
asset_handle = bpy.context.asset_file_handle
|
asset_handle = bpy.context.asset_file_handle
|
||||||
|
|
||||||
|
@ -195,13 +203,13 @@ class LibraryType(PropertyGroup):
|
||||||
def get_video_path(self, name, catalog, filepath):
|
def get_video_path(self, name, catalog, filepath):
|
||||||
raise Exception('Need to be defined in the library_type')
|
raise Exception('Need to be defined in the library_type')
|
||||||
|
|
||||||
def new_asset(self, asset, asset_data):
|
def new_asset(self, asset, asset_cache):
|
||||||
raise Exception('Need to be defined in the library_type')
|
raise Exception('Need to be defined in the library_type')
|
||||||
|
|
||||||
def remove_asset(self, asset, asset_data):
|
def remove_asset(self, asset, asset_cache):
|
||||||
raise Exception('Need to be defined in the library_type')
|
raise Exception('Need to be defined in the library_type')
|
||||||
|
|
||||||
def set_asset_preview(asset, asset_data):
|
def set_asset_preview(self, asset, asset_cache):
|
||||||
raise Exception('Need to be defined in the library_type')
|
raise Exception('Need to be defined in the library_type')
|
||||||
|
|
||||||
def format_asset_data(self, data):
|
def format_asset_data(self, data):
|
||||||
|
@ -238,15 +246,15 @@ class LibraryType(PropertyGroup):
|
||||||
if paths:
|
if paths:
|
||||||
return Path(paths[0])
|
return Path(paths[0])
|
||||||
|
|
||||||
def read_asset_info_file(self, asset_path) -> dict:
|
# def read_asset_info_file(self, asset_path) -> dict:
|
||||||
"""Read the description file of the asset"""
|
# """Read the description file of the asset"""
|
||||||
|
|
||||||
description_path = self.get_description_path(asset_path)
|
# description_path = self.get_description_path(asset_path)
|
||||||
return self.read_file(description_path)
|
# return self.read_file(description_path)
|
||||||
|
|
||||||
def write_description_file(self, asset_data, asset_path) -> None:
|
# def write_description_file(self, asset_data, asset_path) -> None:
|
||||||
description_path = self.get_description_path(asset_path)
|
# description_path = self.get_description_path(asset_path)
|
||||||
return write_file(description_path, asset_data)
|
# return write_file(description_path, asset_data)
|
||||||
|
|
||||||
def write_asset(self, asset, asset_path):
|
def write_asset(self, asset, asset_path):
|
||||||
|
|
||||||
|
@ -260,57 +268,57 @@ class LibraryType(PropertyGroup):
|
||||||
compress=True
|
compress=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def read_catalog(self, directory=None):
|
# def read_catalog(self, directory=None):
|
||||||
"""Read the catalog file of the library target directory or of the specified directory"""
|
# """Read the catalog file of the library target directory or of the specified directory"""
|
||||||
catalog_path = self.get_catalog_path(directory)
|
# catalog_path = self.get_catalog_path(directory)
|
||||||
|
|
||||||
if not catalog_path.exists():
|
# if not catalog_path.exists():
|
||||||
return {}
|
# return {}
|
||||||
|
|
||||||
cat_data = {}
|
# cat_data = {}
|
||||||
|
|
||||||
for line in catalog_path.read_text(encoding="utf-8").split('\n'):
|
# for line in catalog_path.read_text(encoding="utf-8").split('\n'):
|
||||||
if line.startswith(('VERSION', '#')) or not line:
|
# if line.startswith(('VERSION', '#')) or not line:
|
||||||
continue
|
# continue
|
||||||
|
|
||||||
cat_id, cat_path, cat_name = line.split(':')
|
# cat_id, cat_path, cat_name = line.split(':')
|
||||||
cat_data[cat_path] = {'id':cat_id, 'name':cat_name}
|
# cat_data[cat_path] = {'id':cat_id, 'name':cat_name}
|
||||||
|
|
||||||
return cat_data
|
# return cat_data
|
||||||
|
|
||||||
def write_catalog(self, catalog_data, directory=None):
|
# def write_catalog(self, catalog_data, directory=None):
|
||||||
"""Write the catalog file in the library target directory or of the specified directory"""
|
# """Write the catalog file in the library target directory or of the specified directory"""
|
||||||
|
|
||||||
catalog_path = self.get_catalog_path(directory)
|
# catalog_path = self.get_catalog_path(directory)
|
||||||
|
|
||||||
lines = ['VERSION 1', '']
|
# lines = ['VERSION 1', '']
|
||||||
|
|
||||||
# Add missing parents catalog
|
# # Add missing parents catalog
|
||||||
norm_data = {}
|
# norm_data = {}
|
||||||
for cat_path, cat_data in catalog_data.items():
|
# for cat_path, cat_data in catalog_data.items():
|
||||||
norm_data[cat_path] = cat_data
|
# norm_data[cat_path] = cat_data
|
||||||
for p in Path(cat_path).parents[:-1]:
|
# for p in Path(cat_path).parents[:-1]:
|
||||||
if p in cat_data or p in norm_data:
|
# if p in cat_data or p in norm_data:
|
||||||
continue
|
# continue
|
||||||
|
|
||||||
norm_data[p.as_posix()] = {'id': str(uuid.uuid4()), 'name': '-'.join(p.parts)}
|
# norm_data[p.as_posix()] = {'id': str(uuid.uuid4()), 'name': '-'.join(p.parts)}
|
||||||
|
|
||||||
for cat_path, cat_data in sorted(norm_data.items()):
|
# for cat_path, cat_data in sorted(norm_data.items()):
|
||||||
cat_name = cat_data['name'].replace('/', '-')
|
# cat_name = cat_data['name'].replace('/', '-')
|
||||||
lines.append(f"{cat_data['id']}:{cat_path}:{cat_name}")
|
# lines.append(f"{cat_data['id']}:{cat_path}:{cat_name}")
|
||||||
|
|
||||||
print(f'Catalog writen at: {catalog_path}')
|
# print(f'Catalog writen at: {catalog_path}')
|
||||||
catalog_path.write_text('\n'.join(lines), encoding="utf-8")
|
# catalog_path.write_text('\n'.join(lines), encoding="utf-8")
|
||||||
|
|
||||||
def read_cache(self, cache_path=None):
|
# def read_cache(self, cache_path=None):
|
||||||
cache_path = cache_path or self.cache_file
|
# cache_path = cache_path or self.cache_file
|
||||||
print(f'Read cache from {cache_path}')
|
# print(f'Read cache from {cache_path}')
|
||||||
return self.read_file(cache_path)
|
# return self.read_file(cache_path)
|
||||||
|
|
||||||
def write_cache(self, asset_infos, cache_path=None):
|
# def write_cache(self, asset_infos, cache_path=None):
|
||||||
cache_path = cache_path or self.cache_file
|
# cache_path = cache_path or self.cache_file
|
||||||
print(f'cache file writen to {cache_path}')
|
# print(f'cache file writen to {cache_path}')
|
||||||
return write_file(cache_path, list(asset_infos))
|
# return write_file(cache_path, list(asset_infos))
|
||||||
|
|
||||||
def prop_rel_path(self, path, prop):
|
def prop_rel_path(self, path, prop):
|
||||||
'''Get a filepath relative to a property of the library_type'''
|
'''Get a filepath relative to a property of the library_type'''
|
||||||
|
@ -521,56 +529,50 @@ class LibraryType(PropertyGroup):
|
||||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def set_asset_catalog(self, asset, asset_data, catalog_data):
|
# def set_asset_catalog(self, asset, asset_data, catalog_data):
|
||||||
"""Find the catalog if already exist or create it"""
|
# """Find the catalog if already exist or create it"""
|
||||||
catalog_name = asset_data['catalog']
|
|
||||||
catalog = catalog_data.get(catalog_name)
|
|
||||||
if not catalog:
|
|
||||||
catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
|
||||||
catalog_data[catalog_name] = catalog
|
|
||||||
|
|
||||||
asset.asset_data.catalog_id = catalog['id']
|
|
||||||
|
|
||||||
def set_asset_metadata(self, asset, asset_data):
|
# catalog_name = asset_data['catalog']
|
||||||
"""Create custom prop to an asset base on provided data"""
|
# catalog = catalog_data.get(catalog_name)
|
||||||
metadata = asset_data.get('metadata', {})
|
|
||||||
|
|
||||||
library_id = self.library.id
|
# catalog_item = self.catalog.add(asset_data['catalog'])
|
||||||
if 'library_id' in asset_data:
|
# asset.asset_data.catalog_id = catalog_item.id
|
||||||
library_id = asset_data['library_id']
|
|
||||||
|
|
||||||
metadata['.library_id'] = library_id
|
|
||||||
metadata['filepath'] = asset_data['filepath']
|
# if not catalog:
|
||||||
for k, v in metadata.items():
|
# catalog = {'id': str(uuid.uuid4()), 'name': catalog_name}
|
||||||
|
# catalog_data[catalog_name] = catalog
|
||||||
|
|
||||||
|
# asset.asset_data.catalog_id = catalog['id']
|
||||||
|
|
||||||
|
def set_asset_metadata(self, asset, asset_cache):
|
||||||
|
"""Create custom prop to an asset base on provided data"""
|
||||||
|
for k, v in asset_cache.metadata.items():
|
||||||
asset.asset_data[k] = v
|
asset.asset_data[k] = v
|
||||||
|
|
||||||
def set_asset_tags(self, asset, asset_data):
|
def set_asset_tags(self, asset, asset_cache):
|
||||||
"""Create asset tags base on provided data"""
|
"""Create asset tags base on provided data"""
|
||||||
|
|
||||||
if 'tags' in asset_data:
|
if asset_cache.tags is not None:
|
||||||
for tag in asset.asset_data.tags[:]:
|
for tag in list(asset.asset_data.tags):
|
||||||
asset.asset_data.tags.remove(tag)
|
asset.asset_data.tags.remove(tag)
|
||||||
|
|
||||||
for tag in asset_data['tags']:
|
for tag in asset_cache.tags:
|
||||||
if not tag:
|
|
||||||
continue
|
|
||||||
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
asset.asset_data.tags.new(tag, skip_if_exists=True)
|
||||||
|
|
||||||
def set_asset_info(self, asset, asset_data):
|
def set_asset_info(self, asset, asset_cache):
|
||||||
"""Set asset description base on provided data"""
|
"""Set asset description base on provided data"""
|
||||||
|
asset.asset_data.author = asset_cache.author
|
||||||
|
asset.asset_data.description = asset_cache.description
|
||||||
|
|
||||||
for key in ('author', 'description'):
|
def get_asset_bundle_path(self, asset_cache):
|
||||||
if key in asset_data:
|
"""Get the bundle path for that asset"""
|
||||||
setattr(asset.asset_data, key, asset_data.get(key) or '')
|
catalog_parts = asset_cache.catalog_item.parts
|
||||||
|
blend_name = asset_cache.norm_name
|
||||||
|
path_parts = catalog_parts[:self.library.blend_depth]
|
||||||
|
|
||||||
def get_asset_bundle_path(self, asset_data):
|
return Path(self.bundle_directory, *path_parts, blend_name, blend_name).with_suffix('.blend')
|
||||||
|
|
||||||
catalog_parts = asset_data['catalog'].split('/') + [asset_data['name']]
|
|
||||||
|
|
||||||
sub_path = catalog_parts[:self.library.blend_depth]
|
|
||||||
|
|
||||||
blend_name = sub_path[-1].replace(' ', '_').lower()
|
|
||||||
return Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
|
||||||
|
|
||||||
def bundle(self, cache_diff=None):
|
def bundle(self, cache_diff=None):
|
||||||
"""Group all new assets in one or multiple blends for the asset browser"""
|
"""Group all new assets in one or multiple blends for the asset browser"""
|
||||||
|
@ -582,213 +584,184 @@ class LibraryType(PropertyGroup):
|
||||||
print(f'{self.data_type} is not supported yet supported types are {supported_types}')
|
print(f'{self.data_type} is not supported yet supported types are {supported_types}')
|
||||||
return
|
return
|
||||||
|
|
||||||
catalog_data = self.read_catalog() #TODO remove unused catalog
|
catalog = self.read_catalog()
|
||||||
|
cache = None
|
||||||
|
|
||||||
write_cache = False
|
write_cache = False
|
||||||
if not cache_diff:
|
if not cache_diff:
|
||||||
# Get list of all modifications
|
# Get list of all modifications
|
||||||
asset_infos = self.fetch()
|
cache = self.fetch()
|
||||||
|
cache_diff = cache.diff()
|
||||||
|
|
||||||
|
# Write the cache in a temporary file for the generate preview script
|
||||||
cache, cache_diff = self.diff(asset_infos)
|
tmp_cache_file = cache.write(tmp=True)
|
||||||
|
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(tmp_cache_file))
|
||||||
# Only write complete cache at the end
|
|
||||||
write_cache = True
|
|
||||||
|
|
||||||
#self.generate_previews(asset_infos)
|
|
||||||
self.write_cache(asset_infos, self.tmp_cache_file)
|
|
||||||
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(self.tmp_cache_file))
|
|
||||||
|
|
||||||
#print()
|
|
||||||
#print(cache)
|
|
||||||
#raise Exception()
|
|
||||||
|
|
||||||
elif isinstance(cache_diff, (Path, str)):
|
elif isinstance(cache_diff, (Path, str)):
|
||||||
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
cache_diff = LibraryCacheDiff(cache_diff).read()#json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||||
|
|
||||||
|
total_diffs = len(cache_diff)
|
||||||
|
print(f'Total Diffs={total_diffs}')
|
||||||
|
|
||||||
if self.library.blend_depth == 0:
|
if total_diffs == 0:
|
||||||
raise Exception('Blender depth must be 1 at min')
|
|
||||||
#groups = [(cache_diff)]
|
|
||||||
else:
|
|
||||||
cache_diff.sort(key=self.get_asset_bundle_path)
|
|
||||||
groups = groupby(cache_diff, key=self.get_asset_bundle_path)
|
|
||||||
|
|
||||||
total_assets = len(cache_diff)
|
|
||||||
print(f'total_assets={total_assets}')
|
|
||||||
|
|
||||||
if total_assets == 0:
|
|
||||||
print('No assets found')
|
print('No assets found')
|
||||||
return
|
return
|
||||||
|
|
||||||
#data_types = self.data_types
|
|
||||||
#if self.data_types == 'FILE'
|
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
#assets_to_preview = []
|
for bundle_path, asset_diffs in cache_diff.group_by(self.get_asset_bundle_path):
|
||||||
for blend_path, asset_datas in groups:
|
if bundle_path.exists():
|
||||||
#blend_name = sub_path[-1].replace(' ', '_').lower()
|
print(f'Opening existing bundle blend: {bundle_path}')
|
||||||
#blend_path = Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
bpy.ops.wm.open_mainfile(filepath=str(bundle_path))
|
||||||
|
|
||||||
if blend_path.exists():
|
|
||||||
print(f'Opening existing bundle blend: {blend_path}')
|
|
||||||
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
|
||||||
else:
|
else:
|
||||||
print(f'Create new bundle blend to: {blend_path}')
|
print(f'Create new bundle blend to: {bundle_path}')
|
||||||
bpy.ops.wm.read_homefile(use_empty=True)
|
bpy.ops.wm.read_homefile(use_empty=True)
|
||||||
|
|
||||||
for asset_data in asset_datas:
|
for asset_diff in asset_diffs:
|
||||||
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
if total_diffs <= 100 or i % int(total_diffs / 10) == 0:
|
||||||
print(f'Progress: {int(i / total_assets * 100)+1}')
|
print(f'Progress: {int(i / total_diffs * 100)+1}')
|
||||||
|
|
||||||
operation = asset_data.get('operation', 'ADD')
|
operation = asset_diff.operation
|
||||||
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
asset_cache = asset_diff.asset_cache
|
||||||
|
asset = getattr(bpy.data, self.data_types).get(asset_cache.name)
|
||||||
if operation not in supported_operations:
|
|
||||||
print(f'operation {operation} not supported, supported operations are {supported_operations}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if operation == 'REMOVE':
|
if operation == 'REMOVE':
|
||||||
if asset:
|
if asset:
|
||||||
getattr(bpy.data, self.data_types).remove(asset)
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
else:
|
else:
|
||||||
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
print(f'ERROR : Remove Asset: {asset_cache.name} not found in {bundle_path}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif operation == 'MODIFY':
|
elif operation == 'MODIFY':
|
||||||
if not asset:
|
if not asset:
|
||||||
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
print(f'WARNING: Modifiy Asset: {asset_cache.name} not found in {bundle_path} it will be created')
|
||||||
|
|
||||||
if operation == 'ADD' or not asset:
|
if operation == 'ADD' or not asset:
|
||||||
if asset:
|
if asset:
|
||||||
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
||||||
print(f"Asset {asset_data['name']} Already in Blend")
|
print(f"Asset {asset_cache.name} Already in Blend")
|
||||||
getattr(bpy.data, self.data_types).remove(asset)
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
|
||||||
#print(f"INFO: Add new asset: {asset_data['name']}")
|
#print(f"INFO: Add new asset: {asset_data['name']}")
|
||||||
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
asset = getattr(bpy.data, self.data_types).new(name=asset_cache.name)
|
||||||
|
|
||||||
|
|
||||||
asset.asset_mark()
|
asset.asset_mark()
|
||||||
|
|
||||||
self.set_asset_preview(asset, asset_data)
|
self.set_asset_preview(asset, asset_cache)
|
||||||
|
|
||||||
#if not asset_preview:
|
#if not asset_preview:
|
||||||
# assets_to_preview.append((asset_data['filepath'], asset_data['name'], asset_data['data_type']))
|
# assets_to_preview.append((asset_data['filepath'], asset_data['name'], asset_data['data_type']))
|
||||||
#if self.externalize_data:
|
#if self.externalize_data:
|
||||||
# self.write_preview(preview, filepath)
|
# self.write_preview(preview, filepath)
|
||||||
|
|
||||||
self.set_asset_catalog(asset, asset_data, catalog_data)
|
#self.set_asset_catalog(asset, asset_data['catalog'])
|
||||||
self.set_asset_metadata(asset, asset_data)
|
|
||||||
self.set_asset_tags(asset, asset_data)
|
asset.asset_data.catalog_id = catalog.add(asset_cache.catalog).id
|
||||||
self.set_asset_info(asset, asset_data)
|
|
||||||
|
self.set_asset_metadata(asset, asset_cache)
|
||||||
|
self.set_asset_tags(asset, asset_cache)
|
||||||
|
self.set_asset_info(asset, asset_cache)
|
||||||
|
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
#self.write_asset_preview_file()
|
#self.write_asset_preview_file()
|
||||||
|
|
||||||
print(f'Saving Blend to {blend_path}')
|
print(f'Saving Blend to {bundle_path}')
|
||||||
|
|
||||||
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
bundle_path.parent.mkdir(exist_ok=True, parents=True)
|
||||||
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
bpy.ops.wm.save_as_mainfile(filepath=str(bundle_path), compress=True)
|
||||||
|
|
||||||
if write_cache:
|
if write_cache:
|
||||||
self.write_cache(asset_infos)
|
cache.write()
|
||||||
|
|
||||||
self.write_catalog(catalog_data)
|
#self.write_catalog(catalog_data)
|
||||||
|
catalog.write()
|
||||||
|
|
||||||
|
|
||||||
bpy.ops.wm.quit_blender()
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
def unflatten_cache(self, cache):
|
# def unflatten_cache(self, cache):
|
||||||
""" Return a new unflattten list of asset data
|
# """ Return a new unflattten list of asset data
|
||||||
grouped by filepath"""
|
# grouped by filepath"""
|
||||||
|
|
||||||
new_cache = []
|
# new_cache = []
|
||||||
|
|
||||||
cache = deepcopy(cache)
|
# cache = deepcopy(cache)
|
||||||
|
|
||||||
cache.sort(key=lambda x : x['filepath'])
|
# cache.sort(key=lambda x : x['filepath'])
|
||||||
groups = groupby(cache, key=lambda x : x['filepath'])
|
# groups = groupby(cache, key=lambda x : x['filepath'])
|
||||||
|
|
||||||
keys = ['filepath', 'modified', 'library_id']
|
# keys = ['filepath', 'modified', 'library_id']
|
||||||
|
|
||||||
for _, asset_datas in groups:
|
# for _, asset_datas in groups:
|
||||||
asset_datas = list(asset_datas)
|
# asset_datas = list(asset_datas)
|
||||||
|
|
||||||
#print(asset_datas[0])
|
# #print(asset_datas[0])
|
||||||
|
|
||||||
asset_info = {k:asset_datas[0][k] for k in keys}
|
# asset_info = {k:asset_datas[0][k] for k in keys}
|
||||||
asset_info['assets'] = [{k:v for k, v in a.items() if k not in keys+['operation']} for a in asset_datas]
|
# asset_info['assets'] = [{k:v for k, v in a.items() if k not in keys+['operation']} for a in asset_datas]
|
||||||
|
|
||||||
new_cache.append(asset_info)
|
# new_cache.append(asset_info)
|
||||||
|
|
||||||
return new_cache
|
# return new_cache
|
||||||
|
|
||||||
def flatten_cache(self, cache):
|
# def flatten_cache(self, cache):
|
||||||
""" Return a new flat list of asset data
|
# """ Return a new flat list of asset data
|
||||||
the filepath keys are merge with the assets keys"""
|
# the filepath keys are merge with the assets keys"""
|
||||||
|
|
||||||
# If the cache has a wrong format
|
# # If the cache has a wrong format
|
||||||
if not cache or not isinstance(cache[0], dict):
|
# if not cache or not isinstance(cache[0], dict):
|
||||||
return []
|
# return []
|
||||||
|
|
||||||
new_cache = []
|
# new_cache = []
|
||||||
|
|
||||||
for asset_info in cache:
|
# for asset_info in cache:
|
||||||
asset_info = asset_info.copy()
|
# asset_info = asset_info.copy()
|
||||||
if 'assets' in asset_info:
|
# if 'assets' in asset_info:
|
||||||
|
|
||||||
assets = asset_info.pop('assets')
|
# assets = asset_info.pop('assets')
|
||||||
for asset_data in assets:
|
# for asset_data in assets:
|
||||||
new_cache.append({**asset_info, **asset_data})
|
# new_cache.append({**asset_info, **asset_data})
|
||||||
else:
|
# else:
|
||||||
new_cache.append(asset_info)
|
# new_cache.append(asset_info)
|
||||||
|
|
||||||
return new_cache
|
# return new_cache
|
||||||
|
|
||||||
def diff(self, asset_infos=None):
|
# def diff(self, asset_infos=None):
|
||||||
"""Compare the library cache with it current state and return the new cache and the difference"""
|
# """Compare the library cache with it current state and return the new cache and the difference"""
|
||||||
|
|
||||||
cache = self.read_cache()
|
# cache = self.read_cache()
|
||||||
|
|
||||||
if cache is None:
|
# if cache is None:
|
||||||
print(f'Fetch The library {self.library.name} for the first time, might be long...')
|
# print(f'Fetch The library {self.library.name} for the first time, might be long...')
|
||||||
cache = []
|
# cache = []
|
||||||
|
|
||||||
asset_infos = asset_infos or self.fetch()
|
# asset_infos = asset_infos or self.fetch()
|
||||||
|
|
||||||
cache = {f"{a['filepath']}/{a['name']}": a for a in self.flatten_cache(cache)}
|
# cache = {f"{a['filepath']}/{a['name']}": a for a in self.flatten_cache(cache)}
|
||||||
new_cache = {f"{a['filepath']}/{a['name']}" : a for a in self.flatten_cache(asset_infos)}
|
# new_cache = {f"{a['filepath']}/{a['name']}" : a for a in self.flatten_cache(asset_infos)}
|
||||||
|
|
||||||
# print('\n-------------------------')
|
# assets_added = [v for k, v in new_cache.items() if k not in cache]
|
||||||
# print([v for k,v in cache.items() if 'WIP_Test' in k])
|
# assets_removed = [v for k, v in cache.items() if k not in new_cache]
|
||||||
# print()
|
# assets_modified = [v for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
||||||
|
|
||||||
# print([v for k,v in new_cache.items() if 'WIP_Test' in k])
|
# if assets_added:
|
||||||
# print()
|
# print(f'{len(assets_added)} Assets Added \n{tuple(a["name"] for a in assets_added[:10])}\n')
|
||||||
|
# if assets_removed:
|
||||||
assets_added = [v for k, v in new_cache.items() if k not in cache]
|
# print(f'{len(assets_removed)} Assets Removed \n{tuple(a["name"] for a in assets_removed[:10])}\n')
|
||||||
assets_removed = [v for k, v in cache.items() if k not in new_cache]
|
# if assets_modified:
|
||||||
assets_modified = [v for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
# print(f'{len(assets_modified)} Assets Modified \n{tuple(a["name"] for a in assets_modified[:10])}\n')
|
||||||
|
|
||||||
if assets_added:
|
|
||||||
print(f'{len(assets_added)} Assets Added \n{tuple(a["name"] for a in assets_added[:10])}\n')
|
|
||||||
if assets_removed:
|
|
||||||
print(f'{len(assets_removed)} Assets Removed \n{tuple(a["name"] for a in assets_removed[:10])}\n')
|
|
||||||
if assets_modified:
|
|
||||||
print(f'{len(assets_modified)} Assets Modified \n{tuple(a["name"] for a in assets_modified[:10])}\n')
|
|
||||||
|
|
||||||
assets_added = [dict(a, operation='ADD') for a in assets_added]
|
# assets_added = [dict(a, operation='ADD') for a in assets_added]
|
||||||
assets_removed = [dict(a, operation='REMOVE') for a in assets_removed]
|
# assets_removed = [dict(a, operation='REMOVE') for a in assets_removed]
|
||||||
assets_modified = [dict(a, operation='MODIFY') for a in assets_modified]
|
# assets_modified = [dict(a, operation='MODIFY') for a in assets_modified]
|
||||||
|
|
||||||
cache_diff = assets_added + assets_removed + assets_modified
|
# cache_diff = assets_added + assets_removed + assets_modified
|
||||||
if not cache_diff:
|
# if not cache_diff:
|
||||||
print('No change in the library')
|
# print('No change in the library')
|
||||||
|
|
||||||
return list(new_cache.values()), cache_diff
|
# return list(new_cache.values()), cache_diff
|
||||||
|
|
||||||
def draw_prefs(self, layout):
|
def draw_prefs(self, layout):
|
||||||
"""Draw the options in the addon preference for this library_type"""
|
"""Draw the options in the addon preference for this library_type"""
|
||||||
|
|
|
@ -62,6 +62,7 @@ class ScanFolder(LibraryType):
|
||||||
def remove_asset(self, asset, asset_data):
|
def remove_asset(self, asset, asset_data):
|
||||||
raise Exception('Need to be defined in the library_type')
|
raise Exception('Need to be defined in the library_type')
|
||||||
|
|
||||||
|
'''
|
||||||
def format_asset_info(self, asset_datas, asset_path, modified=None):
|
def format_asset_info(self, asset_datas, asset_path, modified=None):
|
||||||
|
|
||||||
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
asset_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||||
|
@ -96,17 +97,18 @@ class ScanFolder(LibraryType):
|
||||||
name=asset_data['name']) for asset_data in asset_datas
|
name=asset_data['name']) for asset_data in asset_datas
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
'''
|
||||||
|
|
||||||
def set_asset_preview(self, asset, asset_data):
|
def set_asset_preview(self, asset, asset_cache):
|
||||||
'''Load an externalize image as preview for an asset using the source template'''
|
'''Load an externalize image as preview for an asset using the source template'''
|
||||||
|
|
||||||
asset_path = self.format_path(asset_data['filepath'])
|
asset_path = self.format_path(asset_cache.filepath)
|
||||||
|
|
||||||
image_template = self.source_template_image
|
image_template = self.source_template_image
|
||||||
if not image_template:
|
if not image_template:
|
||||||
return
|
return
|
||||||
|
|
||||||
image_path = self.find_path(image_template, asset_data, filepath=asset_path)
|
image_path = self.find_path(image_template, asset_cache.to_dict(), filepath=asset_path)
|
||||||
|
|
||||||
if image_path:
|
if image_path:
|
||||||
with bpy.context.temp_override(id=asset):
|
with bpy.context.temp_override(id=asset):
|
||||||
|
@ -126,39 +128,25 @@ class ScanFolder(LibraryType):
|
||||||
print(f'{self.data_type} is not supported yet')
|
print(f'{self.data_type} is not supported yet')
|
||||||
return
|
return
|
||||||
|
|
||||||
catalog_data = self.read_catalog() #TODO remove unused catalog
|
#catalog_data = self.read_catalog()
|
||||||
|
|
||||||
|
catalog = self.read_catalog()
|
||||||
|
cache = None
|
||||||
|
|
||||||
write_cache = False
|
|
||||||
if not cache_diff:
|
if not cache_diff:
|
||||||
# Get list of all modifications
|
# Get list of all modifications
|
||||||
asset_infos = self.fetch()
|
cache = self.fetch()
|
||||||
|
cache_diff = cache.diff()
|
||||||
|
|
||||||
#print(asset_infos[:2])
|
# Write the cache in a temporary file for the generate preview script
|
||||||
|
tmp_cache_file = cache.write(tmp=True)
|
||||||
flat_cache, cache_diff = self.diff(asset_infos)
|
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(tmp_cache_file))
|
||||||
|
|
||||||
catalogs = [a['catalog'] for a in flat_cache]
|
|
||||||
catalog_data = {k:v for k, v in catalog_data.items() if k in catalogs}
|
|
||||||
|
|
||||||
|
|
||||||
print('cache_diff', cache_diff)
|
|
||||||
|
|
||||||
# Only write complete cache at the end
|
|
||||||
write_cache = True
|
|
||||||
|
|
||||||
#self.generate_previews(asset_infos)
|
|
||||||
self.write_cache(asset_infos, self.tmp_cache_file)
|
|
||||||
bpy.ops.assetlib.generate_previews(name=self.library.name, cache=str(self.tmp_cache_file))
|
|
||||||
|
|
||||||
elif isinstance(cache_diff, (Path, str)):
|
elif isinstance(cache_diff, (Path, str)):
|
||||||
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
cache_diff = json.loads(Path(cache_diff).read_text(encoding='utf-8'))
|
||||||
|
|
||||||
if self.library.blend_depth == 0:
|
if self.library.blend_depth == 0:
|
||||||
raise Exception('Blender depth must be 1 at min')
|
raise Exception('Blender depth must be 1 at min')
|
||||||
#groups = [(cache_diff)]
|
|
||||||
else:
|
|
||||||
cache_diff.sort(key=self.get_asset_bundle_path)
|
|
||||||
groups = groupby(cache_diff, key=self.get_asset_bundle_path)
|
|
||||||
|
|
||||||
total_assets = len(cache_diff)
|
total_assets = len(cache_diff)
|
||||||
print(f'total_assets={total_assets}')
|
print(f'total_assets={total_assets}')
|
||||||
|
@ -167,15 +155,8 @@ class ScanFolder(LibraryType):
|
||||||
print('No assets found')
|
print('No assets found')
|
||||||
return
|
return
|
||||||
|
|
||||||
#data_types = self.data_types
|
|
||||||
#if self.data_types == 'FILE'
|
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
#assets_to_preview = []
|
for blend_path, asset_cache_diffs in cache_diff.group_by(key=self.get_asset_bundle_path):
|
||||||
for blend_path, asset_datas in groups:
|
|
||||||
#blend_name = sub_path[-1].replace(' ', '_').lower()
|
|
||||||
#blend_path = Path(self.bundle_directory, *sub_path, blend_name).with_suffix('.blend')
|
|
||||||
|
|
||||||
if blend_path.exists():
|
if blend_path.exists():
|
||||||
print(f'Opening existing bundle blend: {blend_path}')
|
print(f'Opening existing bundle blend: {blend_path}')
|
||||||
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
bpy.ops.wm.open_mainfile(filepath=str(blend_path))
|
||||||
|
@ -183,84 +164,62 @@ class ScanFolder(LibraryType):
|
||||||
print(f'Create new bundle blend to: {blend_path}')
|
print(f'Create new bundle blend to: {blend_path}')
|
||||||
bpy.ops.wm.read_homefile(use_empty=True)
|
bpy.ops.wm.read_homefile(use_empty=True)
|
||||||
|
|
||||||
for asset_data in asset_datas:
|
for asset_cache_diff in asset_cache_diffs:
|
||||||
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
if total_assets <= 100 or i % int(total_assets / 10) == 0:
|
||||||
print(f'Progress: {int(i / total_assets * 100)+1}')
|
print(f'Progress: {int(i / total_assets * 100)+1}')
|
||||||
|
|
||||||
operation = asset_data.get('operation', 'ADD')
|
operation = asset_cache_diff.operation
|
||||||
asset = getattr(bpy.data, self.data_types).get(asset_data['name'])
|
asset_cache = asset_cache_diff.asset_cache
|
||||||
|
asset_name = asset_cache.name
|
||||||
|
asset = getattr(bpy.data, self.data_types).get(asset_name)
|
||||||
|
|
||||||
if operation == 'REMOVE':
|
if operation == 'REMOVE':
|
||||||
if asset:
|
if asset:
|
||||||
getattr(bpy.data, self.data_types).remove(asset)
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
else:
|
else:
|
||||||
print(f'ERROR : Remove Asset: {asset_data["name"]} not found in {blend_path}')
|
print(f'ERROR : Remove Asset: {asset_name} not found in {blend_path}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if operation == 'MODIFY' and not asset:
|
if asset_cache_diff.operation == 'MODIFY' and not asset:
|
||||||
print(f'WARNING: Modifiy Asset: {asset_data["name"]} not found in {blend_path} it will be created')
|
print(f'WARNING: Modifiy Asset: {asset_name} not found in {blend_path} it will be created')
|
||||||
|
|
||||||
if operation == 'ADD' or not asset:
|
if operation == 'ADD' or not asset:
|
||||||
if asset:
|
if asset:
|
||||||
#raise Exception(f"Asset {asset_data['name']} Already in Blend")
|
#raise Exception(f"Asset {asset_name} Already in Blend")
|
||||||
print(f"Asset {asset_data['name']} Already in Blend")
|
print(f"Asset {asset_name} Already in Blend")
|
||||||
getattr(bpy.data, self.data_types).remove(asset)
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
|
||||||
#print(f"INFO: Add new asset: {asset_data['name']}")
|
#print(f"INFO: Add new asset: {asset_name}")
|
||||||
asset = getattr(bpy.data, self.data_types).new(name=asset_data['name'])
|
asset = getattr(bpy.data, self.data_types).new(name=asset_name)
|
||||||
else:
|
else:
|
||||||
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFY)')
|
print(f'operation {operation} not supported should be in (ADD, REMOVE, MODIFY)')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
asset.asset_mark()
|
asset.asset_mark()
|
||||||
|
asset.asset_data.catalog_id = catalog.add(asset_cache_diff.catalog).id
|
||||||
|
|
||||||
self.set_asset_preview(asset, asset_data)
|
self.set_asset_preview(asset, asset_cache)
|
||||||
|
self.set_asset_metadata(asset, asset_cache)
|
||||||
#if not asset_preview:
|
self.set_asset_tags(asset, asset_cache)
|
||||||
# assets_to_preview.append((asset_data['filepath'], asset_data['name'], asset_data['data_type']))
|
self.set_asset_info(asset, asset_cache)
|
||||||
#if self.externalize_data:
|
|
||||||
# self.write_preview(preview, filepath)
|
|
||||||
|
|
||||||
self.set_asset_catalog(asset, asset_data, catalog_data)
|
|
||||||
self.set_asset_metadata(asset, asset_data)
|
|
||||||
self.set_asset_tags(asset, asset_data)
|
|
||||||
self.set_asset_info(asset, asset_data)
|
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
#self.write_asset_preview_file()
|
|
||||||
|
|
||||||
print(f'Saving Blend to {blend_path}')
|
print(f'Saving Blend to {blend_path}')
|
||||||
|
|
||||||
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
blend_path.parent.mkdir(exist_ok=True, parents=True)
|
||||||
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
bpy.ops.wm.save_as_mainfile(filepath=str(blend_path), compress=True)
|
||||||
|
|
||||||
if write_cache:
|
|
||||||
self.write_cache(asset_infos)
|
# If the variable cache_diff was given we need to update the cache with the diff
|
||||||
else:
|
if cache is None:
|
||||||
cache = self.read_cache()
|
cache = self.read_cache()
|
||||||
|
cache.update(cache_diff)
|
||||||
|
|
||||||
# Update the cache with the modification
|
cache.write()
|
||||||
if not cache:
|
|
||||||
cache = []
|
|
||||||
|
|
||||||
flat_cache = {f"{a['filepath']}/{a['name']}": a for a in self.flatten_cache(cache)}
|
|
||||||
flat_cache_diff = {f"{a['filepath']}/{a['name']}": a for a in cache_diff}
|
|
||||||
|
|
||||||
#Update the cache with the operations
|
|
||||||
for k, v in flat_cache_diff.items():
|
|
||||||
if v['operation'] == 'REMOVE':
|
|
||||||
if k in flat_cache:
|
|
||||||
flat_cache.remove(k)
|
|
||||||
elif v['operation'] in ('MODIFY', 'ADD'):
|
|
||||||
flat_cache[k] = v
|
|
||||||
|
|
||||||
new_cache = self.unflatten_cache(list(flat_cache.values()))
|
|
||||||
self.write_cache(new_cache)
|
|
||||||
|
|
||||||
|
|
||||||
self.write_catalog(catalog_data)
|
|
||||||
|
|
||||||
|
catalog.update(cache.catalogs)
|
||||||
|
catalog.write()
|
||||||
|
|
||||||
bpy.ops.wm.quit_blender()
|
bpy.ops.wm.quit_blender()
|
||||||
|
|
||||||
|
@ -271,55 +230,62 @@ class ScanFolder(LibraryType):
|
||||||
|
|
||||||
source_directory = Path(self.source_directory)
|
source_directory = Path(self.source_directory)
|
||||||
template_file = Template(self.source_template_file)
|
template_file = Template(self.source_template_file)
|
||||||
catalog_data = self.read_catalog(directory=source_directory)
|
#catalog_data = self.read_catalog(directory=source_directory)
|
||||||
catalog_ids = {v['id']: k for k, v in catalog_data.items()}
|
#catalog_ids = {v['id']: k for k, v in catalog_data.items()}
|
||||||
|
|
||||||
cache = self.read_cache() or []
|
#self.catalog.read()
|
||||||
|
|
||||||
|
cache = self.read_cache()
|
||||||
|
|
||||||
print(f'Search for blend using glob template: {template_file.glob_pattern}')
|
print(f'Search for blend using glob template: {template_file.glob_pattern}')
|
||||||
print(f'Scanning Folder {source_directory}...')
|
print(f'Scanning Folder {source_directory}...')
|
||||||
|
|
||||||
new_cache = []
|
#new_cache = LibraryCache()
|
||||||
|
|
||||||
for asset_path in template_file.glob(source_directory):#sorted(blend_files):
|
for asset_path in template_file.glob(source_directory):
|
||||||
|
|
||||||
source_rel_path = self.prop_rel_path(asset_path, 'source_directory')
|
source_rel_path = self.prop_rel_path(asset_path, 'source_directory')
|
||||||
modified = asset_path.stat().st_mtime_ns
|
modified = asset_path.stat().st_mtime_ns
|
||||||
|
|
||||||
# Check if the asset description as already been cached
|
# Check if the asset description as already been cached
|
||||||
asset_info = next((a for a in cache if a['filepath'] == source_rel_path), None)
|
file_cache = next((a for a in cache if a.filepath == source_rel_path), None)
|
||||||
|
|
||||||
if asset_info and asset_info['modified'] >= modified:
|
if file_cache:
|
||||||
#print(asset_path, 'is skipped because not modified')
|
if file_cache.modified >= modified: #print(asset_path, 'is skipped because not modified')
|
||||||
new_cache.append(asset_info)
|
continue
|
||||||
continue
|
else:
|
||||||
|
file_cache = cache.add(filepath=source_rel_path)
|
||||||
|
|
||||||
rel_path = asset_path.relative_to(source_directory).as_posix()
|
rel_path = asset_path.relative_to(source_directory).as_posix()
|
||||||
field_data = template_file.parse(rel_path)
|
field_data = template_file.parse(rel_path)
|
||||||
|
|
||||||
catalogs = [v for k,v in sorted(field_data.items()) if re.findall('cat[0-9]+', k)]
|
# Create the catalog path from the actual path of the asset
|
||||||
|
catalog = [v for k,v in sorted(field_data.items()) if re.findall('cat[0-9]+', k)]
|
||||||
#catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
#catalogs = [c.replace('_', ' ').title() for c in catalogs]
|
||||||
|
|
||||||
asset_name = field_data.get('asset_name', asset_path.stem)
|
asset_name = field_data.get('asset_name', asset_path.stem)
|
||||||
|
|
||||||
if self.data_type == 'FILE':
|
if self.data_type == 'FILE':
|
||||||
asset_datas = [{"name": asset_name, "catalog": '/'.join(catalogs)}]
|
file_cache.set_data(
|
||||||
asset_info = self.format_asset_info(asset_datas, asset_path, modified=modified)
|
name=asset_name,
|
||||||
new_cache.append(asset_info)
|
type='FILE',
|
||||||
|
catalog=catalog,
|
||||||
|
modified=modified
|
||||||
|
)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Now check if there is a asset description file
|
# Now check if there is a asset description file (Commented for now propably not usefull)
|
||||||
asset_info_path = self.find_path(self.source_template_info, asset_info, filepath=asset_path)
|
#asset_info_path = self.find_path(self.source_template_info, asset_info, filepath=asset_path)
|
||||||
if asset_info_path:
|
#if asset_info_path:
|
||||||
new_cache.append(self.read_file(asset_info_path))
|
# new_cache.append(self.read_file(asset_info_path))
|
||||||
continue
|
# continue
|
||||||
|
|
||||||
# Scan the blend file for assets inside and write a custom asset description for info found
|
# Scan the blend file for assets inside
|
||||||
print(f'Scanning blendfile {asset_path}...')
|
print(f'Scanning blendfile {asset_path}...')
|
||||||
assets = self.load_datablocks(asset_path, type=self.data_types, link=True, assets_only=True)
|
assets = self.load_datablocks(asset_path, type=self.data_types, link=True, assets_only=True)
|
||||||
print(f'Found {len(assets)} {self.data_types} inside')
|
print(f'Found {len(assets)} {self.data_types} inside')
|
||||||
|
|
||||||
asset_datas = []
|
|
||||||
for asset in assets:
|
for asset in assets:
|
||||||
#catalog_path = catalog_ids.get(asset.asset_data.catalog_id)
|
#catalog_path = catalog_ids.get(asset.asset_data.catalog_id)
|
||||||
|
|
||||||
|
@ -328,20 +294,8 @@ class ScanFolder(LibraryType):
|
||||||
#catalog_path = asset_info['catalog']#asset_path.relative_to(self.source_directory).as_posix()
|
#catalog_path = asset_info['catalog']#asset_path.relative_to(self.source_directory).as_posix()
|
||||||
|
|
||||||
# For now the catalog used is the one extract from the template file
|
# For now the catalog used is the one extract from the template file
|
||||||
asset_data = self.get_asset_data(asset)
|
file_cache.assets.add(self.get_asset_data(asset), catalog=catalog)
|
||||||
asset_data['catalog'] = '/'.join(catalogs)
|
|
||||||
|
|
||||||
asset_datas.append(asset_data)
|
|
||||||
|
|
||||||
getattr(bpy.data, self.data_types).remove(asset)
|
getattr(bpy.data, self.data_types).remove(asset)
|
||||||
|
|
||||||
|
return cache
|
||||||
asset_info = self.format_asset_info(asset_datas, asset_path, modified=modified)
|
|
||||||
|
|
||||||
new_cache.append(asset_info)
|
|
||||||
|
|
||||||
|
|
||||||
new_cache.sort(key=lambda x:x['filepath'])
|
|
||||||
|
|
||||||
return new_cache#[:5]
|
|
||||||
|
|
||||||
|
|
15
operators.py
15
operators.py
|
@ -62,16 +62,18 @@ class ASSETLIB_OT_remove_assets(Operator):
|
||||||
lib = get_active_library()
|
lib = get_active_library()
|
||||||
lib_type = lib.library_type
|
lib_type = lib.library_type
|
||||||
|
|
||||||
asset_handle = context.asset_file_handle
|
catalog = lib.read_catalog()
|
||||||
|
|
||||||
catalog_file = lib.library_type.read_catalog()
|
if not catalog.context.item:
|
||||||
catalog_ids = {v['id']: {'path': k, 'name': v['name']} for k,v in catalog_file.items()}
|
self.report({'ERROR'}, 'The active asset is not in the catalog')
|
||||||
catalog = catalog_ids[asset_handle.asset_data.catalog_id]['path']
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
asset_name = context.asset_file_handle.name
|
||||||
asset_path = lib_type.format_path(asset.asset_data['filepath'])
|
asset_path = lib_type.format_path(asset.asset_data['filepath'])
|
||||||
|
asset_catalog = catalog.context.path
|
||||||
|
|
||||||
img_path = lib_type.get_image_path(name=asset_handle.name, catalog=catalog, filepath=asset_path)
|
img_path = lib_type.get_image_path(name=asset_name, catalog=asset_catalog, filepath=asset_path)
|
||||||
video_path = lib_type.get_video_path(name=asset_handle.name, catalog=catalog, filepath=asset_path)
|
video_path = lib_type.get_video_path(name=asset_name, catalog=asset_catalog, filepath=asset_path)
|
||||||
|
|
||||||
if asset_path and asset_path.exists():
|
if asset_path and asset_path.exists():
|
||||||
asset_path.unlink()
|
asset_path.unlink()
|
||||||
|
@ -708,6 +710,7 @@ class ASSETLIB_OT_generate_previews(Operator):
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
class ASSETLIB_OT_play_preview(Operator):
|
class ASSETLIB_OT_play_preview(Operator):
|
||||||
bl_idname = "assetlib.play_preview"
|
bl_idname = "assetlib.play_preview"
|
||||||
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
bl_options = {"REGISTER", "UNDO", "INTERNAL"}
|
||||||
|
|
|
@ -12,6 +12,8 @@ from asset_library.constants import (DATA_TYPES, DATA_TYPE_ITEMS,
|
||||||
|
|
||||||
from asset_library.common.file_utils import import_module_from_path, norm_str
|
from asset_library.common.file_utils import import_module_from_path, norm_str
|
||||||
from asset_library.common.bl_utils import get_addon_prefs
|
from asset_library.common.bl_utils import get_addon_prefs
|
||||||
|
from asset_library.common.library_cache import LibraryCache
|
||||||
|
from asset_library.common.catalog import Catalog
|
||||||
#from asset_library.common.functions import get_catalog_path
|
#from asset_library.common.functions import get_catalog_path
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
@ -235,6 +237,15 @@ class AssetLibrary(PropertyGroup):
|
||||||
return self.custom_bundle_name
|
return self.custom_bundle_name
|
||||||
|
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
def read_catalog(self):
|
||||||
|
return Catalog(self.library_path).read()
|
||||||
|
|
||||||
|
def read_cache(self, filepath=None):
|
||||||
|
if filepath:
|
||||||
|
return LibraryCache(filepath).read()
|
||||||
|
|
||||||
|
return LibraryCache.from_library(self).read()
|
||||||
|
|
||||||
def clear_library_path(self):
|
def clear_library_path(self):
|
||||||
#print('Clear Library Path', self.name)
|
#print('Clear Library Path', self.name)
|
||||||
|
|
|
@ -0,0 +1,270 @@
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from pathlib import Path
|
||||||
|
from asset_library.common.file_utils import read_file, write_file
|
||||||
|
from copy import deepcopy
|
||||||
|
import time
|
||||||
|
from itertools import groupby
|
||||||
|
|
||||||
|
|
||||||
|
class AssetCache:
|
||||||
|
def __init__(self, file_cache, data):
|
||||||
|
|
||||||
|
self.file_cache = file_cache
|
||||||
|
|
||||||
|
self._data = data
|
||||||
|
|
||||||
|
self.catalog = data['catalog']
|
||||||
|
self.author = data.get('author', '')
|
||||||
|
self.description = data.get('description', '')
|
||||||
|
self.tags = data.get('tags', [])
|
||||||
|
self.type = data.get('type')
|
||||||
|
self.name = data['name']
|
||||||
|
self._metadata = data.get('metadata', {})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filepath(self):
|
||||||
|
return self.file_cache.filepath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def metadata(self):
|
||||||
|
metadata = {
|
||||||
|
'.library_id': self.library.id,
|
||||||
|
'.filepath': self.filepath
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata.update(self.metadata)
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
@property
|
||||||
|
def norm_name(self):
|
||||||
|
return self.name.replace(' ', '_').lower()
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(
|
||||||
|
catalog=self.catalog,
|
||||||
|
author=self.author,
|
||||||
|
metadata=self.metadata,
|
||||||
|
description=self.description,
|
||||||
|
tags=self.tags,
|
||||||
|
type=self.type,
|
||||||
|
name=self.name
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'AssetCache(name={self.name}, type={self.type}, catalog={self.catalog})'
|
||||||
|
|
||||||
|
|
||||||
|
class FileCache:
|
||||||
|
def __init__(self, library_cache, data):
|
||||||
|
|
||||||
|
self.library_cache = library_cache
|
||||||
|
self.filepath = data['filepath']
|
||||||
|
self.modified = data.get('modified', time.time_ns())
|
||||||
|
|
||||||
|
self._data = []
|
||||||
|
|
||||||
|
for asset_cache_data in data.get('assets', []):
|
||||||
|
self.add(asset_cache_data)
|
||||||
|
|
||||||
|
def add(self, asset_cache_data):
|
||||||
|
asset_cache = AssetCache(self, asset_cache_data)
|
||||||
|
self._data.append(asset_cache)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return dict(
|
||||||
|
filepath=self.filepath.as_posix(),
|
||||||
|
modified=self.modified,
|
||||||
|
library_id=self.library_cache.library.id,
|
||||||
|
assets=[asset_cache.to_dict() for asset_cache in self]
|
||||||
|
)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._data.__iter__()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'FileCache(filepath={self.filepath})'
|
||||||
|
|
||||||
|
|
||||||
|
class AssetCacheDiff:
|
||||||
|
def __init__(self, library_diff, asset_cache, operation):
|
||||||
|
|
||||||
|
self.library_cache = library_cache
|
||||||
|
self.filepath = data['filepath']
|
||||||
|
self.operation = operation
|
||||||
|
|
||||||
|
|
||||||
|
class LibraryCacheDiff:
|
||||||
|
def __init__(self, filepath=None):
|
||||||
|
|
||||||
|
self.filepath = filepath
|
||||||
|
self._data = []
|
||||||
|
|
||||||
|
def add(self, asset_diff):
|
||||||
|
asset_diff = AssetCacheDiff(self, asset_diff)
|
||||||
|
self._data.append(asset_cache_diff)
|
||||||
|
|
||||||
|
def set(self, asset_diffs):
|
||||||
|
for asset_diff in asset_diffs:
|
||||||
|
self.add(asset_diff)
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
print(f'Read cache from {self.filepath}')
|
||||||
|
|
||||||
|
for asset_diff_data in read_file(self.filepath):
|
||||||
|
self.add(asset_diff_data)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def group_by(self, key):
|
||||||
|
'''Return groups of file cache diff using the key provided'''
|
||||||
|
data = list(self).sort(key=key)
|
||||||
|
return groupby(data, key=key)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._data)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
|
||||||
|
class LibraryCache:
|
||||||
|
|
||||||
|
def __init__(self, directory, id):
|
||||||
|
|
||||||
|
self.directory = directory
|
||||||
|
self.id = id
|
||||||
|
self._data = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_library(cls, library):
|
||||||
|
return cls(library.library_path, library.id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
return f"blender_assets.{self.id}.json"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filepath(self):
|
||||||
|
"""Get the filepath of the library json file relative to the library"""
|
||||||
|
return self.directory / self.filename
|
||||||
|
|
||||||
|
@property
|
||||||
|
def asset_caches(self):
|
||||||
|
'''Return an iterator to get all asset caches'''
|
||||||
|
return (asset_cache for file_cache in self for asset_cache in file_cache)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tmp_filepath(self):
|
||||||
|
return Path(bpy.app.tempdir) / self.filename
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
print(f'Read cache from {self.filepath}')
|
||||||
|
|
||||||
|
for file_cache_data in read_file(self.filepath):
|
||||||
|
self.add(file_cache_data)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def write(self, temp=False):
|
||||||
|
filepath = self.filepath
|
||||||
|
if temp:
|
||||||
|
filepath = self.tmp_filepath
|
||||||
|
|
||||||
|
print(f'Write cache file to {filepath}')
|
||||||
|
write_file(filepath, self._data)
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
def add(self, file_cache_data):
|
||||||
|
file_cache = FileCache(self, file_cache_data)
|
||||||
|
|
||||||
|
self._data.append(file_cache)
|
||||||
|
|
||||||
|
def unflatten_cache(self, cache):
|
||||||
|
""" Return a new unflattten list of asset data
|
||||||
|
grouped by filepath"""
|
||||||
|
|
||||||
|
new_cache = []
|
||||||
|
|
||||||
|
cache = deepcopy(cache)
|
||||||
|
|
||||||
|
cache.sort(key=lambda x : x['filepath'])
|
||||||
|
groups = groupby(cache, key=lambda x : x['filepath'])
|
||||||
|
|
||||||
|
keys = ['filepath', 'modified', 'library_id']
|
||||||
|
|
||||||
|
for _, asset_datas in groups:
|
||||||
|
asset_datas = list(asset_datas)
|
||||||
|
|
||||||
|
#print(asset_datas[0])
|
||||||
|
|
||||||
|
asset_info = {k:asset_datas[0][k] for k in keys}
|
||||||
|
asset_info['assets'] = [{k:v for k, v in a.items() if k not in keys+['operation']} for a in asset_datas]
|
||||||
|
|
||||||
|
new_cache.append(asset_info)
|
||||||
|
|
||||||
|
return new_cache
|
||||||
|
|
||||||
|
def diff(self, new_cache):
|
||||||
|
"""Compare the library cache with it current state and return the cache differential"""
|
||||||
|
|
||||||
|
cache = self.read()
|
||||||
|
|
||||||
|
cache_dict = {f"{a['filepath']}/{a['name']}": a for a in cache.asset_caches}
|
||||||
|
new_cache_dict = {f"{a['filepath']}/{a['name']}" : a for a in new_cache.asset_caches}
|
||||||
|
|
||||||
|
assets_added = [AssetCacheDiff(v, 'ADD') for k, v in new_cache.items() if k not in cache]
|
||||||
|
assets_removed = [AssetCacheDiff(v, 'REMOVED') for k, v in cache.items() if k not in new_cache]
|
||||||
|
assets_modified = [AssetCacheDiff(v, 'MODIFIED') for k, v in cache.items() if v not in assets_removed and v!= new_cache[k]]
|
||||||
|
|
||||||
|
if assets_added:
|
||||||
|
print(f'{len(assets_added)} Assets Added \n{tuple(a.name for a in assets_added[:10])}...\n')
|
||||||
|
if assets_removed:
|
||||||
|
print(f'{len(assets_removed)} Assets Removed \n{tuple(a.name for a in assets_removed[:10])}...\n')
|
||||||
|
if assets_modified:
|
||||||
|
print(f'{len(assets_modified)} Assets Modified \n{tuple(a.name for a in assets_modified[:10])}...\n')
|
||||||
|
|
||||||
|
cache_diff = LibraryCacheDiff()
|
||||||
|
cache_diff.set(assets_added+assets_removed+assets_modified)
|
||||||
|
|
||||||
|
if not len(LibraryCacheDiff):
|
||||||
|
print('No change in the library')
|
||||||
|
|
||||||
|
return cache_diff
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self._data)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'LibraryCache(library={self.library.name})'
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
prefs = bpy.context.preferences.addons['asset_library'].preferences
|
||||||
|
|
||||||
|
|
||||||
|
library = prefs.env_libraries[0]
|
||||||
|
library_cache = LibraryCache.from_library(library).read()
|
||||||
|
|
||||||
|
data = library.library_type.fetch()
|
||||||
|
print(data)
|
||||||
|
|
||||||
|
print(library_cache[0][0])
|
||||||
|
|
||||||
|
#library_cache.diff(library.library_type.fetch())
|
||||||
|
|
||||||
|
|
||||||
|
#print(library_cache[0])
|
Loading…
Reference in New Issue