124 lines
4.2 KiB
Python
124 lines
4.2 KiB
Python
import argparse
|
|
import bpy
|
|
import sys
|
|
import json
|
|
from pathlib import Path
|
|
from time import time, sleep
|
|
from itertools import groupby
|
|
from asset_library.common.bl_utils import load_datablocks, col_as_asset
|
|
from asset_library.constants import ASSETLIB_FILENAME
|
|
|
|
""" blender_assets.libs.json data Structure
|
|
[
|
|
{
|
|
'name': 'chars/main',
|
|
'id': '013562-56315-4563156-123',
|
|
'children':
|
|
[
|
|
{
|
|
'filepath' : '/z/...',
|
|
'name' : 'collection name',
|
|
'tags' : ['variation', 'machin', 'chose'],
|
|
'metadata' : {'filepath': '$PROJECT/...', 'version' : 'mushable'}
|
|
},
|
|
{
|
|
'filepath' : '/z/...',
|
|
},
|
|
],
|
|
},
|
|
]
|
|
"""
|
|
|
|
def build_collection_blends(path, categories=None, clean=True):
|
|
|
|
t0 = time()
|
|
scn = bpy.context.scene
|
|
scn.render.resolution_x = scn.render.resolution_y = 1000
|
|
|
|
json_path = Path(path) / ASSETLIB_FILENAME
|
|
if not json_path.exists():
|
|
return
|
|
|
|
# _col_datas = json.loads(json_path.read())[category]
|
|
category_datas = json.loads(json_path.read_text())
|
|
|
|
for category_data in category_datas:
|
|
if categories and category_data['name'] not in categories:
|
|
continue
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True)
|
|
|
|
|
|
#category_data = next(c for c in category_datas if c['name'] == category)
|
|
#_col_datas = category_data['children']
|
|
|
|
cat_name = category_data['name']
|
|
build_path = Path(path) / cat_name / f'{cat_name}.blend'
|
|
|
|
## re-iterate in grouped filepath
|
|
col_datas = sorted(category_data['children'], key=lambda x: x['filepath'])
|
|
for filepath, col_data_groups in groupby(col_datas, key=lambda x: x['filepath']):
|
|
#f = Path(f)
|
|
if not Path(filepath).exists():
|
|
print(f'Not exists: {filepath}')
|
|
continue
|
|
|
|
col_data_groups = list(col_data_groups)
|
|
|
|
col_names = [a['name'] for a in col_data_groups]
|
|
linked_cols = load_datablocks(filepath, col_names, link=True, type='collections')
|
|
|
|
for i, col in enumerate(linked_cols):
|
|
# iterate in linked collection and associated data
|
|
if not col:
|
|
continue
|
|
asset_data = col_data_groups[i]
|
|
|
|
## asset_data -> {'filepath': str, 'tags': list, 'metadata': dict}
|
|
|
|
## Directly link as collection inside a marked collection with same name
|
|
marked_col = col_as_asset(col, verbose=True)
|
|
marked_col.asset_data.description = asset_data.get('description', '')
|
|
marked_col.asset_data.catalog_id = category_data['id'] # assign catalog
|
|
|
|
for k, v in asset_data.get('metadata', {}).items():
|
|
marked_col.asset_data[k] = v
|
|
|
|
## exclude collections and generate preview
|
|
bpy.ops.ed.lib_id_generate_preview({"id": marked_col}) # preview gen
|
|
vcol = bpy.context.view_layer.layer_collection.children[marked_col.name]
|
|
vcol.exclude = True
|
|
|
|
sleep(1.0)
|
|
|
|
## clear all objects (can be very long with a lot of objects...):
|
|
if clean:
|
|
print('Removing links...')
|
|
for lib in reversed(bpy.data.libraries):
|
|
bpy.data.libraries.remove(lib)
|
|
|
|
|
|
|
|
# Créer les dossiers intermediaires
|
|
build_path.parent.mkdir(parents=True, exist_ok=True)
|
|
print('Saving to', build_path)
|
|
bpy.ops.wm.save_as_mainfile(filepath=str(build_path), compress=False)
|
|
|
|
print("build time:", f'{time() - t0:.1f}s')
|
|
|
|
bpy.ops.wm.quit_blender()
|
|
|
|
|
|
if __name__ == '__main__' :
|
|
parser = argparse.ArgumentParser(description='build_collection_blends',
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
|
|
parser.add_argument('-path') # Trouve/créer le json assetlib.json en sous-dossier de libdir
|
|
parser.add_argument('--category') # Lit la category dans le json et a link tout dans le blend
|
|
|
|
if '--' in sys.argv :
|
|
index = sys.argv.index('--')
|
|
sys.argv = [sys.argv[index-1], *sys.argv[index+1:]]
|
|
|
|
args = parser.parse_args()
|
|
build_collection_blends(**vars(args)) |