Added block expansion

The expansion process follows pointers and library links to construct
the full set of actually-used data blocks. This set consists of all data
blocks in the initial blend file, and all *actually linked-to* data
blocks in linked blend files.

I've also removed non-recursive dependency listing.
This commit is contained in:
Sybren A. Stüvel 2018-03-02 13:47:24 +01:00
parent 677d388a15
commit 8009ff1e47
10 changed files with 471 additions and 117 deletions

View File

@ -300,7 +300,7 @@ class BlendFile:
try: try:
return self.block_from_addr[address] return self.block_from_addr[address]
except KeyError: except KeyError:
raise exceptions.SegmentationFault('address does not exist', address) raise exceptions.SegmentationFault('address does not exist', address) from None
class BlendFileBlock: class BlendFileBlock:
@ -375,6 +375,10 @@ class BlendFileBlock:
self.addr_old == other.addr_old and self.addr_old == other.addr_old and
self.bfile.filepath == other.bfile.filepath) self.bfile.filepath == other.bfile.filepath)
def __bool__(self) -> bool:
"""Data blocks are always True."""
return True
@property @property
def dna_type(self) -> dna.Struct: def dna_type(self) -> dna.Struct:
return self.bfile.structs[self.sdna_index] return self.bfile.structs[self.sdna_index]
@ -581,6 +585,8 @@ class BlendFileBlock:
fileobj = self.bfile.fileobj fileobj = self.bfile.fileobj
fileobj.seek(file_offset + ps * i, os.SEEK_SET) fileobj.seek(file_offset + ps * i, os.SEEK_SET)
address = endian.read_pointer(fileobj, ps) address = endian.read_pointer(fileobj, ps)
if address == 0:
continue
yield self.bfile.dereference_pointer(address) yield self.bfile.dereference_pointer(address)
def iter_fixed_array_of_pointers(self, path: dna.FieldPath) \ def iter_fixed_array_of_pointers(self, path: dna.FieldPath) \

View File

@ -1,10 +1,39 @@
import typing
from blender_asset_tracer import cdefs
from . import BlendFileBlock from . import BlendFileBlock
from .dna import FieldPath from .dna import FieldPath
def listbase(block: BlendFileBlock, next_path: FieldPath=b'next') -> BlendFileBlock: def listbase(block: BlendFileBlock, next_path: FieldPath = b'next') \
-> typing.Iterator[BlendFileBlock]:
"""Generator, yields all blocks in the ListBase linked list.""" """Generator, yields all blocks in the ListBase linked list."""
while block: while block:
yield block yield block
next_ptr = block[next_path] next_ptr = block[next_path]
block = block.bfile.find_block_from_address(next_ptr) block = block.bfile.find_block_from_address(next_ptr)
def sequencer_strips(sequence_editor: BlendFileBlock) \
-> typing.Iterator[typing.Tuple[BlendFileBlock, int]]:
"""Generator, yield all sequencer strip blocks with their type number.
Recurses into meta strips, yielding both the meta strip itself and the
strips contained within it.
See blender_asset_tracer.cdefs.SEQ_TYPE_xxx for the type numbers.
"""
def iter_seqbase(seqbase) -> typing.Iterator[BlendFileBlock]:
for seq in listbase(seqbase):
seq.refine_type(b'Sequence')
seq_type = seq[b'type']
yield seq, seq_type
if seq_type == cdefs.SEQ_TYPE_META:
# Recurse into this meta-sequence.
subseq = seq.get_pointer((b'seqbase', b'first'))
yield from iter_seqbase(subseq)
sbase = sequence_editor.get_pointer((b'seqbase', b'first'))
yield from iter_seqbase(sbase)

View File

@ -55,4 +55,5 @@ PTCACHE_EXT = b'.bphys'
PTCACHE_PATH = b'blendcache_' PTCACHE_PATH = b'blendcache_'
# BKE_node.h # BKE_node.h
SH_NODE_TEX_IMAGE = 143
CMP_NODE_R_LAYERS = 221 CMP_NODE_R_LAYERS = 221

View File

@ -4,8 +4,8 @@ import logging
import pathlib import pathlib
import sys import sys
from . import common
from blender_asset_tracer import tracer from blender_asset_tracer import tracer
from . import common
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -16,8 +16,6 @@ def add_parser(subparsers):
parser = subparsers.add_parser('list', help=__doc__) parser = subparsers.add_parser('list', help=__doc__)
parser.set_defaults(func=cli_list) parser.set_defaults(func=cli_list)
parser.add_argument('blendfile', type=pathlib.Path) parser.add_argument('blendfile', type=pathlib.Path)
common.add_flag(parser, 'nonrecursive',
help='Limit to direct dependencies of the named blend file')
common.add_flag(parser, 'json', help='Output as JSON instead of human-readable text') common.add_flag(parser, 'json', help='Output as JSON instead of human-readable text')
@ -27,14 +25,13 @@ def cli_list(args):
log.fatal('File %s does not exist', args.blendfile) log.fatal('File %s does not exist', args.blendfile)
return 3 return 3
recursive = not args.nonrecursive
if args.json: if args.json:
report_json(bpath, recursive) report_json(bpath)
else: else:
report_text(bpath, recursive) report_text(bpath)
def report_text(bpath, recursive): def report_text(bpath):
reported_assets = set() reported_assets = set()
last_reported_bfile = None last_reported_bfile = None
cwd = pathlib.Path.cwd() cwd = pathlib.Path.cwd()
@ -46,7 +43,7 @@ def report_text(bpath, recursive):
except ValueError: except ValueError:
return somepath return somepath
for usage in tracer.deps(bpath, recursive=recursive): for usage in tracer.deps(bpath):
filepath = usage.block.bfile.filepath.absolute() filepath = usage.block.bfile.filepath.absolute()
if filepath != last_reported_bfile: if filepath != last_reported_bfile:
print(shorten(filepath)) print(shorten(filepath))
@ -71,13 +68,13 @@ class JSONSerialiser(json.JSONEncoder):
return super().default(o) return super().default(o)
def report_json(bpath, recursive): def report_json(bpath):
import collections import collections
# Mapping from blend file to its dependencies. # Mapping from blend file to its dependencies.
report = collections.defaultdict(set) report = collections.defaultdict(set)
for usage in tracer.deps(bpath, recursive=recursive): for usage in tracer.deps(bpath):
filepath = usage.block.bfile.filepath.absolute() filepath = usage.block.bfile.filepath.absolute()
for assetpath in usage.files(): for assetpath in usage.files():
assetpath = assetpath.resolve() assetpath = assetpath.resolve()

View File

@ -3,7 +3,7 @@ import pathlib
import typing import typing
from blender_asset_tracer import blendfile, bpathlib from blender_asset_tracer import blendfile, bpathlib
from . import result, blocks2assets from . import result, blocks2assets, file2blocks
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -17,77 +17,21 @@ codes_to_skip = {
} }
class _Tracer: def deps(bfilepath: pathlib.Path) -> typing.Iterator[result.BlockUsage]:
"""Trace dependencies with protection against infinite loops.
Don't use this directly, use the function deps(...) instead.
"""
def __init__(self):
self.seen_files = set()
def deps(self, bfilepath: pathlib.Path, recursive=False) -> typing.Iterator[result.BlockUsage]:
"""Open the blend file and report its dependencies.
:param bfilepath: File to open.
:param recursive: Also report dependencies inside linked blend files.
"""
log.info('Tracing %s', bfilepath)
bfilepath = bfilepath.absolute().resolve()
self.seen_files.add(bfilepath)
recurse_into = []
with blendfile.BlendFile(bfilepath) as bfile:
for block in asset_holding_blocks(bfile):
yield from blocks2assets.iter_assets(block)
if recursive and block.code == b'LI':
recurse_into.append(block)
# Deal with recursion after we've handled all dependencies of the
# current file, so that file access isn't interleaved and all deps
# of one file are reported before moving to the next.
for block in recurse_into:
yield from self._recurse_deps(block)
def _recurse_deps(self, lib_block: blendfile.BlendFileBlock) \
-> typing.Iterator[result.BlockUsage]:
"""Call deps() on the file linked from the library block."""
if lib_block.code != b'LI':
raise ValueError('Expected LI block, not %r' % lib_block)
relpath = bpathlib.BlendPath(lib_block[b'name'])
abspath = lib_block.bfile.abspath(relpath)
# Convert bytes to pathlib.Path object so we have a nice interface to work with.
path = abspath.to_path()
try:
path = path.resolve()
except FileNotFoundError:
log.warning('Linked blend file %s (%s) does not exist; skipping.', relpath, path)
return
# Avoid infinite recursion.
if path in self.seen_files:
log.debug('ignoring file, already seen %s', path)
return
yield from self.deps(path, recursive=True)
def deps(bfilepath: pathlib.Path, recursive=False) -> typing.Iterator[result.BlockUsage]:
"""Open the blend file and report its dependencies. """Open the blend file and report its dependencies.
:param bfilepath: File to open. :param bfilepath: File to open.
:param recursive: Also report dependencies inside linked blend files.
""" """
tracer = _Tracer() with blendfile.BlendFile(bfilepath) as bfile:
yield from tracer.deps(bfilepath, recursive=recursive) for block in asset_holding_blocks(file2blocks.iter_blocks(bfile)):
yield from blocks2assets.iter_assets(block)
def asset_holding_blocks(bfile: blendfile.BlendFile) -> typing.Iterator[blendfile.BlendFileBlock]: def asset_holding_blocks(blocks: typing.Iterable[blendfile.BlendFileBlock]) \
-> typing.Iterator[blendfile.BlendFileBlock]:
"""Generator, yield data blocks that could reference external assets.""" """Generator, yield data blocks that could reference external assets."""
for block in bfile.blocks: for block in blocks:
assert isinstance(block, blendfile.BlendFileBlock) assert isinstance(block, blendfile.BlendFileBlock)
code = block.code code = block.code

View File

@ -140,41 +140,26 @@ def scene(block: blendfile.BlendFileBlock) -> typing.Iterator[result.BlockUsage]
single_asset_types = {cdefs.SEQ_TYPE_MOVIE, cdefs.SEQ_TYPE_SOUND_RAM, cdefs.SEQ_TYPE_SOUND_HD} single_asset_types = {cdefs.SEQ_TYPE_MOVIE, cdefs.SEQ_TYPE_SOUND_RAM, cdefs.SEQ_TYPE_SOUND_HD}
asset_types = single_asset_types.union({cdefs.SEQ_TYPE_IMAGE}) asset_types = single_asset_types.union({cdefs.SEQ_TYPE_IMAGE})
def iter_seqbase(seqbase) -> typing.Iterator[result.BlockUsage]: for seq, seq_type in iterators.sequencer_strips(block_ed):
"""Generate results from a ListBase of sequencer strips.""" if seq_type not in asset_types:
continue
for seq in iterators.listbase(seqbase): seq_strip = seq.get_pointer(b'strip')
seq.refine_type(b'Sequence') if seq_strip is None:
seq_type = seq[b'type'] continue
seq_stripdata = seq_strip.get_pointer(b'stripdata')
if seq_stripdata is None:
continue
if seq_type == cdefs.SEQ_TYPE_META: dirname, dn_field = seq_strip.get(b'dir', return_field=True)
# Recurse into this meta-sequence. basename, bn_field = seq_stripdata.get(b'name', return_field=True)
subseq = seq.get_pointer((b'seqbase', b'first')) asset_path = bpathlib.BlendPath(dirname) / basename
yield from iter_seqbase(subseq)
continue
if seq_type not in asset_types: is_sequence = seq_type not in single_asset_types
continue yield result.BlockUsage(seq, asset_path,
is_sequence=is_sequence,
seq_strip = seq.get_pointer(b'strip') path_dir_field=dn_field,
if seq_strip is None: path_base_field=bn_field)
continue
seq_stripdata = seq_strip.get_pointer(b'stripdata')
if seq_stripdata is None:
continue
dirname, dn_field = seq_strip.get(b'dir', return_field=True)
basename, bn_field = seq_stripdata.get(b'name', return_field=True)
asset_path = bpathlib.BlendPath(dirname) / basename
is_sequence = seq_type not in single_asset_types
yield result.BlockUsage(seq, asset_path,
is_sequence=is_sequence,
path_dir_field=dn_field,
path_base_field=bn_field)
sbase = block_ed.get_pointer((b'seqbase', b'first'))
yield from iter_seqbase(sbase)
@dna_code('SO') @dna_code('SO')

View File

@ -0,0 +1,230 @@
"""Low-level functions called by file2block.
Those can expand data blocks and yield their dependencies (e.g. other data
blocks necessary to render/display/work with the given data block).
"""
import logging
import typing
from blender_asset_tracer import blendfile, cdefs
from blender_asset_tracer.blendfile import iterators
_warned_about_types = set()
_funcs_for_code = {}
log = logging.getLogger(__name__)
def expand_block(block: blendfile.BlendFileBlock) -> typing.Iterator[blendfile.BlendFileBlock]:
"""Generator, yield the data blocks used by this data block."""
try:
expander = _funcs_for_code[block.code]
except KeyError:
if block.code not in _warned_about_types:
log.info('No expander implemented for block type %r', block.code.decode())
_warned_about_types.add(block.code)
return
log.debug('Expanding block %r', block)
# Filter out falsy blocks, i.e. None values.
# Allowing expanders to yield None makes them more consise.
yield from filter(None, expander(block))
def dna_code(block_code: str):
"""Decorator, marks decorated func as expander for that DNA code."""
assert isinstance(block_code, str)
def decorator(wrapped):
_funcs_for_code[block_code.encode()] = wrapped
return wrapped
return decorator
def _expand_generic_material(block: blendfile.BlendFileBlock):
array_len = block.get(b'totcol')
yield from block.iter_array_of_pointers(b'mat', array_len)
def _expand_generic_mtex(block: blendfile.BlendFileBlock):
for mtex in block.iter_fixed_array_of_pointers(b'mtex'):
yield mtex.get_pointer(b'tex')
yield mtex.get_pointer(b'object')
def _expand_generic_nodetree(block: blendfile.BlendFileBlock):
assert block.dna_type.dna_type_id == b'bNodeTree'
nodes = block.get_pointer((b'nodes', b'first'))
for node in iterators.listbase(nodes):
if node[b'type'] == cdefs.CMP_NODE_R_LAYERS:
continue
yield node
# The 'id' property points to whatever is used by the node
# (like the image in an image texture node).
yield node.get_pointer(b'id')
def _expand_generic_nodetree_id(block: blendfile.BlendFileBlock):
block_ntree = block.get_pointer(b'nodetree', None)
if block_ntree is not None:
yield from _expand_generic_nodetree(block_ntree)
def _expand_generic_animdata(block: blendfile.BlendFileBlock):
block_adt = block.get_pointer(b'adt')
if block_adt:
yield block_adt.get_pointer(b'action')
# TODO, NLA
@dna_code('AR')
def _expand_armature(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
@dna_code('CU')
def _expand_curve(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_material(block)
for fieldname in (b'vfont', b'vfontb', b'vfonti', b'vfontbi',
b'bevobj', b'taperobj', b'textoncurve'):
yield block.get_pointer(fieldname)
@dna_code('GR')
def _expand_group(block: blendfile.BlendFileBlock):
objects = block.get_pointer((b'gobject', b'first'))
for item in iterators.listbase(objects):
yield item.get_pointer(b'ob')
@dna_code('LA')
def _expand_lamp(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_nodetree_id(block)
yield from _expand_generic_mtex(block)
@dna_code('MA')
def _expand_material(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_nodetree_id(block)
yield from _expand_generic_mtex(block)
yield block.get_pointer(b'group')
@dna_code('MB')
def _expand_metaball(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_material(block)
@dna_code('ME')
def _expand_mesh(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_material(block)
yield block.get_pointer(b'texcomesh')
# TODO, TexFace? - it will be slow, we could simply ignore :S
@dna_code('NT')
def _expand_node_tree(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_nodetree(block)
@dna_code('OB')
def _expand_object(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_material(block)
yield block.get_pointer(b'data')
if block[b'transflag'] & cdefs.OB_DUPLIGROUP:
yield block.get_pointer(b'dup_group')
yield block.get_pointer(b'proxy')
yield block.get_pointer(b'proxy_group')
# 'ob->pose->chanbase[...].custom'
block_pose = block.get_pointer(b'pose')
if block_pose:
assert block_pose.dna_type.dna_type_id == b'bPose'
# sdna_index_bPoseChannel = block_pose.file.sdna_index_from_id[b'bPoseChannel']
channels = block_pose.get_pointer((b'chanbase', b'first'))
for pose_chan in iterators.listbase(channels):
yield pose_chan.get_pointer(b'custom')
# Expand the objects 'ParticleSettings' via 'ob->particlesystem[...].part'
# sdna_index_ParticleSystem = block.file.sdna_index_from_id.get(b'ParticleSystem')
# if sdna_index_ParticleSystem is not None:
psystems = block.get_pointer((b'particlesystem', b'first'))
for psystem in iterators.listbase(psystems):
yield psystem.get_pointer(b'part')
@dna_code('PA')
def _expand_particle_settings(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_mtex(block)
block_ren_as = block[b'ren_as']
if block_ren_as == cdefs.PART_DRAW_GR:
yield block.get_pointer(b'dup_group')
elif block_ren_as == cdefs.PART_DRAW_OB:
yield block.get_pointer(b'dup_ob')
@dna_code('SC')
def _expand_scene(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_nodetree_id(block)
yield block.get_pointer(b'camera')
yield block.get_pointer(b'world')
yield block.get_pointer(b'set', default=None)
yield block.get_pointer(b'clip', default=None)
# sdna_index_Base = block.file.sdna_index_from_id[b'Base']
# for item in bf_utils.iter_ListBase(block.get_pointer((b'base', b'first'))):
# yield item.get_pointer(b'object', sdna_index_refine=sdna_index_Base)
bases = block.get_pointer((b'base', b'first'))
for base in iterators.listbase(bases):
yield base.get_pointer(b'object')
# Sequence Editor
block_ed = block.get_pointer(b'ed')
if not block_ed:
return
strip_type_to_field = {
cdefs.SEQ_TYPE_SCENE: b'scene',
cdefs.SEQ_TYPE_MOVIECLIP: b'clip',
cdefs.SEQ_TYPE_MASK: b'mask',
cdefs.SEQ_TYPE_SOUND_RAM: b'sound',
}
for strip, strip_type in iterators.sequencer_strips(block_ed):
try:
field_name = strip_type_to_field[strip_type]
except KeyError:
continue
yield strip.get_pointer(field_name)
@dna_code('TE')
def _expand_texture(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_nodetree_id(block)
yield block.get_pointer(b'ima')
@dna_code('WO')
def _expand_world(block: blendfile.BlendFileBlock):
yield from _expand_generic_animdata(block)
yield from _expand_generic_nodetree_id(block)
yield from _expand_generic_mtex(block)

View File

@ -0,0 +1,122 @@
"""Expand data blocks.
The expansion process follows pointers and library links to construct the full
set of actually-used data blocks. This set consists of all data blocks in the
initial blend file, and all *actually linked-to* data blocks in linked
blend files.
"""
import collections
import logging
import typing
from blender_asset_tracer import blendfile, bpathlib
from . import expanders
_funcs_for_code = {}
log = logging.getLogger(__name__)
class _BlockIterator:
"""Expand blocks with dependencies from other libraries.
This class exists so that we have some context for the recursive expansion
without having to pass those variables to each recursive call.
"""
def __init__(self):
# Set of (blend file Path, block address) of already-reported blocks.
self.blocks_yielded = set()
# Queue of blocks to visit
self.to_visit = collections.deque()
def iter_blocks(self,
bfile: blendfile.BlendFile,
limit_to: typing.Set[blendfile.BlendFileBlock] = frozenset(),
) -> typing.Iterator[blendfile.BlendFileBlock]:
"""Expand blocks with dependencies from other libraries."""
bpath = bfile.filepath
root_dir = bpathlib.BlendPath(bpath.parent)
# Mapping from library path to data blocks to expand.
blocks_per_lib = collections.defaultdict(set)
if limit_to:
self._queue_named_blocks(bfile, limit_to)
else:
self._queue_all_blocks(bfile)
while self.to_visit:
block = self.to_visit.popleft()
assert isinstance(block, blendfile.BlendFileBlock)
if (bpath, block.addr_old) in self.blocks_yielded:
continue
if block.code == b'ID':
# ID blocks represent linked-in assets. Those are the ones that
# should be loaded from their own blend file and "expanded" to
# the entire set of data blocks required to render them. We
# defer the handling of those so that we can work with one
# blend file at a time.
lib = block.get_pointer(b'lib')
lib_bpath = bpathlib.BlendPath(lib[b'name']).absolute(root_dir)
blocks_per_lib[lib_bpath].add(block)
# The library block itself should also be reported, because it
# represents a blend file that is a dependency as well.
self.to_visit.append(lib)
continue
if limit_to:
# We're limiting the blocks, so we have to expand them to make
# sure we don't miss anything. Otherwise we're yielding the
# entire file anyway, and no expansion is necessary.
self._queue_dependencies(block)
self.blocks_yielded.add((bpath, block.addr_old))
yield block
# We've gone through all the blocks in this file, now open the libraries
# and iterate over the blocks referred there.
for lib_bpath, idblocks in blocks_per_lib.items():
lib_path = lib_bpath.to_path()
log.debug('Expanding %d blocks in %s', len(idblocks), lib_path)
with blendfile.BlendFile(lib_path) as libfile:
yield from self.iter_blocks(libfile, idblocks)
def _queue_all_blocks(self, bfile: blendfile.BlendFile):
log.debug('Queueing all blocks from file %s', bfile.filepath)
# Don't bother visiting DATA blocks, as we won't know what
# to do with them anyway.
self.to_visit.extend(block for block in bfile.blocks
if block.code != b'DATA')
def _queue_named_blocks(self,
bfile: blendfile.BlendFile,
limit_to: typing.Set[blendfile.BlendFileBlock]):
"""Queue only the blocks referred to in limit_to.
:param bfile:
:param limit_to: set of ID blocks that name the blocks to queue.
The queued blocks are loaded from the actual blend file, and
selected by name.
"""
for to_find in limit_to:
assert to_find.code == b'ID'
name_to_find = to_find[b'name']
code = name_to_find[:2]
log.debug('Finding block %r with code %r', name_to_find, code)
same_code = bfile.find_blocks_from_code(code)
for block in same_code:
if block.id_name == name_to_find:
log.debug('Queueing %r from file %s', block, bfile.filepath)
self.to_visit.append(block)
def _queue_dependencies(self, block: blendfile.BlendFileBlock):
self.to_visit.extend(expanders.expand_block(block))
def iter_blocks(bfile: blendfile.BlendFile) -> typing.Iterator[blendfile.BlendFileBlock]:
"""Generator, yield all blocks in this file + required blocks in libs."""
bi = _BlockIterator()
yield from bi.iter_blocks(bfile)

View File

@ -29,7 +29,7 @@ class AssetHoldingBlocksTest(AbstractTracerTest):
blocks_seen = 0 blocks_seen = 0
seen_scene = seen_ob = False seen_scene = seen_ob = False
for block in tracer.asset_holding_blocks(self.bf): for block in tracer.asset_holding_blocks(self.bf.blocks):
assert isinstance(block, blendfile.BlendFileBlock) assert isinstance(block, blendfile.BlendFileBlock)
blocks_seen += 1 blocks_seen += 1
@ -70,8 +70,8 @@ class DepsTest(AbstractTracerTest):
return None return None
return field.name.name_full.decode() return field.name.name_full.decode()
def assert_deps(self, blend_fname, expects: dict, recursive=False): def assert_deps(self, blend_fname, expects: dict):
for dep in tracer.deps(self.blendfiles / blend_fname, recursive=recursive): for dep in tracer.deps(self.blendfiles / blend_fname):
actual_type = dep.block.dna_type.dna_type_id.decode() actual_type = dep.block.dna_type.dna_type_id.decode()
actual_full_field = self.field_name(dep.path_full_field) actual_full_field = self.field_name(dep.path_full_field)
actual_dirname = self.field_name(dep.path_dir_field) actual_dirname = self.field_name(dep.path_dir_field)
@ -92,7 +92,7 @@ class DepsTest(AbstractTracerTest):
del expects[dep.block_name] del expects[dep.block_name]
# All expected uses should have been seen. # All expected uses should have been seen.
self.assertEqual({}, expects, 'Expected results were not seen.') self.assertEqual(expects, {}, 'Expected results were not seen.')
def test_no_deps(self): def test_no_deps(self):
self.assert_deps('basic_file.blend', {}) self.assert_deps('basic_file.blend', {})
@ -203,10 +203,14 @@ class DepsTest(AbstractTracerTest):
b'IMbrick_dotted_04-color': Expect( b'IMbrick_dotted_04-color': Expect(
'Image', 'name[1024]', None, None, 'Image', 'name[1024]', None, None,
b'//textures/Bricks/brick_dotted_04-color.jpg', False), b'//textures/Bricks/brick_dotted_04-color.jpg', False),
b'IMbuildings_roof_04-color': Expect(
'Image', 'name[1024]', None, None, # This data block is in the basic_file.blend file, but not used by
b'//textures/Textures/Buildings/buildings_roof_04-color.png', False), # any of the objects linked in from linked_cube.blend or
}, recursive=True) # doubly_linked.blend, hence it should *not* be reported:
# b'IMbuildings_roof_04-color': Expect(
# 'Image', 'name[1024]', None, None,
# b'//textures/Textures/Buildings/buildings_roof_04-color.png', False),
})
def test_sim_data(self): def test_sim_data(self):
self.assert_deps('T53562/bam_pack_bug.blend', { self.assert_deps('T53562/bam_pack_bug.blend', {
@ -222,7 +226,7 @@ class DepsTest(AbstractTracerTest):
try: try:
sys.setrecursionlimit(80) sys.setrecursionlimit(80)
# This should finish without hitting the recursion limit. # This should finish without hitting the recursion limit.
for _ in tracer.deps(infinite_bfile, recursive=True): for _ in tracer.deps(infinite_bfile):
pass pass
finally: finally:
sys.setrecursionlimit(reclim) sys.setrecursionlimit(reclim)

View File

@ -0,0 +1,36 @@
from blender_asset_tracer import blendfile
from blender_asset_tracer.tracer import file2blocks
from test_tracer import AbstractTracerTest
class File2BlocksTest(AbstractTracerTest):
def test_id_blocks(self):
self.bf = blendfile.BlendFile(self.blendfiles / 'doubly_linked.blend')
foreign_blocks = {}
for block in file2blocks.iter_blocks(self.bf):
# Only register blocks from libraries.
if block.bfile == self.bf:
continue
foreign_blocks[block.id_name] = block
self.assertNotEqual({}, foreign_blocks)
# It should find directly linked blocks (GRCubes and MABrick) as well
# as indirectly linked (MECube³).
self.assertIn(b'GRCubes', foreign_blocks)
self.assertIn(b'MABrick', foreign_blocks)
self.assertIn('MECube³'.encode(), foreign_blocks)
self.assertIn('OBümlaut'.encode(), foreign_blocks)
def test_circular_files(self):
self.bf = blendfile.BlendFile(self.blendfiles / 'recursive_dependency_1.blend')
blocks = {}
for block in file2blocks.iter_blocks(self.bf):
blocks[block.id_name] = block
self.assertNotEqual({}, blocks)
self.assertIn(b'MAMaterial', blocks)
self.assertIn(b'OBCube', blocks)
self.assertIn(b'MECube', blocks)