Implemented reporting callbacks.

Both the dependency Tracer class and the Packer class now support a
callback object, where the latter is a subclass of the former.

For file transfers running in a separate thread, there is a thread-safe
wrapper for progress callbacks. This wrapper can be called from any thread,
and calls the wrapped callback object from the main thread. This way the
callback implementation itself doesn't have to worry about threading
issues.
This commit is contained in:
Sybren A. Stüvel 2018-03-15 16:28:20 +01:00
parent d2e13750e9
commit 889f3abd25
10 changed files with 421 additions and 68 deletions

View File

@ -3,12 +3,13 @@ import enum
import functools import functools
import logging import logging
import pathlib import pathlib
import queue
import tempfile import tempfile
import typing import typing
from blender_asset_tracer import trace, bpathlib, blendfile from blender_asset_tracer import trace, bpathlib, blendfile
from blender_asset_tracer.trace import result from blender_asset_tracer.trace import result
from . import filesystem, transfer from . import filesystem, transfer, progress
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -51,6 +52,7 @@ class AssetAction:
""" """
class Packer: class Packer:
def __init__(self, def __init__(self,
blendfile: pathlib.Path, blendfile: pathlib.Path,
@ -62,6 +64,11 @@ class Packer:
self.target = target self.target = target
self.noop = noop self.noop = noop
# Set this to a custom Callback() subclass instance before calling
# strategise() to receive progress reports.
self._progress_cb = progress.Callback()
self._tscb = progress.ThreadSafeCallback(self._progress_cb)
self._exclude_globs = set() # type: typing.Set[str] self._exclude_globs = set() # type: typing.Set[str]
from blender_asset_tracer.cli import common from blender_asset_tracer.cli import common
@ -74,6 +81,7 @@ class Packer:
self._actions = collections.defaultdict(AssetAction) \ self._actions = collections.defaultdict(AssetAction) \
# type: typing.DefaultDict[pathlib.Path, AssetAction] # type: typing.DefaultDict[pathlib.Path, AssetAction]
self.missing_files = set() # type: typing.Set[pathlib.Path] self.missing_files = set() # type: typing.Set[pathlib.Path]
self._new_location_paths = set() # type: typing.Set[pathlib.Path]
self._output_path = None # type: pathlib.Path self._output_path = None # type: pathlib.Path
# Number of files we would copy, if not for --noop # Number of files we would copy, if not for --noop
@ -97,6 +105,16 @@ class Packer:
"""The path of the packed blend file in the target directory.""" """The path of the packed blend file in the target directory."""
return self._output_path return self._output_path
@property
def progress_cb(self) -> progress.Callback:
return self._progress_cb
@progress_cb.setter
def progress_cb(self, new_progress_cb: progress.Callback):
self._tscb.flush()
self._progress_cb = new_progress_cb
self._tscb = progress.ThreadSafeCallback(self._progress_cb)
def exclude(self, *globs: str): def exclude(self, *globs: str):
"""Register glob-compatible patterns of files that should be ignored.""" """Register glob-compatible patterns of files that should be ignored."""
self._exclude_globs.update(globs) self._exclude_globs.update(globs)
@ -115,51 +133,63 @@ class Packer:
bfile_pp = self.target / bfile_path.relative_to(self.project) bfile_pp = self.target / bfile_path.relative_to(self.project)
self._output_path = bfile_pp self._output_path = bfile_pp
self._progress_cb.pack_start()
act = self._actions[bfile_path] act = self._actions[bfile_path]
act.path_action = PathAction.KEEP_PATH act.path_action = PathAction.KEEP_PATH
act.new_path = bfile_pp act.new_path = bfile_pp
new_location_paths = set() self._new_location_paths = set()
for usage in trace.deps(self.blendfile): for usage in trace.deps(self.blendfile, self._progress_cb):
asset_path = usage.abspath asset_path = usage.abspath
if any(asset_path.match(glob) for glob in self._exclude_globs): if any(asset_path.match(glob) for glob in self._exclude_globs):
log.info('Excluding file: %s', asset_path) log.info('Excluding file: %s', asset_path)
continue continue
if not asset_path.exists(): if not asset_path.exists():
log.info('Missing file: %s', asset_path) log.warning('Missing file: %s', asset_path)
self.missing_files.add(asset_path) self.missing_files.add(asset_path)
self._progress_cb.missing_file(asset_path)
continue continue
bfile_path = usage.block.bfile.filepath.absolute() self._visit_asset(asset_path, usage)
# Needing rewriting is not a per-asset thing, but a per-asset-per- self._find_new_paths()
# blendfile thing, since different blendfiles can refer to it in
# different ways (for example with relative and absolute paths).
path_in_project = self._path_in_project(asset_path)
use_as_is = usage.asset_path.is_blendfile_relative() and path_in_project
needs_rewriting = not use_as_is
act = self._actions[asset_path]
assert isinstance(act, AssetAction)
act.usages.append(usage)
if needs_rewriting:
log.info('%s needs rewritten path to %s', bfile_path, usage.asset_path)
act.path_action = PathAction.FIND_NEW_LOCATION
new_location_paths.add(asset_path)
else:
log.debug('%s can keep using %s', bfile_path, usage.asset_path)
asset_pp = self.target / asset_path.relative_to(self.project)
act.new_path = asset_pp
self._find_new_paths(new_location_paths)
self._group_rewrites() self._group_rewrites()
def _find_new_paths(self, asset_paths: typing.Set[pathlib.Path]): def _visit_asset(self, asset_path: pathlib.Path, usage: result.BlockUsage):
"""Determine what to do with this asset.
Determines where this asset will be packed, whether it needs rewriting,
and records the blend file data block referring to it.
"""
bfile_path = usage.block.bfile.filepath.absolute()
self._progress_cb.trace_asset(asset_path)
# Needing rewriting is not a per-asset thing, but a per-asset-per-
# blendfile thing, since different blendfiles can refer to it in
# different ways (for example with relative and absolute paths).
path_in_project = self._path_in_project(asset_path)
use_as_is = usage.asset_path.is_blendfile_relative() and path_in_project
needs_rewriting = not use_as_is
act = self._actions[asset_path]
assert isinstance(act, AssetAction)
act.usages.append(usage)
if needs_rewriting:
log.info('%s needs rewritten path to %s', bfile_path, usage.asset_path)
act.path_action = PathAction.FIND_NEW_LOCATION
self._new_location_paths.add(asset_path)
else:
log.debug('%s can keep using %s', bfile_path, usage.asset_path)
asset_pp = self.target / asset_path.relative_to(self.project)
act.new_path = asset_pp
def _find_new_paths(self):
"""Find new locations in the BAT Pack for the given assets.""" """Find new locations in the BAT Pack for the given assets."""
for path in asset_paths: for path in self._new_location_paths:
act = self._actions[path] act = self._actions[path]
assert isinstance(act, AssetAction) assert isinstance(act, AssetAction)
# Like a join, but ignoring the fact that 'path' is absolute. # Like a join, but ignoring the fact that 'path' is absolute.
@ -198,6 +228,8 @@ class Packer:
self._rewrite_paths() self._rewrite_paths()
self._copy_files_to_target() self._copy_files_to_target()
self._progress_cb.pack_done(self.output_path, self.missing_files)
def _create_file_transferer(self) -> transfer.FileTransferer: def _create_file_transferer(self) -> transfer.FileTransferer:
"""Create a FileCopier(), can be overridden in a subclass.""" """Create a FileCopier(), can be overridden in a subclass."""
return filesystem.FileCopier() return filesystem.FileCopier()
@ -210,6 +242,7 @@ class Packer:
log.debug('Executing %d copy actions', len(self._actions)) log.debug('Executing %d copy actions', len(self._actions))
ft = self._create_file_transferer() ft = self._create_file_transferer()
ft.progress_cb = self._tscb
if not self.noop: if not self.noop:
ft.start() ft.start()
@ -225,6 +258,8 @@ class Packer:
log.info('File transfer interrupted with Ctrl+C, aborting.') log.info('File transfer interrupted with Ctrl+C, aborting.')
ft.abort_and_join() ft.abort_and_join()
raise raise
finally:
self._tscb.flush()
def _rewrite_paths(self) -> None: def _rewrite_paths(self) -> None:
"""Rewrite paths to the new location of the assets. """Rewrite paths to the new location of the assets.
@ -293,6 +328,8 @@ class Packer:
# Make sure we close the file, otherwise changes may not be # Make sure we close the file, otherwise changes may not be
# flushed before it gets copied. # flushed before it gets copied.
if bfile.is_modified:
self._progress_cb.rewrite_blendfile(bfile_path)
bfile.close() bfile.close()
def _copy_asset_and_deps(self, asset_path: pathlib.Path, action: AssetAction, def _copy_asset_and_deps(self, asset_path: pathlib.Path, action: AssetAction,
@ -336,6 +373,7 @@ class Packer:
verb = 'move' if may_move else 'copy' verb = 'move' if may_move else 'copy'
log.debug('Queueing %s of %s', verb, asset_path) log.debug('Queueing %s of %s', verb, asset_path)
self._tscb.flush()
if may_move: if may_move:
ft.queue_move(asset_path, target) ft.queue_move(asset_path, target)
else: else:

View File

@ -7,17 +7,9 @@ from . import transfer
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class FileCopier(threading.Thread, transfer.FileTransferer): class FileCopier(transfer.FileTransferer):
"""Copies or moves files in source directory order.""" """Copies or moves files in source directory order."""
def __init__(self) -> None:
# Stupid Thread.__init__ doesn't call super().__init__(),
# so it doesn't get chained to transfer.FileTransferer.__init__().
# However, I want to have Thread as first subclass so that its
# start() and join() methods Just Work™.
threading.Thread.__init__(self)
transfer.FileTransferer.__init__(self)
def run(self) -> None: def run(self) -> None:
files_transferred = 0 files_transferred = 0
files_skipped = 0 files_skipped = 0
@ -29,8 +21,8 @@ class FileCopier(threading.Thread, transfer.FileTransferer):
for src, dst, act in self.iter_queue(): for src, dst, act in self.iter_queue():
try: try:
st_src = src.stat() # must exist, or it wouldn't be queued.
if dst.exists(): if dst.exists():
st_src = src.stat()
st_dst = dst.stat() st_dst = dst.stat()
if st_dst.st_size == st_src.st_size and st_dst.st_mtime >= st_src.st_mtime: if st_dst.st_size == st_src.st_size and st_dst.st_mtime >= st_src.st_mtime:
log.info('SKIP %s; already exists', src) log.info('SKIP %s; already exists', src)
@ -46,6 +38,7 @@ class FileCopier(threading.Thread, transfer.FileTransferer):
# TODO(Sybren): when we target Py 3.6+, remove the str() calls. # TODO(Sybren): when we target Py 3.6+, remove the str() calls.
tfunc = transfer_funcs[act] tfunc = transfer_funcs[act]
tfunc(str(src), str(dst)) # type: ignore tfunc(str(src), str(dst)) # type: ignore
self.report_transferred(st_src.st_size)
files_transferred += 1 files_transferred += 1
except Exception: except Exception:

View File

@ -0,0 +1,123 @@
"""Callback class definition for BAT Pack progress reporting."""
import threading
import functools
import logging
import pathlib
import queue
import typing
import blender_asset_tracer.trace.progress
log = logging.getLogger(__name__)
class Callback(blender_asset_tracer.trace.progress.Callback):
"""BAT Pack progress reporting."""
def pack_start(self) -> None:
"""Called when packing starts."""
def pack_done(self,
output_blendfile: pathlib.Path,
missing_files: typing.Set[pathlib.Path]) -> None:
"""Called when packing is done."""
def trace_blendfile(self, filename: pathlib.Path) -> None:
"""Called for every blendfile opened when tracing dependencies."""
def trace_asset(self, filename: pathlib.Path) -> None:
"""Called for every asset found when tracing dependencies.
Note that this can also be a blend file.
"""
def rewrite_blendfile(self, orig_filename: pathlib.Path) -> None:
"""Called for every rewritten blendfile."""
def transfer_file(self, src: pathlib.Path, dst: pathlib.Path) -> None:
"""Called when a file transfer starts."""
def transfer_file_skipped(self, src: pathlib.Path, dst: pathlib.Path) -> None:
"""Called when a file is skipped because it already exists."""
def transfer_progress(self, total_bytes: int, transferred_bytes: int) -> None:
"""Called during file transfer, with per-pack info (not per file).
:param total_bytes: The total amount of bytes to be transferred for
the current packing operation. This can increase while transfer
is happening, when more files are discovered (because transfer
starts in a separate thread before all files are found).
:param transferred_bytes: The total amount of bytes transfered for
the current packing operation.
"""
def missing_file(self, filename: pathlib.Path) -> None:
"""Called for every asset that does not exist on the filesystem."""
class ThreadSafeCallback(Callback):
"""Thread-safe wrapper for Callback instances.
Progress calls are queued until flush() is called. The queued calls are
called in the same thread as the one calling flush().
"""
def __init__(self, wrapped: Callback) -> None:
self.log = log.getChild('ThreadSafeCallback')
self.wrapped = wrapped
# Thread-safe queue for passing progress reports on the main thread.
self._reporting_queue = queue.Queue() # type: queue.Queue[typing.Callable]
self._main_thread_id = threading.get_ident()
def _queue(self, func: typing.Callable, *args, **kwargs):
partial = functools.partial(func, *args, **kwargs)
if self._main_thread_id == threading.get_ident():
partial()
else:
self._reporting_queue.put(partial)
def pack_start(self) -> None:
self._queue(self.wrapped.pack_start)
def pack_done(self,
output_blendfile: pathlib.Path,
missing_files: typing.Set[pathlib.Path]) -> None:
self._queue(self.wrapped.pack_done, output_blendfile, missing_files)
def trace_blendfile(self, filename: pathlib.Path) -> None:
self._queue(self.wrapped.trace_blendfile, filename)
def trace_asset(self, filename: pathlib.Path) -> None:
self._queue(self.wrapped.trace_asset, filename)
def transfer_file(self, src: pathlib.Path, dst: pathlib.Path) -> None:
self._queue(self.wrapped.transfer_file, src, dst)
def transfer_file_skipped(self, src: pathlib.Path, dst: pathlib.Path) -> None:
self._queue(self.wrapped.transfer_file_skipped, src, dst)
def transfer_progress(self, total_bytes: int, transferred_bytes: int) -> None:
self._queue(self.wrapped.transfer_progress, total_bytes, transferred_bytes)
def missing_file(self, filename: pathlib.Path) -> None:
self._queue(self.wrapped.missing_file, filename)
def flush(self, timeout: float = None) -> None:
"""Call the queued calls, call this in the main thread."""
while not self._reporting_queue.empty():
try:
call = self._reporting_queue.get(block=timeout is not None,
timeout=timeout)
except queue.Empty:
return
try:
call()
except Exception:
# Don't let the handling of one callback call
# block the entire flush process.
self.log.exception('Error calling %s', call)

View File

@ -52,20 +52,14 @@ class S3Packer(Packer):
return S3Transferrer(self.client) return S3Transferrer(self.client)
class S3Transferrer(threading.Thread, transfer.FileTransferer): class S3Transferrer(transfer.FileTransferer):
"""Copies or moves files in source directory order.""" """Copies or moves files in source directory order."""
class AbortUpload(Exception): class AbortUpload(Exception):
"""Raised from the upload callback to abort an upload.""" """Raised from the upload callback to abort an upload."""
def __init__(self, botoclient) -> None: def __init__(self, botoclient) -> None:
# Stupid Thread.__init__ doesn't call super().__init__(), super().__init__()
# so it doesn't get chained to transfer.FileTransferer.__init__().
# However, I want to have Thread as first subclass so that its
# start() and join() methods Just Work™.
threading.Thread.__init__(self)
transfer.FileTransferer.__init__(self)
self.client = botoclient self.client = botoclient
def run(self) -> None: def run(self) -> None:

View File

@ -4,8 +4,11 @@ import logging
import pathlib import pathlib
import queue import queue
import threading import threading
import time
import typing import typing
from . import progress
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -25,11 +28,16 @@ class Action(enum.Enum):
QueueItem = typing.Tuple[pathlib.Path, pathlib.Path, Action] QueueItem = typing.Tuple[pathlib.Path, pathlib.Path, Action]
class FileTransferer(metaclass=abc.ABCMeta): class FileTransferer(threading.Thread, metaclass=abc.ABCMeta):
"""Interface for file transfer classes.""" """Abstract superclass for file transfer classes.
Implement a run() function in a subclass that performs the actual file
transfer.
"""
def __init__(self) -> None: def __init__(self) -> None:
super().__init__() super().__init__()
self.log = log.getChild('FileTransferer')
# For copying in a different process. By using a priority queue the files # For copying in a different process. By using a priority queue the files
# are automatically sorted alphabetically, which means we go through all files # are automatically sorted alphabetically, which means we go through all files
@ -44,17 +52,35 @@ class FileTransferer(metaclass=abc.ABCMeta):
self.done = threading.Event() self.done = threading.Event()
self.abort = threading.Event() self.abort = threading.Event()
# Instantiate a dummy progress callback so that we can call it
# without checking for None all the time.
self.progress_cb = progress.ThreadSafeCallback(progress.Callback())
self.total_queued_bytes = 0
self.total_transferred_bytes = 0
@abc.abstractmethod
def run(self):
"""Perform actual file transfer in a thread."""
def queue_copy(self, src: pathlib.Path, dst: pathlib.Path): def queue_copy(self, src: pathlib.Path, dst: pathlib.Path):
"""Queue a copy action from 'src' to 'dst'.""" """Queue a copy action from 'src' to 'dst'."""
assert not self.done.is_set(), 'Queueing not allowed after done_and_join() was called' assert not self.done.is_set(), 'Queueing not allowed after done_and_join() was called'
assert not self.abort.is_set(), 'Queueing not allowed after abort_and_join() was called' assert not self.abort.is_set(), 'Queueing not allowed after abort_and_join() was called'
self.queue.put((src, dst, Action.COPY)) self.queue.put((src, dst, Action.COPY))
self.total_queued_bytes += src.stat().st_size
def queue_move(self, src: pathlib.Path, dst: pathlib.Path): def queue_move(self, src: pathlib.Path, dst: pathlib.Path):
"""Queue a move action from 'src' to 'dst'.""" """Queue a move action from 'src' to 'dst'."""
assert not self.done.is_set(), 'Queueing not allowed after done_and_join() was called' assert not self.done.is_set(), 'Queueing not allowed after done_and_join() was called'
assert not self.abort.is_set(), 'Queueing not allowed after abort_and_join() was called' assert not self.abort.is_set(), 'Queueing not allowed after abort_and_join() was called'
self.queue.put((src, dst, Action.MOVE)) self.queue.put((src, dst, Action.MOVE))
self.total_queued_bytes += src.stat().st_size
def report_transferred(self, block_size: int):
"""Report transfer of `block_size` bytes."""
self.total_transferred_bytes += block_size
self.progress_cb.transfer_progress(self.total_queued_bytes, self.total_transferred_bytes)
def done_and_join(self) -> None: def done_and_join(self) -> None:
"""Indicate all files have been queued, and wait until done. """Indicate all files have been queued, and wait until done.
@ -105,21 +131,29 @@ class FileTransferer(metaclass=abc.ABCMeta):
return return
try: try:
yield self.queue.get(timeout=0.1) src, dst, action = self.queue.get(timeout=0.1)
self.progress_cb.transfer_file(src, dst)
yield src, dst, action
except queue.Empty: except queue.Empty:
if self.done.is_set(): if self.done.is_set():
return return
@abc.abstractmethod def join(self, timeout: float = None) -> None:
def start(self) -> None: """Wait for the transfer to finish/stop."""
"""Starts the file transfer thread/process.
This could spin up a separate thread to perform the actual file if timeout:
transfer. After start() is called, implementations should still accept run_until = time.time() + timeout
calls to the queue_xxx() methods. In other words, this is not to be else:
used as queue-and-then-start, but as start-and-then-queue. run_until = float('inf')
"""
@abc.abstractmethod # We can't simply block the thread, we have to keep watching the
def join(self, timeout=None): # progress queue.
"""Wait for the thread/process to stop.""" while self.is_alive():
if time.time() > run_until:
self.log.warning('Timeout while waiting for transfer to finish')
return
self.progress_cb.flush(timeout=0.1)
# Since Thread.join() neither returns anything nor raises any exception
# when timing out, we don't even have to call it any more.

View File

@ -3,7 +3,7 @@ import pathlib
import typing import typing
from blender_asset_tracer import blendfile from blender_asset_tracer import blendfile
from . import result, blocks2assets, file2blocks from . import result, blocks2assets, file2blocks, progress
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -17,18 +17,24 @@ codes_to_skip = {
} }
def deps(bfilepath: pathlib.Path) -> typing.Iterator[result.BlockUsage]: def deps(bfilepath: pathlib.Path, progress_cb: typing.Optional[progress.Callback] = None) \
-> typing.Iterator[result.BlockUsage]:
"""Open the blend file and report its dependencies. """Open the blend file and report its dependencies.
:param bfilepath: File to open. :param bfilepath: File to open.
:param progress_cb: Progress callback object.
""" """
log.info('opening: %s', bfilepath) log.info('opening: %s', bfilepath)
bfile = blendfile.open_cached(bfilepath) bfile = blendfile.open_cached(bfilepath)
bi = file2blocks.BlockIterator()
if progress_cb:
bi.progress_cb = progress_cb
ahb = asset_holding_blocks(bi.iter_blocks(bfile))
# Sort the asset-holding blocks so that we can iterate over them # Sort the asset-holding blocks so that we can iterate over them
# in disk order, which is slightly faster than random order. # in disk order, which is slightly faster than random order.
ahb = asset_holding_blocks(file2blocks.iter_blocks(bfile))
for block in sorted(ahb): for block in sorted(ahb):
yield from blocks2assets.iter_assets(block) yield from blocks2assets.iter_assets(block)

View File

@ -11,13 +11,13 @@ import pathlib
import typing import typing
from blender_asset_tracer import blendfile, bpathlib from blender_asset_tracer import blendfile, bpathlib
from . import expanders from . import expanders, progress
_funcs_for_code = {} # type: typing.Dict[bytes, typing.Callable] _funcs_for_code = {} # type: typing.Dict[bytes, typing.Callable]
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class _BlockIterator: class BlockIterator:
"""Expand blocks with dependencies from other libraries. """Expand blocks with dependencies from other libraries.
This class exists so that we have some context for the recursive expansion This class exists so that we have some context for the recursive expansion
@ -31,12 +31,15 @@ class _BlockIterator:
# Queue of blocks to visit # Queue of blocks to visit
self.to_visit = collections.deque() # type: typing.Deque[blendfile.BlendFileBlock] self.to_visit = collections.deque() # type: typing.Deque[blendfile.BlendFileBlock]
self.progress_cb = progress.Callback()
def iter_blocks(self, def iter_blocks(self,
bfile: blendfile.BlendFile, bfile: blendfile.BlendFile,
limit_to: typing.Set[blendfile.BlendFileBlock] = set(), limit_to: typing.Set[blendfile.BlendFileBlock] = set(),
) -> typing.Iterator[blendfile.BlendFileBlock]: ) -> typing.Iterator[blendfile.BlendFileBlock]:
"""Expand blocks with dependencies from other libraries.""" """Expand blocks with dependencies from other libraries."""
self.progress_cb.trace_blendfile(bfile.filepath)
log.info('inspecting: %s', bfile.filepath) log.info('inspecting: %s', bfile.filepath)
if limit_to: if limit_to:
self._queue_named_blocks(bfile, limit_to) self._queue_named_blocks(bfile, limit_to)
@ -148,5 +151,5 @@ class _BlockIterator:
def iter_blocks(bfile: blendfile.BlendFile) -> typing.Iterator[blendfile.BlendFileBlock]: def iter_blocks(bfile: blendfile.BlendFile) -> typing.Iterator[blendfile.BlendFileBlock]:
"""Generator, yield all blocks in this file + required blocks in libs.""" """Generator, yield all blocks in this file + required blocks in libs."""
bi = _BlockIterator() bi = BlockIterator()
yield from bi.iter_blocks(bfile) yield from bi.iter_blocks(bfile)

View File

@ -0,0 +1,12 @@
"""Callback class definition for BAT Tracer progress reporting.
Mostly used to forward events to pack.progress.Callback.
"""
import pathlib
class Callback:
"""BAT Tracer progress reporting."""
def trace_blendfile(self, filename: pathlib.Path) -> None:
"""Called for every blendfile opened when tracing dependencies."""

View File

@ -1,8 +1,12 @@
import logging import logging
import pathlib import pathlib
import typing
import tempfile import tempfile
from unittest import mock
from blender_asset_tracer import blendfile, pack, bpathlib from blender_asset_tracer import blendfile, pack, bpathlib
from blender_asset_tracer.pack import progress
from abstract_test import AbstractBlendFileTest from abstract_test import AbstractBlendFileTest
@ -31,6 +35,12 @@ class AbstractPackTest(AbstractBlendFileTest):
for path, action in packer._actions.items() for path, action in packer._actions.items()
if action.rewrites} if action.rewrites}
def outside_project(self) -> pathlib.Path:
"""Return the '_outside_project' path for files in self.blendfiles."""
# /tmp/target + /workspace/bat/tests/blendfiles → /tmp/target/workspace/bat/tests/blendfiles
extpath = pathlib.Path(self.tpath, '_outside_project', *self.blendfiles.parts[1:])
return extpath
class PackTest(AbstractPackTest): class PackTest(AbstractPackTest):
def test_strategise_no_rewrite_required(self): def test_strategise_no_rewrite_required(self):
@ -69,8 +79,7 @@ class PackTest(AbstractPackTest):
'textures/Bricks/brick_dotted_04-bump.jpg', 'textures/Bricks/brick_dotted_04-bump.jpg',
'textures/Bricks/brick_dotted_04-color.jpg', 'textures/Bricks/brick_dotted_04-color.jpg',
) )
# /tmp/target + /workspace/bat/tests/blendfiles → /tmp/target/workspace/bat/tests/blendfiles extpath = self.outside_project()
extpath = pathlib.Path(self.tpath, '_outside_project', *self.blendfiles.parts[1:])
act = packer._actions[ppath / 'doubly_linked_up.blend'] act = packer._actions[ppath / 'doubly_linked_up.blend']
self.assertEqual(pack.PathAction.KEEP_PATH, act.path_action, 'for doubly_linked_up.blend') self.assertEqual(pack.PathAction.KEEP_PATH, act.path_action, 'for doubly_linked_up.blend')
@ -245,3 +254,102 @@ class PackTest(AbstractPackTest):
self.tpath / self.blendfiles.name / infile.name, self.tpath / self.blendfiles.name / infile.name,
packer.output_path packer.output_path
) )
class ProgressTest(AbstractPackTest):
def test_strategise(self):
cb = mock.Mock(progress.Callback)
infile = self.blendfiles / 'subdir/doubly_linked_up.blend'
with pack.Packer(infile, self.blendfiles, self.tpath) as packer:
packer.progress_cb = cb
packer.strategise()
self.assertEqual(1, cb.pack_start.call_count)
self.assertEqual(0, cb.pack_done.call_count)
expected_calls = [
mock.call(self.blendfiles / 'subdir/doubly_linked_up.blend'),
mock.call(self.blendfiles / 'linked_cube.blend'),
mock.call(self.blendfiles / 'basic_file.blend'),
mock.call(self.blendfiles / 'material_textures.blend'),
]
cb.trace_blendfile.assert_has_calls(expected_calls, any_order=True)
self.assertEqual(len(expected_calls), cb.trace_blendfile.call_count)
expected_calls = [
mock.call(self.blendfiles / 'linked_cube.blend'),
mock.call(self.blendfiles / 'basic_file.blend'),
mock.call(self.blendfiles / 'material_textures.blend'),
mock.call(self.blendfiles / 'textures/Bricks/brick_dotted_04-color.jpg'),
mock.call(self.blendfiles / 'textures/Bricks/brick_dotted_04-bump.jpg'),
]
cb.trace_asset.assert_has_calls(expected_calls, any_order=True)
self.assertEqual(len(expected_calls), cb.trace_asset.call_count)
self.assertEqual(0, cb.rewrite_blendfile.call_count)
self.assertEqual(0, cb.transfer_file.call_count)
self.assertEqual(0, cb.transfer_file_skipped.call_count)
self.assertEqual(0, cb.transfer_progress.call_count)
self.assertEqual(0, cb.missing_file.call_count)
def test_execute_with_rewrite(self):
cb = mock.Mock(progress.Callback)
infile = self.blendfiles / 'subdir/doubly_linked_up.blend'
with pack.Packer(infile, infile.parent, self.tpath) as packer:
packer.progress_cb = cb
packer.strategise()
packer.execute()
self.assertEqual(1, cb.pack_start.call_count)
self.assertEqual(1, cb.pack_done.call_count)
# rewrite_blendfile should only be called paths in a blendfile are
# actually rewritten.
cb.rewrite_blendfile.assert_called_with(self.blendfiles / 'subdir/doubly_linked_up.blend')
self.assertEqual(1, cb.rewrite_blendfile.call_count)
# mock.ANY is used for temporary files in temporary paths, because they
# are hard to predict.
extpath = self.outside_project()
expected_calls = [
mock.call(mock.ANY, self.tpath / 'doubly_linked_up.blend'),
mock.call(mock.ANY, extpath / 'linked_cube.blend'),
mock.call(mock.ANY, extpath / 'basic_file.blend'),
mock.call(mock.ANY, extpath / 'material_textures.blend'),
mock.call(self.blendfiles / 'textures/Bricks/brick_dotted_04-color.jpg',
extpath / 'textures/Bricks/brick_dotted_04-color.jpg'),
mock.call(self.blendfiles / 'textures/Bricks/brick_dotted_04-bump.jpg',
extpath / 'textures/Bricks/brick_dotted_04-bump.jpg'),
]
cb.transfer_file.assert_has_calls(expected_calls, any_order=True)
self.assertEqual(len(expected_calls), cb.transfer_file.call_count)
self.assertEqual(0, cb.transfer_file_skipped.call_count)
self.assertGreaterEqual(cb.transfer_progress.call_count, 6,
'transfer_progress() should be called at least once per asset')
self.assertEqual(0, cb.missing_file.call_count)
def test_missing_files(self):
cb = mock.Mock(progress.Callback)
infile = self.blendfiles / 'missing_textures.blend'
with pack.Packer(infile, self.blendfiles, self.tpath) as packer:
packer.progress_cb = cb
packer.strategise()
packer.execute()
self.assertEqual(1, cb.pack_start.call_count)
self.assertEqual(1, cb.pack_done.call_count)
cb.rewrite_blendfile.assert_not_called()
cb.transfer_file.assert_called_with(infile, self.tpath / 'missing_textures.blend')
self.assertEqual(0, cb.transfer_file_skipped.call_count)
self.assertGreaterEqual(cb.transfer_progress.call_count, 1,
'transfer_progress() should be called at least once per asset')
expected_calls = [
mock.call(self.blendfiles / 'textures/HDRI/Myanmar/Golden Palace 2, Old Bagan-1k.exr'),
mock.call(self.blendfiles / 'textures/Textures/Marble/marble_decoration-color.png'),
]
cb.missing_file.assert_has_calls(expected_calls, any_order=True)
self.assertEqual(len(expected_calls), cb.missing_file.call_count)

View File

@ -0,0 +1,42 @@
import threading
import time
import typing
import unittest
from pathlib import Path
from unittest import mock
from blender_asset_tracer.pack import progress
class ThreadedProgressTest(unittest.TestCase):
def test_threaded_progress(self):
cb = mock.Mock(progress.Callback)
tscb = progress.ThreadSafeCallback(typing.cast(progress.Callback, cb))
# Flushing an empty queue should be fast.
before = time.time()
tscb.flush(timeout=1)
duration = time.time() - before
self.assertLess(duration, 1)
def thread():
tscb.pack_start()
tscb.pack_done(Path('one'), {Path('two'), Path('three')})
tscb.trace_asset(Path('four'))
tscb.transfer_file(Path('five'), Path('six'))
tscb.transfer_file_skipped(Path('seven'), Path('eight'))
tscb.transfer_progress(327, 47)
tscb.missing_file(Path('nine'))
t = threading.Thread(target=thread)
t.start()
t.join(timeout=3)
tscb.flush(timeout=3)
cb.pack_start.assert_called_with()
cb.pack_done.assert_called_with(Path('one'), {Path('two'), Path('three')})
cb.trace_asset.assert_called_with(Path('four'))
cb.transfer_file.assert_called_with(Path('five'), Path('six'))
cb.transfer_file_skipped.assert_called_with(Path('seven'), Path('eight'))
cb.transfer_progress.assert_called_with(327, 47)
cb.missing_file.assert_called_with(Path('nine'))