Pack: always pack files, explode directories to a list of files
When an asset is represented as directory in Blender (for example fluid simulation caches), that directory is traced and each file is considered an asset. This makes it considerably easier for Shaman clients, as they need to compute the SHA256 checksum of each file. The logic to transform a directory path to a list of the contained files is now in BAT itself.
This commit is contained in:
parent
f9bc6f2d08
commit
606377180c
@ -554,9 +554,15 @@ class Packer:
|
||||
bfile.close()
|
||||
|
||||
def _copy_asset_and_deps(self, asset_path: pathlib.Path, action: AssetAction):
|
||||
asset_path_is_dir = asset_path.is_dir()
|
||||
|
||||
# Copy the asset itself, but only if it's not a sequence (sequences are
|
||||
# handled below in the for-loop).
|
||||
if "*" not in str(asset_path) and "<UDIM>" not in asset_path.name:
|
||||
if (
|
||||
"*" not in str(asset_path)
|
||||
and "<UDIM>" not in asset_path.name
|
||||
and not asset_path_is_dir
|
||||
):
|
||||
packed_path = action.new_path
|
||||
assert packed_path is not None
|
||||
read_path = action.read_from or asset_path
|
||||
@ -564,6 +570,11 @@ class Packer:
|
||||
read_path, packed_path, may_move=action.read_from is not None
|
||||
)
|
||||
|
||||
if asset_path_is_dir: # like 'some/directory':
|
||||
asset_base_path = asset_path
|
||||
else: # like 'some/directory/prefix_*.bphys':
|
||||
asset_base_path = asset_path.parent
|
||||
|
||||
# Copy its sequence dependencies.
|
||||
for usage in action.usages:
|
||||
if not usage.is_sequence:
|
||||
@ -571,14 +582,24 @@ class Packer:
|
||||
|
||||
first_pp = self._actions[usage.abspath].new_path
|
||||
assert first_pp is not None
|
||||
log.info(f"first_pp = {first_pp}")
|
||||
|
||||
# In case of globbing, we only support globbing by filename,
|
||||
# and not by directory.
|
||||
assert "*" not in str(first_pp) or "*" in first_pp.name
|
||||
|
||||
if asset_path_is_dir:
|
||||
packed_base_dir = first_pp
|
||||
else:
|
||||
packed_base_dir = first_pp.parent
|
||||
|
||||
for file_path in usage.files():
|
||||
packed_path = packed_base_dir / file_path.name
|
||||
# Compute the relative path, to support cases where asset_path
|
||||
# is `some/directory` and the to-be-copied file is in
|
||||
# `some/directory/subdir/filename.txt`.
|
||||
relpath = file_path.relative_to(asset_base_path)
|
||||
packed_path = packed_base_dir / relpath
|
||||
|
||||
# Assumption: assets in a sequence are never blend files.
|
||||
self._send_to_target(file_path, packed_path)
|
||||
|
||||
|
||||
@ -90,6 +90,8 @@ class FileTransferer(threading.Thread, metaclass=abc.ABCMeta):
|
||||
|
||||
def queue_copy(self, src: pathlib.Path, dst: pathlib.PurePath):
|
||||
"""Queue a copy action from 'src' to 'dst'."""
|
||||
if src.is_dir():
|
||||
raise TypeError(f"only files can be copied, not directories: {src}")
|
||||
assert (
|
||||
not self.done.is_set()
|
||||
), "Queueing not allowed after done_and_join() was called"
|
||||
@ -103,6 +105,8 @@ class FileTransferer(threading.Thread, metaclass=abc.ABCMeta):
|
||||
|
||||
def queue_move(self, src: pathlib.Path, dst: pathlib.PurePath):
|
||||
"""Queue a move action from 'src' to 'dst'."""
|
||||
if src.is_dir():
|
||||
raise TypeError(f"only files can be moved, not directories: {src}")
|
||||
assert (
|
||||
not self.done.is_set()
|
||||
), "Queueing not allowed after done_and_join() was called"
|
||||
|
||||
@ -56,7 +56,10 @@ def expand_sequence(path: pathlib.Path) -> typing.Iterator[pathlib.Path]:
|
||||
raise DoesNotExist(path)
|
||||
|
||||
if path.is_dir():
|
||||
yield path
|
||||
# Explode directory paths into separate files.
|
||||
for subpath in path.rglob("*"):
|
||||
if subpath.is_file():
|
||||
yield subpath
|
||||
return
|
||||
|
||||
log.debug("expanding file sequence %s", path)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user