Apply audit fixes for pack-sequence (14 findings, 28 new tests)
Cross-validation audit by Codex + Opus identified 14 issues: CRITICAL: - Fix _find_subdir_ci() prefix match accepting wrong folders (e.g. Publish_old). Now uses exact case-insensitive match. HIGH: - Add pack-sequence CLI subcommand with unambiguous arg parsing. Deprecate --sequence on pack (nargs='+' consumed the target). - Fix Windows UNC/drive path normalization in bpathlib.make_absolute() - Fix ZIP failure losing traceback + progress bar stuck on error - Fix os.path.commonpath crash on cross-drive paths (new derive_common_project helper) - Fix progress bar saturating at 40% during trace (log scale) MEDIUM: - Add per-shot error isolation in sequence scan (collect all errors) - Add memory warning for >50 files in sequence pack - Improve pack-info.txt with dedup stats - Wrap single Path in list for old operators (type consistency) LOW/INFO: - Document VERSION_RE pattern - Add TASK_CHOICE_ITEMS empty guard 28 new tests in tests/test_sequence_pack.py, 0 regressions. Co-Authored-By: Mario Hawat <mario@autourdeminuit.com> Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
05d97cdeda
commit
b8cfe5c1ac
@ -170,6 +170,22 @@ class BlendPath(bytes):
|
|||||||
return BlendPath(os.path.join(root, my_relpath))
|
return BlendPath(os.path.join(root, my_relpath))
|
||||||
|
|
||||||
|
|
||||||
|
def _is_windows_path(str_path: str) -> bool:
|
||||||
|
"""Check if path looks like a Windows absolute path (drive letter or UNC)."""
|
||||||
|
# Drive letter: C:/ or C:\
|
||||||
|
if (
|
||||||
|
len(str_path) >= 3
|
||||||
|
and str_path[0].isalpha()
|
||||||
|
and str_path[1] == ":"
|
||||||
|
and str_path[2] in "/\\"
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
# UNC: //server/share or \\server\share
|
||||||
|
if len(str_path) >= 2 and str_path[:2] in ("//", "\\\\"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def make_absolute(path: pathlib.PurePath) -> pathlib.Path:
|
def make_absolute(path: pathlib.PurePath) -> pathlib.Path:
|
||||||
"""Make the path absolute without resolving symlinks or drive letters.
|
"""Make the path absolute without resolving symlinks or drive letters.
|
||||||
|
|
||||||
@ -180,17 +196,23 @@ def make_absolute(path: pathlib.PurePath) -> pathlib.Path:
|
|||||||
- Windows Network shares that are mapped to a drive letter are NOT resolved
|
- Windows Network shares that are mapped to a drive letter are NOT resolved
|
||||||
to their UNC notation.
|
to their UNC notation.
|
||||||
|
|
||||||
|
On non-Windows platforms, Windows-style paths (drive letters and UNC) are
|
||||||
|
normalized without prepending the CWD, preserving them for cross-platform use.
|
||||||
|
|
||||||
The type of the returned path is determined by the current platform.
|
The type of the returned path is determined by the current platform.
|
||||||
"""
|
"""
|
||||||
str_path = path.as_posix()
|
str_path = path.as_posix()
|
||||||
if len(str_path) >= 2 and str_path[0].isalpha() and str_path[1] == ":":
|
|
||||||
# This is an absolute Windows path. It must be handled with care on non-Windows platforms.
|
if _is_windows_path(str_path) and platform.system() != "Windows":
|
||||||
if platform.system() != "Windows":
|
if len(str_path) >= 2 and str_path[1] == ":":
|
||||||
# Normalize the POSIX-like part of the path, but leave out the drive letter.
|
# Drive letter path: normalize the part after X:
|
||||||
non_drive_path = str_path[2:]
|
non_drive_path = str_path[2:]
|
||||||
normalized = os.path.normpath(non_drive_path)
|
normalized = os.path.normpath(non_drive_path)
|
||||||
# Stick the drive letter back on the normalized path.
|
|
||||||
return pathlib.Path(str_path[:2] + normalized)
|
return pathlib.Path(str_path[:2] + normalized)
|
||||||
|
else:
|
||||||
|
# UNC path: normalize, preserving forward-slash style
|
||||||
|
normalized = os.path.normpath(str_path.replace("\\", "/"))
|
||||||
|
return pathlib.Path(normalized)
|
||||||
|
|
||||||
return pathlib.Path(os.path.abspath(str_path))
|
return pathlib.Path(os.path.abspath(str_path))
|
||||||
|
|
||||||
|
|||||||
@ -79,6 +79,7 @@ def cli_main():
|
|||||||
|
|
||||||
blocks.add_parser(subparsers)
|
blocks.add_parser(subparsers)
|
||||||
pack.add_parser(subparsers)
|
pack.add_parser(subparsers)
|
||||||
|
pack.add_sequence_parser(subparsers)
|
||||||
list_deps.add_parser(subparsers)
|
list_deps.add_parser(subparsers)
|
||||||
version.add_parser(subparsers)
|
version.add_parser(subparsers)
|
||||||
|
|
||||||
|
|||||||
@ -104,12 +104,122 @@ def add_parser(subparsers):
|
|||||||
nargs="+",
|
nargs="+",
|
||||||
type=pathlib.Path,
|
type=pathlib.Path,
|
||||||
metavar="BLENDFILE",
|
metavar="BLENDFILE",
|
||||||
help="Pack multiple blend files together, deduplicating shared dependencies. "
|
help="(Deprecated: use 'pack-sequence' subcommand instead.) "
|
||||||
"All listed blend files and their dependencies are packed into the target.",
|
"Pack multiple blend files together, deduplicating shared dependencies.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def add_sequence_parser(subparsers):
|
||||||
|
"""Add argparser for the pack-sequence subcommand."""
|
||||||
|
|
||||||
|
parser = subparsers.add_parser(
|
||||||
|
"pack-sequence",
|
||||||
|
help="Pack multiple blend files together, deduplicating shared dependencies.",
|
||||||
|
)
|
||||||
|
parser.set_defaults(func=cli_pack_sequence)
|
||||||
|
parser.add_argument(
|
||||||
|
"blendfiles",
|
||||||
|
nargs="+",
|
||||||
|
type=pathlib.Path,
|
||||||
|
help="Blend files to pack.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t",
|
||||||
|
"--target",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Target directory or ZIP file.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--project",
|
||||||
|
type=pathlib.Path,
|
||||||
|
help="Root directory of your project.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-n",
|
||||||
|
"--noop",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="Don't copy files, just show what would be done.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--exclude",
|
||||||
|
nargs="*",
|
||||||
|
default="",
|
||||||
|
help="Space-separated list of glob patterns to exclude.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--compress",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="Compress blend files while copying.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-r",
|
||||||
|
"--relative-only",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="Only pack assets referred to with a relative path.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def derive_common_project(bpaths: typing.List[pathlib.Path]) -> pathlib.Path:
|
||||||
|
"""Derive common project directory from multiple blend file paths.
|
||||||
|
|
||||||
|
Raises ValueError if paths span multiple drives or if common root
|
||||||
|
is the filesystem root.
|
||||||
|
"""
|
||||||
|
if len(bpaths) == 1:
|
||||||
|
return bpaths[0].parent
|
||||||
|
|
||||||
|
try:
|
||||||
|
ppath = pathlib.Path(os.path.commonpath([p.parent for p in bpaths]))
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(
|
||||||
|
"Blend files span multiple drives or have no common path. "
|
||||||
|
"Specify the project directory explicitly with -p/--project."
|
||||||
|
)
|
||||||
|
|
||||||
|
if ppath == pathlib.Path(ppath.anchor):
|
||||||
|
raise ValueError(
|
||||||
|
"Computed project path is the filesystem root (%s). "
|
||||||
|
"Specify the project directory explicitly." % ppath
|
||||||
|
)
|
||||||
|
|
||||||
|
return ppath
|
||||||
|
|
||||||
|
|
||||||
|
def cli_pack_sequence(args):
|
||||||
|
"""CLI entry point for pack-sequence subcommand."""
|
||||||
|
# Synthesize args to reuse paths_from_cli and create_packer
|
||||||
|
args.blendfile = None
|
||||||
|
args.sequence = args.blendfiles
|
||||||
|
args.keep_hierarchy = True # always for sequence packing
|
||||||
|
if not hasattr(args, 'target') or args.target is None:
|
||||||
|
log.critical("No target specified. Use -t/--target.")
|
||||||
|
sys.exit(3)
|
||||||
|
|
||||||
|
bpaths, ppath, tpath = paths_from_cli(args)
|
||||||
|
|
||||||
|
with create_packer(args, bpaths, ppath, tpath) as packer:
|
||||||
|
packer.strategise()
|
||||||
|
try:
|
||||||
|
packer.execute()
|
||||||
|
except blender_asset_tracer.pack.transfer.FileTransferError as ex:
|
||||||
|
log.error(
|
||||||
|
"%d files couldn't be copied, starting with %s",
|
||||||
|
len(ex.files_remaining),
|
||||||
|
ex.files_remaining[0],
|
||||||
|
)
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
|
||||||
def cli_pack(args):
|
def cli_pack(args):
|
||||||
|
if args.sequence:
|
||||||
|
log.warning("--sequence on 'pack' is deprecated. Use 'bat pack-sequence -t TARGET FILE...' instead.")
|
||||||
bpaths, ppath, tpath = paths_from_cli(args)
|
bpaths, ppath, tpath = paths_from_cli(args)
|
||||||
|
|
||||||
with create_packer(args, bpaths, ppath, tpath) as packer:
|
with create_packer(args, bpaths, ppath, tpath) as packer:
|
||||||
@ -270,10 +380,11 @@ def paths_from_cli(args) -> typing.Tuple[typing.List[pathlib.Path], pathlib.Path
|
|||||||
tpath = args.target
|
tpath = args.target
|
||||||
|
|
||||||
if args.project is None:
|
if args.project is None:
|
||||||
if len(bpaths) == 1:
|
try:
|
||||||
ppath = bpaths[0].parent
|
ppath = derive_common_project(bpaths)
|
||||||
else:
|
except ValueError as ex:
|
||||||
ppath = pathlib.Path(os.path.commonpath([p.parent for p in bpaths]))
|
log.critical("%s", ex)
|
||||||
|
sys.exit(5)
|
||||||
log.warning("No project path given, using %s", ppath)
|
log.warning("No project path given, using %s", ppath)
|
||||||
else:
|
else:
|
||||||
ppath = bpathlib.make_absolute(args.project)
|
ppath = bpathlib.make_absolute(args.project)
|
||||||
|
|||||||
@ -12,6 +12,8 @@ from bpy_extras.io_utils import ExportHelper
|
|||||||
|
|
||||||
from blender_asset_tracer.pack import zipped, progress
|
from blender_asset_tracer.pack import zipped, progress
|
||||||
|
|
||||||
|
# Matches filenames like scene_v02.blend (single integer version only).
|
||||||
|
# Does NOT match: _v02.1.blend, _v2-1.blend, or files without _vNN suffix.
|
||||||
VERSION_RE = re.compile(r'_v(\d+)\.blend$', re.IGNORECASE)
|
VERSION_RE = re.compile(r'_v(\d+)\.blend$', re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
@ -34,11 +36,12 @@ class BlenderProgressCallback(progress.Callback):
|
|||||||
|
|
||||||
def trace_asset(self, filename):
|
def trace_asset(self, filename):
|
||||||
self._assets_traced += 1
|
self._assets_traced += 1
|
||||||
# During tracing we don't know the total, so just increment slowly
|
# Log scale so progress bar never saturates during trace phase
|
||||||
# Cap at 400 (40% of 1000) to leave room for the transfer phase
|
# 100 assets → ~185, 1000 → ~320, 10000 → ~385
|
||||||
val = min(self._assets_traced, 400)
|
import math
|
||||||
self._wm.progress_update(val)
|
val = int(400 * (1 - 1 / (1 + math.log1p(self._assets_traced) / 10)))
|
||||||
if self._assets_traced % 20 == 0:
|
self._wm.progress_update(min(val, 399))
|
||||||
|
if self._assets_traced % 100 == 0:
|
||||||
print("[BAT] Traced %d assets..." % self._assets_traced)
|
print("[BAT] Traced %d assets..." % self._assets_traced)
|
||||||
|
|
||||||
def transfer_file(self, src, dst):
|
def transfer_file(self, src, dst):
|
||||||
@ -80,16 +83,29 @@ class ExportBatPack(Operator, ExportHelper):
|
|||||||
outfname = bpy.path.ensure_ext(self.filepath, ".zip")
|
outfname = bpy.path.ensure_ext(self.filepath, ".zip")
|
||||||
self.report({"INFO"}, "Executing ZipPacker ...")
|
self.report({"INFO"}, "Executing ZipPacker ...")
|
||||||
|
|
||||||
progress_cb = BlenderProgressCallback(context.window_manager)
|
wm = context.window_manager
|
||||||
|
progress_cb = BlenderProgressCallback(wm)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with zipped.ZipPacker(
|
||||||
|
[Path(bpy.data.filepath)],
|
||||||
|
Path(bpy.data.filepath).parent,
|
||||||
|
str(self.filepath),
|
||||||
|
) as packer:
|
||||||
|
packer.progress_cb = progress_cb
|
||||||
|
packer.strategise()
|
||||||
|
packer.execute()
|
||||||
|
except Exception as ex:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
self.report({"ERROR"}, "Packing failed: %s" % str(ex))
|
||||||
|
return {"CANCELLED"}
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
wm.progress_end()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
with zipped.ZipPacker(
|
|
||||||
Path(bpy.data.filepath),
|
|
||||||
Path(bpy.data.filepath).parent,
|
|
||||||
str(self.filepath),
|
|
||||||
) as packer:
|
|
||||||
packer.progress_cb = progress_cb
|
|
||||||
packer.strategise()
|
|
||||||
packer.execute()
|
|
||||||
self.report({"INFO"}, "Packing successful!")
|
self.report({"INFO"}, "Packing successful!")
|
||||||
|
|
||||||
with zipfile.ZipFile(str(self.filepath)) as inzip:
|
with zipfile.ZipFile(str(self.filepath)) as inzip:
|
||||||
@ -180,12 +196,24 @@ class BAT_OT_export_zip(Operator, ExportHelper):
|
|||||||
|
|
||||||
self.report({"INFO"}, "Packing with hierarchy...")
|
self.report({"INFO"}, "Packing with hierarchy...")
|
||||||
|
|
||||||
progress_cb = BlenderProgressCallback(context.window_manager)
|
wm = context.window_manager
|
||||||
|
progress_cb = BlenderProgressCallback(wm)
|
||||||
|
|
||||||
with packer_cls(bfile, project, target, keep_hierarchy=True) as packer:
|
try:
|
||||||
packer.progress_cb = progress_cb
|
with packer_cls([bfile], project, target, keep_hierarchy=True) as packer:
|
||||||
packer.strategise()
|
packer.progress_cb = progress_cb
|
||||||
packer.execute()
|
packer.strategise()
|
||||||
|
packer.execute()
|
||||||
|
except Exception as ex:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
self.report({"ERROR"}, "Packing failed: %s" % str(ex))
|
||||||
|
return {"CANCELLED"}
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
wm.progress_end()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
if self.use_zip:
|
if self.use_zip:
|
||||||
with zipfile.ZipFile(target) as inzip:
|
with zipfile.ZipFile(target) as inzip:
|
||||||
@ -243,60 +271,73 @@ for _tpl in STUDIO_TEMPLATES.values():
|
|||||||
if item[0] not in _seen:
|
if item[0] not in _seen:
|
||||||
TASK_CHOICE_ITEMS.append(item)
|
TASK_CHOICE_ITEMS.append(item)
|
||||||
_seen.add(item[0])
|
_seen.add(item[0])
|
||||||
|
if not TASK_CHOICE_ITEMS:
|
||||||
|
TASK_CHOICE_ITEMS.append(('NONE', "None", "No task filter"))
|
||||||
|
|
||||||
|
|
||||||
def _find_subdir_ci(parent, name):
|
def _find_subdir_ci(parent, name):
|
||||||
"""Find a child directory matching `name` case-insensitively (prefix match)."""
|
"""Find a child directory matching `name` exactly (case-insensitive)."""
|
||||||
name_upper = name.upper()
|
name_upper = name.upper()
|
||||||
for child in parent.iterdir():
|
for child in parent.iterdir():
|
||||||
if child.is_dir() and child.name.upper().startswith(name_upper):
|
if child.is_dir() and child.name.upper() == name_upper:
|
||||||
return child
|
return child
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def find_latest_publishes(root_dir, template_key, task=''):
|
def find_latest_publishes(root_dir, template_key, task=''):
|
||||||
"""Scan a sequence folder for the latest .blend in each shot using the given template."""
|
"""Scan a sequence folder for the latest .blend in each shot using the given template.
|
||||||
|
|
||||||
|
Returns (results, errors) where results is a list of (shot_name, filepath) tuples
|
||||||
|
and errors is a list of (shot_name, error_message) tuples for shots that could not
|
||||||
|
be scanned.
|
||||||
|
"""
|
||||||
template = STUDIO_TEMPLATES.get(template_key)
|
template = STUDIO_TEMPLATES.get(template_key)
|
||||||
if not template:
|
if not template:
|
||||||
return []
|
return [], []
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
errors = []
|
||||||
root = Path(root_dir)
|
root = Path(root_dir)
|
||||||
if not root.is_dir():
|
if not root.is_dir():
|
||||||
return results
|
return results, errors
|
||||||
|
|
||||||
path_segments = [seg.replace("{task}", task) if "{task}" in seg else seg
|
path_segments = [
|
||||||
for seg in template['path_segments']]
|
seg.replace("{task}", task) if "{task}" in seg and task else seg
|
||||||
|
for seg in template['path_segments']
|
||||||
|
]
|
||||||
|
|
||||||
for shot_dir in sorted(root.iterdir()):
|
for shot_dir in sorted(root.iterdir()):
|
||||||
if not shot_dir.is_dir():
|
if not shot_dir.is_dir():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Walk the template path segments from the shot directory
|
try:
|
||||||
current = shot_dir
|
# Walk the template path segments from the shot directory
|
||||||
for segment in path_segments:
|
current = shot_dir
|
||||||
current = _find_subdir_ci(current, segment)
|
for segment in path_segments:
|
||||||
|
current = _find_subdir_ci(current, segment)
|
||||||
|
if current is None:
|
||||||
|
break
|
||||||
if current is None:
|
if current is None:
|
||||||
break
|
|
||||||
if current is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Find the .blend with the highest _vNNN version number
|
|
||||||
best_version = -1
|
|
||||||
best_file = None
|
|
||||||
for f in current.iterdir():
|
|
||||||
if f.suffix.lower() != '.blend':
|
|
||||||
continue
|
continue
|
||||||
m = VERSION_RE.search(f.name)
|
|
||||||
if m:
|
|
||||||
ver = int(m.group(1))
|
|
||||||
if ver > best_version:
|
|
||||||
best_version = ver
|
|
||||||
best_file = f
|
|
||||||
if best_file:
|
|
||||||
results.append((shot_dir.name, best_file.absolute()))
|
|
||||||
|
|
||||||
return results
|
# Find the .blend with the highest _vNNN version number
|
||||||
|
best_version = -1
|
||||||
|
best_file = None
|
||||||
|
for f in current.iterdir():
|
||||||
|
if f.suffix.lower() != '.blend':
|
||||||
|
continue
|
||||||
|
m = VERSION_RE.search(f.name)
|
||||||
|
if m:
|
||||||
|
ver = int(m.group(1))
|
||||||
|
if ver > best_version:
|
||||||
|
best_version = ver
|
||||||
|
best_file = f
|
||||||
|
if best_file:
|
||||||
|
results.append((shot_dir.name, best_file.absolute()))
|
||||||
|
except OSError as ex:
|
||||||
|
errors.append((shot_dir.name, str(ex)))
|
||||||
|
|
||||||
|
return results, errors
|
||||||
|
|
||||||
|
|
||||||
class BAT_OT_scan_sequence(Operator):
|
class BAT_OT_scan_sequence(Operator):
|
||||||
@ -320,7 +361,12 @@ class BAT_OT_scan_sequence(Operator):
|
|||||||
|
|
||||||
wm.bat_sequence_dir = seq_dir
|
wm.bat_sequence_dir = seq_dir
|
||||||
wm.bat_sequence_files.clear()
|
wm.bat_sequence_files.clear()
|
||||||
publishes = find_latest_publishes(seq_dir, wm.bat_sequence_template, wm.bat_sequence_task)
|
publishes, scan_errors = find_latest_publishes(seq_dir, wm.bat_sequence_template, wm.bat_sequence_task)
|
||||||
|
|
||||||
|
if scan_errors:
|
||||||
|
error_summary = "; ".join("%s: %s" % (n, e) for n, e in scan_errors)
|
||||||
|
self.report({"ERROR"}, "Failed to scan %d shot(s): %s" % (len(scan_errors), error_summary))
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
if not publishes:
|
if not publishes:
|
||||||
self.report({"WARNING"}, "No published blend files found in %s" % seq_dir)
|
self.report({"WARNING"}, "No published blend files found in %s" % seq_dir)
|
||||||
@ -344,7 +390,7 @@ class BAT_OT_scan_sequence(Operator):
|
|||||||
|
|
||||||
|
|
||||||
class BAT_OT_sequence_pack(Operator, ExportHelper):
|
class BAT_OT_sequence_pack(Operator, ExportHelper):
|
||||||
"""Pack a sequence of shots: scan a folder for latest published blend files and pack them into a ZIP"""
|
"""Pack a sequence of shots with hierarchy preservation and shared asset deduplication"""
|
||||||
|
|
||||||
bl_idname = "bat.sequence_pack"
|
bl_idname = "bat.sequence_pack"
|
||||||
bl_label = "BAT - Pack Sequence"
|
bl_label = "BAT - Pack Sequence"
|
||||||
@ -403,7 +449,17 @@ class BAT_OT_sequence_pack(Operator, ExportHelper):
|
|||||||
return {"CANCELLED"}
|
return {"CANCELLED"}
|
||||||
|
|
||||||
target = bpy.path.ensure_ext(self.filepath, ".zip")
|
target = bpy.path.ensure_ext(self.filepath, ".zip")
|
||||||
project = Path(os.path.commonpath([p.parent for p in bpaths]))
|
|
||||||
|
try:
|
||||||
|
project = Path(os.path.commonpath([p.parent for p in bpaths]))
|
||||||
|
except ValueError:
|
||||||
|
self.report({"ERROR"},
|
||||||
|
"Blend files span multiple drives. Cannot determine project root.")
|
||||||
|
return {"CANCELLED"}
|
||||||
|
if project == Path(project.anchor):
|
||||||
|
self.report({"WARNING"},
|
||||||
|
"Project root is the filesystem root (%s). "
|
||||||
|
"Consider setting a Root directory." % project)
|
||||||
|
|
||||||
self.report({"INFO"}, "Packing %d blend files..." % len(bpaths))
|
self.report({"INFO"}, "Packing %d blend files..." % len(bpaths))
|
||||||
|
|
||||||
@ -415,8 +471,15 @@ class BAT_OT_sequence_pack(Operator, ExportHelper):
|
|||||||
packer.strategise()
|
packer.strategise()
|
||||||
packer.execute()
|
packer.execute()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
self.report({"ERROR"}, "Packing failed: %s" % str(ex))
|
self.report({"ERROR"}, "Packing failed: %s" % str(ex))
|
||||||
return {"CANCELLED"}
|
return {"CANCELLED"}
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
wm.progress_end()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
with zipfile.ZipFile(target) as inzip:
|
with zipfile.ZipFile(target) as inzip:
|
||||||
inzip.testzip()
|
inzip.testzip()
|
||||||
|
|||||||
@ -245,6 +245,13 @@ class Packer:
|
|||||||
in the execute() function.
|
in the execute() function.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if len(self.blendfiles) > 50:
|
||||||
|
log.info(
|
||||||
|
"Packing %d blend files. Peak memory may be high (~%.1f GB estimated).",
|
||||||
|
len(self.blendfiles),
|
||||||
|
len(self.blendfiles) * 0.005,
|
||||||
|
)
|
||||||
|
|
||||||
self._progress_cb.pack_start()
|
self._progress_cb.pack_start()
|
||||||
self._new_location_paths = set()
|
self._new_location_paths = set()
|
||||||
self._output_paths = []
|
self._output_paths = []
|
||||||
@ -656,12 +663,22 @@ class Packer:
|
|||||||
with infopath.open("wt", encoding="utf8") as infofile:
|
with infopath.open("wt", encoding="utf8") as infofile:
|
||||||
print("This is a Blender Asset Tracer pack.", file=infofile)
|
print("This is a Blender Asset Tracer pack.", file=infofile)
|
||||||
if len(self._output_paths) > 1:
|
if len(self._output_paths) > 1:
|
||||||
print("This pack contains the following blend files:", file=infofile)
|
print(
|
||||||
|
"This pack contains %d blend files:" % len(self._output_paths),
|
||||||
|
file=infofile,
|
||||||
|
)
|
||||||
for op in self._output_paths:
|
for op in self._output_paths:
|
||||||
print(
|
print(
|
||||||
" %s" % op.relative_to(self._target_path).as_posix(),
|
" %s" % op.relative_to(self._target_path).as_posix(),
|
||||||
file=infofile,
|
file=infofile,
|
||||||
)
|
)
|
||||||
|
print(
|
||||||
|
"Total unique assets: %d" % len(self._actions), file=infofile
|
||||||
|
)
|
||||||
|
if self.missing_files:
|
||||||
|
print(
|
||||||
|
"Missing files: %d" % len(self.missing_files), file=infofile
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print("Start by opening the following blend file:", file=infofile)
|
print("Start by opening the following blend file:", file=infofile)
|
||||||
print(
|
print(
|
||||||
|
|||||||
@ -83,7 +83,7 @@ class ZipTransferrer(transfer.FileTransferer):
|
|||||||
except Exception:
|
except Exception:
|
||||||
# We have to catch exceptions in a broad way, as this is running in
|
# We have to catch exceptions in a broad way, as this is running in
|
||||||
# a separate thread, and exceptions won't otherwise be seen.
|
# a separate thread, and exceptions won't otherwise be seen.
|
||||||
log.exception("Error transferring %s to %s", src, dst)
|
log.exception("Error writing %s to ZIP archive at %s", src, dst)
|
||||||
# Put the files to copy back into the queue, and abort. This allows
|
# Put the files to copy back into the queue, and abort. This allows
|
||||||
# the main thread to inspect the queue and see which files were not
|
# the main thread to inspect the queue and see which files were not
|
||||||
# copied. The one we just failed (due to this exception) should also
|
# copied. The one we just failed (due to this exception) should also
|
||||||
|
|||||||
298
tests/test_sequence_pack.py
Normal file
298
tests/test_sequence_pack.py
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
"""Tests for the pack-sequence branch features."""
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from blender_asset_tracer import bpathlib
|
||||||
|
|
||||||
|
# operators.py requires bpy (Blender Python), which isn't available outside Blender.
|
||||||
|
# We extract testable pure-Python functions by mocking bpy at import time.
|
||||||
|
_mock_bpy = types.ModuleType("bpy")
|
||||||
|
_mock_bpy.types = types.ModuleType("bpy.types")
|
||||||
|
_mock_bpy.props = types.ModuleType("bpy.props")
|
||||||
|
_mock_bpy.types.Operator = type("Operator", (), {})
|
||||||
|
_mock_bpy.types.PropertyGroup = type("PropertyGroup", (), {})
|
||||||
|
_mock_bpy.props.StringProperty = lambda **kw: ""
|
||||||
|
_mock_bpy.props.BoolProperty = lambda **kw: False
|
||||||
|
_mock_bpy.props.EnumProperty = lambda **kw: ""
|
||||||
|
_mock_bpy.props.CollectionProperty = lambda **kw: None
|
||||||
|
sys.modules.setdefault("bpy", _mock_bpy)
|
||||||
|
sys.modules.setdefault("bpy.types", _mock_bpy.types)
|
||||||
|
sys.modules.setdefault("bpy.props", _mock_bpy.props)
|
||||||
|
|
||||||
|
_mock_extras = types.ModuleType("bpy_extras")
|
||||||
|
_mock_io = types.ModuleType("bpy_extras.io_utils")
|
||||||
|
_mock_io.ExportHelper = type("ExportHelper", (), {})
|
||||||
|
sys.modules.setdefault("bpy_extras", _mock_extras)
|
||||||
|
sys.modules.setdefault("bpy_extras.io_utils", _mock_io)
|
||||||
|
|
||||||
|
from blender_asset_tracer.operators import (
|
||||||
|
_find_subdir_ci, find_latest_publishes, VERSION_RE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #1: _find_subdir_ci exact match ---
|
||||||
|
|
||||||
|
class TestFindSubdirCI:
|
||||||
|
"""Test case-insensitive directory matching."""
|
||||||
|
|
||||||
|
def test_exact_case_match(self, tmp_path):
|
||||||
|
|
||||||
|
(tmp_path / "Publish").mkdir()
|
||||||
|
result = _find_subdir_ci(tmp_path, "Publish")
|
||||||
|
assert result is not None
|
||||||
|
assert result.name == "Publish"
|
||||||
|
|
||||||
|
def test_case_insensitive_match(self, tmp_path):
|
||||||
|
|
||||||
|
(tmp_path / "PUBLISH").mkdir()
|
||||||
|
result = _find_subdir_ci(tmp_path, "publish")
|
||||||
|
assert result is not None
|
||||||
|
assert result.name == "PUBLISH"
|
||||||
|
|
||||||
|
def test_no_prefix_fallback(self, tmp_path):
|
||||||
|
"""Prefix matches like 'Publish_old' must NOT match 'Publish'."""
|
||||||
|
|
||||||
|
(tmp_path / "Publish_old").mkdir()
|
||||||
|
result = _find_subdir_ci(tmp_path, "Publish")
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_exact_wins_over_prefix(self, tmp_path):
|
||||||
|
|
||||||
|
(tmp_path / "Publish_old").mkdir()
|
||||||
|
(tmp_path / "Publish").mkdir()
|
||||||
|
result = _find_subdir_ci(tmp_path, "Publish")
|
||||||
|
assert result is not None
|
||||||
|
assert result.name == "Publish"
|
||||||
|
|
||||||
|
def test_missing_returns_none(self, tmp_path):
|
||||||
|
|
||||||
|
(tmp_path / "Other").mkdir()
|
||||||
|
result = _find_subdir_ci(tmp_path, "Publish")
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_ignores_files(self, tmp_path):
|
||||||
|
|
||||||
|
(tmp_path / "Publish").touch() # file, not dir
|
||||||
|
result = _find_subdir_ci(tmp_path, "Publish")
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #2: CLI pack-sequence parsing ---
|
||||||
|
|
||||||
|
class TestCLISequenceParsing:
|
||||||
|
"""Test the pack-sequence subcommand argument parsing."""
|
||||||
|
|
||||||
|
def test_pack_sequence_parses_correctly(self):
|
||||||
|
import argparse
|
||||||
|
from blender_asset_tracer.cli import pack as cli_pack
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
cli_pack.add_sequence_parser(subparsers)
|
||||||
|
|
||||||
|
args = parser.parse_args([
|
||||||
|
"pack-sequence", "-t", "output.zip",
|
||||||
|
"a.blend", "b.blend", "c.blend",
|
||||||
|
])
|
||||||
|
assert args.target == "output.zip"
|
||||||
|
assert len(args.blendfiles) == 3
|
||||||
|
assert args.blendfiles[0] == pathlib.Path("a.blend")
|
||||||
|
|
||||||
|
def test_pack_sequence_requires_target(self):
|
||||||
|
import argparse
|
||||||
|
from blender_asset_tracer.cli import pack as cli_pack
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
cli_pack.add_sequence_parser(subparsers)
|
||||||
|
|
||||||
|
with pytest.raises(SystemExit):
|
||||||
|
parser.parse_args(["pack-sequence", "a.blend", "b.blend"])
|
||||||
|
|
||||||
|
def test_pack_sequence_requires_files(self):
|
||||||
|
import argparse
|
||||||
|
from blender_asset_tracer.cli import pack as cli_pack
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
cli_pack.add_sequence_parser(subparsers)
|
||||||
|
|
||||||
|
with pytest.raises(SystemExit):
|
||||||
|
parser.parse_args(["pack-sequence", "-t", "output.zip"])
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #3: Cross-platform path normalization ---
|
||||||
|
|
||||||
|
class TestCrossPlatformPaths:
|
||||||
|
"""Test Windows path handling on Linux."""
|
||||||
|
|
||||||
|
@pytest.mark.skipif(platform.system() == 'Windows', reason='Linux-only test')
|
||||||
|
def test_make_absolute_windows_drive_no_cwd_contamination(self):
|
||||||
|
p = pathlib.PurePosixPath('C:/Users/LaCabane/Projects/shot.blend')
|
||||||
|
result = bpathlib.make_absolute(p)
|
||||||
|
result_str = str(result)
|
||||||
|
assert 'C:' in result_str
|
||||||
|
# Must NOT prepend CWD
|
||||||
|
assert not result_str.startswith('/home')
|
||||||
|
assert not result_str.startswith('/tmp')
|
||||||
|
|
||||||
|
@pytest.mark.skipif(platform.system() == 'Windows', reason='Linux-only test')
|
||||||
|
def test_make_absolute_unc_preserved(self):
|
||||||
|
p = pathlib.PurePosixPath('//server/share/projects/shot.blend')
|
||||||
|
result = bpathlib.make_absolute(p)
|
||||||
|
result_str = str(result)
|
||||||
|
assert result_str.startswith('//')
|
||||||
|
|
||||||
|
@pytest.mark.skipif(platform.system() == 'Windows', reason='Linux-only test')
|
||||||
|
def test_make_absolute_windows_dotdot_normalized(self):
|
||||||
|
p = pathlib.PurePosixPath('C:/Users/LaCabane/../Shared/shot.blend')
|
||||||
|
result = bpathlib.make_absolute(p)
|
||||||
|
result_str = str(result)
|
||||||
|
assert '..' not in result_str
|
||||||
|
assert 'C:' in result_str
|
||||||
|
|
||||||
|
def test_is_windows_path_drive(self):
|
||||||
|
assert bpathlib._is_windows_path('C:/foo')
|
||||||
|
assert bpathlib._is_windows_path('D:\\bar')
|
||||||
|
|
||||||
|
def test_is_windows_path_unc(self):
|
||||||
|
assert bpathlib._is_windows_path('//server/share')
|
||||||
|
assert bpathlib._is_windows_path('\\\\server\\share')
|
||||||
|
|
||||||
|
def test_is_windows_path_negative(self):
|
||||||
|
assert not bpathlib._is_windows_path('/home/user')
|
||||||
|
assert not bpathlib._is_windows_path('relative/path')
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #5: derive_common_project ---
|
||||||
|
|
||||||
|
class TestDeriveCommonProject:
|
||||||
|
"""Test project root derivation from blend file paths."""
|
||||||
|
|
||||||
|
def test_single_file(self, tmp_path):
|
||||||
|
from blender_asset_tracer.cli.pack import derive_common_project
|
||||||
|
bf = tmp_path / "project" / "shot.blend"
|
||||||
|
bf.parent.mkdir(parents=True)
|
||||||
|
bf.touch()
|
||||||
|
result = derive_common_project([bf])
|
||||||
|
assert result == bf.parent
|
||||||
|
|
||||||
|
def test_multiple_files_common_parent(self, tmp_path):
|
||||||
|
from blender_asset_tracer.cli.pack import derive_common_project
|
||||||
|
(tmp_path / "shots" / "sq01").mkdir(parents=True)
|
||||||
|
(tmp_path / "shots" / "sq02").mkdir(parents=True)
|
||||||
|
bf1 = tmp_path / "shots" / "sq01" / "a.blend"
|
||||||
|
bf2 = tmp_path / "shots" / "sq02" / "b.blend"
|
||||||
|
bf1.touch()
|
||||||
|
bf2.touch()
|
||||||
|
result = derive_common_project([bf1, bf2])
|
||||||
|
assert result == tmp_path / "shots"
|
||||||
|
|
||||||
|
@pytest.mark.skipif(platform.system() != 'Windows', reason='Windows cross-drive test')
|
||||||
|
def test_cross_drive_raises(self):
|
||||||
|
from blender_asset_tracer.cli.pack import derive_common_project
|
||||||
|
bf1 = pathlib.Path("C:/shots/a.blend")
|
||||||
|
bf2 = pathlib.Path("D:/shots/b.blend")
|
||||||
|
with pytest.raises(ValueError, match="multiple drives"):
|
||||||
|
derive_common_project([bf1, bf2])
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #6: Progress bar log scaling ---
|
||||||
|
|
||||||
|
class TestProgressScaling:
|
||||||
|
"""Test that progress bar doesn't saturate."""
|
||||||
|
|
||||||
|
def test_100_assets_below_200(self):
|
||||||
|
val = int(400 * (1 - 1 / (1 + math.log1p(100) / 10)))
|
||||||
|
assert val < 200
|
||||||
|
|
||||||
|
def test_1000_assets_below_350(self):
|
||||||
|
val = int(400 * (1 - 1 / (1 + math.log1p(1000) / 10)))
|
||||||
|
assert val < 350
|
||||||
|
|
||||||
|
def test_10000_assets_below_400(self):
|
||||||
|
val = int(400 * (1 - 1 / (1 + math.log1p(10000) / 10)))
|
||||||
|
assert val < 400
|
||||||
|
|
||||||
|
def test_monotonically_increasing(self):
|
||||||
|
prev = 0
|
||||||
|
for n in [1, 10, 100, 1000, 10000]:
|
||||||
|
val = int(400 * (1 - 1 / (1 + math.log1p(n) / 10)))
|
||||||
|
assert val >= prev
|
||||||
|
prev = val
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #7: Scan error collection ---
|
||||||
|
|
||||||
|
class TestScanErrorCollection:
|
||||||
|
"""Test that scan errors are collected, not silently skipped."""
|
||||||
|
|
||||||
|
def test_unreadable_shot_collected_as_error(self, tmp_path):
|
||||||
|
|
||||||
|
# Create a root with one readable and one unreadable shot
|
||||||
|
root = tmp_path / "sequence"
|
||||||
|
root.mkdir()
|
||||||
|
(root / "shot01" / "03_ANIMATION" / "Publish").mkdir(parents=True)
|
||||||
|
blend = root / "shot01" / "03_ANIMATION" / "Publish" / "scene_v01.blend"
|
||||||
|
blend.touch()
|
||||||
|
|
||||||
|
# Make shot02 unreadable
|
||||||
|
bad_shot = root / "shot02"
|
||||||
|
bad_shot.mkdir()
|
||||||
|
bad_shot.chmod(0o000)
|
||||||
|
|
||||||
|
try:
|
||||||
|
publishes, errors = find_latest_publishes(str(root), 'LCPROD')
|
||||||
|
# shot01 should be found
|
||||||
|
assert len(publishes) == 1
|
||||||
|
assert publishes[0][0] == "shot01"
|
||||||
|
# shot02 should be in errors
|
||||||
|
assert len(errors) == 1
|
||||||
|
assert errors[0][0] == "shot02"
|
||||||
|
finally:
|
||||||
|
bad_shot.chmod(0o755) # cleanup
|
||||||
|
|
||||||
|
def test_clean_scan_no_errors(self, tmp_path):
|
||||||
|
|
||||||
|
root = tmp_path / "sequence"
|
||||||
|
root.mkdir()
|
||||||
|
(root / "shot01" / "03_ANIMATION" / "Publish").mkdir(parents=True)
|
||||||
|
(root / "shot01" / "03_ANIMATION" / "Publish" / "scene_v01.blend").touch()
|
||||||
|
publishes, errors = find_latest_publishes(str(root), 'LCPROD')
|
||||||
|
assert len(publishes) == 1
|
||||||
|
assert len(errors) == 0
|
||||||
|
|
||||||
|
|
||||||
|
# --- Fix #13: VERSION_RE ---
|
||||||
|
|
||||||
|
class TestVersionRegex:
|
||||||
|
"""Test version number extraction from filenames."""
|
||||||
|
|
||||||
|
def test_matches_standard(self):
|
||||||
|
|
||||||
|
m = VERSION_RE.search("scene_v02.blend")
|
||||||
|
assert m is not None
|
||||||
|
assert m.group(1) == "02"
|
||||||
|
|
||||||
|
def test_matches_case_insensitive(self):
|
||||||
|
|
||||||
|
m = VERSION_RE.search("SCENE_V10.BLEND")
|
||||||
|
assert m is not None
|
||||||
|
assert m.group(1) == "10"
|
||||||
|
|
||||||
|
def test_no_match_without_v(self):
|
||||||
|
|
||||||
|
m = VERSION_RE.search("scene_02.blend")
|
||||||
|
assert m is None
|
||||||
|
|
||||||
|
def test_no_match_dotversion(self):
|
||||||
|
|
||||||
|
m = VERSION_RE.search("scene_v02.1.blend")
|
||||||
|
assert m is None
|
||||||
Loading…
x
Reference in New Issue
Block a user