def _path_in_project(self, path: pathlib.Path) -> bool: abs_path = bpathlib.make_absolute(path) abs_project = bpathlib.make_absolute(self.project) try: abs_path.relative_to(abs_project) except ValueError: return False return True
def strategise(self) -> None: """Determine what to do with the assets. Places an asset into one of these categories: - Can be copied as-is, nothing smart required. - Blend files referring to this asset need to be rewritten. This function does *not* expand globs. Globs are seen as single assets, and are only evaluated when performing the actual transfer in the execute() function. """ # The blendfile that we pack is generally not its own dependency, so # we have to explicitly add it to the _packed_paths. bfile_path = bpathlib.make_absolute(self.blendfile) # Both paths have to be resolved first, because this also translates # network shares mapped to Windows drive letters back to their UNC # notation. Only resolving one but not the other (which can happen # with the abosolute() call above) can cause errors. bfile_pp = self._target_path / bfile_path.relative_to( bpathlib.make_absolute(self.project)) self._output_path = bfile_pp self._progress_cb.pack_start() act = self._actions[bfile_path] act.path_action = PathAction.KEEP_PATH act.new_path = bfile_pp self._check_aborted() self._new_location_paths = set() for usage in trace.deps(self.blendfile, self._progress_cb): self._check_aborted() asset_path = usage.abspath if any(asset_path.match(glob) for glob in self._exclude_globs): log.info('Excluding file: %s', asset_path) continue if self.relative_only and not usage.asset_path.startswith(b'//'): log.info('Skipping absolute path: %s', usage.asset_path) continue if usage.is_sequence: self._visit_sequence(asset_path, usage) else: self._visit_asset(asset_path, usage) self._find_new_paths() self._group_rewrites()
def _group_rewrites(self) -> None: """For each blend file, collect which fields need rewriting. This ensures that the execute() step has to visit each blend file only once. """ # Take a copy so we can modify self._actions in the loop. actions = set(self._actions.values()) while actions: action = actions.pop() # if action.path_action != PathAction.FIND_NEW_LOCATION: # # This asset doesn't require a new location, so no rewriting necessary. # continue for usage in action.usages: bfile_path = bpathlib.make_absolute(usage.block.bfile.filepath) insert_new_action = bfile_path not in self._actions self._actions[bfile_path].rewrites.append(usage) if insert_new_action: actions.add(self._actions[bfile_path])
def pack_scene(tpath: str): bpy.ops.file.report_missing_files() bpath = pathlib.Path(bpy.data.filepath) print("Packing", bpath, "into", tpath) bpath = bpathlib.make_absolute(bpath) ppath = bpathlib.make_absolute(bpath).parent packer = pack.Packer(bpath, ppath, tpath) packer.strategise() for missing_file in packer.missing_files: notif('Warning - Missing file ' + str(missing_file)) packer.execute()
def paths_from_cli(args) -> typing.Tuple[pathlib.Path, pathlib.Path, str]: """Return paths to blendfile, project, and pack target. Calls sys.exit() if anything is wrong. """ bpath = args.blendfile if not bpath.exists(): log.critical('File %s does not exist', bpath) sys.exit(3) if bpath.is_dir(): log.critical('%s is a directory, should be a blend file') sys.exit(3) bpath = bpathlib.make_absolute(bpath) tpath = args.target if args.project is None: ppath = bpathlib.make_absolute(bpath).parent log.warning('No project path given, using %s', ppath) else: ppath = bpathlib.make_absolute(args.project) if not ppath.exists(): log.critical('Project directory %s does not exist', ppath) sys.exit(5) if not ppath.is_dir(): log.warning('Project path %s is not a directory; using the parent %s', ppath, ppath.parent) ppath = ppath.parent try: bpath.relative_to(ppath) except ValueError: log.critical('Project directory %s does not contain blend file %s', args.project, bpath.absolute()) sys.exit(5) log.info('Blend file to pack: %s', bpath) log.info('Project path: %s', ppath) log.info('Pack will be created in: %s', tpath) return bpath, ppath, tpath
def pack(self): print('Packing', self.blend_path, 'into', self.target_path) try: bpath = pathlib.Path(self.blend_path) bpath = bpathlib.make_absolute(bpath) project_path = bpathlib.make_absolute(bpath).parent packer = pack.Packer(bpath, project_path, self.target_path) packer.strategise() for missing_file in packer.missing_files: self.__signal_missing_file(str(missing_file)) self.__signal_pack_start() packer.execute() self.__signal_pack_end(True) except Exception as error: self.__signal_pack_error(str(error)) self.__signal_pack_end(False) sys.exit(1)
def _visit_linked_blocks(self, blocks_per_lib): # We've gone through all the blocks in this file, now open the libraries # and iterate over the blocks referred there. for lib_bpath, idblocks in blocks_per_lib.items(): lib_path = bpathlib.make_absolute(lib_bpath.to_path()) assert lib_path.exists() if not lib_path.exists(): log.warning('Library %s does not exist', lib_path) continue log.debug('Expanding %d blocks in %s', len(idblocks), lib_path) libfile = blendfile.open_cached(lib_path) yield from self.iter_blocks(libfile, idblocks)
def report_text(bpath, *, include_sha256: bool, show_timing: bool): reported_assets = set() # type: typing.Set[pathlib.Path] last_reported_bfile = None shorten = functools.partial(common.shorten, pathlib.Path.cwd()) time_spent_on_shasums = 0.0 start_time = time.time() for usage in trace.deps(bpath): filepath = usage.block.bfile.filepath.absolute() if filepath != last_reported_bfile: if include_sha256: shasum, time_spent = calc_sha_sum(filepath) time_spent_on_shasums += time_spent print(shorten(filepath), shasum) else: print(shorten(filepath)) last_reported_bfile = filepath for assetpath in usage.files(): assetpath = bpathlib.make_absolute(assetpath) if assetpath in reported_assets: log.debug('Already reported %s', assetpath) continue if include_sha256: shasum, time_spent = calc_sha_sum(assetpath) time_spent_on_shasums += time_spent print(' ', shorten(assetpath), shasum) else: print(' ', shorten(assetpath)) reported_assets.add(assetpath) if show_timing: duration = time.time() - start_time print('Spent %.2f seconds on producing this listing' % duration) if include_sha256: print('Spent %.2f seconds on calculating SHA sums' % time_spent_on_shasums) percentage = time_spent_on_shasums / duration * 100 print(' (that is %d%% of the total time' % percentage)
def _visit_blocks(self, bfile, limit_to): bpath = bpathlib.make_absolute(bfile.filepath) root_dir = bpathlib.BlendPath(bpath.parent) # Mapping from library path to data blocks to expand. blocks_per_lib = collections.defaultdict(set) while not self.to_visit.empty(): block = self.to_visit.get() assert isinstance(block, blendfile.BlendFileBlock) if (bpath, block.addr_old) in self.blocks_yielded: continue if block.code == b'ID': # ID blocks represent linked-in assets. Those are the ones that # should be loaded from their own blend file and "expanded" to # the entire set of data blocks required to render them. We # defer the handling of those so that we can work with one # blend file at a time. lib = block.get_pointer(b'lib') lib_bpath = bpathlib.BlendPath(lib[b'name']).absolute(root_dir) blocks_per_lib[lib_bpath].add(block) # The library block itself should also be reported, because it # represents a blend file that is a dependency as well. self.to_visit.put(lib) continue if limit_to: # We're limiting the blocks, so we have to expand them to make # sure we don't miss anything. Otherwise we're yielding the # entire file anyway, and no expansion is necessary. self._queue_dependencies(block) self.blocks_yielded.add((bpath, block.addr_old)) yield block return blocks_per_lib
def open_cached(path: pathlib.Path, mode='rb', assert_cached: typing.Optional[bool] = None) -> 'BlendFile': """Open a blend file, ensuring it is only opened once.""" my_log = log.getChild('open_cached') bfile_path = bpathlib.make_absolute(path) if assert_cached is not None: is_cached = bfile_path in _cached_bfiles if assert_cached and not is_cached: raise AssertionError('File %s was not cached' % bfile_path) elif not assert_cached and is_cached: raise AssertionError('File %s was cached' % bfile_path) try: bfile = _cached_bfiles[bfile_path] except KeyError: my_log.debug('Opening non-cached %s', path) bfile = BlendFile(path, mode=mode) _cached_bfiles[bfile_path] = bfile else: my_log.debug('Returning cached %s', path) return bfile
def __fspath__(self) -> pathlib.Path: """Determine the absolute path of the asset on the filesystem.""" if self._abspath is None: bpath = self.block.bfile.abspath(self.asset_path) log.info('Resolved %s rel to %s -> %s', self.asset_path, self.block.bfile.filepath, bpath) as_path = pathlib.Path(bpath.to_path()) # Windows cannot make a path that has a glob pattern in it absolute. # Since globs are generally only on the filename part, we take that off, # make the parent directory absolute, then put the filename back. try: abs_parent = bpathlib.make_absolute(as_path.parent) except FileNotFoundError: self._abspath = as_path else: self._abspath = abs_parent / as_path.name log.info('Resolving %s rel to %s -> %s', self.asset_path, self.block.bfile.filepath, self._abspath) else: log.info('Reusing abspath %s', self._abspath) return self._abspath
def _uncache(path: pathlib.Path): """Remove a BlendFile object from the cache.""" bfile_path = bpathlib.make_absolute(path) _cached_bfiles.pop(bfile_path, None)
def _cache(path: pathlib.Path, bfile: 'BlendFile'): """Add a BlendFile to the cache.""" bfile_path = bpathlib.make_absolute(path) _cached_bfiles[bfile_path] = bfile