def read(cls, json_data): _fml = cls() _fml.count = len(json_data['FileManifestList']) for _fmj in json_data.pop('FileManifestList'): _fm = FileManifest() _fm.filename = _fmj.pop('Filename', '') _fm.hash = blob_to_num(_fmj.pop('FileHash')).to_bytes( 160 // 8, 'little') _fm.flags |= int(_fmj.pop('bIsReadOnly', False)) _fm.flags |= int(_fmj.pop('bIsCompressed', False)) << 1 _fm.flags |= int(_fmj.pop('bIsUnixExecutable', False)) << 2 _fm.file_size = 0 _fm.chunk_parts = [] _fm.install_tags = _fmj.pop('InstallTags', list()) for _cpj in _fmj.pop('FileChunkParts'): _cp = ChunkPart() _cp.guid = guid_from_json(_cpj.pop('Guid')) _cp.offset = blob_to_num(_cpj.pop('Offset')) _cp.size = blob_to_num(_cpj.pop('Size')) _fm.file_size += _cp.size if _cpj: print(f'Non-read ChunkPart keys: {_cpj.keys()}') _fm.chunk_parts.append(_cp) if _fmj: print(f'Non-read FileManifest keys: {_fmj.keys()}') _fml.elements.append(_fm) return _fml
def package_savegame(self, input_folder: str, app_name: str = '', epic_id: str = '', cloud_folder: str = '', include_filter: list = None, exclude_filter: list = None, manifest_dt: datetime = None): """ :param input_folder: Folder to be packaged into chunks/manifest :param app_name: App name for savegame being stored :param epic_id: Epic account ID :param cloud_folder: Folder the savegame resides in (based on game metadata) :param include_filter: list of patterns for files to include (excludes all others) :param exclude_filter: list of patterns for files to exclude (includes all others) :param manifest_dt: datetime for the manifest name (optional) :return: """ m = Manifest() m.meta = ManifestMeta() m.chunk_data_list = CDL() m.file_manifest_list = FML() m.custom_fields = CustomFields() # create metadata for savegame m.meta.app_name = f'{app_name}{epic_id}' if not manifest_dt: manifest_dt = datetime.utcnow() m.meta.build_version = manifest_dt.strftime('%Y.%m.%d-%H.%M.%S') m.custom_fields['CloudSaveFolder'] = cloud_folder self.log.info(f'Packing savegame for "{app_name}", input folder: {input_folder}') files = [] for _dir, _, _files in os.walk(input_folder): for _file in _files: _file_path = os.path.join(_dir, _file) _file_path_rel = os.path.relpath(_file_path, input_folder).replace('\\', '/') if include_filter and not _filename_matches(_file_path_rel, include_filter): self.log.debug(f'Excluding "{_file_path_rel}" (does not match include filter)') continue elif exclude_filter and _filename_matches(_file_path_rel, exclude_filter): self.log.debug(f'Excluding "{_file_path_rel}" (does match exclude filter)') continue files.append(_file_path) if not files: if exclude_filter or include_filter: self.log.warning('No save files matching the specified filters have been found.') return self.files chunk_num = 0 cur_chunk = None cur_buffer = None for _file in sorted(files, key=str.casefold): s = os.stat(_file) f = FileManifest() # get relative path for manifest f.filename = os.path.relpath(_file, input_folder).replace('\\', '/') self.log.debug(f'Processing file "{f.filename}"') f.file_size = s.st_size fhash = sha1() with open(_file, 'rb') as cf: while remaining := s.st_size - cf.tell(): if not cur_chunk: # create new chunk cur_chunk = Chunk() if cur_buffer: cur_buffer.close() cur_buffer = BytesIO() chunk_num += 1 # create chunk part and write it to chunk buffer cp = ChunkPart(guid=cur_chunk.guid, offset=cur_buffer.tell(), size=min(remaining, 1024 * 1024 - cur_buffer.tell())) _tmp = cf.read(cp.size) if not _tmp: self.log.warning(f'Got EOF for "{f.filename}" with {remaining} bytes remaining! ' f'File may have been corrupted/modified.') break cur_buffer.write(_tmp) fhash.update(_tmp) # update sha1 hash with new data f.chunk_parts.append(cp) if cur_buffer.tell() >= 1024 * 1024: cur_chunk.data = cur_buffer.getvalue() ci = self.finalize_chunk(cur_chunk) self.log.info(f'Chunk #{chunk_num} "{ci.path}" created') # add chunk to CDL m.chunk_data_list.elements.append(ci) cur_chunk = None f.hash = fhash.digest() m.file_manifest_list.elements.append(f)