def load_data(self): self._file_ = open(self.file_name, "rb") (codec, class_lengths, self.old_max_minibatch_size, self.class_chunk_lengths, self.minibatch_data_shape, self.minibatch_data_dtype, self.minibatch_labels_shape, self.minibatch_labels_dtype, self._labels_mapping) = \ pickle.load(self.file) self.class_lengths[:] = class_lengths self._has_labels = self.minibatch_labels_shape is not None self._reversed_labels_mapping[:] = sorted(self.labels_mapping) self.decompress = MinibatchesLoader.CODECS[codec] self.chunk_numbers = [] for ci, cl in enumerate(self.class_lengths): mb_chunks = int(numpy.ceil(self.old_max_minibatch_size / self.class_chunk_lengths[ci])) mb_count = int(numpy.ceil(cl / self.old_max_minibatch_size)) self.chunk_numbers.append(mb_chunks * mb_count) class BytesMeasurer(object): def __init__(self): self.size = 0 def write(self, data): self.size += len(data) bm = BytesMeasurer() fake_table = [numpy.uint64(i) for i in range(sum(self.chunk_numbers))] pickle.dump(fake_table, bm, protocol=best_protocol) self.file.seek(-bm.size, SEEK_END) try: self.offset_table = pickle.load(self.file) except pickle.UnpicklingError as e: self.error("Failed to read the offset table (table offset was %d)", bm.size) raise from_none(e) for i, offset in enumerate(self.offset_table): self.offset_table[i] = int(offset) # Virtual end self.offset_table.append(self.file.tell() - bm.size) self.debug("Offsets: %s", self.offset_table) if self.class_lengths[TRAIN] == 0: assert self.normalization_type == "none", \ "You specified \"%s\" normalization but there are no train " \ "samples to analyze." % self.normalization_type self.normalizer.analyze(self.minibatch_data.mem)
def _import_fobj(fobj): try: return pickle.load(fobj) except ImportError as e: logging.getLogger("Snapshotter").error( "Are you trying to import snapshot belonging to a different " "workflow?") raise from_none(e)
def _apply_config(self, fname_config): if not self.config_file: self.warning("Configuration path is empty") return def fail(): self.exception("Failed to apply the configuration \"%s\"", fname_config) sys.exit(Main.EXIT_FAILURE) self.info("Applying the configuration from %s...", fname_config) try: runpy.run_path(fname_config) except FileNotFoundError: self.exception("Configuration does not exist: \"%s\"", fname_config) sys.exit(errno.ENOENT) except IsADirectoryError: self.exception("Configuration \"%s\" is a directory", fname_config) sys.exit(errno.EISDIR) except PermissionError: self.exception("Cannot read configuration \"%s\"", fname_config) sys.exit(errno.EACCES) except TypeError as e: self.debug("Filed to import \"%s\": %s -> assumed pickle", fname_config, e) from veles.pickle2 import pickle try: with open(fname_config, "rb") as fin: cfg = pickle.load(fin) except: fail() for subcfg in cfg: root[subcfg].update(cfg[subcfg]) except SyntaxError as e: self.debug("Filed to import \"%s\": %s -> assumed json", fname_config, e) import json try: with open(fname_config, "r") as fin: cfg = json.load(fin) except: fail() for subcfg in cfg: root[subcfg].update(cfg[subcfg]) except: fail()
def load(self, file_name): """ Loads object's current state from the specified file. """ data = pickle.load(file_name) self.apply_data_from_master(data)