def __setitem__(self, key, value): if self.writeback: self.cache[key] = value f = BytesIO() p = pickle.Pickler(f, self._protocol) p.dump(value) self.dict[key.encode(self.keyencoding)] = f.getvalue()
def receive(self, _): # Start the remote process command = ' '.join(['sudo'] + self.args + ['farcel.py']) self.process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=False) # Pickle the pipeline so that it can be sent to the remote process buffer = io.BytesIO() pickler = dill.Pickler(buffer) pickler.dump(self.pipeline) buffer.seek(0) stdout, stderr = self.process.communicate(input=buffer.getvalue()) # Wait for completion (already guaranteed by communicate returning?) self.process.wait() # Handle results stderr_lines = stderr.decode('utf-8').split('\n') if len(stderr_lines[-1]) == 0: del stderr_lines[-1] sys.stdout.flush() for line in stderr_lines: print(line, file=sys.stderr) sys.stderr.flush() input = dill.Unpickler(io.BytesIO(stdout)) try: while True: self.send(input.load()) except EOFError: self.send_complete()
def save_state(self, save_dir: str) -> None: """Load training state. Args: save_dir: The directory into which to save the state """ os.makedirs(save_dir, exist_ok=True) # Start with the high-level info. We could use pickle for this but having it human readable is nice. state = { key: value for key, value in self.__dict__.items() if is_restorable(value)[0] } with open(os.path.join(save_dir, 'system.json'), 'w') as fp: json.dump(state, fp, indent=4) # Save all of the models / optimizer states for model in self.network.models: save_model(model, save_dir=save_dir, save_optimizer=True) # Save everything else objects = { 'summary': self.summary, 'custom_graphs': self.custom_graphs, 'traces': [ trace.__getstate__() if hasattr(trace, '__getstate__') else {} for trace in self.traces ], 'tops': [ op.__getstate__() if hasattr(op, '__getstate__') else {} for op in self.network.ops ], 'pops': [ op.__getstate__() if hasattr(op, '__getstate__') else {} for op in self.network.postprocessing ], 'nops': [ op.__getstate__() if hasattr(op, '__getstate__') else {} for op in self.pipeline.ops ], 'ds': { mode: { key: value.__getstate__() for key, value in ds.items() if hasattr(value, '__getstate__') } for mode, ds in self.pipeline.data.items() } } with open(os.path.join(save_dir, 'objects.pkl'), 'wb') as file: # We need to use a custom pickler here to handle MirroredStrategy, which will show up inside of tf # MirroredVariables in multi-gpu systems. p = pickle.Pickler(file) p.dispatch_table = copyreg.dispatch_table.copy() p.dispatch_table[MirroredStrategy] = pickle_mirroredstrategy p.dump(objects)
def write(self, fname='.'): """Save model and data to disk. :param str fname: path to a directory. """ path = os.path.join(fname, self.dir['surrogate']) with open(path, 'wb') as f: pickler = pickle.Pickler(f) pickler.dump(self.predictor) self.logger.debug('Model wrote to {}'.format(path)) self.space.write(fname, self.dir['space'], doe=False) path = os.path.join(fname, self.dir['data']) with open(path, 'wb') as f: pickler = pickle.Pickler(f) pickler.dump(self.data) self.logger.debug('Data wrote to {}'.format(path)) self.logger.info('Model, data and space wrote.')
def write(self, artifact: T, dir: Union[str, PathLike]): filename = pathlib.Path(dir) / ("data.dill" + _SUFFIXES[self.open]) with self.open(filename, "wb") as f: pickler = dill.Pickler(file=f) pickler.dump(self.VERSION) if hasattr(artifact, "__next__"): pickler.dump(True) for item in cast(Iterable, artifact): pickler.dump(item) else: pickler.dump(False) pickler.dump(artifact)
def copy(x): try: buffer = io.BytesIO() pickler = dill.Pickler(buffer) pickler.dump(x) buffer.seek(0) unpickler = dill.Unpickler(buffer) return unpickler.load() except Exception as e: sys.stdout.flush() print(f'Cloning error: ({type(e)}) {e}', file=sys.__stderr__, flush=True)
def exportPickle(filename, objectToSave): """ Te export an object into a binary file Parameters ---------- filename : str path+file name objectToSave : object """ dataFile = open(filename, "wb") pickler = pickle.Pickler(dataFile) pickler.dump(objectToSave) dataFile.close()
def test_extend(): obj = lambda: my_fn(34) assert obj() == 578 obj_io = StringIO() pickler = pickle.Pickler(obj_io) pickler.dump(obj) obj_str = obj_io.getvalue() obj2_io = StringIO(obj_str) unpickler = pickle.Unpickler(obj2_io) obj2 = unpickler.load() assert obj2() == 578
def test_isdill(): obj_io = StringIO() pickler = pickle.Pickler(obj_io) assert pickle._dill.is_dill(pickler) is True pickler = pickle._dill.StockPickler(obj_io) assert pickle._dill.is_dill(pickler) is False try: import multiprocess as mp pickler = mp.reduction.ForkingPickler(obj_io) assert pickle._dill.is_dill(pickler, child=True) is True assert pickle._dill.is_dill(pickler, child=False) is False except: pass
def receive(self, _): # Start the remote process command = ' '.join( ['ssh', '-l', self.host.user, self.host.addr, 'farcel.py']) self.process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=False) buffer = io.BytesIO() pickler = dill.Pickler(buffer) pickler.dump(self.env().remotify()) pickler.dump(self.pipeline) buffer.seek(0) try: stdout, stderr = self.process.communicate(input=buffer.getvalue()) stderr_lines = stderr.decode('utf-8').split('\n') if len(stderr_lines[-1]) == 0: del stderr_lines[-1] sys.stdout.flush() for line in stderr_lines: print(line, file=sys.stderr) sys.stderr.flush() input = dill.Unpickler(io.BytesIO(stdout)) try: while True: x = input.load() if isinstance(x, marcel.object.error.Error): self.send_error(x) else: self.send(x) except EOFError as e: self.send_complete() except BaseException as e: marcel.util.print_stack() print(e)
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2014 California Institute of Technology. # License: 3-clause BSD. The full license text is available at: # - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/dill/LICENSE import dill as pickle try: from StringIO import StringIO except ImportError: from io import BytesIO as StringIO def my_fn(x): return x * 17 obj = lambda: my_fn(34) assert obj() == 578 obj_io = StringIO() pickler = pickle.Pickler(obj_io) pickler.dump(obj) obj_str = obj_io.getvalue() obj2_io = StringIO(obj_str) unpickler = pickle.Unpickler(obj2_io) obj2 = unpickler.load() assert obj2() == 578
def __init__(self, env): super().__init__(env) self.pickler = dill.Pickler(sys.stdout.buffer)
def save_set(self, Path): print("save set") with open(Path, 'wb') as fichier: mon_pickler = pickle.Pickler(fichier) mon_pickler.dump([self.master, self.tracks])