def find_class(self, module, name): # Forbid everything not builtin fullname = '%s.%s' % (module, name) raise pickle.UnpicklingError("global '%s' is forbidden" % fullname)
def UnPickleLock(locked, *args): lock = threading.Lock() if locked: if not lock.acquire(False): raise pickle.UnpicklingError('Cannot acquire lock') return lock
def persistent_load(pid): if pid == b'ctx': return self._ctx else: raise pickle.UnpicklingError( 'unsupported persistent object: %r' % pid)
def persistent_load(self, pid): if pid == 'dialog': return external_dialog if pid == 'client': return external_client raise pickle.UnpicklingError("unsupported persistent object")
def find_class(self, module, name): if module == "db" and name == "User": return User raise pickle.UnpicklingError(f"HACKING DETECTED")
def __setstate__(self, state): raise pickle.UnpicklingError("You shall not de-serialize me!")
def find_class(self, module, name): # Only allow safe classes from builtins. if module == Color: return Color raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def restrictive_find_global(module, clsname): if clsname == 'A': return A else: raise pickle.UnpicklingError("Cannot load class", module, clsname)
def enter_loop(self): os.environ['RAYON_NUM_THREADS'] = "%i" % (self.max_cores_to_use, ) self.spawn_slaves() if sys.version_info[ 0] == 2 and sys.version_info[1] < 7: # pragma: no cover raise ValueError("pypipegraph needs python >=2.7") else: self.que = multiprocessing.Queue() self.pipegraph.logger.debug("Entering execution loop") self.pipegraph.start_jobs() if self.interactive: # pragma: no cover from . import interactive interactive_thread = threading.Thread( target=interactive.thread_loop) interactive_thread.start() s = signal.signal(signal.SIGINT, signal_handler) # ignore ctrl-c while True: self.slave.check_for_dead_jobs( ) # whether time out or or job was done, let's check this... if self.interactive: # pragma: no cover self.see_if_output_is_requested() try: start = time.time() r = self.que.get(block=True, timeout=self.timeout) stop = time.time() self.pipegraph.logger.info("Till que.got: %.2f" % (stop - start)) if r is None and interactive.interpreter.terminated: # pragma: no cover # abort was requested self.slave.kill_jobs() break # slave_id, was_ok, job_id_done, stdout, stderr, exception, trace, new_jobs, runtime = ( # r # ) # was there a job done?t self.pipegraph.logger.debug("Job returned: %s, was_ok: %s" % (r.job_id, r.was_ok)) job = self.pipegraph.jobs[r.job_id] job.stop_time = time.time() job.was_done_on.add(r.slave_id) job.stdout = r.stdout job.stderr = r.stderr job.exception = r.exception job.trace = r.trace job.failed = not r.was_ok if job.start_time: delta = job.stop_time - job.start_time if delta > 5: self.pipegraph.logger.warning( "%s runtime: %.2fs (%.2fs w/oque)" % (r.job_id, delta, r.runtime)) job.runtime = delta else: job.runtime = -1 if job.failed: try: if job.exception.startswith("STR".encode("UTF-8")): job.exception = job.exception[3:] raise pickle.UnpicklingError( "String Transmission" ) # what an ugly control flow... job.exception = pickle.loads(r.exception) except ( pickle.UnpicklingError, EOFError, TypeError, AttributeError, ): # some exceptions can't be pickled, so we send a string instead pass if job.exception: self.pipegraph.logger.warning( "Job returned with exception: %s" % job) self.pipegraph.logger.warning("Exception: %s" % repr(r.exception)) self.pipegraph.logger.warning("Trace: %s" % r.trace) if r.new_jobs is not False: if not job.modifies_jobgraph(): # pragma: no cover job.exception = ValueError( "This branch should not be reached.") job.failed = True else: new_jobs = pickle.loads(r.new_jobs) self.pipegraph.logger.debug( "We retrieved %i new jobs from %s" % (len(new_jobs), job)) self.pipegraph.new_jobs_generated_during_runtime( new_jobs) more_jobs = self.pipegraph.job_executed(job) if ( not more_jobs ): # this means that all jobs are done and there are no longer any more running... break self.pipegraph.start_jobs() except (queue.Empty, IOError): # either timeout, or the que failed pass self.que.close() self.que.join_thread() # wait for the que to close if self.interactive: # pragma: no cover - interactive if not interactive.interpreter.stay: interactive.interpreter.terminated = True interactive_thread.join() signal.signal(signal.SIGINT, s) self.pipegraph.logger.debug("Leaving loop")
def find_class(self, module, name): if module == "__main__": return super().find_class(module, name) raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def unpickle_lock(locked, *args): lock = threading.Lock() if locked: if not lock.acquire(False): raise pickle.UnpicklingError("Cannot acquire lock")
def refuse_to_unpickle(self): raise pickle.UnpicklingError('Refused')
def __setstate__(self, d): print 'L setstate ' raise pickle.UnpicklingError("Not supported, pickle the dict")
def test_unpickle_pipe_unpickle_errors(self, mock_echo, mock_pickle): mock_pickle.side_effect = pickle.UnpicklingError("Error") with self.assertRaises(pickle.UnpicklingError): self.gdb.unpickle_pipe("Fifo Data") mock_echo.assert_called_with( "Error retrieving data from process: Error")
def restore_testcase_from_id(cls, id_): test_case = cls.__active_test_cases.get(id_) if test_case is None: raise pickle.UnpicklingError("the test case no longer exists") return test_case
def find_class(self, module, name): if module.startswith("BlenderAndMBDyn"): module = ".".join((__package__, module.split(".", 1)[1])) elif module == "builtins" and name in ("exec", "eval"): raise pickle.UnpicklingError("global " + ".".join((module, name)) + " is forbidden") return super().find_class(module, name)
def persistent_load(self, pid: Any) -> Any: type_tag, key = pid if type_tag == "_persistent_id_": return self._lookup_table[key] else: raise pickle.UnpicklingError("unsupported persistent object")
def enter_loop(self): self.spawn_slaves() self.que = MPQueueFixed() logger.info("Starting first batch of jobs") self.pipegraph.start_jobs() while True: self.slave.check_for_dead_jobs( ) # whether time out or or job was done, let's check this... self.see_if_output_is_requested() try: logger.info("Listening to que") slave_id, was_ok, job_id_done, stdout, stderr, exception, trace, new_jobs = self.que.get( block=True, timeout=self.timeout) # was there a job done?t logger.info("Job returned: %s, was_ok: %s" % (job_id_done, was_ok)) logger.info("Remaining in que (approx): %i" % self.que.qsize()) job = self.pipegraph.jobs[job_id_done] job.was_done_on.add(slave_id) job.stdout = stdout job.stderr = stderr job.exception = exception job.trace = trace job.failed = not was_ok job.stop_time = time.time() if job.start_time: logger.info("%s runtime: %is" % (job_id_done, job.stop_time - job.start_time)) if job.failed: try: if job.exception.startswith('STR'.encode('UTF-8')): job.exception = job.exception[3:] raise pickle.UnpicklingError( "String Transmission" ) # what an ugly control flow... logger.info("Before depickle %s" % type(exception)) job.exception = pickle.loads(exception) logger.info("After depickle %s" % type(job.exception)) logger.info("exception stored at %s" % (job)) except ( pickle.UnpicklingError, EOFError ): # some exceptions can't be pickled, so we send a string instead pass if job.exception: logger.info("Exception: %s" % repr(exception)) logger.info("Trace: %s" % trace) logger.info("stdout: %s" % stdout) logger.info("stderr: %s" % stderr) if not new_jobs is False: if not job.modifies_jobgraph(): job.exception = ppg_exceptions.JobContractError( "%s created jobs, but was not a job with modifies_jobgraph() returning True" % job) job.failed = True else: new_jobs = pickle.loads(new_jobs) logger.info("We retrieved %i new jobs from %s" % (len(new_jobs), job)) self.pipegraph.new_jobs_generated_during_runtime( new_jobs) more_jobs = self.pipegraph.job_executed(job) #if job.cores_needed == -1: #self.cores_available = self.max_cores_to_use #else: #self.cores_available += job.cores_needed if not more_jobs: # this means that all jobs are done and there are no longer any more running... break self.pipegraph.start_jobs() except (queue.Empty, IOError): # either timeout, or the que failed pass self.que.close() self.que.join_thread() # wait for the que to close logger.info("Leaving loop")
def find_class(self, module, name): if module == '__main__': return getattr(sys.modules['__main__'], name) raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def find_class(self, module, name): if self.is_safe_class(module, name): return super(SafeUnpickler, self).find_class(module, name) raise pickle.UnpicklingError("global \"%s.%s\" is forbidden" % (module, name))
def find_class(self, module, name): if module == "builtins" and name in safe_builtins: return getattr(builtins, name) # Forbid everything else. raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def get_extension(self, code): raise pickle.UnpicklingError("extension is forbidden")
def find_class(self, module, name): # *.dat files must have compatible classes between Python 2 and 3 if module.split('.')[0] == 'babel': return pickle.Unpickler.find_class(self, module, name) raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def e(*s): raise pickle.UnpicklingError("encountered disabled instruction %r" % c)
def __call__(self, *args): raise pickle.UnpicklingError("Unpickling Java requires JUnpickler")
def find_class(self, module, name): if module == "__main__" and name.startswith("Snake") and name.count(".") <= 1 and len(name) <= len("SnakeSave.HighScores"): return super().find_class(module, name) raise pickle.UnpicklingError(f"HACKING DETECTED")
def test_deserialize_unpickling_error(self): self._test_deserialize_error(pickle.UnpicklingError("TEST ERROR"))
def find_class(self, module, name): # Forbid all globals raise pickle.UnpicklingError("global '%s.%s' is forbidden" % (module, name))
def persistent_load(self, pid): type_tag, value = pid if type_tag == "DaCeSympyExpression": return _sunpickle(value) else: raise pickle.UnpicklingError("unsupported persistent object")
def find_class(self, module: str, name: str) -> typing.Any: if module != "builtins": raise pickle.UnpicklingError(f"Invalid object: {module}.{name}") return super().find_class(module, name)