def test_inner_exceptions(self): """ Verify that an object not at the global level of this file cannot be pickled properly. """ try: RemoteOperation(RaiseOperation_Nested(), "localhost").run() except ExecutionError, e: self.assertTrue(e.cmd.get_results().stderr.strip().endswith( "raise RaiseOperation_Nested.MyException2()"))
class DeleteCurrentDump(Operation): def __init__(self, timestamp, master_datadir, master_port, ddboost): self.timestamp = timestamp self.master_datadir = master_datadir self.master_port = master_port self.ddboost = ddboost def execute(self): try: DeleteCurrentSegDump(self.timestamp, self.master_datadir).run() except OSError, e: logger.warn("Error encountered during deletion of %s on master" % self.timestamp) gparray = GpArray.initFromCatalog(dbconn.DbURL(port=self.master_port), utility=True) segs = [ seg for seg in gparray.getDbList() if seg.isSegmentPrimary(current_role=True) ] for seg in segs: try: RemoteOperation( DeleteCurrentSegDump(self.timestamp, seg.getSegmentDataDirectory()), seg.getSegmentHostName()).run() except OSError, e: logger.warn("Error encountered during deletion of %s on %s" % (self.timestamp, seg.getSegmentHostName()))
def test_Remote_harden(self): """ Ensure that some logging occurs in event of error. """ # One case encountered thus far is the raising of a pygresql DatabaseError, # which due to the import from a shared object (I think), does not behave # nicely in terms of imports and namespacing. """ try: RemoteOperation(RaiseOperation_Unpicklable(), "localhost").run() except ExecutionError, e: self.assertTrue(e.cmd.get_results().stderr.strip().endswith( "raise pg.DatabaseError()"))
def execute(self): state_dict = { VerificationState.RUNNING: [], VerificationState.SUCCEEDED: [], VerificationState.ABORTED: [], VerificationState.FAILED: [] } operations = [] for pseg in self.to_validate: operations.append( RemoteOperation( ValidateResultFile(token=self.token, datadir=pseg.getSegmentDataDirectory(), content=pseg.getSegmentContentId()), pseg.getSegmentHostName())) ParallelOperation(operations, self.batch_default).run() for remote in operations: state = remote.get_ret() state_dict[state].append(remote.operation.content) return state_dict
def execute(self): ValidateGpToolkit(database=self.dump_database, master_port=self.master_port).run() operations = [] gparray = GpArray.initFromCatalog(dbconn.DbURL(port=self.master_port), utility=True) segs = [ seg for seg in gparray.getDbList() if seg.isSegmentPrimary(current_role=True) ] for seg in segs: operations.append( RemoteOperation( ValidateSegDiskSpace( free_space_percent=self.free_space_percent, compress=self.compress, dump_database=self.dump_database, include_dump_tables=self.include_dump_tables, datadir=seg.getSegmentDataDirectory(), segport=seg.getSegmentPort()), seg.getSegmentHostName())) ParallelOperation(operations, self.batch_default).run() success = 0 for remote in operations: host = remote.host try: remote.get_ret() except NotEnoughDiskSpace, e: logger.error( "%s has insufficient disk space. [Need: %dK, Free %dK]" % (host, e.needed_space, e.free_space)) else: success += 1
def BuildRemoteTableDump(restore_tables, real_filename, fake_filename, compress, host): return RemoteOperation( BuildTableDump(restore_tables, real_filename, fake_filename, compress), host)
if seg.isSegmentPrimary(current_role=True) ] for seg in segs: path = self.backup_dir if self.backup_dir is not None else seg.getSegmentDataDirectory( ) path = os.path.join(path, DUMP_DIR, timestamp[0:8]) status_file = os.path.join( path, "%s%d_%s" % (SEG_STATUS_PREFIX, seg.getSegmentDbId(), timestamp)) dump_file = os.path.join( path, "%s%d_%s" % (SEG_DBDUMP_PREFIX, seg.getSegmentDbId(), timestamp)) if self.compress: dump_file += ".gz" operations.append( RemoteOperation( PostDumpSegment(status_file=status_file, dump_file=dump_file), seg.getSegmentHostName())) ParallelOperation(operations, self.batch_default).run() success = 0 for remote in operations: host = remote.host status_file = remote.operation.status_file dump_file = remote.operation.dump_file try: remote.get_ret() except NoStatusFile, e: logger.warn('Status file %s not found on %s' % (status_file, host)) except StatusFileError, e:
def test_proper_exceptions_with_args(self): try: RemoteOperation(RaiseOperation_Safe(), "localhost").run() except ExceptionWithArgs, e: self.assertTrue(e.x == 1 and e.y == 2)
def test_proper_exceptions_sanity(self): try: RemoteOperation(RaiseOperation_Safe(), "localhost").run() except ExceptionWithArgs, e: pass
def test_unsafe_exceptions_with_args(self): try: RemoteOperation(RaiseOperation_Unsafe(), "localhost").run() except TypeError, e: # Because Exceptions don't retain init args, they are not pickle-able normally pass
def test_Remote_exceptions(self): """ Test that an Exception returned remotely will be raised locally. """ try: RemoteOperation(RaiseOperation(), "localhost").run() except MyException, e: pass
def test_Remote_basic(self): """ Basic RemoteOperation test """ self.assertTrue(TestOperation().run() == RemoteOperation( TestOperation(), "localhost").run())