def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time, eas=None, acls=None): """Restore mirror_dir to dest_dir at given time This will automatically find the increments.XXX.dir representing the time specified. The mirror_dir and dest_dir are relative to the testing directory and will be modified for remote trials. """ Main._force = 1 Main._restore_root_set = 0 Globals.security_level = "override" cmdpairs = _internal_get_cmd_pairs(mirror_local, dest_local, mirror_dir, dest_dir) Security.initialize("restore", cmdpairs) mirror_rp, dest_rp = list(map(SetConnections.cmdpair2rp, cmdpairs)) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main._misc_setup([mirror_rp, dest_rp]) inc = get_increment_rp(mirror_rp, time) if inc: Main._action_restore(get_increment_rp(mirror_rp, time), dest_rp) else: # use alternate syntax Main._restore_timestr = str(time) Main._action_restore(mirror_rp, dest_rp, restore_as_of=1) Main._cleanup()
def InternalBackup(source_local, dest_local, src_dir, dest_dir, current_time = None, eas = None, acls = None): """Backup src to dest internally This is like rdiff_backup but instead of running a separate rdiff-backup script, use the separate *.py files. This way the script doesn't have to be rebuild constantly, and stacktraces have correct line/file references. """ Globals.current_time = current_time #_reset_connections() Globals.security_level = "override" remote_schema = '%s' if not source_local: src_dir = "cd test1; python ../server.py ../%s::../%s" % \ (SourceDir, src_dir) if not dest_local: dest_dir = "cd test2/tmp; python ../../server.py ../../%s::../../%s" \ % (SourceDir, dest_dir) cmdpairs = SetConnections.get_cmd_pairs([src_dir, dest_dir], remote_schema) Security.initialize("backup", cmdpairs) rpin, rpout = map(SetConnections.cmdpair2rp, cmdpairs) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main.misc_setup([rpin, rpout]) Main.Backup(rpin, rpout) Main.cleanup()
def set_special_escapes(self, repo): """ Set escape_dos_devices and escape_trailing_spaces from rdiff-backup-data dir, just like chars_to_quote """ se = repo.get_special_escapes() if se is not None: actual_edd = ("escape_dos_devices" in se) actual_ets = ("escape_trailing_spaces" in se) else: if getattr(self, "src_fsa", None) is not None: actual_edd = (self.src_fsa.escape_dos_devices and not self.dest_fsa.escape_dos_devices) actual_ets = (self.src_fsa.escape_trailing_spaces and not self.dest_fsa.escape_trailing_spaces) else: # Single filesystem operation actual_edd = self.dest_fsa.escape_dos_devices actual_ets = self.dest_fsa.escape_trailing_spaces SetConnections.UpdateGlobal('escape_dos_devices', actual_edd) log.Log("Backup: escape_dos_devices = {dd}".format(dd=actual_edd), log.INFO) SetConnections.UpdateGlobal('escape_trailing_spaces', actual_ets) log.Log("Backup: escape_trailing_spaces = {ts}".format(ts=actual_ets), log.INFO)
def InternalBackup(source_local, dest_local, src_dir, dest_dir, current_time=None, eas=None, acls=None): """Backup src to dest internally This is like rdiff_backup but instead of running a separate rdiff-backup script, use the separate *.py files. This way the script doesn't have to be rebuild constantly, and stacktraces have correct line/file references. """ Globals.current_time = current_time Globals.security_level = "override" cmdpairs = _internal_get_cmd_pairs(source_local, dest_local, src_dir, dest_dir) Security.initialize("backup", cmdpairs) rpin, rpout = list(map(SetConnections.cmdpair2rp, cmdpairs)) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main._misc_setup([rpin, rpout]) Main._action_backup(rpin, rpout) Main._cleanup()
def setup(self, src_dir=None, owners_map=None): if self.must_be_writable and not self._create(): return 1 if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 if Globals.get_api_version() >= 201: # compat200 if self.base_dir.conn is Globals.local_connection: # should be more efficient than going through the connection from rdiffbackup.locations import _repo_shadow self._shadow = _repo_shadow.RepoShadow else: self._shadow = self.base_dir.conn._repo_shadow.RepoShadow if self.must_be_writable: self.fs_abilities = self._shadow.get_fs_abilities_readwrite( self.base_dir) else: self.fs_abilities = self._shadow.get_fs_abilities_readonly( self.base_dir) if not self.fs_abilities: return 1 # something was wrong else: log.Log("--- Repository file system capabilities ---\n" + str(self.fs_abilities), log.INFO) if src_dir is None: self.remote_transfer = None # just in case ret_code = fs_abilities.SingleRepoSetGlobals(self)() if ret_code != 0: return ret_code else: # FIXME this shouldn't be necessary, and the setting of variable # across the connection should happen through the shadow SetConnections.UpdateGlobal("backup_writer", self.base_dir.conn) self.base_dir.conn.Globals.set("isbackup_writer", True) # this is the new way, more dedicated but not sufficient yet self.remote_transfer = (src_dir.base_dir.conn is not self.base_dir.conn) ret_code = fs_abilities.Dir2RepoSetGlobals(src_dir, self)() if ret_code != 0: return ret_code self.init_quoting() if owners_map is not None: ret_code = self.init_owners_mapping(**owners_map) if ret_code != 0: return ret_code return 0 # all is good
def _update_triple(self, fsa_support, attr_triple): """ Update global vars from single fsa test """ active_attr, write_attr, conn_attr = attr_triple if Globals.get(active_attr) == 0: return # don't override 0 for attr in attr_triple: SetConnections.UpdateGlobal(attr, None) if not fsa_support: return SetConnections.UpdateGlobal(active_attr, 1) SetConnections.UpdateGlobal(write_attr, 1) self.conn.Globals.set_local(conn_attr, 1)
def set_special_escapes(self, repo): """ Escaping DOS devices and trailing periods/spaces works like regular filename escaping. If only the destination requires it, then we do it. Otherwise, it is not necessary, since the files couldn't have been created in the first place. We also record whether we have done it in order to handle the case where a volume which was escaped is later restored by an OS that does not require it. """ suggested_edd = (self.dest_fsa.escape_dos_devices and not self.src_fsa.escape_dos_devices) suggested_ets = (self.dest_fsa.escape_trailing_spaces and not self.src_fsa.escape_trailing_spaces) se = repo.get_special_escapes() if se is None: actual_edd, actual_ets = suggested_edd, suggested_ets se = set() if actual_edd: se.add("escape_dos_devices") if actual_ets: se.add("escape_trailing_spaces") repo.set_special_escapes(se) else: actual_edd = ("escape_dos_devices" in se) actual_ets = ("escape_trailing_spaces" in se) if actual_edd != suggested_edd and not suggested_edd: log.Log( "System no longer needs DOS devices to be escaped, " "but we will retain for backwards compatibility", log.WARNING) if actual_ets != suggested_ets and not suggested_ets: log.Log( "System no longer needs trailing spaces or periods to be " "escaped, but we will retain for backwards compatibility", log.WARNING) SetConnections.UpdateGlobal('escape_dos_devices', actual_edd) log.Log("Backup: escape_dos_devices = {dd}".format(dd=actual_edd), log.INFO) SetConnections.UpdateGlobal('escape_trailing_spaces', actual_ets) log.Log("Backup: escape_trailing_spaces = {ts}".format(ts=actual_ets), log.INFO)
def set_chars_to_quote(self, repo): """ Set chars_to_quote setting for backup session Unlike most other options, the chars_to_quote setting also depends on the current settings in the rdiff-backup-data directory, not just the current fs features. """ ctq = self._compare_ctq_file(repo, self._get_ctq_from_fsas()) regexp, unregexp = map_filenames.get_quoting_regexps( ctq, Globals.quoting_char) SetConnections.UpdateGlobal('chars_to_quote', ctq) SetConnections.UpdateGlobal('chars_to_quote_regexp', regexp) SetConnections.UpdateGlobal('chars_to_quote_unregexp', unregexp)
def setup(self): ret_code = super().setup() if ret_code != 0: return ret_code Globals.rbdir = self.data_dir # compat200 # define a few essential subdirectories if not self.data_dir.lstat(): try: self.data_dir.mkdir() except (OSError, IOError) as exc: self.log( "Could not create 'rdiff-backup-data' sub-directory " "in '{rp}' due to '{exc}'. " "Please fix the access rights and retry.".format( rp=self.base_dir, exc=exc), self.log.ERROR) return 1 elif self._is_failed_initial_backup(): self._fix_failed_initial_backup() if not self.incs_dir.lstat(): try: self.incs_dir.mkdir() except (OSError, IOError) as exc: self.log( "Could not create 'increments' sub-directory " "in '{rp}' due to '{exc}'. " "Please fix the access rights and retry.".format( rp=self.data_dir, exc=exc), self.log.ERROR) return 1 SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 return 0
def connect(self): """ Connect to potentially provided locations arguments, remote or local. Defines the current time as being the time of a potentially upcoming backup. Returns self, to be used as context manager. """ if 'locations' in self.values: # TODO encapsulate the following lines into one # connections/connections_mgr construct, so that the action doesn't # need to care about cmdpairs and Security (which would become a # feature of the connection). cmdpairs = SetConnections.get_cmd_pairs( self.values.locations, remote_schema=self.remote_schema, ssh_compression=self.values.ssh_compression, remote_tempdir=self.remote_tempdir, term_verbosity=log.Log.term_verbosity) Security.initialize(self.get_security_class(), cmdpairs) self.connected_locations = list( map(SetConnections.get_connected_rpath, cmdpairs)) else: Security.initialize(self.get_security_class(), []) self.connected_locations = [] # once the connection is set, we can define "now" as being the current # time, unless the user defined a fixed a current time. Time.set_current_time(self.values.current_time) return self
def setUp(self): """Start server""" Log.setverbosity(5) Globals.change_source_perms = 1 SetConnections.UpdateGlobal('checkpoint_interval', 3) user_group.init_user_mapping() user_group.init_group_mapping()
def init_quoting(self): """ Set QuotedRPath versions of important RPaths if chars_to_quote is set. Return True if quoting needed to be done, False else. """ # FIXME the problem is that the chars_to_quote can come from the command # line but can also be a value coming from the repository itself, # set globally by the fs_abilities.xxx_set_globals functions. if not Globals.chars_to_quote: return False if Globals.get_api_version() < 201: # compat200 FilenameMapping.set_init_quote_vals() self.base_dir = FilenameMapping.get_quotedrpath(self.base_dir) self.data_dir = FilenameMapping.get_quotedrpath(self.data_dir) self.incs_dir = FilenameMapping.get_quotedrpath(self.incs_dir) else: self.base_dir = map_filenames.get_quotedrpath(self.base_dir) self.data_dir = map_filenames.get_quotedrpath(self.data_dir) self.incs_dir = map_filenames.get_quotedrpath(self.incs_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 return True
def _update_triple(self, src_support, dest_support, attr_triple): """ Many of the settings have a common form we can handle here """ active_attr, write_attr, conn_attr = attr_triple if Globals.get(active_attr) == 0: return # don't override 0 for attr in attr_triple: SetConnections.UpdateGlobal(attr, None) if not src_support: return # if source doesn't support, nothing SetConnections.UpdateGlobal(active_attr, 1) self.in_conn.Globals.set_local(conn_attr, 1) if dest_support: SetConnections.UpdateGlobal(write_attr, 1) self.out_conn.Globals.set_local(conn_attr, 1)
def _reset_connections(src_rp, dest_rp): """Reset some global connection information""" Globals.security_level = "override" Globals.isbackup_reader = Globals.isbackup_writer = None #Globals.connections = [Globals.local_connection] #Globals.connection_dict = {0: Globals.local_connection} SetConnections.UpdateGlobal('rbdir', None) Main.misc_setup([src_rp, dest_rp])
def _internal_get_cmd_pairs(src_local, dest_local, src_dir, dest_dir): """Function returns a tuple of connections based on the given parameters. One or both directories are faked for remote connection if not local, and the connections are set accordingly. Note that the function relies on the global variables abs_remote1_dir, abs_remote2_dir and abs_testing_dir.""" remote_schema = b'%s' # compat200: replace with {h} remote_format = b"cd %s; %s/server.py::%s" if not src_local: src_dir = remote_format % (abs_remote1_dir, abs_testing_dir, src_dir) if not dest_local: dest_dir = remote_format % (abs_remote2_dir, abs_testing_dir, dest_dir) if src_local and dest_local: return SetConnections.get_cmd_pairs([src_dir, dest_dir]) else: return SetConnections.get_cmd_pairs([src_dir, dest_dir], remote_schema)
def set_chars_to_quote(self, repo): """ Set chars_to_quote from rdiff-backup-data dir """ if Globals.chars_to_quote is not None: return # already overridden ctq = repo.get_chars_to_quote() if ctq is not None: regexp, unregexp = map_filenames.get_quoting_regexps( ctq, Globals.quoting_char) SetConnections.UpdateGlobal("chars_to_quote", ctq) SetConnections.UpdateGlobal('chars_to_quote_regexp', regexp) SetConnections.UpdateGlobal('chars_to_quote_unregexp', unregexp) else: log.Log( "chars_to_quote config not found, assuming no quoting " "required in backup repository".format(), log.WARNING) SetConnections.UpdateGlobal("chars_to_quote", b"")
def setup(self): ret_code = super().setup() if ret_code != 0: return ret_code if self.base_dir.conn is Globals.local_connection: Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 return 0 # all is good
def setup(self): if self.must_be_writable and not self._create(): return 1 if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 return 0 # all is good
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time, eas=None, acls=None): """Restore mirror_dir to dest_dir at given time This will automatically find the increments.XXX.dir representing the time specified. The mirror_dir and dest_dir are relative to the testing directory and will be modified for remote trials. """ Main.force = 1 Main.restore_root_set = 0 remote_schema = b'%s' Globals.security_level = "override" if not mirror_local: mirror_dir = b"cd %s; %s/server.py::%s" % (abs_remote1_dir, abs_testing_dir, mirror_dir) if not dest_local: dest_dir = b"cd %s; %s/server.py::%s" % (abs_remote2_dir, abs_testing_dir, dest_dir) cmdpairs = SetConnections.get_cmd_pairs([mirror_dir, dest_dir], remote_schema) Security.initialize("restore", cmdpairs) mirror_rp, dest_rp = list(map(SetConnections.cmdpair2rp, cmdpairs)) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main.misc_setup([mirror_rp, dest_rp]) inc = get_increment_rp(mirror_rp, time) if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp) else: # use alternate syntax Main.restore_timestr = str(time) Main.Restore(mirror_rp, dest_rp, restore_as_of=1) Main.cleanup()
def setPathnames(self, src_path, src_return, dest_path, dest_return): """Start servers which will run in src_path and dest_path respectively If either is None, then no server will be run and local process will handle that end. src_return and dest_return are the prefix back to the original rdiff-backup directory. So for instance is src_path is "test2/tmp", then src_return will be '../'. """ # Clear old data that may rely on deleted connections Globals.isbackup_writer = None Globals.isbackup_reader = None Globals.rbdir = None print "Setting up connection" self.src_prefix, self.src_conn = \ self.get_prefix_and_conn(src_path, src_return) self.dest_prefix, self.dest_conn = \ self.get_prefix_and_conn(dest_path, dest_return) SetConnections.BackupInitConnections(self.src_conn, self.dest_conn) Globals.restrict_path = "/" # we aren't testing security here assert not os.system("rm -rf testfiles/output* " "testfiles/restoretarget* " "testfiles/noperms_output testfiles/root_output " "testfiles/unreadable_out") self.inc1rp = self.get_src_rp("testfiles/increment1") self.inc2rp = self.get_src_rp('testfiles/increment2') self.inc3rp = self.get_src_rp('testfiles/increment3') self.inc4rp = self.get_src_rp('testfiles/increment4') self.rpout_inc = self.get_dest_rp('testfiles/output_inc') self.rpout1 = self.get_dest_rp('testfiles/restoretarget1') self.rpout2 = self.get_dest_rp('testfiles/restoretarget2') self.rpout3 = self.get_dest_rp('testfiles/restoretarget3') self.rpout4 = self.get_dest_rp('testfiles/restoretarget4') self.rpout = self.get_dest_rp('testfiles/output') self.set_rbdir(self.rpout) self.noperms = self.get_src_rp('testfiles/noperms') self.noperms_out = self.get_dest_rp('testfiles/noperms_output') self.rootfiles = self.get_src_rp('testfiles/root') self.rootfiles_out = self.get_dest_rp('testfiles/root_output') self.rootfiles2 = self.get_src_rp('testfiles/root2') self.rootfiles21 = self.get_src_rp('testfiles/root2.1') self.rootfiles_out2 = self.get_dest_rp('testfiles/root_output2') self.one_unreadable = self.get_src_rp('testfiles/one_unreadable') self.one_unreadable_out = self.get_dest_rp('testfiles/unreadable_out')
def _update_triple(self, src_support, dest_support, attr_triple): """ Update global settings for feature based on fsa results This is slightly different from Dir2RepoSetGlobals._update_triple because (using the mirror_metadata file) rpaths from the source may have more information than the file system supports. """ active_attr, write_attr, conn_attr = attr_triple if Globals.get(active_attr) == 0: return # don't override 0 for attr in attr_triple: SetConnections.UpdateGlobal(attr, None) if not dest_support: return # if dest doesn't support, do nothing SetConnections.UpdateGlobal(active_attr, 1) self.out_conn.Globals.set_local(conn_attr, 1) self.out_conn.Globals.set_local(write_attr, 1) if src_support: self.in_conn.Globals.set_local(conn_attr, 1)
def setup(self): # in setup we return as soon as we detect an issue to avoid changing # too much return_code = super().setup() if return_code != 0: return return_code return_code = self._set_no_compression_regexp() if return_code != 0: return return_code return_code = self.dir.setup() if return_code != 0: return return_code owners_map = { "users_map": self.values.user_mapping_file, "groups_map": self.values.group_mapping_file, "preserve_num_ids": self.values.preserve_numerical_ids } return_code = self.repo.setup(self.dir, owners_map=owners_map) if return_code != 0: return return_code # TODO validate how much of the following lines and methods # should go into the directory/repository modules if Globals.get_api_version() < 201: # compat200 SetConnections.BackupInitConnections(self.dir.base_dir.conn, self.repo.base_dir.conn) self.repo.base_dir.conn.fs_abilities.backup_set_globals( self.dir.base_dir, self.values.force) self.repo.setup_quoting() previous_time = self.repo.get_mirror_time() if previous_time >= Time.getcurtime(): log.Log("The last backup is not in the past. Aborting.", log.ERROR) return 1 if log.Log.verbosity > 0: try: # the target repository must be writable log.Log.open_logfile(self.repo.data_dir.append("backup.log")) except (log.LoggerError, Security.Violation) as exc: log.Log("Unable to open logfile due to '{ex}'".format(ex=exc), log.ERROR) return 1 log.ErrorLog.open(Time.getcurtimestr(), compress=self.values.compression) (select_opts, select_data) = selection.get_prepared_selections( self.values.selections) self.dir.set_select(select_opts, select_data) self._warn_if_infinite_recursion(self.dir.base_dir, self.repo.base_dir) return 0
def __exit__(self, exc_type, exc_val, exc_tb): """ Context manager interface to exit with-as context. Returns False to propagate potential exception, else True. """ self.log("Cleaning up", self.log.INFO) self.errlog.close() self.log.close_logfile() if self.security != "server": SetConnections.CloseConnections() return False
def __exit__(self, exc_type, exc_val, exc_tb): """ Context manager interface to exit with-as context. Returns False to propagate potential exception, else True. """ log.Log("Cleaning up", log.INFO) if hasattr(self, 'repo'): self.repo.exit() if self.security != "server": log.ErrorLog.close() log.Log.close_logfile() SetConnections.CloseConnections() return False
def setup(self): """ Prepare the execution of the action. """ # Set default change ownership flag, umask, relay regexps os.umask(0o77) Time.setcurtime(Globals.current_time) SetConnections.UpdateGlobal("client_conn", Globals.local_connection) Globals.postset_regexp('no_compression_regexp', Globals.no_compression_regexp_string) for conn in Globals.connections: conn.robust.install_signal_handlers() conn.Hardlink.initialize_dictionaries() return 0
def pre_check(self): return_code = super().pre_check() # validate that all locations are remote for location in self.values.locations: (file_host, file_path, err) = SetConnections.parse_location(location) if err: self.log(err, self.log.ERROR) return_code |= 1 # binary 'or' to always get 1 elif not file_host: self.log( "Only remote locations can be tested but '{loc}' " "isn't remote.".format(loc=location), self.log.ERROR) return_code |= 1 # binary 'or' to always get 1 return return_code
def pre_check(self): return_code = super().pre_check() # validate that all locations are remote for location in self.values.locations: (file_host, file_path, err) = SetConnections.parse_location(location) if err: log.Log(err, log.ERROR) return_code |= Globals.RET_CODE_ERR elif not file_host: log.Log( "Only remote locations can be tested but location " "'{lo}' isn't remote".format(lo=location), log.ERROR) return_code |= Globals.RET_CODE_ERR return return_code
def setup(self): # in setup we return as soon as we detect an issue to avoid changing # too much return_code = super().setup() if return_code != 0: return return_code return_code = self.source.setup() if return_code != 0: return return_code return_code = self.target.setup() if return_code != 0: return return_code # TODO validate how much of the following lines and methods # should go into the directory/repository modules SetConnections.BackupInitConnections(self.source.base_dir.conn, self.target.base_dir.conn) self.target.base_dir.conn.fs_abilities.backup_set_globals( self.source.base_dir, self.values.force) self.target.init_quoting(self.values.chars_to_quote) self._init_user_group_mapping(self.target.base_dir.conn) previous_time = self.target.get_mirror_time() if previous_time >= Time.curtime: self.log("The last backup is not in the past. Aborting.", self.log.ERROR) return 1 if self.log.verbosity > 0: try: # the target repository must be writable self.log.open_logfile( self.target.data_dir.append("backup.log")) except (log.LoggerError, Security.Violation) as exc: self.log( "Unable to open logfile due to '{exc}'".format(exc=exc), self.log.ERROR) return 1 # TODO could we get rid of the error log? self.errlog.open(Time.curtimestr, compress=self.values.compression) (select_opts, select_data) = selection.get_prepared_selections( self.values.selections) self.source.set_select(select_opts, select_data) self._warn_if_infinite_recursion(self.source.base_dir, self.target.base_dir) return 0
def setup(self): if self.must_be_writable and not self._create(): return 1 if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 if Globals.get_api_version() >= 201: # compat200 if self.base_dir.conn is Globals.local_connection: # should be more efficient than going through the connection from rdiffbackup.locations import _repo_shadow self._shadow = _repo_shadow.ShadowRepo else: self._shadow = self.base_dir.conn._repo_shadow.ShadowRepo return 0 # all is good
def test_rorpiter_xfer(self): """Test if hashes are transferred in files, rorpiter""" Globals.security_level = 'override' conn = SetConnections.init_connection( b'%b %b/server.py' % (os.fsencode(sys.executable), abs_testing_dir)) assert conn.reval("lambda x: x+1", 4) == 5 # connection sanity check fp = hash.FileWrapper(io.BytesIO(self.s1.encode())) conn.Globals.set('tmp_file', fp) fp_remote = conn.Globals.get('tmp_file') assert fp_remote.read() == self.s1.encode() assert fp_remote.close().sha1_digest == self.s1_hash # Tested xfer of file, now test xfer of files in rorpiter root = MakeOutputDir() rp1 = root.append('s1') rp1.write_string(self.s1) rp2 = root.append('s2') rp2.write_string(self.s2) rp1.setfile(hash.FileWrapper(rp1.open('rb'))) rp2.setfile(hash.FileWrapper(rp2.open('rb'))) rpiter = iter([rp1, rp2]) conn.Globals.set('tmp_conn_iter', rpiter) remote_iter = conn.Globals.get('tmp_conn_iter') rorp1 = next(remote_iter) fp = hash.FileWrapper(rorp1.open('rb')) read_s1 = fp.read().decode() assert read_s1 == self.s1, "Read string 1 %s isn't the same as written string %s" % ( read_s1, self.s1) ret_val = fp.close() assert isinstance(ret_val, hash.Report), ret_val assert ret_val.sha1_digest == self.s1_hash rorp2 = next(remote_iter) fp2 = hash.FileWrapper(rorp2.open('rb')) read_s2 = fp2.read().decode() assert read_s2 == self.s2, "Read string 2 %s isn't the same as written string %s" % ( read_s2, self.s2) assert fp2.close().sha1_digest == self.s2_hash conn.quit()
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time, eas = None, acls = None): """Restore mirror_dir to dest_dir at given time This will automatically find the increments.XXX.dir representing the time specified. The mirror_dir and dest_dir are relative to the testing directory and will be modified for remote trials. """ Main.force = 1 Main.restore_root_set = 0 remote_schema = '%s' Globals.security_level = "override" #_reset_connections() if not mirror_local: mirror_dir = "cd test1; python ../server.py ../%s::../%s" % \ (SourceDir, mirror_dir) if not dest_local: dest_dir = "cd test2/tmp; python ../../server.py ../../%s::../../%s" \ % (SourceDir, dest_dir) cmdpairs = SetConnections.get_cmd_pairs([mirror_dir, dest_dir], remote_schema) Security.initialize("restore", cmdpairs) mirror_rp, dest_rp = map(SetConnections.cmdpair2rp, cmdpairs) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main.misc_setup([mirror_rp, dest_rp]) inc = get_increment_rp(mirror_rp, time) if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp) else: # use alternate syntax Main.restore_timestr = str(time) Main.Restore(mirror_rp, dest_rp, restore_as_of = 1) Main.cleanup()
def get_prefix_and_conn(self, path, return_path): """Return (prefix, connection) tuple""" if path: return (return_path, SetConnections.init_connection("./chdir-wrapper " + path)) else: return ("./", Globals.local_connection)