def InternalBackup(source_local, dest_local, src_dir, dest_dir, current_time = None, eas = None, acls = None): """Backup src to dest internally This is like rdiff_backup but instead of running a separate rdiff-backup script, use the separate *.py files. This way the script doesn't have to be rebuild constantly, and stacktraces have correct line/file references. """ Globals.current_time = current_time #_reset_connections() Globals.security_level = "override" remote_schema = '%s' if not source_local: src_dir = "cd test1; python ../server.py ../%s::../%s" % \ (SourceDir, src_dir) if not dest_local: dest_dir = "cd test2/tmp; python ../../server.py ../../%s::../../%s" \ % (SourceDir, dest_dir) cmdpairs = SetConnections.get_cmd_pairs([src_dir, dest_dir], remote_schema) Security.initialize("backup", cmdpairs) rpin, rpout = map(SetConnections.cmdpair2rp, cmdpairs) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main.misc_setup([rpin, rpout]) Main.Backup(rpin, rpout) Main.cleanup()
def connect(self): conn_value = super().connect() if conn_value: Security.initialize(self.get_security_class(), [], security_level=self.values.restrict_mode, restrict_path=self.values.restrict_path) return conn_value
def connect(self): """ Connect to potentially provided locations arguments, remote or local. Defines the current time as being the time of a potentially upcoming backup. Returns self, to be used as context manager. """ if 'locations' in self.values: # TODO encapsulate the following lines into one # connections/connections_mgr construct, so that the action doesn't # need to care about cmdpairs and Security (which would become a # feature of the connection). cmdpairs = SetConnections.get_cmd_pairs( self.values.locations, remote_schema=self.remote_schema, ssh_compression=self.values.ssh_compression, remote_tempdir=self.remote_tempdir, term_verbosity=log.Log.term_verbosity) Security.initialize(self.get_security_class(), cmdpairs) self.connected_locations = list( map(SetConnections.get_connected_rpath, cmdpairs)) else: Security.initialize(self.get_security_class(), []) self.connected_locations = [] # once the connection is set, we can define "now" as being the current # time, unless the user defined a fixed a current time. Time.set_current_time(self.values.current_time) return self
def InternalBackup(source_local, dest_local, src_dir, dest_dir, current_time=None, eas=None, acls=None): """Backup src to dest internally This is like rdiff_backup but instead of running a separate rdiff-backup script, use the separate *.py files. This way the script doesn't have to be rebuild constantly, and stacktraces have correct line/file references. """ Globals.current_time = current_time Globals.security_level = "override" cmdpairs = _internal_get_cmd_pairs(source_local, dest_local, src_dir, dest_dir) Security.initialize("backup", cmdpairs) rpin, rpout = list(map(SetConnections.cmdpair2rp, cmdpairs)) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main._misc_setup([rpin, rpout]) Main._action_backup(rpin, rpout) Main._cleanup()
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time, eas=None, acls=None): """Restore mirror_dir to dest_dir at given time This will automatically find the increments.XXX.dir representing the time specified. The mirror_dir and dest_dir are relative to the testing directory and will be modified for remote trials. """ Main._force = 1 Main._restore_root_set = 0 Globals.security_level = "override" cmdpairs = _internal_get_cmd_pairs(mirror_local, dest_local, mirror_dir, dest_dir) Security.initialize("restore", cmdpairs) mirror_rp, dest_rp = list(map(SetConnections.cmdpair2rp, cmdpairs)) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main._misc_setup([mirror_rp, dest_rp]) inc = get_increment_rp(mirror_rp, time) if inc: Main._action_restore(get_increment_rp(mirror_rp, time), dest_rp) else: # use alternate syntax Main._restore_timestr = str(time) Main._action_restore(mirror_rp, dest_rp, restore_as_of=1) Main._cleanup()
def setup(self): ret_code = super().setup() if ret_code != 0: return ret_code if self.base_dir.conn is Globals.local_connection: Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 return 0 # all is good
def setup(self, src_dir=None, owners_map=None): if self.must_be_writable and not self._create(): return 1 if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) Globals.set_all('rbdir', self.data_dir) # compat200 if Globals.get_api_version() >= 201: # compat200 if self.base_dir.conn is Globals.local_connection: # should be more efficient than going through the connection from rdiffbackup.locations import _repo_shadow self._shadow = _repo_shadow.RepoShadow else: self._shadow = self.base_dir.conn._repo_shadow.RepoShadow if self.must_be_writable: self.fs_abilities = self._shadow.get_fs_abilities_readwrite( self.base_dir) else: self.fs_abilities = self._shadow.get_fs_abilities_readonly( self.base_dir) if not self.fs_abilities: return 1 # something was wrong else: log.Log( "--- Repository file system capabilities ---\n" + str(self.fs_abilities), log.INFO) if src_dir is None: self.remote_transfer = None # just in case ret_code = fs_abilities.SingleRepoSetGlobals(self)() if ret_code != 0: return ret_code else: # FIXME this shouldn't be necessary, and the setting of variable # across the connection should happen through the shadow Globals.set_all("backup_writer", self.base_dir.conn) self.base_dir.conn.Globals.set_local("isbackup_writer", True) # this is the new way, more dedicated but not sufficient yet self.remote_transfer = (src_dir.base_dir.conn is not self.base_dir.conn) ret_code = fs_abilities.Dir2RepoSetGlobals(src_dir, self)() if ret_code != 0: return ret_code self.setup_quoting() self.setup_paths() if owners_map is not None: ret_code = self.init_owners_mapping(**owners_map) if ret_code != 0: return ret_code return 0 # all is good
def setup(self): if self.must_be_writable and not self._create(): return 1 if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 return 0 # all is good
def setup(self): if self.must_be_writable and not self._create(): return 1 if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) SetConnections.UpdateGlobal('rbdir', self.data_dir) # compat200 if Globals.get_api_version() >= 201: # compat200 if self.base_dir.conn is Globals.local_connection: # should be more efficient than going through the connection from rdiffbackup.locations import _repo_shadow self._shadow = _repo_shadow.ShadowRepo else: self._shadow = self.base_dir.conn._repo_shadow.ShadowRepo return 0 # all is good
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time, eas=None, acls=None): """Restore mirror_dir to dest_dir at given time This will automatically find the increments.XXX.dir representing the time specified. The mirror_dir and dest_dir are relative to the testing directory and will be modified for remote trials. """ Main.force = 1 Main.restore_root_set = 0 remote_schema = b'%s' Globals.security_level = "override" if not mirror_local: mirror_dir = b"cd %s; %s/server.py::%s" % (abs_remote1_dir, abs_testing_dir, mirror_dir) if not dest_local: dest_dir = b"cd %s; %s/server.py::%s" % (abs_remote2_dir, abs_testing_dir, dest_dir) cmdpairs = SetConnections.get_cmd_pairs([mirror_dir, dest_dir], remote_schema) Security.initialize("restore", cmdpairs) mirror_rp, dest_rp = list(map(SetConnections.cmdpair2rp, cmdpairs)) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main.misc_setup([mirror_rp, dest_rp]) inc = get_increment_rp(mirror_rp, time) if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp) else: # use alternate syntax Main.restore_timestr = str(time) Main.Restore(mirror_rp, dest_rp, restore_as_of=1) Main.cleanup()
def connect(self): """ Connect to potentially provided locations arguments, remote or local. Returns self, to be used as context manager. """ if 'locations' in self.values: # TODO encapsulate the following lines into one # connections/connections_mgr construct, so that the action doesn't # need to care about cmdpairs and Security (which would become a # feature of the connection). cmdpairs = SetConnections.get_cmd_pairs( self.values.locations, remote_schema=self.remote_schema, ssh_compression=self.values.ssh_compression) Security.initialize(self.get_security_class(), cmdpairs) self.connected_locations = list( map(SetConnections.get_connected_rpath, cmdpairs)) else: self.connected_locations = [] return self
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time, eas = None, acls = None): """Restore mirror_dir to dest_dir at given time This will automatically find the increments.XXX.dir representing the time specified. The mirror_dir and dest_dir are relative to the testing directory and will be modified for remote trials. """ Main.force = 1 Main.restore_root_set = 0 remote_schema = '%s' Globals.security_level = "override" #_reset_connections() if not mirror_local: mirror_dir = "cd test1; python ../server.py ../%s::../%s" % \ (SourceDir, mirror_dir) if not dest_local: dest_dir = "cd test2/tmp; python ../../server.py ../../%s::../../%s" \ % (SourceDir, dest_dir) cmdpairs = SetConnections.get_cmd_pairs([mirror_dir, dest_dir], remote_schema) Security.initialize("restore", cmdpairs) mirror_rp, dest_rp = map(SetConnections.cmdpair2rp, cmdpairs) for attr in ('eas_active', 'eas_write', 'eas_conn'): SetConnections.UpdateGlobal(attr, eas) for attr in ('acls_active', 'acls_write', 'acls_conn'): SetConnections.UpdateGlobal(attr, acls) Main.misc_setup([mirror_rp, dest_rp]) inc = get_increment_rp(mirror_rp, time) if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp) else: # use alternate syntax Main.restore_timestr = str(time) Main.Restore(mirror_rp, dest_rp, restore_as_of = 1) Main.cleanup()
def setup(self, src_dir=None, owners_map=None): if self.must_be_writable and not self._create(): return Globals.RET_CODE_ERR if (self.can_be_sub_path and self.base_dir.conn is Globals.local_connection): Security.reset_restrict_path(self.base_dir) Globals.set_all('rbdir', self.data_dir) # compat200 if Globals.get_api_version() >= 201: # compat200 if self.base_dir.conn is Globals.local_connection: # should be more efficient than going through the connection from rdiffbackup.locations import _repo_shadow self._shadow = _repo_shadow.RepoShadow else: self._shadow = self.base_dir.conn._repo_shadow.RepoShadow if not self.lock(): if self.force: log.Log( "Repository is locked by file {lf}, another " "action is probably on-going. Enforcing anyway " "at your own risk".format(lf=self.lockfile), log.WARNING) else: log.Log( "Repository is locked by file {lf}, another " "action is probably on-going. Either wait, remove " "the lock or use the --force option".format( lf=self.lockfile), log.ERROR) return Globals.RET_CODE_ERR if self.must_be_writable: self.fs_abilities = self._shadow.get_fs_abilities_readwrite( self.base_dir) else: self.fs_abilities = self._shadow.get_fs_abilities_readonly( self.base_dir) if not self.fs_abilities: return Globals.RET_CODE_ERR else: log.Log( "--- Repository file system capabilities ---\n" + str(self.fs_abilities), log.INFO) if src_dir is None: self.remote_transfer = None # just in case ret_code = fs_abilities.SingleRepoSetGlobals(self)() if ret_code != 0: return ret_code else: # FIXME this shouldn't be necessary, and the setting of variable # across the connection should happen through the shadow Globals.set_all("backup_writer", self.base_dir.conn) self.base_dir.conn.Globals.set_local("isbackup_writer", True) # this is the new way, more dedicated but not sufficient yet self.remote_transfer = (src_dir.base_dir.conn is not self.base_dir.conn) ret_code = fs_abilities.Dir2RepoSetGlobals(src_dir, self)() if ret_code != 0: return ret_code self.setup_quoting() self.setup_paths() if owners_map is not None: ret_code = self.init_owners_mapping(**owners_map) if ret_code != 0: return ret_code return Globals.RET_CODE_OK