def syncLocalDir(self, local_dir, verbose=False): if os.path.exists(local_dir): if not os.path.isdir(local_dir): sys.exit("'%s' already exists and is not a directory => EXIT." % local_dir) else: os.mkdir(local_dir) oldcwd = os.getcwd() os.chdir(local_dir) if self.host == None: # self is a local dir too cmd = "%s -rlptz %s/ %s" % (self.rsync_cmd, self.path, '.') else: # self is a remote dir cmd = "%s -rlptz %s/ %s" % (self.rsync_rsh_cmd, self.get_full_remote_path(), '.') if verbose: print "BBS> Syncing local '%s' with %s" % (local_dir, self.label) jobs.tryHardToRunJob(cmd, 3, None, 1800.0, 60.0, verbose) ## Workaround a strange problem observed so far on Windows Server ## 2008 R2 Enterprise (64-bit) only. After running rsync (from Cygwin) ## on this machine to sync a local folder, the local filesystem seems ## to be left in a state that confuses the 'tar' command (from Cygwin ## or Rtools) i.e. 'tar zcvf ...' fails when trying to store all or ## part of the local folder into a tarball complaining that for some ## subfolders "file changed as we read it". ## Traversing the entire local folder, with e.g. a call to ## 'chmod a+r . -R', seems to "fix" the state of the filesystem and ## to make 'tar' work again on it. if sys.platform == "win32": cmd = "chmod a+r . -R" # from Cygwin (or Rtools) jobs.runJob(cmd, None, 300.0, verbose) os.chdir(oldcwd) return
def Put(self, src_path, failure_is_fatal=True, verbose=False): if sys.platform == "win32" and os.path.exists( src_path) and os.path.isfile(src_path): #os.chmod(src_path, 0644) # This doesn't work ## This works better but requires Cygwin. cmd = "chmod +r " + src_path jobs.runJob(cmd, None, 60.0, verbose) if self.host == None: # self is a local dir cmd = "%s %s %s" % (self.rsync_cmd, src_path, self.path) else: # self is a remote dir cmd = "%s %s %s" % (self.rsync_rsh_cmd, src_path, self.get_full_remote_path()) maxtime = 120.0 + fileutils.total_size( src_path) / bandwidth_in_bytes_per_sec if verbose: if self.host != None: action = "Putting" else: action = "Copying" print "BBS> %s %s in %s/:" % (action, src_path, self.label) jobs.tryHardToRunJob(cmd, 5, None, maxtime, 30.0, failure_is_fatal, verbose) return
def set_readable_flag(path, verbose=False): if sys.platform == "win32" and \ os.path.exists(path) and \ os.path.isfile(path): #os.chmod(path, 0644) # this doesn't work # This works better but requires Cygwin: cmd = "chmod +r " + path jobs.runJob(cmd, None, 60.0, verbose) return
def syncLocalDir(self, local_dir, verbose=False): if os.path.exists(local_dir): if not os.path.isdir(local_dir): sys.exit( "'%s' already exists but is not a directory => EXIT." % local_dir) else: os.mkdir(local_dir) oldcwd = os.getcwd() os.chdir(local_dir) if self.host == None or self.host == 'localhost': # self is a local dir too rsync_cmd = self.rsync_cmd src = self.path else: # self is a remote dir rsync_cmd = self.rsync_rsh_cmd src = self.get_full_remote_path() rsync_options = self.rsync_options if sys.platform == "win32": # Transform symlink into referent file/dir (-L) rsync_options += ' -rLptz' else: # Copy symlinks as symlinks (-l) rsync_options += ' -rlptz' cmd = "%s %s %s/ %s" % (rsync_cmd, rsync_options, src, '.') if verbose: print("BBS> Syncing local '%s' with %s" % (local_dir, self.label)) ## This can take a veeeeeeeeery long time on Windows! jobs.tryHardToRunJob(cmd, 3, None, 2400.0, 30.0, True, verbose) ## Workaround a strange problem observed so far on Windows Server ## 2008 R2 Enterprise (64-bit) only. After running rsync (from Cygwin) ## on this machine to sync a local folder, the local filesystem seems ## to be left in a state that confuses the 'tar' command (from Cygwin ## or Rtools) i.e. 'tar zcvf ...' fails when trying to store all or ## part of the local folder into a tarball complaining that for some ## subfolders "file changed as we read it". ## Traversing the entire local folder, with e.g. a call to ## 'chmod a+r . -R', seems to "fix" the state of the filesystem and ## to make 'tar' work again on it. if sys.platform == "win32": cmd = "chmod a+r . -R" # from Cygwin (or Rtools) ## This can time out on Azure VMs palomino or palomino2 when ## syncing the local meat folder with central MEAT0 if 'maxtime' ## is set to 5 min so now we allow 10 min. jobs.runJob(cmd, None, 600.0, verbose) os.chdir(oldcwd) return
def Put(self, src_path, verbose=False): if sys.platform == "win32" and os.path.exists(src_path) and os.path.isfile(src_path): #os.chmod(src_path, 0644) # This doesn't work ## This works better but requires Cygwin. cmd = "chmod +r " + src_path jobs.runJob(cmd, None, 60.0, verbose) if self.host == None: # self is a local dir cmd = "%s %s %s" % (self.rsync_cmd, src_path, self.path) else: # self is a remote dir cmd = "%s %s %s" % (self.rsync_rsh_cmd, src_path, self.get_full_remote_path()) maxtime = 60.0 + fileutils.total_size(src_path) / bandwidth_in_bytes_per_sec if verbose: if self.host != None: action = "Putting" else: action = "Copying" print "BBS> %s %s in %s/:" % (action, src_path, self.label) jobs.tryHardToRunJob(cmd, 5, None, maxtime, 30.0, verbose) return