def _submit_job(self): exec_scheme, exec_parts = uriparse(self.task.job.exec_backend) working_scheme, working_parts = uriparse(self.working_output_dir_uri()) self.working_dir = working_parts.path self.submission_script_name = self.executer.generate_remote_script_name( ) self.task.job_identifier = self.submission_script_name self.task.save() self.task_logger.info("Creating submission script %s" % self.submission_script_name) self.submission_script_body = self.get_submission_script( exec_parts.hostname, self.working_dir) wrapper_script = self._get_submission_wrapper_script() self.task_logger.info("Executing script:\n\n%s" % wrapper_script) exit_code, stdout, stderr = self.executer.exec_script(wrapper_script) result = self.parser.parse_sub(exit_code, stdout, stderr) if result.status != result.JOB_SUBMITTED: self.task_logger.error("Yabi Task Name = %s" % self._yabi_task_name()) self.task_logger.error("Submission script name = %s" % self.submission_script_name) self.task_logger.error("Submission script body = %s" % self.submission_script_body) self.task_logger.error("stderr:\n") lines = "\n".join(stderr) self.task_logger.error(lines) return result
def submit_task(self): """ For local exec submitting a task executes the task and blocks the current process. It is not intended for large scale real world usage. """ exec_scheme, exec_parts = uriparse(self.task.job.exec_backend) working_scheme, working_parts = uriparse(self.working_output_dir_uri()) script = self.get_submission_script(exec_parts.hostname, working_parts.path) logger.debug('script {0}'.format(script)) script_name = self.create_script(script) if os.path.exists(working_parts.path): shutil.rmtree(working_parts.path) os.makedirs(working_parts.path) try: stdout = open(os.path.join(working_parts.path, 'STDOUT.txt'), 'w') stderr = open(os.path.join(working_parts.path, 'STDERR.txt'), 'w') logger.debug('Running in {0}'.format(working_parts.path)) args = shlex.split(self.task.command.encode('utf-8')) def set_remote_id(pid): self.task.remote_id = pid self.task.save() args = [script_name] status = blocking_execute(args=args, stderr=stderr, stdout=stdout, cwd=working_parts.path, report_pid_callback=set_remote_id) if status != 0: if self.is_aborting(): return None logger.error('Non zero exit status [{0}]'.format(status)) raise RetryException( 'Local Exec of command "{0}" retuned non-zero code {1}'. format(" ".join(args), status)) except Exception as exc: raise RetryException(exc) finally: try: stdout.close() stderr.close() except Exception as exc: logger.error(exc) try: os.unlink(script_name) except: logger.exception("Couldn't delete script file %s", script_name) return status
def __init__(self, *args, **kwargs): ret = Task.__init__(self, *args, **kwargs) # basic stuff used by both stagein types self.fsscheme, self.fsbackend_parts = uriparse(self.job.fs_backend) self.execscheme, self.execbackend_parts = uriparse(self.job.exec_backend) return ret
def local_copy(self, src_uri, dst_uri): """A local copy within this backend.""" logger.debug('local_copy {0} -> {1}'.format(src_uri, dst_uri)) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) try: shutil.copy2(src_parts.path, dst_parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def __init__(self, *args, **kwargs): ret = Task.__init__(self, *args, **kwargs) # basic stuff used by both stagein types self.fsscheme, self.fsbackend_parts = uriparse(self.job.fs_backend) self.execscheme, self.execbackend_parts = uriparse( self.job.exec_backend) return ret
def symbolic_link(self, target_uri, link_uri): """symbolic link to target_uri called link_uri.""" logger.debug('symbolic_link {0} -> {1}'.format(target_uri, link_uri)) target_scheme, target_parts = uriparse(target_uri) link_scheme, link_parts = uriparse(link_uri) target = target_parts.path try: if not os.path.exists(target): raise FileNotFoundError("Source of symbolic link '%s' doesn't exist" % target) os.symlink(target, link_parts.path) except OSError as ose: raise RetryException(ose, traceback.format_exc())
def symbolic_link(self, src_uri, dst_uri): """symbolic link to target_uri called link_uri.""" logger.debug("SFTPBackend.symbolic_link: %s => %s", src_uri, dst_uri) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) logger.debug('{0} -> {1}'.format(src_uri, dst_uri)) executer = create_executer(self.yabiusername, src_uri) try: executer.local_symlink(src_parts.path, dst_parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def symbolic_link(self, src_uri, dst_uri): """symbolic link to target_uri called link_uri.""" logger.debug("SFTPBackend.symbolic_link: %s => %s", src_uri, dst_uri) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) logger.debug("{0} -> {1}".format(src_uri, dst_uri)) executer = create_executer(self.yabiusername, src_uri) try: executer.local_symlink(src_parts.path, dst_parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def submit_task(self): """ For local exec submitting a task executes the task and blocks the current process. It is not intended for large scale real world usage. """ exec_scheme, exec_parts = uriparse(self.task.job.exec_backend) working_scheme, working_parts = uriparse(self.working_output_dir_uri()) script = self.get_submission_script(exec_parts.hostname, working_parts.path) logger.debug('script {0}'.format(script)) script_name = self.create_script(script) if os.path.exists(working_parts.path): shutil.rmtree(working_parts.path) os.makedirs(working_parts.path) try: stdout = open(os.path.join(working_parts.path, 'STDOUT.txt'), 'w') stderr = open(os.path.join(working_parts.path, 'STDERR.txt'), 'w') logger.debug('Running in {0}'.format(working_parts.path)) args = shlex.split(self.task.command.encode('utf-8')) def set_remote_id(pid): self.task.remote_id = pid self.task.save() args = [script_name] status = blocking_execute(args=args, stderr=stderr, stdout=stdout, cwd=working_parts.path, report_pid_callback=set_remote_id) if status != 0: if self.is_aborting(): return None logger.error('Non zero exit status [{0}]'.format(status)) raise RetryException('Local Exec of command "{0}" retuned non-zero code {1}'.format(" ".join(args), status)) except Exception as exc: raise RetryException(exc) finally: try: stdout.close() stderr.close() except Exception as exc: logger.error(exc) try: os.unlink(script_name) except: logger.exception("Couldn't delete script file %s", script_name) return status
def local_copy(self, src_uri, dst_uri): """Copy src_uri to dst_uri on the remote backend""" logger.debug("SFTPBackend.local_copy: %s => %s", src_uri, dst_uri) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) logger.debug("{0} -> {1}".format(src_uri, dst_uri)) # Given paramiko does not support local copy, we # use cp on server via exec backend executer = create_executer(self.yabiusername, src_uri) try: executer.local_copy(src_parts.path, dst_parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def local_copy(self, src_uri, dst_uri): """Copy src_uri to dst_uri on the remote backend""" logger.debug("SFTPBackend.local_copy: %s => %s", src_uri, dst_uri) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) logger.debug('{0} -> {1}'.format(src_uri, dst_uri)) # Given paramiko does not support local copy, we # use cp on server via exec backend executer = create_executer(self.yabiusername, src_uri) try: executer.local_copy(src_parts.path, dst_parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def symbolic_link(self, target_uri, link_uri): """symbolic link to target_uri called link_uri.""" logger.debug('symbolic_link {0} -> {1}'.format(target_uri, link_uri)) target_scheme, target_parts = uriparse(target_uri) link_scheme, link_parts = uriparse(link_uri) target = target_parts.path try: if not os.path.exists(target): raise FileNotFoundError( "Source of symbolic link '%s' doesn't exist" % target) os.symlink(target, link_parts.path) except OSError as ose: raise RetryException(ose, traceback.format_exc())
def local_copy_recursive(self, src_uri, dst_uri): """A local copy within this backend.""" logger.debug('local_copy {0} -> {1}'.format(src_uri, dst_uri)) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) try: for item in os.listdir(src_parts.path): src = os.path.join(src_parts.path, item) dst = os.path.join(dst_parts.path, item) if os.path.isdir(src): shutil.copytree(src, dst) else: shutil.copy2(src, dst) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def get_exec_backendcredential_for_uri(yabiusername, uri): """ Looks up a backend credential based on the supplied uri, which should include a username. Returns bc, will log and reraise ObjectDoesNotExist and MultipleObjectsReturned exceptions if more than one credential """ logger.debug('yabiusername: %s uri: %s' % (yabiusername, uri)) # parse the URI into chunks schema, rest = uriparse(uri) # enforce Exec scehmas only from ..backend import ExecBackend if not ExecBackend.get_backend_cls_for_scheme(schema): logger.error("get_exec_backendcredential_for_uri was asked to get an fs schema! This is forbidden.") raise ValueError("Invalid schema in uri passed to get_exec_backendcredential_for_uri: asked for %s" % schema) path = rest.path if path != "/": logger.error("get_exec_backendcredential_for_uri was passed a uri with a path! This is forbidden. Path must be / for exec backends") raise ValueError("Invalid path in uri passed to get_exec_backendcredential_for_uri: path passed in was: %s" % path) # get our set of credential candidates bcs = _get_credential_candidates(yabiusername, schema, rest.username, rest.hostname) if len(bcs) == 0: raise ObjectDoesNotExist("Could not find backendcredential") return bcs[0]
def get_fs_backendcredential_for_uri(yabiusername, uri): """ Looks up a backend credential based on the supplied uri, which should include a username. Returns bc, will log and reraise ObjectDoesNotExist and MultipleObjectsReturned exceptions if more than one credential """ logger.debug('yabiusername: %s uri: %s' % (yabiusername, uri)) schema, rest = uriparse(uri) _enforce_FS_schema_only(schema) path = _normalise_path(rest.path) bc_candidates = _get_credential_candidates(yabiusername, schema, rest.username, rest.hostname) logger.debug("credentials candidates [%s]" % (",".join([str(x) for x in bc_candidates]))) matches_path = partial(_does_path_match_be_cred, path) matching_bcs = filter(matches_path, bc_candidates) if len(matching_bcs) == 0: raise ObjectDoesNotExist("Could not find backendcredential") cred = _find_be_cred_with_longest_path(matching_bcs) logger.info("chose cred {0} {1} {2}".format(cred.id, cred.backend.path, cred.homedir)) return cred
def exec_script(self, script): logger.debug("SSHExex.exec_script...") logger.debug('script content = {0}'.format(script)) exec_scheme, exec_parts = uriparse(self.uri) ssh = sshclient(exec_parts.hostname, exec_parts.port, self.credential) sftp = None try: sftp = ssh.open_sftp() script_name = self.upload_script(sftp, script) stdin, stdout, stderr = ssh.exec_command(script_name, bufsize=-1, timeout=None, get_pty=False) stdin.close() exit_code = stdout.channel.recv_exit_status() logger.debug("sshclient exec'd script OK") self.remove_script(sftp, script_name) return exit_code, stdout.readlines(), stderr.readlines() except paramiko.SSHException as sshe: raise RetryException(sshe, traceback.format_exc()) finally: try: if sftp is not None: sftp.close() if ssh is not None: ssh.close() except: pass
def remote_file_upload(yabiusername, filename, uri): """Use a local fifo to upload to a remote file""" logger.debug('local_fifo -> {0}'.format(uri)) # we need ref to the backend backend = FSBackend.urifactory(yabiusername, uri) scheme, parts = uriparse(uri) # uri for an upload must specify filename. we can't rely on the # source name as we copy from a fifo with a random name if not uri.endswith(filename): if not uri.endswith('/'): uri = uri + '/' uri = uri + filename try: # create a fifo, start the write to/read from fifo fifo = create_fifo('remote_file_upload_' + yabiusername + '_' + parts.hostname) thread, queue = backend.fifo_to_remote(uri, fifo) outfile = open(fifo, "wb") try: os.unlink(fifo) except OSError: logger.exception("Couldn't delete remote file upload fifo") return outfile, queue except Exception as exc: raise RetryException(exc, traceback.format_exc())
def remote_file_download(yabiusername, uri, is_dir=False): """Use a local fifo to download a remote file""" logger.debug('{0} -> local fifo'.format(uri)) # we need ref to the backend backend = FSBackend.urifactory(yabiusername, uri) scheme, parts = uriparse(uri) try: # create a fifo, start the write to/read from fifo fifo = create_fifo('remote_file_download_' + yabiusername + '_' + parts.hostname) if is_dir: thread, queue = backend.remote_dir_to_fifo(uri, fifo) else: thread, queue = backend.remote_to_fifo(uri, fifo) infile = open(fifo, "rb") try: os.unlink(fifo) except OSError: logger.exception("Couldn't delete remote file download fifo") return infile, thread, queue except Exception as exc: raise RetryException(exc, traceback.format_exc())
def exec_script(self, script): logger.debug("SSHExex.exec_script...") logger.debug("script content = {0}".format(script)) exec_scheme, exec_parts = uriparse(self.uri) ssh = sshclient(exec_parts.hostname, exec_parts.port, self.credential) sftp = None try: sftp = ssh.open_sftp() script_name = self.upload_script(sftp, script) stdin, stdout, stderr = ssh.exec_command(script_name, bufsize=-1, timeout=None, get_pty=False) stdin.close() exit_code = stdout.channel.recv_exit_status() logger.debug("sshclient exec'd script OK") self.remove_script(sftp, script_name) return exit_code, stdout.readlines(), stderr.readlines() except paramiko.SSHException as sshe: raise RetryException(sshe, traceback.format_exc()) finally: try: if sftp is not None: sftp.close() if ssh is not None: ssh.close() except: pass
def parse_s3_uri(self, uri): if uri.endswith(DELIMITER): uri = uri.rstrip(DELIMITER) + DELIMITER scheme, parts = uriparse(uri) bucket_name = parts.hostname.split('.')[0] path = parts.path return bucket_name, path
def remote_file_copy(yabiusername, src_uri, dst_uri): """Use a local fifo to copy a single file from src_uri to dst_uri""" logger.debug('remote_file_copy {0} -> {1}'.format(src_uri, dst_uri)) # we need refs to the src and dst backends src_backend = FSBackend.urifactory(yabiusername, src_uri) dst_backend = FSBackend.urifactory(yabiusername, dst_uri) src_scheme, src_parts = uriparse(src_uri) dst_scheme, dst_parts = uriparse(dst_uri) # Making sure dst_uri is always a file not a dir if dst_parts.path.endswith("/"): # Looks like a dir dst_file_uri = "%s/%s" % (dst_uri.rstrip('/'), src_backend.basename(src_parts.path)) dst_scheme, dst_parts = uriparse(dst_uri) else: dst_file_uri = dst_uri fifo = None try: src_stat = src_backend.remote_uri_stat(src_uri) # create a fifo, start the write to/read from fifo fifo = create_fifo('remote_file_copy_' + yabiusername + '_' + src_parts.hostname + '_' + dst_parts.hostname) src_cmd, src_queue = src_backend.remote_to_fifo(src_uri, fifo) dst_cmd, dst_queue = dst_backend.fifo_to_remote(dst_file_uri, fifo) src_cmd.join() dst_cmd.join() try: os.unlink(fifo) except OSError: pass src_success = src_queue.get() dst_success = dst_queue.get() # check exit status if not src_success: raise RetryException('remote_file_copy remote_to_fifo failed') if not dst_success: raise RetryException('remote_file_copy fifo_to_remote failed') if src_stat: atime = src_stat.get('atime') mtime = src_stat.get('mtime') dst_backend.set_remote_uri_times(dst_file_uri, atime, mtime) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def download_dir(self, uri, outfile): logger.debug("SFTPBackend.download_dir: %s => tarball => %s", uri, outfile) scheme, parts = uriparse(uri) executer = create_executer(self.yabiusername, uri) try: return executer.download_dir_as_tarball(parts.path, outfile) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def download_file(self, uri, outfile): scheme, parts = uriparse(uri) return self._sftp_copy(host=parts.hostname, port=parts.port, credential=self.cred.credential, localfo=outfile, remotepath=parts.path, copy='get', hostkey=None)
def sshclient(self): exec_scheme, exec_parts = uriparse(self.uri) ssh = sshclient(exec_parts.hostname, exec_parts.port, self.credential) try: yield ssh finally: try: ssh.close() except: pass
def _convert(self, uri): """convert a uri into a full remote path""" schema, rest = uriparse(uri) return self.uri_conversion_string % { 'schema': schema, 'hostname': rest.hostname, 'username': rest.username, 'port': rest.port, 'fullpath': rest.path, 'filename': rest.path.rsplit("/", 1)[-1] }
def download_file(self, uri, outfile): scheme, parts = uriparse(uri) return self._sftp_copy( host=parts.hostname, port=parts.port, credential=self.cred.credential, localfo=outfile, remotepath=parts.path, copy="get", hostkey=None, )
def urifactory(yabiusername, uri): assert(uri) fsscheme, fsbackend_parts = uriparse(uri) backend = FSBackend.create_backend_for_scheme(fsscheme) if backend is None: raise Exception("No backend can be found for uri %s with fsscheme %s for user %s" % (uri, fsscheme, yabiusername)) backend.yabiusername = yabiusername backend.cred = fs_credential(yabiusername, uri) return backend
def mkdir(self, uri): """mkdir at uri""" logger.debug('mkdir {0}'.format(uri)) scheme, parts = uriparse(uri) if os.path.exists(parts.path) and os.path.isdir(parts.path): return try: os.makedirs(parts.path) except OSError as ose: raise RetryException(ose, traceback.format_exc())
def _submit_job(self): exec_scheme, exec_parts = uriparse(self.task.job.exec_backend) working_scheme, working_parts = uriparse(self.working_output_dir_uri()) self.working_dir = working_parts.path self.submission_script_name = self.executer.generate_remote_script_name() self.task.job_identifier = self.submission_script_name self.task.save() self.task_logger.info("Creating submission script %s" % self.submission_script_name) self.submission_script_body = self.get_submission_script(exec_parts.hostname, self.working_dir) wrapper_script = self._get_submission_wrapper_script() self.task_logger.info("Executing script:\n\n%s" % wrapper_script) exit_code, stdout, stderr = self.executer.exec_script(wrapper_script) result = self.parser.parse_sub(exit_code, stdout, stderr) if result.status != result.JOB_SUBMITTED: self.task_logger.error("Yabi Task Name = %s" % self._yabi_task_name()) self.task_logger.error("Submission script name = %s" % self.submission_script_name) self.task_logger.error("Submission script body = %s" % self.submission_script_body) self.task_logger.error("stderr:\n") lines = "\n".join(stderr) self.task_logger.error(lines) return result
def get_backend_by_uri(uri): """ Looks up a backend object purely by uri. Ignored username. Thus diesnt consider credentials. Just pure hostname/portnumber. Used by HostKey system for find what hostkeys are allowed """ schema, rest = uriparse(uri) netloc = rest.netloc if ':' in netloc: host, port = netloc.split(':') else: host, port = netloc, None return Backend.objects.filter(scheme=schema, hostname=host, port=port)
def rm(self, uri): """recursively delete a uri""" scheme, parts = uriparse(uri) logger.debug("{0}".format(parts.path)) ssh = sshclient(parts.hostname, parts.port, self.cred.credential) try: sftp = ssh.open_sftp() self._rm(sftp, parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc()) finally: try: if ssh is not None: ssh.close() except: pass
def rm(self, uri): """recursively delete a uri""" scheme, parts = uriparse(uri) logger.debug('{0}'.format(parts.path)) ssh = sshclient(parts.hostname, parts.port, self.cred.credential) try: sftp = ssh.open_sftp() self._rm(sftp, parts.path) except Exception as exc: raise RetryException(exc, traceback.format_exc()) finally: try: if ssh is not None: ssh.close() except: pass
def set_remote_uri_times(self, uri, atime, mtime): scheme, parts = uriparse(uri) remotepath = parts.path ssh = None try: ssh = pool_manager.borrow(parts.hostname, parts.port, self.cred.credential) sftp = ssh.open_sftp() sftp.utime(remotepath, (atime, mtime)) except Exception: logger.exception("Exception while setting times for '%s'", uri) raise finally: if ssh is not None: sftp.close() pool_manager.give_back(ssh, parts.hostname, parts.port, self.cred.credential)
def rm(self, uri): """rm uri""" logger.debug('rm {0}'.format(uri)) scheme, parts = uriparse(uri) logger.debug('{0}'.format(parts.path)) if not os.path.exists(parts.path): raise Exception('rm target ({0}) is not a file or directory'.format(parts.path)) try: path = parts.path.rstrip('/') if os.path.isfile(path) or os.path.islink(path): os.unlink(path) elif os.path.isdir(path): shutil.rmtree(path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def remote_uri_stat(self, uri): scheme, parts = uriparse(uri) remotepath = parts.path ssh = None try: ssh = pool_manager.borrow(parts.hostname, parts.port, self.cred.credential) sftp = ssh.open_sftp() stat = sftp.stat(remotepath) return {"atime": stat.st_atime, "mtime": stat.st_mtime} except Exception: logger.exception("Exception while stating '%s'", uri) raise finally: if ssh is not None: sftp.close() pool_manager.give_back(ssh, parts.hostname, parts.port, self.cred.credential)
def local_copy_recursive(self, src_uri, dst_uri): """recursively copy src_uri to dst_uri on the remote backend""" logger.debug("SFTPBackend.local_copy_recursive: %s => %s", src_uri, dst_uri) dst_scheme, dst_parts = uriparse(dst_uri) dst_path = dst_parts.path listing = self.ls(src_uri) executer = create_executer(self.yabiusername, src_uri) try: for key in listing: for listing_file in listing[key]["files"]: file_path = os.path.join(key, listing_file[0]) executer.local_copy(file_path, dst_path) for listing_dir in listing[key]["directories"]: dir_path = os.path.join(key, listing_dir[0]) executer.local_copy(dir_path, dst_path, recursive=True) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def rm(self, uri): """rm uri""" logger.debug('rm {0}'.format(uri)) scheme, parts = uriparse(uri) logger.debug('{0}'.format(parts.path)) if not os.path.exists(parts.path): raise Exception( 'rm target ({0}) is not a file or directory'.format( parts.path)) try: path = parts.path.rstrip('/') if os.path.isfile(path) or os.path.islink(path): os.unlink(path) elif os.path.isdir(path): shutil.rmtree(path) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def local_copy_recursive(self, src_uri, dst_uri): """recursively copy src_uri to dst_uri on the remote backend""" logger.debug("SFTPBackend.local_copy_recursive: %s => %s", src_uri, dst_uri) dst_scheme, dst_parts = uriparse(dst_uri) dst_path = dst_parts.path listing = self.ls(src_uri) executer = create_executer(self.yabiusername, src_uri) try: for key in listing: for listing_file in listing[key]['files']: file_path = os.path.join(key, listing_file[0]) executer.local_copy(file_path, dst_path) for listing_dir in listing[key]['directories']: dir_path = os.path.join(key, listing_dir[0]) executer.local_copy(dir_path, dst_path, recursive=True) except Exception as exc: raise RetryException(exc, traceback.format_exc())
def remote_uri_stat(self, uri): scheme, parts = uriparse(uri) remotepath = parts.path ssh = None try: ssh = pool_manager.borrow(parts.hostname, parts.port, self.cred.credential) sftp = ssh.open_sftp() stat = sftp.stat(remotepath) return {'atime': stat.st_atime, 'mtime': stat.st_mtime} except Exception: logger.exception("Exception while stating '%s'", uri) raise finally: if ssh is not None: sftp.close() pool_manager.give_back(ssh, parts.hostname, parts.port, self.cred.credential)
def ls(self, uri): """ls at uri""" self.set_cred(uri) scheme, parts = uriparse(uri) ssh = sshclient(parts.hostname, parts.port, self.cred.credential) try: sftp = ssh.open_sftp() results = self._do_ls(sftp, parts.path) output = {} output[parts.path] = results return output except FileNotFoundError: return {} except Exception as exc: logger.exception("ls: %s" % uri) raise RetryException(exc, traceback.format_exc()) finally: try: if ssh is not None: ssh.close() except: pass
def mkdir(self, uri): """mkdir at uri""" self.set_cred(uri) scheme, parts = uriparse(uri) path = parts.path ssh = sshclient(parts.hostname, parts.port, self.cred.credential) try: sftp = ssh.open_sftp() try: self._rm(sftp, path) logger.debug("deleted existing directory %s OK" % path) except Exception as ex: logger.debug("could not remove directory %s: %s" % (path, ex)) def full_path(result, d): previous = result[-1] if result else "" result.append("%s/%s" % (previous, d)) return result dirs = [p for p in path.split("/") if p.strip() != ''] dir_full_paths = reduce(full_path, dirs, []) non_existant_dirs = dropwhile(lambda d: self.path_exists(sftp, d), dir_full_paths) for d in non_existant_dirs: sftp.mkdir(d) logger.debug("created dir %s OK" % path) except Exception as exc: logger.error(exc) raise RetryException(exc, traceback.format_exc()) finally: try: if ssh is not None: ssh.close() except: pass
def mkdir(self, uri): """mkdir at uri""" self.set_cred(uri) scheme, parts = uriparse(uri) path = parts.path ssh = sshclient(parts.hostname, parts.port, self.cred.credential) try: sftp = ssh.open_sftp() try: self._rm(sftp, path) logger.debug("deleted existing directory %s OK" % path) except Exception as ex: logger.debug("could not remove directory %s: %s" % (path, ex)) def full_path(result, d): previous = result[-1] if result else "" result.append("%s/%s" % (previous, d)) return result dirs = [p for p in path.split("/") if p.strip() != ""] dir_full_paths = reduce(full_path, dirs, []) non_existant_dirs = dropwhile(lambda d: self.path_exists(sftp, d), dir_full_paths) for d in non_existant_dirs: sftp.mkdir(d) logger.debug("created dir %s OK" % path) except Exception as exc: logger.error(exc) raise RetryException(exc, traceback.format_exc()) finally: try: if ssh is not None: ssh.close() except: pass
def get_exec_backendcredential_for_uri(yabiusername, uri): """ Looks up a backend credential based on the supplied uri, which should include a username. Returns bc, will log and reraise ObjectDoesNotExist and MultipleObjectsReturned exceptions if more than one credential """ logger.debug('yabiusername: %s uri: %s' % (yabiusername, uri)) # parse the URI into chunks schema, rest = uriparse(uri) # enforce Exec scehmas only from ..backend import ExecBackend if not ExecBackend.get_backend_cls_for_scheme(schema): logger.error( "get_exec_backendcredential_for_uri was asked to get an fs schema! This is forbidden." ) raise ValueError( "Invalid schema in uri passed to get_exec_backendcredential_for_uri: asked for %s" % schema) path = rest.path if path != "/": logger.error( "get_exec_backendcredential_for_uri was passed a uri with a path! This is forbidden. Path must be / for exec backends" ) raise ValueError( "Invalid path in uri passed to get_exec_backendcredential_for_uri: path passed in was: %s" % path) # get our set of credential candidates bcs = _get_credential_candidates(yabiusername, schema, rest.username, rest.hostname) if len(bcs) == 0: raise ObjectDoesNotExist("Could not find backendcredential") return bcs[0]
def isdir(self, uri): """is the uri a dir?""" scheme, parts = uriparse(uri) return os.path.exists(parts.path) and os.path.isdir(parts.path)
def matches_filename(self, filename): _, parts = uriparse(self.src) return filename == os.path.basename(parts.path)