def check_destination_path(self, path, mkdir_destination=None, ssh=None): """Returns the destination_dir or raises an Exception if destination_dir does not exist on the remote host. @param path: path on remote host @type path: byte or str @param mkdir_destination: if True attempts to create the remote folder. @type mkdir_destination: boolean @raise FileNotFoundError: if path does not exist and mkdir_destination is False @return path """ ssh = ssh if ssh else self.ssh if path[0:1] == b'~' or path[0:1] == '~': _, stdout, _ = ssh.exec_command("pwd") path = os.path.join(stdout.readlines()[0].strip(), path.replace('~/', '')) with SFTPClient.from_transport(ssh.get_transport()) as sftp: try: sftp.chdir(path) except IOError: if mkdir_destination or self.mkdir_destination: self.mkdir_p(sftp, path) else: raise FileNotFoundError( '{} not found on remote host.'.format(path)) return path return None
def _patch_resolve(self, strict=False): """Add the argument strict to signature in Python>3,<3.6.""" resolved = Path().old_resolve() / self if strict and not resolved.exists(): raise FileNotFoundError(resolved) return resolved
def _resolve_with_filenotfound(path, **kwargs): """ Raise FileNotFoundError instead of OSError """ try: return path.resolve(**kwargs) except OSError as e: if isinstance(e, FileNotFoundError): raise raise FileNotFoundError(str(path))
def _patch_resolve(self, strict=False): """Raise FileNotFoundError instead of OSError with pathlib2.""" try: resolved = self.old_resolve(strict=strict) except OSError: raise FileNotFoundError(self.old_resolve()) return resolved
def check_destination_path(self, path, mkdir_destination=None): """Returns the destination path after checking if it exists or making it (mkdir=True).""" if mkdir_destination or self.mkdir_destination: try: os.makedirs(path) except FileExistsError: pass else: raise FileNotFoundError('{} does not exist.'.format(path)) return path
def check_local_path(self, path): """Returns the path or raises an Exception if path does not exist locally.""" path = os.path.expanduser(path) path = path[:-1] if path.endswith('/') else path if not os.path.exists(path): if self.mkdir_local: os.makedirs(path) else: raise FileNotFoundError(path) return path
def _generate_images_and_masks(images_iter, mask_dir): for image_file in images_iter: file_part = _extract_filename(image_file) mask_file = os.path.join(mask_dir, file_part + "_mask.png") if os.path.exists(mask_file): yield image_file, mask_file else: raise FileNotFoundError( f"Could not find mask {mask_file} corresponding to {image_file}" )
def load_glade_trees_from_files(): """Load XML trees from glade files. :return: List of XML trees, as parsed by etree. """ trees = [] glade_files = testfilelist(lambda x: x.endswith('.glade')) if not glade_files: raise FileNotFoundError("Found no glade files to test.") # Parse all of the glade files log.info("Parsing glade files...") for glade_file in glade_files: trees.append(etree.parse(glade_file)) return trees
def deed_adapter(deed_reference, use_system=False): """ An adapter for the deed to enhance and return in the required form. :param deed_reference: :return: The deed with status and token attributes set :rtype: deed """ if use_system: deed = Deed().get_deed_system(deed_reference) else: deed = Deed().get_deed(deed_reference) if deed is None: raise FileNotFoundError( "There is no deed associated with deed id '{0}'.".format( deed_reference, )) deed.deed['token'] = deed.token deed.deed['status'] = deed.status return deed
usd_no_python_dir_path) = download_usd_binaries( args.usd_version, PYTHON_VERSION, library_path) except KeyError: exit() else: usd_python_dir_path = os.path.join( library_path, usd_binaries_dirname(args.usd_version, PYTHON_VERSION), usd_python_dirname(args.usd_version, PYTHON_VERSION)) usd_no_python_dir_path = os.path.join( library_path, usd_binaries_dirname(args.usd_version, PYTHON_VERSION), usd_no_python_dirname(args.usd_version)) if not os.path.exists(usd_python_dir_path): raise FileNotFoundError(usd_python_dir_path) if not os.path.exists(usd_no_python_dir_path): raise FileNotFoundError(usd_no_python_dir_path) build_dir = "./build" if args.component == "usdcs": build_dir += "_usdcs" elif args.component == "usdnet": build_dir += "_usdnet" elif args.component == "tests": build_dir += "_tests" if not os.path.exists(build_dir): os.mkdir(build_dir) cmake_cmd = " ".join([ "cmake -S . -B {} ", '-DPXR_USD_LOCATION="{}" '
def _raise_file_not_found_error(*args): raise FileNotFoundError()
def __init__(self, filepath: str): if not os.path.isfile(filepath): raise FileNotFoundError("data_info.csv is not found in {}".format(filepath)) self.filepath = filepath self.data_dir = os.path.dirname(filepath)
def run_on(self, driver_type, pasta_user=None, capabilities={}): """Webdriver activation. driver_type (string): web browser type pasta_user (PastaSauce): optional API access for saucelabs capabilities (dict): browser settings; copy object to avoid overwrite Defaults: DesiredCapabilities.ANDROID.copy() DesiredCapabilities.CHROME.copy() DesiredCapabilities.EDGE.copy() DesiredCapabilities.FIREFOX.copy() DesiredCapabilities.HTMLUNIT.copy() DesiredCapabilities.HTMLUNITWITHJS.copy() DesiredCapabilities.INTERNETEXPLORER.copy() DesiredCapabilities.IPAD.copy() DesiredCapabilities.IPHONE.copy() DesiredCapabilities.ORERA.copy() DesiredCapabilities.SAFARI.copy() Keys: platform browserName version javascriptEnabled wait (int): standard time, in seconds, to wait for Selenium commands opera_driver (string): Chromium location """ print('Driver type input: %s' % driver_type) if pasta_user: driver = 'saucelabs' print('Driver type: %s' % driver) elif driver_type and driver_type is not 'chrome': driver = driver_type print('Driver type: %s' % driver) else: option_set = options.Options() option_set.add_argument("disable-infobars") option_set.add_experimental_option( 'prefs', { 'credentials_enable_service': False, 'profile': { 'password_manager_enabled': False } }) driver = 'chrome' print('Driver type: %s' % driver) try: return { 'firefox': lambda: webdriver.Firefox(), 'chrome': lambda: webdriver.Chrome(chrome_options=option_set), 'headlesschrome': lambda: self.start_headless(), 'ie': lambda: webdriver.Ie(), 'opera': lambda: self.start_opera(self.opera_driver), 'safari': lambda: webdriver.Safari(), 'saucelabs': lambda: webdriver.Remote(command_executor=( 'http://%s:%[email protected]:80/wd/hub' % (pasta_user.get_user(), pasta_user.get_access_key())), desired_capabilities=capabilities), }[driver]() except WebDriverException as err: raise FileNotFoundError(err) except Exception as err: raise err
metavar='', default=None, help='compute backend') parser.add_argument('-s', '--storage', metavar='', default=None, help='storage backend') parser.add_argument('-d', '--debug', action='store_true', default=False, help='activate debug logging') args = parser.parse_args() if args.config: if os.path.exists(args.config): args.config = load_yaml_config(args.config) else: raise FileNotFoundError( "Provided config file '{}' does not exist".format(args.config)) log_level = logging.INFO if not args.debug else logging.DEBUG setup_lithops_logger(log_level) if args.test == 'help': print_help() else: run_tests(args.test, args.config, args.mode, args.backend, args.storage)
def copy_to(self, source: str, target: str, timeout=None, timestamp=None, recurse=0, retry=0): timeouterr = [ 0, ] def timeout_handler(sig, frame): timeouterr[0] = errno.EINTR direntry, entry, dirpath, name = self.__entry_and_dir(target) if entry is None: if not direntry is None and not direntry.is_directory(): raise NotADirectoryError("Target directory does not exist") else: if entry.is_directory(): return errno.EISDIR if entry.get_id() >= 0: err = self.libmtp.LIBMTP_Delete_Object(self.open_device.device, entry.get_id()) if err != 0: self.log.error("Delete object %d (%s) failed" % (entry.get_id(), entry.get_path())) else: if not direntry is None: direntry.must_refresh = True direntry.refresh() fh = -1 if type(source) == str or type(source) == unicode: if not os.path.exists(source): raise FileNotFoundError("Source file is not found") pfile = self.__new_filet(direntry, entry, name=name, localpath=source, timestamp=timestamp) else: try: fh = int(source) pfile = self.__new_filet(direntry, entry, name=name, handle=fh, timestamp=timestamp) except: self.log.error( "mtp.copy_to(source, target): source must be a string (local path) or a integer (file handle)" ) return errno.EINVAL if bool(pfile): try: pfile[0].item_id = 0 self.libmtp.LIBMTP_Clear_Errorstack(self.open_device.device) oldhandler = None if not timeout is None: oldhandler = signal.signal(signal.SIGALRM, timeout_handler) signal.alarm(timeout) try: if fh >= 0: os.lseek(fh, 0, os.SEEK_SET) err = self.libmtp.LIBMTP_Send_File_From_File_Descriptor( self.open_device.device, fh, pfile, MTP.PROGRESS_FUNC_P(), c_void_p()) else: err = self.libmtp.LIBMTP_Send_File_From_File( self.open_device.device, c_char_p(bytes(source, "utf8")), pfile, MTP.PROGRESS_FUNC_P(), c_void_p()) signal.alarm(0) finally: if not oldhandler is None: signal.signal(signal.SIGALRM, oldhandler) if timeouterr[0] != 0 or err != 0: if timeout: print("Error transferring %s to %s in %d seconds" % (str(source), str(target), timeout)) else: print("Error transferring %s to %s" % (str(source), str(target))) self.libmtp.LIBMTP_Dump_Errorstack(self.open_device.device) if not self.check(): devno = "%04x:%04x" % (self.open_device.vendor_id, self.open_device.product_id) if recurse == 0: # self.log.warn('Resetting device ' + devno) # ret = self.libmtp.LIBMTP_Reset_Device(self.open_device.device) self.close() if self.open(devno, must_refresh=False): return self.copy_to(source, target, timeout, timestamp, recurse=1) else: recurse = 1 if recurse == 1: self.log.error( 'Reset device failed. Attempting to reopen') self.close() if not self.open(devno, must_refresh=True): self.log.error("Could not reopen device.") return errno.EINTR return self.copy_to(source, target, timeout, timestamp, recurse=2) if recurse == 2: return errno.EINTR direntry.must_refresh = True direntry.refresh() return 0 finally: self.__delete_filet(pfile) return errno.EIO
import argparse import os from builtins import FileNotFoundError from parser import AssemblyParser if __name__ == '__main__': parser = argparse.ArgumentParser( description='Assemly the hack machine code') parser.add_argument('infile', type=str, help='Input file for hack code') parser.add_argument('outfile', type=str, help='Output file for binary assembled code') args = parser.parse_args() infile = os.path.join(os.getcwd(), args.infile) if not os.path.exists(infile): raise FileNotFoundError( f'file {infile} was not found in the current directory') assembly_parser = AssemblyParser(infile) assembly_parser.parse() outfile = os.path.join(os.getcwd(), args.outfile) with open(outfile, 'w') as out: out.write('\n'.join(assembly_parser.lines_to_write))
def retrieve_PFAM_full_alignment_by_AC(pfam_ac): """ retrieves and parses the full alignment for a Pfam full alignment file in the following format: 'AC' : pfam accession code 'alignments' : all the alignments for this Pfam accession code, 'consensus' : { 'identifier' : consensus identifier, 'sequence' : consensus sequence }, SS_cons (optional): Mark up of sequence of known structure for secondary structure. seq_cons, consensus sequence (i.e 60% or above, of the amino acids in this column belong to this class of residue): class key residues A A A C C C D D D E E E F F F G G G H H H I I I K K K L L L M M M N N N P P P Q Q Q R R R S S S T T T V V V W W W Y Y Y alcohol o S,T aliphatic l I,L,V any . A,C,D,E,F,G,H,I,K,L,M,N,P,Q,R,S,T,V,W,Y aromatic a F,H,W,Y charged c D,E,H,K,R hydrophobic h A,C,F,G,H,I,K,L,M,R,T,V,W,Y negative - D,E polar p C,D,E,H,K,N,Q,R,S,T positive + H,K,R small s A,C,D,G,N,P,S,T,V tiny u A,G,S turnlike t A,C,D,E,G,H,K,N,Q,R,S,T """ # Construct the filepath of the alignment file alignment_file = PFAM_ALIGNMENT_DIR + pfam_ac + "/full" # check if the path exists if not os.path.exists(alignment_file): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), alignment_file) # check if there is an ID associated with the provided accession code pfam_hmm_meta_data = retrieve_PFAM_ID_by_AC(pfam_ac) found_ids = 0 for pfam_id, _ in pfam_hmm_meta_data: found_ids += 1 if found_ids > 1: raise FoundMoreThanOnePfamHMMException( "Found more than one Pfam ids that match the Pfam ac '" + pfam_ac + "' when searching for matching HMMER HMM's") if found_ids == 0: raise FoundNoPfamHMMException( "Found no matching Pfam ids that for Pfam ac '" + pfam_ac + "' when searching for matching HMMER HMM's") # create a temporary HMM file tmp_hmm_file = tempfile.NamedTemporaryFile(suffix=".hmm", delete=False) # fetch the HMM fetch_args = [ HMMFETCH_EXECUTABLE, "-o", tmp_hmm_file.name, PFAM_HMM, pfam_id ] try: subprocess.call(fetch_args) except subprocess.CalledProcessError as e: _log.error("{}".format(e.output)) consensus_identifier, consensus_sequence = retrieve_consensus_sequence( tmp_hmm_file.name) pfam_alignment_output = { 'AC': pfam_ac, 'alignments': [], 'consensus': { 'identifier': consensus_identifier, 'sequence': consensus_sequence }, 'SS_cons': None, 'seq_cons': None } # interpret the full alignments input for this domain n_sequences = 0 try: with open(alignment_file) as a: # retrieve the aligned sequences from the output file Pfam_alignments = a.readlines() for line in Pfam_alignments: if not line.startswith("#") and not line.startswith("//"): # An alignment row. alignment_row = [ al for al in line.strip().split(" ") if len(al) > 0 ] alignment = alignment_row[1] alignment_consensus = convert_pfam_fasta_alignment_to_strict_consensus_sequence( alignment) sequence_origin = alignment_row[0].split("/") sequence_identifier = sequence_origin[0] sequence_region = sequence_origin[1].split('-') sequence_start = int(sequence_region[0]) sequence_stop = int(sequence_region[1]) # add the alignment to the output pfam_alignment_output['alignments'].append({ 'seq_id': sequence_identifier, 'seq_start': sequence_start, 'seq_stop': sequence_stop, 'alignment': alignment, 'alignment_consensus': alignment_consensus }) elif line.startswith("#=GC SS_cons"): pfam_alignment_output['SS_cons'] = [ al for al in line.strip().split(" ") if len(al) > 0 ][1] elif line.startswith("#=GC seq_cons"): pfam_alignment_output['seq_cons'] = [ al for al in line.strip().split(" ") if len(al) > 0 ][1] elif line.startswith("#=GF SQ"): # Number of sequences, start of alignment. n_sequences = int([ al for al in line.strip().split(" ") if len(al) > 0 ][2]) except IOError as e: _log.error("{}".format(e.output)) if n_sequences != len(pfam_alignment_output['alignments']): _log.error("Number of sequences in full alignment for Pfam " + pfam_ac + " ('" + n_sequences + "') does not match the actual number of sequences '" + len(pfam_alignment_output['alignments']) + "'") # remove the temporary files os.remove(tmp_hmm_file.name) # return the output return pfam_alignment_output