def go(self): # 1. Check if nex-file present if not _isfile(self.inFn): sys.exit(" ERROR: .nex file not found.") # 2. Performing nex2xtg via Treegraph2 cmdL = ["java -jar", self.pathToTG2, "-convert", self.inFn, "-xtg", self.outFn] CFO.extprog(cmdL) # 3. Check if nex-file present if not _isfile(self.inFn): sys.exit(" ERROR: Conversion .nex to .xtg unsuccessful.")
def _create_account(username, region, instance_ids, passwordless, sudo): if not _isfile(username + '.pub'): abort("%s.pub does not exist" % username) env.ssh_key = username + '.pub' env.username = username # Own attribute for string formatting. if passwordless: _sudo('adduser --disabled-password %(username)s' % env) if sudo: _sudo('sed -i "s/# %sudo ALL=NOPASSWD: ALL/' '%sudo ALL=NOPASSWD: ALL/" /etc/sudoers') for group in ['sudo']: with settings(group=group): _sudo('adduser %(username)s %(group)s' % env) else: _sudo('adduser %(username)s' % env) if sudo: for group in ['adm', 'admin', 'staff']: with settings(group=group): _sudo('adduser %(username)s %(group)s' % env) _sudo('mkdir -p /home/%(username)s/.ssh' % env) _sudo('touch /home/%(username)s/.ssh/authorized_keys' % env) _sudo('chown -R %(username)s: /home/%(username)s/.ssh' % env) _sudo('chmod 700 /home/%(username)s/.ssh' % env) put(env.ssh_key, '/home/%(username)s/.ssh/authorized_keys' % env, use_sudo=True) _sudo('chown -R %(username)s: /home/%(username)s/.' 'ssh/authorized_keys' % env) _sudo('chmod 600 /home/%(username)s/.ssh/authorized_keys' % env)
def _verify_and_save(out_file, data, sha1_sum, quiet): """ Verify checksum and save file locally. Parameters ---------- out_file : str File destination. data : bytes File content sha1_sum : str SHA1 sum Hex digest to verify. """ checksum = _sha1() checksum.update(data) if checksum.hexdigest() != sha1_sum + "s": raise RuntimeError(f'"{_basename(out_file)}" checksum does not match.') updated = _isfile(out_file) with open(out_file, "wb") as file: file.write(data) if not quiet: print( f'"{_basename(out_file)}" has been {"updated" if updated else "installed"}.' )
def _already_exists(file_name, sha1_sum, quiet): """ Checks if file with same name and checksum already exists. Parameters ---------- file_name : str: File name to verify. sha1_sum : str SHA1 sum Hex digest to verify. Returns ------- not_exists: bool True if file with same version already exist. """ if _isfile(file_name): checksum = _sha1() with open(file_name, "rb") as file: checksum.update(file.read()) if checksum.hexdigest() == sha1_sum: if not quiet: print(f'"{_basename(file_name)}" is up to date.') return True return False
def _validate_file(self, file_name): """function _validate_file Input: file_name - The file to be validated Output: validated file_name _validate_file verifies the file exists and the extension matches the parser extension(s) before proceeding. This hook can be overwritten to perform different file checks or remove the checks entirely as long as it returns the file_name. """ if not isinstance(file_name, _string_types): raise ParseError( '{}: file_name must be a string'.format(file_name)) file_name = _realpath(_expanduser(file_name)) if not _isfile(file_name): raise ParseError('{}: File not found.'.format(file_name)) fnsplit = file_name.split('.') if (len(fnsplit) < 2 or fnsplit[-1].upper() not in [ext.upper() for ext in self.extensions]): raise ParseError('{name}: Unsupported extension. Supported ' 'extensions are {exts}'.format( name=file_name, exts='(' + ', '.join(self.extensions) + ')')) return file_name
def list_directories(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files = [] N_OK = 0 if type(dir_paths) != type([]): dir_paths = [dir_paths] for path in dir_paths: try: gg = [(_join(path, f) if path != "." else f) for f in _listdir(path) if _isfile(_join(path, f)) == False and ( startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and ( contains == None or contains in f) and ( contains_not == None or (not (contains_not in f)))] files += gg N_OK += 1 except: print("path <", path, "> is invalid") if N_OK == 0: print('list_directories():: All paths were invalid!') raise ValueError() files.sort() return files
def _validate_file(self, file_name): """function _validate_file Input: file_name - The file to be validated Output: validated file_name _validate_file verifies the file exists and the extension matches the parser extension(s) before proceeding. This hook can be overwritten to remove or perform different file checks as long as it returns the file_name. """ if not isinstance(file_name, _string_types): raise IOError('{}: file_name must be a string'.format(file_name)) file_name = _realpath(_expanduser(file_name)) if not _isfile(file_name): raise IOError('{}: File not found.'.format(file_name)) if file_name.split('.')[-1] not in self.extensions: raise IOError('{name}: Unsupported extension. Supported ' 'extensions are {exts}'.format( name=file_name, exts='(' + ', '.join(self.extensions) + ')' )) return file_name
def _get_xrt_lib(): """ Detect XRT installation path: """ prefix = '/usr' if _isfile(_join(prefix, 'bin', 'xbutil')): return prefix raise RuntimeError('Unable to find Xilinx XRT')
def _getfileobj(fil, action): '''Return a file object if given a filename, otherwise return file''' if isinstance(fil, basestring) and _isfile(fil): if action in _stringallowedcommands: mode = 'rb' else: mode = 'r+b' return file(fil, mode) return fil
def _get_xrt_lib(): """ Detect XRT installation path: """ for prefix in (_environ.get("XILINX_XRT", "/opt/xilinx/xrt"), '/usr', '/usr/local'): if _isfile(_join(prefix, 'bin', 'xbutil')): return prefix raise RuntimeError('Unable to find Xilinx XRT')
def get_cached_file(fn, cd=cached_dir): file = f"{_join(cd,fn)}.data.json" if not _isfile(file): return None with open(file) as f: try: return _load(f) except Exception as e: print(e) return None
def Check(FilePath): """Check(FilPath) Check the permission of file and return value """ if (_isfile(FilePath) or _isdir(FilePath)): chmodValue = ((oct(_stat(FilePath).st_mode & 0O777))[-3:]) return (int(chmodValue)) else: chmodValue = "File Not Found" return (chmodValue)
def _get_driver(): """ Get FPGA driver Returns: ctypes.CDLL: FPGA driver. """ xrt_path = FpgaDriver._get_xrt_lib() if _isfile(_join(xrt_path, 'lib', 'libxrt_aws.so')): print('Loading XRT API library for AWS targets') fpga_library = _cdll.LoadLibrary( _join(xrt_path, 'lib', 'libxrt_aws.so')) elif _isfile(_join(xrt_path, 'lib', 'libxrt_core.so')): print('Loading XRT API library for Xilinx targets') fpga_library = _cdll.LoadLibrary( _join(xrt_path, 'lib', 'libxrt_core.so')) else: raise RuntimeError('Unable to find Xilinx XRT Library') return fpga_library
def download_file(url, filename=None, overwrite=True): response = _urlopen(url) if not filename: _, params = _cgi.parse_header( response.headers.get('Content-Disposition', '')) filename = params.get('filename', url.split('/')[-1].split('?')[0]) if not _isfile(filename): with open(filename, 'wb') as f: f.write(response.read()) return filename
def __init__(self, createSqliteInMemory=True, configFilePath=None, configDict={}): """fields in configDict or in configFile: keePassFilePath keePassNotesSeparator driversTypes password (not recommended) the configFile is first imported and its value is override by the configDict parameter each field must be given in the config file or the configDict parameter with the exception of password if password is not set, a prompt will ask you for it """ from pykeepass import PyKeePass as _PyKeePass from pykeepass.exceptions import CredentialsIntegrityError as _CredentialsIntegrityError self.config = {} if configFilePath is None: if _system() == "Windows": configFilePath = _environ["USERPROFILE"] elif _system() in ("Linux", "Darwin"): configFilePath = _environ["HOME"] configFilePath = _join(configFilePath, ".connectionManagerpy", "config.py") if _isfile(configFilePath): configDictInFile = _utils.importFromPath(configFilePath).configDict self.config.update(configDictInFile) self.config.update(configDict) else: self.config.update(configDict) self.keePassFilePath = self.config["keePassFilePath"] self.keePassNotesSeparator = self.config["keePassNotesSeparator"] self.driversTypes = self.config["driversTypes"] try: pwd = self.config.get("password") if pwd is None: from getpass import getpass as _getpass pwd = _getpass("mdp: ") self.keyPassFile = _PyKeePass(self.keePassFilePath, pwd) except FileNotFoundError: raise FileNotFoundError(self.keePassFilePath + " inexistant") except _CredentialsIntegrityError: raise ValueError("mot de passe incorrect pour " + self.keePassFilePath) if createSqliteInMemory: self.inMemory = self.getDriver("sql", "sqlite") self.inMemory = self.inMemory() self.inMemory.connect(self.inMemory.inmemory)
def list_files(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files = [] if not isinstance(dir_paths, list): dir_paths = [dir_paths] for path in dir_paths: # '/home/nkrasows/phd/data/graham/Neurons/4dBinNeuronVolume/h5/', try: gg= [ (_join(path,f) if path!="." else f) for f in _listdir(path) if _isfile(_join(path,f)) and (startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and (contains_not == None or (not (contains_not in f))) ] files += gg except: print("path",path,"invalid") files.sort() return files
def list_files(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files=[] if type(dir_paths)!=type([]): dir_paths=[dir_paths] for path in dir_paths:#'/home/nkrasows/phd/data/graham/Neurons/4dBinNeuronVolume/h5/', try: gg= [ (_join(path,f) if path!="." else f) for f in _listdir(path) if _isfile(_join(path,f)) and (startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and (contains_not == None or (not (contains_not in f))) ] files+=gg except: print("path",path,"invalid") files.sort() return files
def go(self): # 1. Check if .xtg file present if not _isfile(self.inFn): sys.exit(" ERROR: .xtg file not found.") # 2. Parsing of .xtg file: try: tree = _ET.parse(self.inFn) root = tree.getroot() except: sys.exit(" ERROR: Parsing of XML code unsuccessful: " + sys.exc_info()[0]) # 3. Adding node labels: for c,n in enumerate(root.iter('Node')): n.attrib["UniqueName"] = "Node_"+str(c+1) tree.write(self.inFn)
def encrypt(in_file, passwd='', cipher='seed', iter=0, out_file=_strftime('enc_%y%m%d_%H%M%S'), pbkdf2=False, b64=False): # eliminate if (not _isfile(in_file)): return {"status": "negative", "error": f"file not found: {in_file}"} if (not str(iter).isdigit() and not iter >= 0): return {"status": "negative", "error": "Iter: Invalied Itration"} if (cipher not in CIPHERS): return {"status": "negative", "error": "Cipher: Invalied Cipher"} if (len(out_file) == 0): return {"status": "negative", "error": "OutFile: Nofile given"} # Proceed const_string = f'openssl {cipher} -in {in_file} -out {out_file} -k "{passwd}"' if (iter > 0): const_string += f' -iter {str(iter)}' if (pbkdf2): const_string += ' -pbkdf2' if (b64): const_string += ' -a' # exec _system(const_string) return { "status": "positive", "cipher": cipher, "in_file": in_file, "out_file": out_file, "iter": iter, "passwd": "*" * len(passwd), "pbkdf2": pbkdf2, "base64": b64 }
def _check_path(self, path): """ Check path and ensure it is absolute. Args: path (str): path Returns: str: Absolute path """ with self._handle_exception(OSError): path = _realpath(_expanduser(path)) if not _isfile(path): raise FileNotFoundError('No such file: ' + path) return path
def __init__(self) -> None: """ set all class variables :return: None :since: 0.1.0 """ from os.path import isfile as _isfile self._user_variables = {} if _isfile(_aion_variable_file): self._aion_open_variable_file = open(_aion_variable_file, "w+") else: self._aion_open_variable_file = None
def file_exists(file_path, boolean=False, raise_error=False, head_message=''): """ :param file_path: :param boolean: :param raise_error: :param head_message: :return: """ exists = _isfile(file_path) if (raise_error or head_message != '') and not exists: raise Exception('{}{} File does not exist.'.format( head_message, file_path)) if boolean: return exists return file_path
def go(self): #cwd = os.getcwd() # 1. Check if xtg-file present if not _isfile(self.inFn): sys.exit(" ERROR: .xtg file not found.") # 2. Set if plotting as phylo- or cladogram resolut = "-width 600mm -res 120ppi" if self.flags.upper() in ["C", "CLADO"]: pass if self.flags.upper() in ["P", "PHYLO"]: resolut = "-phyl " + resolut # 3. Save as .svg and as .png for fEnd in [".png", ".svg"]: outPath = self.inFn + fEnd cmdL = ["java -jar", self.pathToTG2, "-image", self.inFn, outPath, resolut] CFO.extprog(cmdL)
def __init__(self, xl, wbObject= None): self._xl= xl # wbPath: path of the workbook if wbObject == None: # Try to set the wb to a new workbook self._xl.Workbooks.Add() self._wb= self.xl.ActiveWorkbook self._sh = self._updateSheets() elif isinstance(wbObject, (str,unicode)): if _isfile(wbObject): self._wb = self.xl.Workbooks.open( wbObject) self._sh = self._updateSheets() else: raise IOError(wbObject + " is not a file") else: self._wb= wbObject self._sh = self._updateSheets()
def __init__(self, fname_or_element: _Union[str, _ET.Element]): """ Args: fname_or_element: File name of a xml file or a python xml `Element` """ if isinstance(fname_or_element, str): if _isfile(fname_or_element): self.root = _ET.parse(fname_or_element).getroot() else: raise FileNotFoundError('The given file could not be found') else: self.root = fname_or_element self.root_id = 0 self.elements = {0: self.root} for elem in self.root.iter(): self.elements[_randint(111111111, 999999999)] = elem
def find_file_by(_path, prefix=None, sufix=None): """ :param _path: :param prefix: :param sufix: :return: """ filename = None for _filename in _listdir(_path): _filepath = _join(_path, _filename) if _isfile(_filepath) and (_filename.startswith(prefix) if prefix else _filename.endswith(sufix)): if not filename: filename = _filename else: raise Exception('Multiple benchmark files in a same folder.') return filename
def get_accelize_cred(*src): """ Initialize Accelize Credentials. Args: src (str): Directories. Returns: str: Path to cred.json """ for src in get_sources_dirs(*src): cred_path = _join(src, 'cred.json') if _isfile(cred_path): return cred_path raise _AuthenticationException( 'No Accelize credential found. Please, make sure to have your ' f'"cred.json" file installed in "{HOME_DIR}" or current directory')
def list_directories(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files=[] N_OK=0 if type(dir_paths)!=type([]): dir_paths=[dir_paths] for path in dir_paths: try: gg= [ (_join(path,f) if path!="." else f) for f in _listdir(path) if _isfile(_join(path,f))==False and (startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and (contains_not == None or (not (contains_not in f))) ] files+=gg N_OK+=1 except: print("path <",path,"> is invalid") if N_OK==0: print('list_directories():: All paths were invalid!') raise ValueError() files.sort() return files
def get(url, output=".", extract=False): """ Get a file or archive from an URL. Args: output (str): Output file or directory path. url (str): Input URL. extract (bool): If True, extract archive. """ response = _request("GET", url, stream=True) response.raise_for_status() if extract: with _tarfile_open(fileobj=response.raw) as archive: archive.extractall(output) else: if not _isfile(output): output = _join(output, _basename(_urlparse(url).path)) with open(output, "wb") as dest: for chunk in response.iter_content(): dest.write(chunk)
def _program_fpga(self, fpga_image): """ Program the FPGA with the specified image. Args: fpga_image (str): FPGA image. """ ''' # Vitis does not reprogram a FPGA that has already the bitstream. # So to force it we write another bitstream first. clear_image = _join(SCRIPT_DIR, 'clear.awsxclbin') load_image = _run( [self._xbutil, 'program', '-d', str(self._fpga_slot_id), '-p', clear_image], stderr=_STDOUT, stdout=_PIPE, universal_newlines=True, check=False) if load_image.returncode: raise RuntimeError(load_image.stdout) # Now load the real image fpga_image = _realpath(_fsdecode(fpga_image)) load_image = _run( [self._xbutil, 'program', '-d', str(self._fpga_slot_id), '-p', fpga_image], stderr=_STDOUT, stdout=_PIPE, universal_newlines=True, check=False) if load_image.returncode: raise RuntimeError(load_image.stdout) ''' # Init global specific variables self.shm_pages = list() self.ctrl_sw_exec = None # Start Controller SW fpga_image = '' if not _isfile(fpga_image): pass # raise RuntimeError('Controller SW executable path is invald: ', fpga_image) # self.ctrl_sw_exec = Popen([self.ctrl_sw_exec, self._fpga_image], shell=False, stdout=_PIPE, stderr=_STDOUT) print('Programmed AWS SoM with', self.ctrl_sw_exec)
def load_files(data_file_location): data = [] startswith = None endswith = None contains = None contains_not = None for path in data_file_location: gg = [(_join(path, f) if path != "." else f) for f in _listdir(path) if _isfile(_join(path, f)) and (startswith == None or f.startswith(startswith)) and ( endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and ( contains_not == None or (not (contains_not in f)))] data.append(gg) combined_list = [] # Sort the lists: for i in range(len(data)): elem = sorted(data[i]) combined_list = combined_list + elem combined_list = strip_files_of_doubles(combined_list) combined_list = sorted(combined_list, key=sort_func) return combined_list
rect = Image.new("RGBA", rect_sz, 4 * (0, )) removed = img.copy() removed.paste(rect, pos) if not reverse: return removed else: return _inv(_sub(img, removed)) def _scale(section, dims): return tuple(b * d for b, d in zip(section, 2 * dims)) npath = "nouns/" for noun in (_f for _f in _listdir(npath) if _isfile(npath + _f)): resized = _resize(Image.open((npath + noun)).convert("RGBA"), (100, 100)) resized.save(((npath + "processed/") + noun)) vpath = "verbs/" vmark = Image.open("verb_mark.png").convert("RGBA") for verb in (_f for _f in _listdir(vpath) if _isfile(vpath + _f)): resized = _resize(Image.open((vpath + verb)).convert("RGBA"), (100, 100)) _add(resized, vmark).save(((vpath + "processed/") + verb)) apath = "adjectives/" amark = Image.open("adjective_mark.png").convert("RGBA") for adj in (_f for _f in _listdir(apath) if _isfile(apath + _f)): resized = _resize(Image.open((apath + adj)).convert("RGBA"), (100, 100)) _add(resized, amark).save(((apath + "processed/") + adj))
def isfile(self, path): return _isfile(self.getsyspath(path))
def updateape(fil, fields = {}, removefields = []): '''Update APE tag in fil with the information in fields''' _checkremovefields(removefields) return _tag(_ape, fil, 'update', callback=_updateapetagcallback, callbackkwargs={'fields':fields, 'removefields':removefields}) def updateid3(fil, fields = {}): '''Update ID3v1 tag in fil with the information in fields''' return _tag(_id3, fil, 'update', callback=_updateid3tagcallback, callbackkwargs={'fields':fields}) def updatetags(fil, fields = {}, removefields = []): '''Update both APE and ID3v1 tags on fil with the information in fields''' _checkremovefields(removefields) return _tag(_ape, fil, 'update', callback=_updateapetagcallback, callbackkwargs={'fields':fields, 'removefields':removefields}, updateid3=True) if __name__ == '__main__': import sys for filename in sys.argv[1:]: if _isfile(filename): print '\n%s' % filename try: printtags(filename) except TagError: print 'Missing APE or ID3 Tag' else: print "%s: file doesn't exist" % filename
def _get_xbutil(): xrt_path = FpgaDriver._get_xrt_lib() _xbutil_path = _join(xrt_path, 'bin', 'xbutil') if not _isfile(_xbutil_path): raise RuntimeError('Unable to find Xilinx XRT Board Utility') return _xbutil_path
from os.path import dirname as _dirname, basename as _basename, isfile as _isfile import glob as _glob exec('\n'.join(map(lambda name: "from ." + name + " import *", [_basename(f)[:-3] for f in _glob.glob(_dirname(__file__) + "/*.py") \ if _isfile(f) and not _basename(f).startswith('_')])))
# Importing the dependencies from os.path import dirname as _dirname, basename as _basename,\ isfile as _isfile import glob as _glob exec('\n'.join( map(lambda name: "from ." + name + " import *", [ _basename(f)[:-3] for f in _glob.glob(_dirname(__file__) + "/*.py") if _isfile(f) and not _basename(f).startswith('_') ])))
def read_url_from_file(filename): if filename: if _isfile(filename): with open(filename) as input_file: return input_file.read().strip().split('\n')
'update', callback=_updateid3tagcallback, callbackkwargs={'fields': fields}) def updatetags(fil, fields={}, removefields=[]): '''Update both APE and ID3v1 tags on fil with the information in fields''' _checkremovefields(removefields) return _tag(_ape, fil, 'update', callback=_updateapetagcallback, callbackkwargs={ 'fields': fields, 'removefields': removefields }, updateid3=True) if __name__ == '__main__': import sys for filename in sys.argv[1:]: if _isfile(filename): print '\n%s' % filename try: printtags(filename) except TagError: print 'Missing APE or ID3 Tag' else: print "%s: file doesn't exist" % filename