def path_ensdf_file(): data = [] dirs = path_ensdf_dir() for dr in dirs: for fr in _listdir(dr): data.append(_path.join(dr, fr)) return data
def cleanup_files(root_path: str, ttl: int) -> tuple: """Remove obsolete files and empty directories """ now = _current_time() success = [] failed = [] for root, d_names, f_names in _walk(root_path): for f_name in f_names: f_path = _path.join(root, f_name) m_time = _path.getmtime(f_path) if m_time + ttl < now: try: _unlink(f_path) success.append(f_path) except Exception as e: failed.append((f_path, e)) # Remove empty directories for d_name in d_names: d_path = _path.join(root, d_name) if not _listdir(d_path): _rmdir(d_path) return success, failed
def ListFullDir(dirname): """Sometimes it is inconvenient to have to type '\\'.join(basename, filename) when calling listdir to get the full path, so here's a shortcut func. """ return ['\\'.join((dirname, filename)) for filename in _listdir(dirname)]
def _get_lib_path_parital_qualname(name, base, search_dirs, splitext=_splitext): """ Internal function to search for partially qualified names @param name: filename @type name: str @param base: parially qualified base to join with name @type base: str @param search_dirs: search dirs @type search_dirs: list[str] or tuple[str] @return: str @rtype: str """ error = FileNotFoundError ext = splitext(name)[1] if not (base.startswith('\\') or base.startswith('/')): base = '\\' + base if ext: for fldr in search_dirs: path = fldr + base try: files = _listdir(path) except error: continue for file in files: if file == name: return '\\'.join((path, name)) else: for fldr in search_dirs: path = fldr + base try: files = _listdir(path) except error: continue for file in files: file, ext = splitext(file) # test presence of ext to exclude dirs if ext and file == name: return ''.join((path, "\\", name, ext)) raise FileNotFoundError("Partially qualified name %s not found" % '\\'.join((base, name)))
def change_location(src, tgt, move=False, verbose=True): ''' Copies/moves/deletes locations :param src: Source location where to copy from :param tgt: Target location where to copy to * To backup `src`, set `tgt` explicitly to ``True``. \ `tgt` will be set to `src` + '_backup_' + \ :func:`util.system.get_timestamp` then :param move: Deletes original location after copy (a.k.a. move) * To delete `src` , set `tgt` explicitly to ``False`` \ and `move` to ``True`` (be careful!!1!) :param verbose: Show warnings ''' from photon.util.system import shell_notify if _path.exists(src): if tgt: if _path.isfile(src): _copy2(src, search_location( tgt, create_in=_path.dirname(tgt), verbose=verbose) ) else: for l in _listdir(src): change_location( _path.abspath(_path.join(src, l)), _path.abspath(_path.join(tgt, l)) ) if move: if _path.isdir(src) and not _path.islink(src): _rmtree(src) else: _remove(src) if verbose: shell_notify( '%s location' % ( 'deleted' if not tgt and move else 'moved' if move else 'copied' ), more=dict(src=src, tgt=tgt) )
def list_files(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files=[] if type(dir_paths)!=type([]): dir_paths=[dir_paths] for path in dir_paths:#'/home/nkrasows/phd/data/graham/Neurons/4dBinNeuronVolume/h5/', try: gg= [ (_join(path,f) if path!="." else f) for f in _listdir(path) if _isfile(_join(path,f)) and (startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and (contains_not == None or (not (contains_not in f))) ] files+=gg except: print("path",path,"invalid") files.sort() return files
def init(dllpath=None, root="C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL dllpath: string of the exact path of the DLL root: string of the directory to start walking/searching to find the DLL """ global _dll rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return try: if dllpath is None: matcher = _partial(_match, _REGEX_DLL_NAME) # regex match function for nfile in map(matcher, _listdir(_curdir)): if nfile: # try the current dir first rel.add(_curdir + _sep + nfile.string) if not rel: for root, dirs, files in _walk( root): # no luck, walk the dir tree for file in map(matcher, files): if file: rel.add(root + _sep + file.string) if not rel: # if still nothing throw raise TOSDB_Error(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) ver = _compile('-[\d]{1,2}.[\d]{1,2}-') vers = tuple( zip(map(lambda x: _search(ver, x).group().strip('-'), rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple((x[0].split('.')[1], x[1]) for x in vers if x[0].split('.')[0] == vers_max) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max) # find the most recently updated d = dict(zip(map(lambda x: _stat(x).st_mtime, rel), rel)) rec = max(d) dllpath = d[rec] _dll = _WinDLL(dllpath) print("+ Using Module ", dllpath) print("+ Last Update ", _asctime(_localtime(_stat(dllpath).st_mtime))) if connect(): print("+ Succesfully Connected to Service \ Engine") else: print("- Failed to Connect to Service \ Engine") return True # indicate the lib was loaded except Exception as e: raise TOSDB_CLibError("unable to initialize library", e)
def run_command(): """Sign packages""" from os import environ from argparse import ArgumentParser parser = ArgumentParser(prog="sign_packages", description="Sign RPM or DEB packages.") parser.add_argument("packages_dir", help="Input packages directory") parser.add_argument("--quiet", "-q", help="Disable verbosity", action="store_true") args = parser.parse_args() private_key = environ.get("GPG_PRIVATE_KEY", "") if not private_key: parser.exit(1, message="No private key\n") return packages_dir = _realpath(args.packages_dir) for file in _listdir(packages_dir): ext = _splitext(file)[1].lower() if ext == ".rpm": sign = sign_rpm_packages break elif ext == ".deb": sign = sign_deb_packages break else: parser.exit(1, "No package to sign\n") return try: sign( packages_dir=packages_dir, private_key=private_key, public_key=environ.get("GPG_PUBLIC_KEY", ""), pass_phrase=environ.get("GPG_PASS_PHRASE", ""), quiet=args.quiet, ) except _CalledProcessError as exception: parser.exit(exception.returncode, exception.stdout) except RuntimeError as exception: parser.exit(1, str(exception)) if not args.quiet: parser.exit(message="Signature successful\n")
def init(dllpath = None, root = "C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL dllpath: string of the exact path of the DLL root: string of the directory to start walking/searching to find the DLL """ global _dll rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return try: if dllpath is None: matcher = _partial( _match, _REGEX_DLL_NAME) # regex match function for nfile in map( matcher, _listdir( _curdir )): if nfile: # try the current dir first rel.add( _curdir+ _sep + nfile.string ) if not rel: for root,dirs, files in _walk(root): # no luck, walk the dir tree for file in map( matcher, files): if file: rel.add( root + _sep + file.string ) if not rel: # if still nothing throw raise TOSDB_Error(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) ver = _compile('-[\d]{1,2}.[\d]{1,2}-') vers = tuple( zip( map( lambda x: _search(ver,x).group().strip('-'), rel), rel) ) vers_max = max(vers)[0].split('.')[0] mtup = tuple( (x[0].split('.')[1],x[1]) for x in vers if x[0].split('.')[0] == vers_max) mtup_max = max(mtup)[0] rel = set( x[1] for x in mtup if x[0] == mtup_max ) # find the most recently updated d = dict( zip(map( lambda x : _stat(x).st_mtime, rel), rel ) ) rec = max(d) dllpath = d[ rec ] _dll = _WinDLL( dllpath ) print( "+ Using Module ", dllpath ) print( "+ Last Update ", _asctime(_localtime(_stat(dllpath).st_mtime))) if connect(): print("+ Succesfully Connected to Service \ Engine") else: print("- Failed to Connect to Service \ Engine") return True # indicate the lib was loaded except Exception as e: raise TOSDB_CLibError( "unable to initialize library", e )
def run_command(): """Sign packages""" from argparse import ArgumentParser parser = ArgumentParser(prog='sign_packages', description='Sign RPM or DEB packages.') parser.add_argument('packages_dir', help='Input packages directory') parser.add_argument('--private_key', '-k', help='Private GPG key') parser.add_argument('--public_key', '-b', help='Public GPG key') parser.add_argument('--pass_phrase', '-p', help='GPG key pass phrase') parser.add_argument('--quiet', '-q', help='Disable verbosity', action='store_true') args = parser.parse_args() packages_dir = _realpath(args.packages_dir) if args.private_key is not None and not args.private_key: # Passed and empty value: command called from build script but # not private key is defined. In this case, signature is disabled. return for file in _listdir(packages_dir): ext = _splitext(file)[1].lower() if ext == '.rpm': sign = sign_rpm_packages break elif ext == '.deb': sign = sign_deb_packages break else: parser.exit(1, 'No package to sign.') return try: sign(packages_dir=packages_dir, private_key=args.private_key, public_key=args.public_key, pass_phrase=args.pass_phrase, quiet=args.quiet) except _CalledProcessError as exception: parser.exit(exception.returncode, exception.stdout) except RuntimeError as exception: parser.exit(1, str(exception)) if not args.quiet: parser.exit(message='Signature successful')
def list_directories(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files=[] N_OK=0 if type(dir_paths)!=type([]): dir_paths=[dir_paths] for path in dir_paths: try: gg= [ (_join(path,f) if path!="." else f) for f in _listdir(path) if _isfile(_join(path,f))==False and (startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and (contains_not == None or (not (contains_not in f))) ] files+=gg N_OK+=1 except: print("path <",path,"> is invalid") if N_OK==0: print('list_directories():: All paths were invalid!') raise ValueError() files.sort() return files
def find_file_by(_path, prefix=None, sufix=None): """ :param _path: :param prefix: :param sufix: :return: """ filename = None for _filename in _listdir(_path): _filepath = _join(_path, _filename) if _isfile(_filepath) and (_filename.startswith(prefix) if prefix else _filename.endswith(sufix)): if not filename: filename = _filename else: raise Exception('Multiple benchmark files in a same folder.') return filename
def clear(self, path): for _file in _listdir(path): full_path = path_join(path, _file) if isdir(full_path): if _file in self.extensions_to_delete: try: rmtree(full_path) print('Removed', ''.join((full_path, '/'))) except OSError: print('Permission denied', full_path) else: self.clear(full_path) else: try: if _file.split('.')[::-1][0] in self.extensions_to_delete: remove(full_path) print('Removed', full_path) except OSError: print('Permission denied', full_path)
def nbconvert(): """helper function to convert all .ipynb files in the current directory to html files. The output html files are in a directory named `html` under the current directory. Existing html files with the same name are overwritten. If no directory named `html` is present, it will be created (jupyter nbconvert does that automatically) """ # Get the current directory #cdir = path.dirname(path.realpath(__file__)) cdir = _getcwd() # Get a list of .ipynb files files2convert = [f for f in _listdir(cdir) if f.endswith('.ipynb')] # Convert the files within a try-except block count_files_successfully_converted = 0 failedFiles = [] for i, f in enumerate(files2convert): _getMD5(f) print(">>> [{}] Converting file ... ".format(i+1)) if _convert(f): count_files_successfully_converted += 1 else: failedFiles.append(f) # Print some human readable feedback print("\n") print("*******************************************") print(" REPORT ") print("*******************************************") print("\nCurrent Directory: ", cdir) print("Number of IPython notebooks found: ", len(files2convert)) print("Number of files successfully converted to html:", count_files_successfully_converted) print("Number of files failed to convert to html:", len(failedFiles)) if failedFiles: print("Files that failed to convert:") for f in failedFiles: print(f) print("\nDONE!") raw_input("Press ENTER to close the appliation ...")
def _get_lib_path_no_extension(filepath, splitext=_splitext): """Internal function to find file when given basepath but no file extension. Scan the directory of the file and try to match the head of the filepath to an entry in that directory. @param filepath: a filename with extension but no basename To make function not require xl param, just use xl = newExcel(False, False) and close at the end. @type filepath: str @rtype: str """ base, head = _split(filepath) for entry in _listdir(base): filename, ext = splitext(entry) # if not ext, then we have a dir, not filename # test ext first to short circuit non-files if ext and filename == head: return '\\'.join((base, entry)) raise FileNotFoundError("File '%s' not found via no-extension search" % filepath)
def create_compiled_schedule(self, networkjson, weightdir, inps): import numpy as np weights = _listdir(weightdir) weights = [_join(weightdir, wt) for wt in weights] const = {} for wtfile in weights: with open(wtfile, 'r') as wts: line = wts.readline() toks = line.strip().split() print len(toks) if len(toks) > 4: print toks[0], toks[1], toks[2], toks[3], len(toks[4:]) if toks[0] not in const: const[toks[0]] = {} if "bias" in wtfile[wtfile.rfind('/'):]: const[toks[0]]['bias'] = np.array( [float(x) for x in toks[4:]]) else: const[toks[0]]['weights'] = np.array( [float(x) for x in toks[4:]]) schedule = [] for layer in networkjson['network']: if layer['type'] in _available_layers: print layer['type'], layer['name'], layer[ 'bottoms'], layer['type'] not in [ 'Convolution', 'InnerProduct' ] or (layer['name'] in const and len(const[layer['name']]) == 2) xdlf_layer = copy.deepcopy( _available_layers(layer['type'], mode='NCHW')) if layer['name'] in const: xdlf_layer.set_layer_params(layer, const[layer['name']]) else: xdlf_layer.set_layer_params(layer) schedule.append(xdlf_layer) elif layer['name'] in inps: print "Detected input : ", layer['name'], layer['type'], layer[ 'outputshapes'] print schedule return schedule
def list_files(dir_paths, endswith=None, contains=None, startswith=None, contains_not=None): """ endswith may be a sting like '.jpg' """ files = [] if type(dir_paths) != type([]): dir_paths = [dir_paths] for path in dir_paths: #'/home/nkrasows/phd/data/graham/Neurons/4dBinNeuronVolume/h5/', try: gg = [(_join(path, f) if path != "." else f) for f in _listdir(path) if _isfile(_join(path, f)) and ( startswith == None or f.startswith(startswith)) and (endswith == None or f.endswith(endswith)) and ( contains == None or contains in f) and ( contains_not == None or (not (contains_not in f)))] files += gg except: print("path", path, "invalid") files.sort() return files
def _build_buffers() -> None: global _buffer_map_pids_to_comms global _buffer_map_pids_to_cmdlines global _buffer_running_pids global _buffer_list_of_comms global _buffer_map_comms_to_pids global _buffer_map_comms_to_pids _buffer_map_pids_to_comms = {} # Dict[str, str] _buffer_map_pids_to_cmdlines = {} # Dict[str, List[str]] _buffer_running_pids = [pid for pid in _listdir('/proc') if pid.isdigit()] _buffer_list_of_comms = [] # List[str] _buffer_map_comms_to_pids = {} # type: Dict[str, List[str]] _buffer_map_comms_to_pids = dict() # type: Dict[str, List[str]] for index, pid in enumerate(_buffer_running_pids): try: comm_file = open(_join('/proc', pid, 'comm'), 'r') comm = comm_file.read().rstrip('\n') comm_file.close() _buffer_map_pids_to_comms[pid] = comm cmd_file = open(_join('/proc', pid, 'cmdline'), 'r') cmd = cmd_file.read().rstrip('\n') cmd_file.close() cmdline = cmd.split('\x00') _buffer_map_pids_to_cmdlines[pid] = cmdline if comm not in _buffer_list_of_comms: _buffer_list_of_comms.append(comm) _buffer_map_comms_to_pids[comm] = list() _buffer_map_comms_to_pids[comm].append(pid) except FileNotFoundError: _buffer_running_pids.pop(index)
def sign_deb_packages(packages_dir, private_key=None, public_key=None, pass_phrase=None, quiet=False): """ Sign all DEB packages in a directory. Args: packages_dir (str): Directory containing packages. private_key (str): Path to GPG private key to use. If no private key specified, use current GPG configuration. public_key (str): Path to GPG public key to use. pass_phrase (str): GPG key pass phrase. quiet (bool): Hide packages manager output. """ _init_gpg_configuration(private_key) # Sign packages packages = [ package for package in _listdir(packages_dir) if _splitext(package)[1].lower() == ".deb" ] command = ["dpkg-sig"] if pass_phrase: command += ["-g", '--passphrase "%s"' % pass_phrase] _run(command + ["--sign", "builder"] + packages, quiet=quiet, cwd=packages_dir) # Verify signatures _run(["dpkg-sig", "--verify"] + packages, quiet=quiet, cwd=packages_dir) if not quiet: print("Signed packages:\n - %s" % "\n- ".join(packages))
def load_files(data_file_location): data = [] startswith = None endswith = None contains = None contains_not = None for path in data_file_location: gg = [(_join(path, f) if path != "." else f) for f in _listdir(path) if _isfile(_join(path, f)) and (startswith == None or f.startswith(startswith)) and ( endswith == None or f.endswith(endswith)) and (contains == None or contains in f) and ( contains_not == None or (not (contains_not in f)))] data.append(gg) combined_list = [] # Sort the lists: for i in range(len(data)): elem = sorted(data[i]) combined_list = combined_list + elem combined_list = strip_files_of_doubles(combined_list) combined_list = sorted(combined_list, key=sort_func) return combined_list
def install_accelize_drm_library(packages_dir, quiet=False): """ Install Accelize DRM library packages. Args: packages_dir (str): Directory containing packages. quiet (bool): Hide packages manager output. """ # Get packages packages_dir = _realpath(packages_dir) packages = [ _join(packages_dir, package_file) for package_file in _listdir(packages_dir) if (_splitext(package_file)[-1].lower() in ('.deb', '.rpm') and '-dev' not in package_file)] # Run command if quiet: run_kwargs = dict(stdout=_PIPE, stderr=_PIPE, universal_newlines=True) else: run_kwargs = dict() _run(detect_package_manager() + ' '.join(packages), shell=True, **run_kwargs).check_returncode()
## get_status: converts a solver specific status flag to a cobra pie flag. ## set_parameter: takes solver specific parameter strings and sets them. ## solve: solves the optimization problem. this is where one should put ## in logic on what to try if the problem ## isn't optimal ## solve_problem: dumb and fast which will set parameters, if provided ##note that for some solvers ## update_problem: changes bounds and linear objective coefficient of the ## solver specific problem file, given the complementary cobra.mod from os import listdir as _listdir from os import path as _path for i in _listdir(_path.split(_path.abspath(__file__))[0]): if i.startswith("_") or i.startswith(".") or i == 'legacy.py': continue if not i.endswith(".py"): continue try: m = i.strip(".py") exec("from . import %s" % m) solver_name = m if solver_name.endswith('_solver'): solver_name = solver_name[:-len('_solver')] solver_dict[solver_name] = eval(m) except Exception, e: pass del _path del _listdir
def sign_rpm_packages(packages_dir, private_key=None, public_key=None, pass_phrase=None, quiet=False): """ Sign all RPM packages in a directory. Args: packages_dir (str): Directory containing packages. private_key (str): Path to GPG private key to use. If no private key specified, use current GPG configuration. public_key (str): Path to GPG public key to use. pass_phrase (str): GPG key pass phrase. quiet (bool): Hide commands output. """ _init_gpg_configuration(private_key) # Import public key if public_key: _run(["rpm", "--import", public_key], quiet=quiet) # Sign packages packages = [ package for package in _listdir(packages_dir) if _splitext(package)[1].lower() == ".rpm" ] gpg_info = _run("gpg --export | gpg --list-packets", shell=True, quiet=True).stdout for line in gpg_info.strip().splitlines(): if ":user ID packet" in line: gpg_user_id = line.rsplit(":", 1)[1].strip().strip('"') break else: raise RuntimeError("Unable to read GPG User ID") macros = [ "_signature gpg", "_gpg_path %s" % _expanduser("~/.gnupg"), "_gpg_name %s" % gpg_user_id, ] if pass_phrase: macros += [ "_gpgbin /usr/bin/gpg", " ".join(( "__gpg_sign_cmd %{__gpg}", "gpg", "--force-v3-sigs", "--batch", "--verbose", "--no-armor", '--passphrase "%s"' % pass_phrase, "--no-secmem-warning", "-u", '"%{_gpg_name}"', "-sbo", "%{__signature_filename}", "--digest-algo", "sha256", "%{__plaintext_filename}", )), ] define = [] for macro in macros: define.extend(["--define", macro]) _run(["rpm", "--addsign"] + define + packages, quiet=True, cwd=packages_dir) # Verify signatures result = _run(["rpm", "--checksig"] + packages, quiet=True, cwd=packages_dir) for line in result.stdout.splitlines(): line = line.rstrip() if (not line.endswith("gpg OK") and not line.endswith("pgp md5 OK") and not line.endswith("digests signatures OK")): raise RuntimeError("Package signature verification failure: %s" % line) if not quiet: print("Signed packages:\n - %s" % "\n- ".join(packages))
def sign_rpm_packages(packages_dir, private_key=None, public_key=None, pass_phrase=None, quiet=False): """ Sign all RPM packages in a directory. Args: packages_dir (str): Directory containing packages. private_key (str): Path to GPG private key to use. If no private key specified, use current GPG configuration. public_key (str): Path to GPG public key to use. pass_phrase (str): GPG key pass phrase. quiet (bool): Hide commands output. """ _init_gpg_configuration(private_key) # Import public key if public_key: _run(['rpm', '--import', public_key], quiet=quiet) # Sign packages packages = [ package for package in _listdir(packages_dir) if _splitext(package)[1].lower() == '.rpm' ] gpg_info = _run('gpg --export | gpg --list-packets', shell=True, quiet=True).stdout for line in gpg_info.strip().splitlines(): if ':user ID packet' in line: gpg_user_id = line.rsplit(':', 1)[1].strip().strip('"') break else: raise RuntimeError('Unable to read GPG User ID') macros = [ '_signature gpg', '_gpg_path %s' % _expanduser("~/.gnupg"), '_gpg_name %s' % gpg_user_id ] if pass_phrase: macros += [ '_gpgbin /usr/bin/gpg', ' '.join( ('__gpg_sign_cmd %{__gpg}', 'gpg', '--force-v3-sigs', '--batch', '--verbose', '--no-armor', '--passphrase "%s"' % pass_phrase, '--no-secmem-warning', '-u', '"%{_gpg_name}"', '-sbo', '%{__signature_filename}', '--digest-algo', 'sha256', '%{__plaintext_filename}')) ] define = [] for macro in macros: define.extend(["--define", macro]) _run(['rpm', '--addsign'] + define + packages, quiet=True, cwd=packages_dir) # Verify signatures result = _run(['rpm', '--checksig'] + packages, quiet=True, cwd=packages_dir) for line in result.stdout.splitlines(): line = line.rstrip() if (not line.endswith('gpg OK') and not line.endswith('pgp md5 OK') and not line.endswith('digests signatures OK')): raise RuntimeError('Package signature verification failure: %s' % line)
rect = Image.new("RGBA", rect_sz, 4 * (0, )) removed = img.copy() removed.paste(rect, pos) if not reverse: return removed else: return _inv(_sub(img, removed)) def _scale(section, dims): return tuple(b * d for b, d in zip(section, 2 * dims)) npath = "nouns/" for noun in (_f for _f in _listdir(npath) if _isfile(npath + _f)): resized = _resize(Image.open((npath + noun)).convert("RGBA"), (100, 100)) resized.save(((npath + "processed/") + noun)) vpath = "verbs/" vmark = Image.open("verb_mark.png").convert("RGBA") for verb in (_f for _f in _listdir(vpath) if _isfile(vpath + _f)): resized = _resize(Image.open((vpath + verb)).convert("RGBA"), (100, 100)) _add(resized, vmark).save(((vpath + "processed/") + verb)) apath = "adjectives/" amark = Image.open("adjective_mark.png").convert("RGBA") for adj in (_f for _f in _listdir(apath) if _isfile(apath + _f)): resized = _resize(Image.open((apath + adj)).convert("RGBA"), (100, 100)) _add(resized, amark).save(((apath + "processed/") + adj))
def listdir(path): items = os._listdir(path) if isinstance(path, unicode): items = [unicode(String(i).toString()) for i in items] return items
def os_listdir(path): return os._listdir(longpathify(uni(path)))
def listdir(dirpath): return [join(dirpath, fname) for fname in _listdir(dirpath) if fname[0] != '.' and not fname.endswith('.pyc') and not fname.endswith('~') and not isdir(join(dirpath, fname))]
from __future__ import unicode_literals import logging from os import listdir as _listdir # To not conflict with operator from os.path import dirname, basename, join, isdir, abspath, expanduser from abc import ABCMeta, abstractmethod from PyInquirer import prompt import cookiecutter as cc from cookiecutter.context_manager import work_in logger = logging.getLogger(__name__) # TODO: Allow for imports of custom operators and subdirectories. __all__ = [ basename(f)[:-3] for f in _listdir(dirname(__file__)) if f[-3:] == ".py" and not f.endswith("__init__.py") ] class BaseOperator(metaclass=ABCMeta): """Base operator mixin class.""" def __init__( self, operator_dict=None, context=None, context_key=None, no_input=False, cc_dict=None, env=None, key=None,
def init(dllpath=None, root="C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL and try to connect. Returns True if library was successfully loaded, not necessarily that it was also able to connect. Details are sent to stdout. init(dllpath=None, root="C:\\", bypass_check=False) dllpath :: str :: exact path of the DLL -or- root :: str :: directory to start walking/searching to find the DLL bypass_check :: bool :: used by virtual layer implemenation (DO NOT SET) returns -> bool throws TOSDB_InitError TOSDB_Error """ global _dll, _dll_depend1 rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return False def _remove_older_versions(): nonlocal rel getver = lambda x: _search(_REGEX_VER_SFFX, x).group().strip('-') vers = tuple(zip(map(getver, rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple(( x[0].split('.')[1],x[1]) \ for x in vers if x[0].split('.')[0] == vers_max ) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max) def _get_depends1_dll_path(dllpath): d = _path.dirname(dllpath) dbg = _match(_REGEX_DBG_DLL_PATH, dllpath) base = d + "/" + DLL_DEPENDS1_NAME + "-" + SYS_ARCH_TYPE path = base + ("_d.dll" if dbg else ".dll") return path try: if dllpath is None: matcher = _partial(_match, _REGEX_DLL_NAME) for nfile in map(matcher, _listdir(_curdir)): if nfile: # try the current dir first rel.add(_curdir + _sep + nfile.string) if not rel: # no luck, walk the dir tree for root, dirs, files in _walk(root): for file in map(matcher, files): if file: rel.add(root + _sep + file.string) if not rel: # if still nothing throw raise TOSDB_InitError(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) _remove_older_versions() # most recently updated d = dict(zip(map(lambda x: _stat(x).st_mtime, rel), rel)) rec = max(d) dllpath = d[rec] dllpath_depends1 = _get_depends1_dll_path(dllpath) _dll_depend1 = _CDLL(dllpath_depends1) _dll = _CDLL(dllpath) print("+ Using Module(s) ", dllpath) print(" ", dllpath_depends1) print("+ Last Update:", _asctime(_localtime(_stat(dllpath).st_mtime))) print("+ Process ID:", str(_getpid())) if connect(): print("+ Succesfully Connected to Service\Engine") if connected(): print("+ Succesfully Connected to TOS") else: print("- Failed to Connect to TOS") else: print("- Failed to Connect to Service\Engine") print("- Failed to Connect to TOS") return True # indicate the lib was loaded (but not if connect succeeded) except Exception as e: raise TOSDB_InitError("unable to initialize library:", e)
from os import listdir as _listdir from os.path import dirname as _dirname from importlib import import_module as _import_module base_package = __name__.replace('.base', '') def _not_implemented_conversion(item): raise NotImplementedError("This conversion has not been implemented yet") list_api_forms = [ filename.split('.')[0] for filename in _listdir(_dirname(__file__)) if filename.startswith('api') ] dict_api_forms = {} list_forms = [] dict_converter = {} dict_is_form = {} for api_form in list_api_forms: module_api_form = _import_module('.' + api_form, base_package) form_name = module_api_form.form_name list_forms.append(form_name) dict_api_forms[form_name] = module_api_form dict_is_form.update(module_api_form.is_form) for form_name in list_forms: dict_converter[form_name] = {} for method in dict_api_forms[form_name].__dict__.keys():
from datetime import date as _date from json import load as _load, loads as _loads from os import listdir as _listdir from os.path import dirname as _dirname, join as _join, splitext as _splitext from pybars import Compiler as _Compiler from ssl_config._helpers import HELPERS as _HELPERS from ssl_config._versions import Version as _Version _DATA_DIR = _join(_dirname(__file__), '_data') #: Supported server software SERVERS = tuple( sorted( _splitext(name)[0] for name in _listdir(_join(_DATA_DIR, 'templates')))) with open(_join(_DATA_DIR, 'guidelines.json'), 'rt') as json_file: #: Guidelines information as dict GUIDELINES = _load(json_file) #: Mozilla SSL configuration levels #: #: Modern: #: Services with clients that support TLS 1.3 and don't need #: backward compatibility. #: #: Intermediate #: General-purpose servers with a variety of clients, recommended for #: almost all systems. #:
def init(dllpath=None, root="C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL dllpath: string of the exact path of the DLL root: string of the directory to start walking/searching to find the DLL """ global _dll, _dll_depend1 rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return False def _remove_older_versions(): nonlocal rel getver = lambda x: _search(_REGEX_VER_SFFX, x).group().strip('-') vers = tuple(zip(map(getver, rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple(( x[0].split('.')[1],x[1]) \ for x in vers if x[0].split('.')[0] == vers_max ) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max) def _get_depends1_dll_path(dllpath): d = _path.dirname(dllpath) return d + "/" + DLL_DEPENDS1_NAME + "-" + SYS_ARCH_TYPE + ".dll" try: if dllpath is None: matcher = _partial(_match, _REGEX_DLL_NAME) # regex match function for nfile in map(matcher, _listdir(_curdir)): if nfile: # try the current dir first rel.add(_curdir + _sep + nfile.string) if not rel: # no luck, walk the dir tree for root, dirs, files in _walk(root): for file in map(matcher, files): if file: rel.add(root + _sep + file.string) if not rel: # if still nothing throw raise TOSDB_InitError(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) _remove_older_versions() # most recently updated d = dict(zip(map(lambda x: _stat(x).st_mtime, rel), rel)) rec = max(d) dllpath = d[rec] dllpath_depends1 = _get_depends1_dll_path(dllpath) _dll_depend1 = _CDLL(dllpath_depends1) _dll = _CDLL(dllpath) print("+ Using Module(s) ", dllpath) print(" ", dllpath_depends1) print("+ Last Update ", _asctime(_localtime(_stat(dllpath).st_mtime))) if connect(): print("+ Succesfully Connected to Service \ Engine") else: print("- Failed to Connect to Service \ Engine") return True # indicate the lib was loaded (but not if connect succeeded) except TOSDB_Error: raise except Exception as e: raise TOSDB_InitError("unable to initialize library", e)
def listdir(directory): return sorted(_listdir(directory))
from os import listdir as _listdir __all__ = [ _module.replace(".py", "") for _module in _listdir(__path__[0]) if not _module.endswith(".pyc") and not _module.startswith("__") ]
def init(dllpath=None, root="C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL dllpath: string of the exact path of the DLL root: string of the directory to start walking/searching to find the DLL """ global _dll, _dll_depend1 rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return False def _remove_older_versions(): nonlocal rel getver = lambda x: _search(_REGEX_VER_SFFX,x).group().strip('-') vers = tuple(zip(map(getver, rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple(( x[0].split('.')[1],x[1]) \ for x in vers if x[0].split('.')[0] == vers_max ) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max) def _get_depends1_dll_path(dllpath): d = _path.dirname(dllpath) return d + "/" + DLL_DEPENDS1_NAME + "-" + SYS_ARCH_TYPE + ".dll" try: if dllpath is None: matcher = _partial(_match, _REGEX_DLL_NAME) for nfile in map(matcher, _listdir(_curdir)): if nfile: # try the current dir first rel.add(_curdir+ _sep + nfile.string) if not rel: # no luck, walk the dir tree for root, dirs, files in _walk(root): for file in map(matcher, files): if file: rel.add(root + _sep + file.string) if not rel: # if still nothing throw raise TOSDB_InitError(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) _remove_older_versions() # most recently updated d = dict(zip(map(lambda x: _stat(x).st_mtime, rel), rel)) rec = max(d) dllpath = d[rec] dllpath_depends1 = _get_depends1_dll_path(dllpath) _dll_depend1 = _CDLL(dllpath_depends1) _dll = _CDLL(dllpath) print("+ Using Module(s) ", dllpath) print(" ", dllpath_depends1) print("+ Last Update ", _asctime(_localtime(_stat(dllpath).st_mtime))) if connect(): print("+ Succesfully Connected to Service \ Engine") else: print("- Failed to Connect to Service \ Engine") return True # indicate the lib was loaded (but not if connect succeeded) except TOSDB_Error: raise except Exception as e: raise TOSDB_InitError("unable to initialize library", e)
def listdir(dirpath): return [ join(dirpath, fname) for fname in _listdir(dirpath) if fname[0] != '.' and not fname.endswith('.pyc') and not fname.endswith('~') and not isdir(join(dirpath, fname)) ]
def path_ensdf_dir(): regex = _re.compile("ensdf_[\d]{6}_[\d]{3}") root = path_rawdata() return [_path.join(root, i) for i in _listdir(root) if regex.match(i)]