def __send_probe(host): ping = self.m( '', cmdd=dict( cmd=' '.join([ self.__ping_cmd, self.__num, self.__net_if, self.__packetsize, host ]) ), critical=False, verbose=False ) up = True if ping.get('returncode') == 0 else False self.__probe_results[host] = {'up': up} if up: p = ping.get('out') loss = _search(rxlss, p) ms = _findall(rxmst, p) rtt = _search(rxrtt, p) if loss: loss = loss.group('loss') self.__probe_results[host].update(dict( ms=ms, loss=loss, rtt=rtt.groupdict() ))
def _pull_topics_from_header(verbose=True): read_flag = False topics = [] lineno = 0 with open(_HEADER_PATH,'r') as hfile: for hline in hfile: lineno += 1 if read_flag: if _MAGIC_TOPIC_STR in hline: read_flag = False if verbose: print('',_HEADER_PATH + ":" + str(lineno+1) + ": topic enum END") break try: t = hline.split('=')[0].strip() except Exception as e: raise TOSDB_SetupError("failed to parse topic enum line", e.args) if(not _search('[\W]',t)): topics.append(t) else: if _MAGIC_TOPIC_STR in hline: read_flag = True if verbose: print('',_HEADER_PATH + ":" + str(lineno-1) + ": topic enum BEGIN") return topics
def _pick(self, sentence, n_im): pick_token = _extract('pick', sentence) if pick_token == 'all': pick_token = ':' elif _search('all_except', pick_token) != None: pick_token = pick_token.split(' ')[1] reverse_pick_token = ':' for n in pick_token.split(','): if _is_int(n): reverse_pick_token += n + ',' + str(_to_int(n) + 1) + ':' n_im -= 1 elif _is_slice(n): n = _to_slice(n) reverse_pick_token += str( n.start) + ',' + str(n.stop + 1) + ':' n_im -= n.stop - n.start pick_token = reverse_pick_token else: n_im = 0 for n in pick_token.split(','): if _is_int(n): n_im += 1 elif _is_slice(n): n = _to_slice(n) n_im += n.stop - n.start return pick_token, n_im
def _calculate_reduced_mass(self, molecule, database=None): found = _search(r'^(\d+)(\D+)(\d+)(\D+)\b', molecule.strip()) err_msg = f'Error: Molecule {molecule} not in the correct format' if found is None: raise SystemExit(err_msg) molecule_data = found.groups() if len(molecule_data) != 4: raise SystemExit(err_msg) atoms = (molecule_data[1].strip(), molecule_data[3].strip()) mass_numbers = (int(molecule_data[0]), int(molecule_data[2])) db_name = AtomicDatabase.database atomic_db = database or db_name atom_mass1 = self._match_atomic_mass( atomic_db, atoms[0], mass_numbers[0] ) atom_mass2 = self._match_atomic_mass( atomic_db, atoms[1], mass_numbers[1] ) rmass = atom_mass1 * atom_mass2 / (atom_mass1 + atom_mass2) self.atomic_db.append((atom_mass1, atom_mass2)) self.atomic_symbols.append((atoms[0], atoms[1])) self.atomic_mass_nums.append((mass_numbers[0], mass_numbers[1])) return rmass
def read_from_grid(self, grid_file): import osgeo.gdal as gd #if grid_file.endswith('\.gz'): # this doesn't work if _search('gz$', grid_file): grid_file = '/vsigzip/' + grid_file source = gd.Open(grid_file) self.read_from_grid_obj(source)
def _pull_topics_from_header(verbose=True): read_flag = False topics = [] lineno = 0 with open(_HEADER_PATH, 'r') as hfile: for hline in hfile: lineno += 1 if read_flag: if _MAGIC_TOPIC_STR in hline: read_flag = False if verbose: print( '', _HEADER_PATH + ":" + str(lineno + 1) + ": topic enum END") break try: t = hline.split('=')[0].strip() except Exception as e: raise TOSDB_SetupError("failed to parse topic enum line", e.args) if (not _search('[\W]', t)): topics.append(t) else: if _MAGIC_TOPIC_STR in hline: read_flag = True if verbose: print( '', _HEADER_PATH + ":" + str(lineno - 1) + ": topic enum BEGIN") return topics
def _remove_older_versions(): nonlocal rel getver = lambda x: _search(_REGEX_VER_SFFX, x).group().strip('-') vers = tuple(zip(map(getver, rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple(( x[0].split('.')[1],x[1]) \ for x in vers if x[0].split('.')[0] == vers_max ) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max)
def _remove_older_versions(): nonlocal rel getver = lambda x: _search(_REGEX_VER_SFFX,x).group().strip('-') vers = tuple(zip(map(getver, rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple(( x[0].split('.')[1],x[1]) \ for x in vers if x[0].split('.')[0] == vers_max ) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max)
def _extract(token, sentence): tokens = ('take', 'pick', 'apply', 'name_it', '$') token_idx = {'take': 0, 'pick': 1, 'apply': 2, 'name_it': 3} left_token = tokens[token_idx[token]] right_token = tokens[token_idx[token] + 1] result = _search(left_token + '(.+?)' + right_token, sentence) result = _sub('take|pick|apply|name_it', '', result.group()) result = result.strip() return result
def _get_current_branch(): """Returns name of current git branch. Creates new branch if not currently on any branch.""" branches = local('git branch') branch = _search(r'\* (?P<branch>\(no branch\)|\S+)', branches).group(1) if branch == '(no branch)': local('git branch -r', capture=False) branch = prompt('Please specify new branch name:', validate=_check_git_branch_name) local('git checkout -b %s' % branch) return branch
def __send_probe(host): ping = self.m('', cmdd=dict(cmd=' '.join([ self.__ping_cmd, self.__num, self.__net_if, self.__packetsize, host ])), critical=False, verbose=False) up = True if ping.get('returncode') == 0 else False self.__probe_results[host] = {'up': up} if up: p = ping.get('out') loss = _search(rxlss, p) ms = _findall(rxmst, p) rtt = _search(rxrtt, p) if loss: loss = loss.group('loss') self.__probe_results[host].update( dict(ms=ms, loss=loss, rtt=rtt.groupdict()))
def init(dllpath=None, root="C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL dllpath: string of the exact path of the DLL root: string of the directory to start walking/searching to find the DLL """ global _dll rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return try: if dllpath is None: matcher = _partial(_match, _REGEX_DLL_NAME) # regex match function for nfile in map(matcher, _listdir(_curdir)): if nfile: # try the current dir first rel.add(_curdir + _sep + nfile.string) if not rel: for root, dirs, files in _walk( root): # no luck, walk the dir tree for file in map(matcher, files): if file: rel.add(root + _sep + file.string) if not rel: # if still nothing throw raise TOSDB_Error(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) ver = _compile('-[\d]{1,2}.[\d]{1,2}-') vers = tuple( zip(map(lambda x: _search(ver, x).group().strip('-'), rel), rel)) vers_max = max(vers)[0].split('.')[0] mtup = tuple((x[0].split('.')[1], x[1]) for x in vers if x[0].split('.')[0] == vers_max) mtup_max = max(mtup)[0] rel = set(x[1] for x in mtup if x[0] == mtup_max) # find the most recently updated d = dict(zip(map(lambda x: _stat(x).st_mtime, rel), rel)) rec = max(d) dllpath = d[rec] _dll = _WinDLL(dllpath) print("+ Using Module ", dllpath) print("+ Last Update ", _asctime(_localtime(_stat(dllpath).st_mtime))) if connect(): print("+ Succesfully Connected to Service \ Engine") else: print("- Failed to Connect to Service \ Engine") return True # indicate the lib was loaded except Exception as e: raise TOSDB_CLibError("unable to initialize library", e)
def _apply(self, sentence): apply_token = _extract('apply', sentence) augment_names = [ x for x in self.augments.__dict__.keys() if x[0] != '_' ] if apply_token == 'all': applied_augments = augment_names elif _search('all_except', apply_token) != None: not_applied_augments = apply_token.split(' ')[1].split(',') applied_augments = [ x for x in augment_names if x not in not_applied_augments ] else: applied_augments = apply_token.split(',') return applied_augments, len(applied_augments)
def init(dllpath = None, root = "C:\\", bypass_check=False): """ Initialize the underlying tos-databridge DLL dllpath: string of the exact path of the DLL root: string of the directory to start walking/searching to find the DLL """ global _dll rel = set() if not bypass_check and dllpath is None and root == "C:\\": if abort_init_after_warn(): return try: if dllpath is None: matcher = _partial( _match, _REGEX_DLL_NAME) # regex match function for nfile in map( matcher, _listdir( _curdir )): if nfile: # try the current dir first rel.add( _curdir+ _sep + nfile.string ) if not rel: for root,dirs, files in _walk(root): # no luck, walk the dir tree for file in map( matcher, files): if file: rel.add( root + _sep + file.string ) if not rel: # if still nothing throw raise TOSDB_Error(" could not locate DLL") if len(rel) > 1: # only use the most recent version(s) ver = _compile('-[\d]{1,2}.[\d]{1,2}-') vers = tuple( zip( map( lambda x: _search(ver,x).group().strip('-'), rel), rel) ) vers_max = max(vers)[0].split('.')[0] mtup = tuple( (x[0].split('.')[1],x[1]) for x in vers if x[0].split('.')[0] == vers_max) mtup_max = max(mtup)[0] rel = set( x[1] for x in mtup if x[0] == mtup_max ) # find the most recently updated d = dict( zip(map( lambda x : _stat(x).st_mtime, rel), rel ) ) rec = max(d) dllpath = d[ rec ] _dll = _WinDLL( dllpath ) print( "+ Using Module ", dllpath ) print( "+ Last Update ", _asctime(_localtime(_stat(dllpath).st_mtime))) if connect(): print("+ Succesfully Connected to Service \ Engine") else: print("- Failed to Connect to Service \ Engine") return True # indicate the lib was loaded except Exception as e: raise TOSDB_CLibError( "unable to initialize library", e )
def _get_max_supported_compiler_dialect(self): """Check the maximally supported C++ dialect. Returns ------- str Flag string of the latest supported dialect """ _, report = self._run_clang_command( [str(self.cpp_compiler), "-std=dummpy", "-x", "c++", "-E", "-"] ) for line in reversed(report.splitlines()): if "draft" in line or "gnu" in line: continue return "-std=" + _search(r"'(c\+\+..)'", line).group(1) raise RuntimeError("Could not find a supported C++ standard.")
def get_exact_args(ctx: _Context, additional_parameters: int = 0) -> str: try: if ctx.command.full_parent_name: full_parent_command = f'{ctx.prefix}{ctx.command.full_parent_name} ' else: full_parent_command = f'{ctx.prefix}' command_names = [ctx.command.name] if ctx.command.aliases: command_names.extend(ctx.command.aliases) command_names = [ _escape(command_name) for command_name in command_names ] rx_command_names = '|'.join(command_names) rx_command = f'{_escape(full_parent_command)}({rx_command_names}) (.*? ){{{additional_parameters}}}' rx_match = _search(rx_command, ctx.message.content) if rx_match is not None: return str(ctx.message.content[len(rx_match.group(0)):]) else: return '' except: return ''
from ..Input_Data_Formatter import Input_Data_Formatter as _IDF import pandas as _pd from re import search as _search #import ipdb #ipdb.set_trace() _curr_dir = _search('.*/', __file__).group() _idf = _IDF() _idf.add_sources(_curr_dir + 'default_ims/') meta = _pd.read_csv(_curr_dir + 'default_metadata.csv') _idf.add_metadata(meta) default_aug_original_im_batch = _idf.get_results()
def _is_int(input_str): if _search('^[-0-9]+$', input_str) == None: return False else: return True
def _is_slice(input_str): if _search('^[-0-9]*:[-0-9]*$', input_str) == None: return False else: return True