def exec_command(capfd, command, get_out=True, get_err=False): command = command.split(" ") arguments = ArgParser(args=command) try: Application(arguments) # NOTE: docker-compose calls SystemExit at the end of the command... except SystemExit: log.info('completed') out, err = capfd.readouterr() out = out.replace('\r', '').split("\n") err = err.replace('\r', '').split("\n") if get_out: pp(out) if get_err: pp(err) if get_out and get_err: return out, err if get_out: return out if get_err: return err return None
def handle_others(args, category, usages): non_numeric = [] if args['<n>'] == ['all']: numeric = list(range(len(usages))) else: numeric = [] for arg in args['<n>']: try: numeric.append(int(arg)) except: non_numeric.append(arg) if len(non_numeric) > 0: print('') print( f"Error: '{category}' only takes numeric arguments, not : {repr(non_numeric)}" ) print('') print(__doc__) sys.exit(1) parser = DocOptParserPEG() for n in numeric: if n >= len(usages): print(f"error: case number {n} too large.") continue usage = usages[n] if args['--show']: print(f"input = '{usage}'") parse_tree = parser.parse(usage) # , print_raw=True) if args['--show']: pp(parse_tree) print('')
def perform_pass(cfg, _pass, fcn, *args): name = PASSES[_pass] parse_tree = fcn(*args) if cfg[f'--{name}']: pp(parse_tree) if cfg['--post']: print(f"\ninput = '{cfg.doc}'\n") if cfg['--write']: file_name = cfg.dst / f"p{_pass}" with open(file_name, 'w') as f: pp(parse_tree, stream=f) target = Path(file_name).name symlink = cfg.dst / name try: if symlink.exists: os.unlink(symlink) except: pass os.symlink(target, symlink) return parse_tree
def write_scratch(**kwargs): scratch = write_scratch.scratch if not (scratch.exists() and scratch.is_dir()): scratch.mkdir(exist_ok=True) color = write_scratch.color if not (color.exists() and color.is_dir()): color.mkdir(exist_ok=True) if '_clean' in kwargs: if kwargs['_clean'] is True: for file in iglob(str(scratch / '*')): if not Path(file).is_dir(): os.unlink(file) for file in iglob(str(color / '*')): if not Path(file).is_dir(): os.unlink(file) del kwargs['_clean'] for name in kwargs: with open(scratch / name, 'w') as f: pp_plain(kwargs[name], stream=f) with open(color / name, 'w') as f: pp(kwargs[name], stream=f)
def test_004__n1_eq_t1__with_eq(self): if False: from prettyprinter import cpprint as pp import p print(f": test_004__n1_eq_t1__with_eq") print(f": n1 :") pp(self.n1) print(f": t1 :") pp(self.t1) NonTerminal_enable_structural_eq() with self.assertRaises(AssertionError) as context: assert self.n1 == self.t1 # now it fails self.assertTrue('Internal error, AssertionError not raised !!!') assert self.n1 != self.t1 assert self.n1 == deepcopy(self.n1) t2 = Terminal(self.dot, 0, 'one') n2 = NonTerminal(self.dot, [t2]) assert self.n1 == n2 t2 = Terminal(self.dot, 0, 'one') n2 = NonTerminal(self.dot, [t2]) assert self.n1 == n2 bang = StrMatch('!', rule_name='bang') t3 = Terminal(bang, 0, 'one') n3 = NonTerminal(bang, [t3]) assert self.n1 != n3
def perform_pass(self, _pass, name, fcn, *args, **kwargs): if self.debug is not False: if _pass >= self.debug: print(f": pass {_pass} : {name}") out = fcn(*args, **kwargs) if self.debug is not False: if _pass >= self.debug: pp(out) print('') return out
def _visit_option_help(self, node, children): print(f": visit_option_help : {node.name}") pp(children) while isinstance(children[-1], list): additional = children[-1] children = children[:-1] children.extend(additional) if isinstance(children[-1], Terminal): children[-1] = children[-1].value return Terminal(node.rule, 0, '\n'.join(children))
def validate(self, action_block: dict): """ Validates a given task_create block :param action_block: :return: """ self.log.debug("Validating...{}".format(action_block)) self.action = action_block['action'] self.name = action_block['name'] # Voluptuous can indicate 'where' the error was, but it relies on the caller (us) passing that info in # So we generate a simple location 'slug' based on the action and the user given name ## # Use the action/name to generate a location 'root' self.location = 'job://{1} (type:{0})'.format(self.action, self.name) # We start with making sure that we have the high level keys required for project_* actions # If there are any extra high-level keys, make noise self.log.debug("Checking high level keys...") self._schema = self._schema.extend(base_schema, extra=PREVENT_EXTRA) # Do high level validation and store the validated (so far...) action block action_block = SchemaCheck(action_block, self._schema, self.location).result() # If nothing blew up, then the high-level schema is valid. We now need to validate the filters ## self.log.debug( "...Valid! Now validating {} filters from '{}'...".format( len(action_block['labels']), self.action)) _idx = 0 _valid_sources = [] for obj in action_block['labels']: _loc = "{}.from#{}".format(self.location, _idx) # pp(obj) # exit() # Each obj will be a dict w/ only one key: from. Unwrap the from to get either filters or "ID" if 'id' in obj['from']: self.log.debug("Assuming that id:{} is valid...".format( obj['from']['id'])) _valid_sources.append(obj['from']) else: self.log.debug( "Validating filter block on from#{}".format(_idx)) _valid_sources.extend(self._validate_filters([obj['from']])) _idx += 1 action_block['labels'] = _valid_sources # We've validated/coerced everything, return :) pp(action_block) return action_block
class Test_Case(unittest.TestCase): def setUp(self): self.dot = StrMatch('.', rule_name='self.dot') self.s1 = 's1 : string' self.s2 = 's2 : string' self.s3 = 's3 : string' # rule, position, value self.t1 = Terminal(self.dot, 0, 'one') self.t2 = Terminal(self.dot, 0, 'two') self.t3 = Terminal(self.dot, 0, 'three') assert not isinstance(self.t1, list) assert not isinstance(self.t2, list) assert not isinstance(self.t3, list) # rule, value : a list where the first element is a node # self.n1 = NonTerminal(self.dot, self.t1) # TypeError: 'Terminal' object is not subscriptable self.n2 = NonTerminal(self.dot, [self.t1]) self.n3 = NonTerminal(self.dot, self.n2) self.n4 = NonTerminal(self.dot, [self.n2]) assert isinstance(self.n2, list) assert isinstance(self.n3, list) assert isinstance(self.n4, list) self.v0 = self.n2 self.v1 = [self.s1, self.s2] self.v2 = self.t1 self.v3s = self.s3 self.v3t = (self.s1, self.s2) def test_000_wrap_unneccessary(self): w = wrap(deepcopy(self.v0)) assert not hasattr(w, '_please_unwrap') assert w == self.v0 #------------------------------------------------------------------------------ def fcn_00x_wrap_and_unwrap(self, case, value): w = wrap(deepcopy(value)) assert hasattr(w, '_please_unwrap') assert w._please_unwrap == case assert w is not value if False: pp(value) pp(unwrap(deepcopy(w))) assert unwrap(w) == value
def unwrap_extend(dst, wrapped): """appends unwrapped element(s) to the end list 'dst'""" debug = False if debug: print(f"\n[ unwrap_extend : enter") print("[wrapped]") pp(wrapped) value = unwrap(wrapped) if debug: print("[value]") pp(value) if debug: print("[dst] : before") pp(dst) if is_unpackable_sequence(value): dst.extend(value) else: dst.append(value) if debug: print("[dst] : after") pp(dst) return dst
def NonTerminal_eq_structural(self, other, verbose=False): if False: from prettyprinter import cpprint as pp import p print(f": NonTerminal_eq :") print(f": self :") pp(self) print(f": other :") pp(other) if not isinstance(other, NonTerminal): if verbose: print(f"eq issue: '{self.name}' vs '{other.name}' : " f"Wrong type, other is {str(type(other))}") return False if self.rule_name != other.rule_name: if verbose: print( f"eq issue: '{self.name}' vs '{other.name}' : rules differ : " f"{self.rule_name} != {other.rule_name}") return False if len(self) != len(other): if verbose: print( f"eq issue: '{self.name}' vs '{other.name}' : lengths differ : " f"{len(self)} vs {len(other)}") return False for idx in range(len(self)): if isinstance(self[idx], NonTerminal): if not NonTerminal_eq_structural( self[idx], other[idx], verbose=verbose): return False elif self[idx] != other[idx]: if verbose: print(f"eq issue: '{self.name}' vs '{other.name}' : " f"terminal children differ : " f"{pp_str(self[idx])} vs {pp_str(other[idx])}") return False return True
def handle_files(args): if len(args['<doc>']) > 0: fnames = args['<doc>'] elif args['<p-name>'] == ['all']: fnames = sx.files() else: fnames = [] for fname in args['<p-name>']: fnames.append(eval(sx.fname_pattern)) parser = DocOptParserPEG() for file_path in fnames: print(f"file: {file_path}") if not Path(file_path).exists(): print('') print(f"Error: No such file '{file_path}'") print('') print(__doc__) sys.exit(1) usage = slurp(file_path) if args['--show']: print(f"input = '{usage}'") parse_tree = parser.parse(usage) # , print_raw=True) if args['--write']: parse_path = file_path.replace('/doc.txt', '/parse.txt') print(f"write: {parse_path}") with open(parse_path, 'w') as outf: pp(parse_tree, stream=outf) if args['--show']: pp(parse_tree) print('') sys.exit(0)
def launch(args: argparse.Namespace): """ :param args: the argparse args :return: """ # Pull a few things out of args for easier access _cmp = args.component _id = args.id _json = args.json # Pull out / parse the config file client_config = get_todoist_file(args.config_file) # Use that API token to get a client todo_client = todoist.TodoistAPI(client_config['todoist']['api']['token']) if args.do_sync: print("doing sync...") result = todo_client.sync() pp(result) # Otherwise, get the manager for that component _manager = getattr(todo_client, _cmp) if args.id is None: if _json: # Collect all the JSON friendly bits of all the objects _objs = [_get_json_from_obj(o) for o in _manager.all()] print(json.dumps(_objs, sort_keys=True, indent=2)) else: # Don't write nno JSON out to console if user wants JSON format print("no --id set so dumping ALL for component:{}".format(_cmp)) pp(_manager.all()) else: obj = _manager.get_by_id(_id) if _json: obj = _get_json_from_obj(obj) print(json.dumps(obj, sort_keys=True, indent=2)) else: # Don't write nno JSON out to console if user wants JSON format print("Attempting to get {}://{}...".format(_cmp, _id)) pp(obj)
debug = False wrapped = wrap(deepcopy(value)) _wrapped = deepcopy(wrapped) assert wrapped._please_unwrap == case dst = [self.t1] if debug: print(f": fcn_20x_unwrap_extend : case {case} : value = {value}") print(f" value = {value}") print(f" wrapped = {_wrapped}") print(f" dst prior : ") pp(dst) expect = deepcopy(dst) if is_unpackable_sequence(value): expect.extend(deepcopy(value)) else: expect.append(deepcopy(value)) result = unwrap_extend(dst, wrapped) if debug: print(f": produced :") pp(dst) print(": expected :") pp(expect)
def print_parsed(parsed): if isinstance(parsed, ParseTreeNode): tprint(parsed.tree_str()) else: pp(parsed)
def pp_str(obj): sio = io.StringIO() with redirect_stdout(sio): pp(obj) return sio.getvalue()
# X = pd.get_dummies(df) # In the case of categorical variables: use one-hot encoding scaler = StandardScaler() X = scaler.fit_transform(df) y = np.ravel(scaler.fit_transform(y)) X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size=.20, random_state=2) pp({ 'Type': 'Regression', 'Model': 'MLPRegression', 'Predictors': df.columns.values.tolist(), 'Target': target, 'Size': df.shape[0], 'Training%': 80 }) hyperparameters = [{ 'hidden_layer_sizes': [[40, 40], [30, 30, 30], [20, 20, 20, 20]], 'activation': ['relu'], 'solver': ['adam', 'lbfgs'], 'alpha': [0.0001], 'batch_size': ['auto'], 'learning_rate': ['constant'], 'learning_rate_init': [0.0001], 'max_iter': [5000], 'random_state': np.arange(100)
contents=list(value), ) # @register_pretty(Unwrap) def _pretty_Unwrap(value, ctx): return pretty_call( ctx, Unwrap, value=value.value, ) import p #------------------------------------------------------------------------------ x = Terminal(StrMatch('.', 'x'), 0, 'value:x') y = Terminal(StrMatch('.', 'y'), 0, 'value:y') v = NonTerminal(StrMatch('.', 'x_y'), [x, y]) t = NonTerminal(StrMatch('.', 'v_v'), [v, v]) pp(v) pp(t, indent=2) #------------------------------------------------------------------------------
def visit(self, node, depth=0, path=[]): i = ' ' * 3 * depth dprint('') dprint(f"{i} : visit : {node.name} -- START") if not isinstance(node, (NonTerminal, Terminal, SemanticActionResults)): dprint(f"{i} ** Invalid type '{str(type(node))}'") dprint(f"{i} => {_res(node)}") dprint(f"{i} : visit : {node.name} -- DONE") return node #---------------------------------------------------------------------- dprint('') dprint(f"{i} Process Children -- START") dprint(f"{i} # essentially, thus :") dprint(f"{i} children = []") dprint(f"{i} for child in node :") dprint(f"{i} response = visit(child)") dprint(f"{i} if response is not None :") dprint( f"{i} children.append(response) # generally reformed") dprint('') children = [] if isinstance(node, (NonTerminal, SemanticActionResults)): # these object types are lists for child in node: # NonTerminal IS the list #print(f"{i} Process Children -- START") if hasattr(child, 'name'): dprint(f"{i} - '{child.name}'") else: if hasattr(child, '__name__'): dprint(f"{i} - '{child.__name__}'") else: dprint( f"{i} - id = {id(child)} : {str(type(child))}") response = self.visit(child, depth=1 + depth, path=path + [node.name]) dprint(f"{i} - '{child.name}'") dprint(f"{i} : response = {_res(response)}") if response is not None: value = unwrap(response) dprint(f"{i} : unwrapped = {_res(value)}") children.append(value) dprint('') dprint(f"[ children : final ]\n{_res(children)}") dprint(f"{i} Process Children -- Done\n") dprint('') #---------------------------------------------------------------------- # In extreme circumstances, rule_name may be list. Note, that # such probably means unwrapping has gone too far and your node # is merely an empty list. rule_name = str(node.rule_name) # print(f": visit : {rule_name}") method = f"visit_{rule_name}" if hasattr(self, method): dprint(f"\n*** VISIT_{node.name} -- START") out = getattr(self, method)(node, children) dprint(f" => {_res(out)}\n") dprint("*** VISIT_{node.name} -- DONE\n") dprint('') return out if isinstance(node, Terminal): dprint( f"{i} Terminal without a visit method. Return unchanged.") dprint(f"{i} => {_res(node)}") dprint(f"{i} : visit : {node.name} -- DONE") dprint('') return node if len(children) > 0: if type(children) is list and len(children) == 1: if type(children[0]) is list: dprint( f": visit : {node.name} : list w/ single child, divulge" ) children = children[0] if isinstance(children, (list, NonTerminal)): which = None if isinstance(children[0], ParseTreeNode): dprint( f": visit : {node.name} : list w/ children => NonTerminal" ) out = NonTerminal(node.rule, children) verb = 'is' # # *NO* : it strips rule info which we need. # was : # out = NonTerminal(node.rule, wrap(None)) # del out[0] # out.extend(children) # else: out = NonTerminal(node.rule, wrap(children)) verb = 'is not' dprint('') dprint(f"{i} : list or NonTerminal and [0] {verb} a node") dprint(f"{i} => {_res(out)}") dprint(f"{i} : visit : {node.name} -- DONE") dprint('') return out internal_error( context, node, "Has children but neither a list nor " "ParseTreeNode. Nothing left to try. ") raise ValueError(f"Visiting {node.name}, nothing left to try.") # - node can't be a terminal node (as they bailed earlier). # - node can't be the result of a visit_* method (bailed earlier). # # - Academically, we should crash. Let's continue in Battle Mode # and wrap it in a Terminal -- complaining first. with redirect_stdout(sys.stderr): print( f"INTERNAL ERROR : Unhandled configuration for children of a NonTerminal" ) print('') print(f" path : ", end='') _path = path + [node.name] prefix = '' for idx in range(len(_path)): i = ' ' * 3 * idx print(f"{prefix}{i}{_path[idx]}") prefix = ' ' * 10 print('') print(f" node = {node.name} : depth {depth}") seq = isinstance(children, Sequence) seq_text = ': is a sequence' if seq else '' print(f" children type = {str(type(children))} {seq_text}") if seq: print(f" children[0] type = {str(type(children[0]))}") print(": children =") pp(children) print(f"Please report this scenario to the maintainer.") out = Terminal(node.rule, 0, wrap(children)) dprint(f": visit : {node.name} => {_res(out)}") dprint('') return out
def pprint(self): from prettyprinter import pprint as pp pp(self)
def check_versions(skip_angular=False): dependencies = {} backend = load_yaml_file("../rapydo-confs/confs/backend.yml") services = backend.get("services", {}) for service in services: definition = services.get(service) image = definition.get('image') if image.startswith("rapydo/"): continue if service not in dependencies: dependencies[service] = {} dependencies[service]['compose'] = image for d in glob("../build-templates/*/Dockerfile"): if 'not_used_anymore_' in d: continue with open(d) as f: service = d.replace("../build-templates/", "") service = service.replace("/Dockerfile", "") if service not in dependencies: dependencies[service] = {} for line in f: if line.startswith("#"): continue if 'FROM' in line: line = line.replace("FROM", "").strip() dependencies[service]['Dockerfile'] = line elif not skip_angular and 'RUN npm install' in line: if line.startswith("#"): continue tokens = line.split(" ") for t in tokens: t = t.strip() if '@' in t: if service not in dependencies: dependencies[service] = {} if "npm" not in dependencies[service]: dependencies[service]["npm"] = [] dependencies[service]["npm"].append(t) elif 'RUN pip install' in line or 'RUN pip3 install' in line: if line.startswith("#"): continue tokens = line.split(" ") for t in tokens: t = t.strip() if '==' in t: if service not in dependencies: dependencies[service] = {} if "pip" not in dependencies[service]: dependencies[service]["pip"] = [] dependencies[service]["pip"].append(t) elif 'ENV ACMEV' in line: line = line.replace("ENV ACMEV", "").strip() line = line.replace("\"", "").strip() dependencies[service]['ACME'] = "ACME:{}".format(line) for d in glob("../build-templates/*/requirements.txt"): with open(d) as f: service = d.replace("../build-templates/", "") service = service.replace("/requirements.txt", "") for line in f: line = line.strip() if service not in dependencies: dependencies[service] = {} if "pip" not in dependencies[service]: dependencies[service]["pip"] = [] dependencies[service]["pip"].append(line) if not skip_angular: package_json = None if os.path.exists('../frontend/src/package.json'): package_json = '../frontend/src/package.json' elif os.path.exists('../rapydo-angular/src/package.json'): package_json = '../rapydo-angular/src/package.json' if package_json is not None: with open(package_json) as f: package = json.load(f) package_dependencies = package.get('dependencies', {}) package_devDependencies = package.get('devDependencies', {}) if 'angular' not in dependencies: dependencies['angular'] = {} if "package.json" not in dependencies['angular']: dependencies['angular']["package.json"] = [] for dep in package_dependencies: ver = package_dependencies[dep] lib = "{}:{}".format(dep, ver) dependencies['angular']["package.json"].append(lib) for dep in package_devDependencies: ver = package_devDependencies[dep] lib = "{}:{}".format(dep, ver) dependencies['angular']["package.json"].append(lib) controller = distutils.core.run_setup("../do/setup.py") http_api = distutils.core.run_setup("../http-api/setup.py") dependencies['controller'] = controller.install_requires dependencies['http-api'] = http_api.install_requires filtered_dependencies = {} for service in dependencies: if service in ['talib', 'react', 'icat']: continue service_dependencies = dependencies[service] if isinstance(service_dependencies, list): filtered_dependencies[service] = [] for d in service_dependencies: skipped = False if '==' not in d and '>=' not in d: skipped = True else: filtered_dependencies[service].append(d) check_updates(service, d) if skipped: log.debug("Filtering out {}", d) if len(filtered_dependencies[service]) == 0: log.debug("Removing empty list: {}", service) del filtered_dependencies[service] elif isinstance(service_dependencies, dict): for category in service_dependencies: if service not in filtered_dependencies: filtered_dependencies[service] = {} deps = service_dependencies[category] was_str = False if isinstance(deps, str): deps = [deps] was_str = True else: filtered_dependencies[service][category] = [] for d in deps: skipped = False if d == 'b2safe/server:icat': skipped = True elif d == 'node:carbon': skipped = True elif re.match(r'^git\+https://github\.com.*@master$', d): skipped = True elif d == 'docker:dind': skipped = True elif d.endswith(':latest'): skipped = True elif '==' in d or ':' in d: if was_str: filtered_dependencies[service][category] = d check_updates(category, d) else: filtered_dependencies[service][category].append(d) check_updates(category, d) elif '@' in d: filtered_dependencies[service][category].append(d) check_updates(category, d) else: skipped = True if skipped: log.debug("Filtering out {}", d) if category in filtered_dependencies[service]: if len(filtered_dependencies[service][category]) == 0: log.debug("Removing empty list: {}.{}", service, category) del filtered_dependencies[service][category] if len(filtered_dependencies[service]) == 0: log.debug("Removing empty list: {}", service) del filtered_dependencies[service] else: log.warning("Unknown dependencies type: {}", type(service_dependencies)) # print(service) pp(filtered_dependencies) log.info("Note: very hard to upgrade ubuntu:16.04 from backendirods and icat") log.info("gssapi: versions >1.5.1 does not work and requires some effort...") log.info("typescript: angular.cli 8.2.14 requires typescript < 3.6.0, so that max ver is 3.5.3, cannot upgade to ver 3.7.3")
def print_parsed(parsed): if False : # isinstance(parsed, ParseTreeNode): tprint(parsed.tree_str()) else: # 'p' module add pretty printing for ParseTreeNode types pp(parsed)