def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) #print '...Sentence string before serialization: ', d tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text serializedStr = sentence.SerializeToString() #print '...Sentence string protobuf: ', serializedStr file = open("/Users/yihed/Documents/workspace/Other/src/thmp/data/serializedSentence.txt", "wb") #file = open("serializedSentence.txt", "wb") file.write(serializedStr) file.close() print 'Parse:' print tr(d) if finished: break
def print_logging_tree(include_handlers=True): """ Prints the hierarchy of loggers, handlers, and filters Args: include_handlers: Include logging handlers and filters in the output """ root = __getHandlers(logging.root) if include_handlers else {} tree = OD() tree[__make_tree_label(logging.root)] = root loggers = {'root': root} keys = sorted(logging.Logger.manager.loggerDict.keys()) for loggername in keys: parent = '.'.join(loggername.split('.')[:-1]) if parent == '': parent = 'root' parentlogger = loggers[parent] logger = logging.Logger.manager.loggerDict[loggername] if isinstance(logger, logging.Logger): cur_colorname = __make_tree_label(logger) elif isinstance(logger, logging.PlaceHolder): cur_colorname = __color('<Logger {}>'.format(loggername), 'white') else: assert False cur_logger = __getHandlers(logger) if include_handlers else {} parentlogger[cur_colorname] = cur_logger loggers[loggername] = cur_logger LA = asciitree.LeftAligned() print('\n{}\n'.format(LA(tree)))
def pretty_print(): _write_input(_read_output().strip()) logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source( batch_size=32, corpus_name='input-from-file-conll', task_context=task_context_path) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) # for d in documents: # sentence.ParseFromString(d) # as_asciitree(sentence) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print('Input: %s' % sentence.text) print('Parse:') tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print(pat.sub('', tr_ln)) if finished: break
def render(self, colorize: bool = False) -> None: """Renders an ASCII tree using our user hierarchical namespace traversal object.""" print( asciitree.LeftAligned(traverse=UserNamespaceTraversal( self._nstypename, colorize), draw=BoxStyle(gfx=BOX_LIGHT, horiz_len=2))(self._roots))
def print_tree(tree, indent=0): try: import asciitree left_aligned = asciitree.LeftAligned() print(left_aligned(tree)) except ModuleNotFoundError: for k, v in tree.items(): print(' '*indent, k) print_tree(v, indent+1)
def process(irr_host, afi, db, as_set, search): import datetime now = datetime.datetime.now() now = now.strftime("%Y-%m-%d %H:%M") print "IRRTree (%s) report for '%s' (IPv%i), using %s at %s" \ % (irrtree.__version__, as_set, afi, irr_host, now) if search and "-" not in db.keys(): if not search in db.keys(): print "NOT_FOUND: %s not present in %s or any of its members" % ( search, as_set) sys.exit() def print_member(as_set, db, search): if not "-" in as_set: res = "%s (%s pfxs)" % (as_set, resolve_prefixes(db, as_set)) elif search: res = "%s (%s ASNs)" % (as_set, len(db[as_set]['origin_asns'])) else: res = "%s (%s ASNs, %s pfxs)" % (as_set, len(db[as_set]['origin_asns']), resolve_prefixes(db, as_set)) return res def getasncount(db, item): v = db[item] if type(v) == set: ret = (0, len(v)) else: ret = (len(v['origin_asns']), resolve_prefixes(db, item)) return ret def resolve_tree(as_set, db, tree=OD(), seen=set()): seen.add(as_set) for member in sorted(db[as_set]['members'], key=lambda x: getasncount(db, x), reverse=True): if member in seen: tree["%s - already expanded" % print_member(member, db, search)] = {} continue if "-" in member: seen.add(member) tree["%s" % print_member(member, db, search)] = resolve_tree( member, db, OD(), seen) else: if not search or search == member: tree["%s" % print_member(member, db, search)] = {} else: continue return tree tree = OD() tree["%s" % print_member(as_set, db, search)] = resolve_tree(as_set, db) tr = asciitree.LeftAligned() print tr(tree)
def render_tree(repository: "Repository") -> None: """Draws the repository's commit graph as a Git-like tree.""" import asciitree from splitgraph.core.output import truncate_line # Get all commits in ascending time order all_images = {i.image_hash: i for i in repository.images()} if not all_images: return latest = repository.images["latest"] tag_dict = defaultdict(list) for img, img_tag in repository.get_all_hashes_tags(): tag_dict[img].append(img_tag) tag_dict[latest.image_hash].append("latest") class ImageTraversal(asciitree.DictTraversal): def get_text(self, node): image = all_images[node[0]] result = format_image_hash(image.image_hash) result += format_tags(tag_dict[image.image_hash]) result += format_time(image.created) if image.comment: result += " " + truncate_line(image.comment) return result tree: OrderedDict[str, OrderedDict] = OrderedDict( (image, OrderedDict()) for image in all_images ) tree_elements = tree.copy() # Join children to parents to prepare a tree structure for asciitree for image in all_images.values(): if image.parent_id is None or image.parent_id not in tree_elements: # If we only pulled a single image, it's possible that we won't have # the metadata for the image's parent. continue tree_elements[image.parent_id][image.image_hash] = tree_elements[image.image_hash] del tree[image.image_hash] # tree = _pull_up_children(tree) renderer = asciitree.LeftAligned( draw=asciitree.BoxStyle( gfx=asciitree.drawing.BOX_ASCII if SG_CMD_ASCII else asciitree.drawing.BOX_LIGHT, label_space=1, horiz_len=0, ), traverse=ImageTraversal(), ) for root, root_tree in tree.items(): click.echo(renderer({root: root_tree}))
def handle(self, *args, **options): if not options["project_id"]: projects = Project.objects.all() else: projects = [] for project_id in options["project_id"]: matched_projects = Project.objects.filter( Q(guid__icontains=project_id) | Q(name__icontains=project_id)) if not matched_projects: logger.warn("No matching project found for keyword: %s" % project_id) projects.extend(matched_projects) projects_tree = OrderedDict() for project_i, project in enumerate(projects): project_label = "P%s project: %s. GRCh%s" % ( project_i + 1, project, project.genome_version) project_tree = projects_tree[project_label] = OrderedDict() for family_i, family in enumerate( Family.objects.filter(project=project)): family_label = "F%s family: %s" % ( family_i + 1, family, ) family_tree = project_tree[family_label] = OrderedDict() for individual_i, individual in enumerate( Individual.objects.filter(family=family)): individual_label = "I%s individual: %s" % ( individual_i + 1, individual, ) individual_tree = family_tree[ individual_label] = OrderedDict() for sample_i, sample in enumerate( Sample.objects.filter(individual=individual)): sample_label = "S%s sample: %s" % ( sample_i + 1, "{sample_type}, elasticsearch_index: {elasticsearch_index} {dataset_file_path}" .format(**sample.json()), ) individual_tree[sample_label] = OrderedDict() #pprint(projects_tree) print( apply( asciitree.LeftAligned(draw=asciitree.BoxStyle( gfx=asciitree.drawing.BOX_HEAVY, horiz_len=1)), [{ 'Projects:': projects_tree }]))
def project_tree_view(router): router.screen.clear() router.screen.addstr(1, 4, "Features hierarchy tree:", curses.A_BOLD) data = FileUtils.get_brief_tree(router.root, router.alternatives) la = asciitree.LeftAligned() tree = la({router.root.name: data}).splitlines() for index, line in enumerate(tree): router.screen.addstr(index + 3, 4, line) q = router.screen.getch() router.current_view = ViewsNames.LUNCH_MENU
def visualize(self) -> str: token_str = ['root'] children: List[List[int]] = [[] for _ in range(len(self))] for i, token in enumerate(self.tokens[self.start:], 1): token_str.append(f'{token.form} @{i}') children[token.head].append(i) def to_dict(i: int) -> OrderedDict: d: OrderedDict = OrderedDict() for c in children[i]: d[token_str[c]] = to_dict(c) return d tree: OrderedDict = OrderedDict() tree[token_str[0]] = to_dict(0) res = asciitree.LeftAligned()(tree) return res
def make_skilltree(session, char, fmt=lambda sk: '{0.id}'.format(sk)): tr = asciitree.LeftAligned( draw = asciitree.BoxStyle( gfx = asciitree.drawing.BOX_HEAVY, horiz_len=0, indent=0, label_space=1 ) ) space_char = '\u2001' # '\u2000' for BOX_LIGHT root_skills = session.query(Skill).filter(Skill.char == char, Skill.parent == None).order_by(Skill.created) d = OrderedDict() for skill in root_skills: d[fmt(skill)] = get_tree(session, skill, fmt) msg = tr(d) return '\n'.join(a.replace(' ', space_char) + b for a,b in (tree_re.match(l).groups() for l in msg.split('\n')))
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print('Input: %s' % sentence.text) print('Parse:') print(tr(d)) if finished: break
def ensure_rule(d): tr = asciitree.LeftAligned() root_tok = d.keys()[0] print(tr(d)) root, advcl = get_rel(d[d.keys()[0]], ['advcl', 'prep']) _, subject = get_rel(d[d.keys()[0]], ['nsubj']) if advcl: string = get_string_from_root(root, advcl) if string.endswith(','): string = string[:-1] return string + ', which of the following must be satisfied?' else: root, ccomp = get_rel(d[d.keys()[0]], ['ccomp']) if ccomp: n_root, nsubj = get_rel(root[ccomp], ['nsubj', 'nsubjpass']) if nsubj: if nsubj.word == 'it': _, dobj = get_rel(root[ccomp], ['dobj']) if dobj: string = get_string_from_root(root[ccomp], dobj) if subject: subject_string = get_string_from_root( d[d.keys()[0]], subject) return 'Which of the following is true regarding {} that {} {}'.format( string, subject_string, ccomp.word) else: # if copular, make it about the relationship _, cop = get_word(n_root, ['is', 'are']) if cop or ccomp.word in ['is', 'are']: string = get_string_from_root(n_root, nsubj) return 'Which of the following is true regarding {}'.format( string) else: string = get_string_from_root(n_root, nsubj, ['WDT']) if string.endswith(','): string = string[:-1] return 'Where ' + string + ', which of the following must be satisfied?'
def main(unused_argv): logging.set_verbosity(logging.INFO) with tf.Session() as sess: src = gen_parser_ops.document_source(batch_size=32, corpus_name=FLAGS.corpus_name, task_context=FLAGS.task_context) sentence = sentence_pb2.Sentence() while True: documents, finished = sess.run(src) logging.info('Read %d documents', len(documents)) for d in documents: sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) print 'Input: %s' % sentence.text print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): print pat.sub('', tr_ln) if finished: break
def run(self): """ Runs the CLI login flow. Returns: None """ fold_parents, org_parents = self._get_projects() def recurse_and_attach(parent, tree): """ Given a tree (a dict) and a parent node, traverses depth-first in the org hierarchy, attaching children to parents in tree. Args: parent: parent node as str tree: mapping to use to construct the org hierarchy Returns: None """ result = self._crm_v2.folders().list(parent=parent).execute() if 'folders' in result: for folder in result['folders']: f_str = '{} {} ({})'.format(_FOLDER, folder["displayName"], folder["name"].split("/")[1]) tree[f_str] = {} _id = folder["name"].split('/')[1] if _id in fold_parents: for project in fold_parents[_id]: proj_name = '{} {}'.format(_PROJ, project["projectId"]) tree[f_str][proj_name] = {} recurse_and_attach(folder['name'], tree[f_str]) t = {} # If an org flag is set, finds the org by name or ID, walks the org # hierarchy starting at the organization, uses the tree and attaches # folders and projects recursively if self.organization: resp = self._crm_v1.organizations().search(body={}).execute() orgs = resp.get('organizations') if not orgs: raise SystemExit('No organizations found') org_id = [ org['name'].split('/')[1] for org in orgs if self.organization in org['displayName'] or self.organization in org['name'] ][0] if self.use_org_id: filter_func = functools.partial(resource_filter, self.organization, field='name') else: filter_func = functools.partial(resource_filter, self.organization) try: org = next(filter(filter_func, orgs)) except StopIteration: raise SystemExit('Could not find organization {}' ''.format(self.organization)) org_name = '{} {} ({})'.format(_ORG, org["displayName"], org["name"].split('/')[1]) t[org_name] = {} recurse_and_attach(org['name'], t[org_name]) for project in org_parents.get(org_id, []): proj_name = '{} {}'.format(_PROJ, project["projectId"]) t[org_name][proj_name] = {} # If the folder flag is set, walks the organization hierarchy starting # at a folder node and attaches folders and projects. if self.folder: folder = self._crm_v2.folders().get( name='folders/{}'.format(self.folder)).execute() fold_name = '{} {} ({})'.format(_FOLDER, folder["displayName"], self.folder) t[fold_name] = {} recurse_and_attach(folder['name'], t[fold_name]) for project in fold_parents.get(self.folder, []): proj_name = '{} {}'.format(_PROJ, project["projectId"]) t[fold_name][proj_name] = {} tr = asciitree.LeftAligned(draw=asciitree.BoxStyle()) print(tr(t))
The data nodes are also responsible for storing the data. Since dsch is built to support multiple storage backends, there are specific data node classes implementing the respective functionality. The classes in this module provide common functionality and are intended to be used as base classes. Different backends are implemented in the :mod:`dsch.backends` package. """ import datetime import importlib import asciitree from . import exceptions draw_tree = asciitree.LeftAligned( draw=asciitree.BoxStyle(gfx=asciitree.drawing.BOX_LIGHT, horiz_len=1) ) class ItemNode: """Generic data item node. :class:`ItemNode` is the base class for data nodes, providing common functionality and the common interface. Subclasses may add functionality depending on the node type and backend (e.g. compression settings). Note that this is only the base class for item nodes, i.e. nodes that directly hold data. Collection nodes, i.e. :class:`Compilation` and :class:`List` are *not* based on this class. Attributes:
if __name__ == '__main__': # describe_questions() # analyse_questions() output = check_parsey_output() print(output['root'].value_counts()) ensures = output[output['pos'] == 'VERB'] # ensures = output qs = [] errs = 0 for idx, row in ensures.iterrows(): d = row['dict'] # ref = row['refs'] tr = asciitree.LeftAligned() # print(get_string_from_root(d, d.keys()[0])) print(tr(d)) # rules_s_o_v(d, True) # get_numeric(d) # get_triples_nested(d, d.keys()[0]) new_qs = rules_s_o_v(d, False) # new_qs = chunk_approach(d) if new_qs is not None: for n in new_qs: qs.append(n) # qs.append((ref,) + n) # else: # errs += 1 # print(len(qs)) # print(errs)
def handle(self, *args, **options): if not options["project_id"]: projects = Project.objects.all() else: projects = [] for project_id in options["project_id"]: matched_projects = Project.objects.filter( Q(guid__icontains=project_id) | Q(name__icontains=project_id)) if not matched_projects: logger.warn("No matching project found for keyword: %s" % project_id) projects.extend(matched_projects) try: import pip pip.main(["install", "asciitree"]) import asciitree from asciitree.util import * from asciitree.drawing import * except ImportError as e: logger.error(e) return projects_tree = OrderedDict() for project_i, project in enumerate(projects): project_label = "P%s project: %s" % ( project_i + 1, project, ) project_tree = projects_tree[project_label] = OrderedDict() for family_i, family in enumerate( Family.objects.filter(project=project)): family_label = "F%s family: %s" % ( family_i + 1, family, ) family_tree = project_tree[family_label] = OrderedDict() for individual_i, individual in enumerate( Individual.objects.filter(family=family)): individual_label = "I%s individual: %s" % ( individual_i + 1, individual, ) individual_tree = family_tree[ individual_label] = OrderedDict() for sample_i, sample in enumerate( Sample.objects.filter(individual=individual)): sample_label = "sample: %s" % (sample, ) datasets = Dataset.objects.filter(samples=sample) sample_label += " - dataset(s): " + str([ "%s: %s" % (d, d.source_file_path) for d in datasets ]) individual_tree[sample_label] = OrderedDict() pprint(projects_tree) print( apply( asciitree.LeftAligned(draw=asciitree.BoxStyle( gfx=asciitree.drawing.BOX_HEAVY, horiz_len=1)), [{ 'Projects:': projects_tree }]))
def pretty_print_dict(input_dict): tr = asciitree.LeftAligned() print tr(input_dict)
def _tree_as_string(d): if len(d) != 1: d = dict(ROOT=d) box_tr = asciitree.LeftAligned( draw=asciitree.BoxStyle(gfx=BOX_DOUBLE, horiz_len=1))(d) return box_tr
def display_tree(self): root = self.app.model.root print asciitree.LeftAligned()({root: OD(root.items())})