def spider_closed(self): """ Handler for spider_closed signal.""" print("Goodbye vermin") exporter = DictExporter() exported_result = exporter.export(self.root) self.postToMongo(self.mongoIP, self.database, self.collection, exported_result)
def get(self, request): db_tree = DBTree() db_tree.reset() CachedTree().reset() exporter = DictExporter() return Response(data=exporter.export(db_tree.tree))
def _prune_leaves(self, root_node) -> dict: """Prune all the leaves that does not have 'value' attribute in them. In other words, remove the leaves that user did not set in file. Parameters ---------- root_node : dict Branch to be pruned. Returns ------- dict Pruned branch. """ importer = DictImporter() exporter = DictExporter() inptree = importer.import_(root_node) # keep checking for leaves without value attribute while True: all_has_value = all( hasattr(leaf, 'value') for leaf in inptree.leaves) if all_has_value: break for leaf in inptree.leaves: if not hasattr(leaf, 'value'): # prune leaves without value attribute leaf.parent = None return exporter.export(inptree)
def test_dict_exporter(): """Dict Exporter.""" root = AnyNode(id="root") s0 = AnyNode(id="sub0", parent=root) s0b = AnyNode(id="sub0B", parent=s0) s0a = AnyNode(id="sub0A", parent=s0) s1 = AnyNode(id="sub1", parent=root, foo="bar") s1a = AnyNode(id="sub1A", parent=s1) s1b = AnyNode(id="sub1B", parent=s1) s1c = AnyNode(id="sub1C", parent=s1) s1ca = AnyNode(id="sub1Ca", parent=s1c) exporter = DictExporter() eq_(exporter.export(root), {'id': 'root', 'children': [ {'id': 'sub0', 'children': [ {'id': 'sub0B'}, {'id': 'sub0A'} ]}, {'id': 'sub1', 'foo':'bar', 'children': [ {'id': 'sub1A'}, {'id': 'sub1B'}, {'id': 'sub1C', 'children': [ {'id': 'sub1Ca'} ]} ]} ]} )
def test_dict_exporter_filter(): """Dict Exporter.""" root = Node("root") s0 = Node("sub0", parent=root) s0b = Node("sub0B", parent=s0) s0a = Node("sub0A", parent=s0) s1 = Node("sub1", parent=root, foo="bar") s1a = Node("sub1A", parent=s1) s1b = Node("sub1B", parent=s1) s1c = Node("sub1C", parent=s1) s1ca = Node("sub1Ca", parent=s1c) exporter = DictExporter(attriter=lambda attrs: [(k, v) for k, v in attrs if k == "name"]) eq_(exporter.export(root), {'name': 'root', 'children': [ {'name': 'sub0', 'children': [ {'name': 'sub0B'}, {'name': 'sub0A'} ]}, {'name': 'sub1', 'children': [ {'name': 'sub1A'}, {'name': 'sub1B'}, {'name': 'sub1C', 'children': [ {'name': 'sub1Ca'} ]} ]} ]} )
def test_dict_importer(): """Dict Importer.""" importer = DictImporter() exporter = DictExporter() refdata = { 'id': 'root', 'children': [{ 'id': 'sub0', 'children': [{ 'id': 'sub0B' }, { 'id': 'sub0A' }] }, { 'id': 'sub1', 'children': [{ 'id': 'sub1A' }, { 'id': 'sub1B' }, { 'id': 'sub1C', 'children': [{ 'id': 'sub1Ca' }] }] }] } data = deepcopy(refdata) root = importer.import_(data) eq_(data, refdata) eq_(exporter.export(root), data)
def __json_encode__(self): attrs = dict(vars(self)) del attrs['opt_model'] exporter = DictExporter(attriter=lambda attrs_: [( k, v) for k, v in attrs_ if k == "name" or k == "tag"]) attrs['root_node'] = exporter.export(self.root_node) return attrs
async def exportTree(self): properties = set([ 'server_id', 'name', 'string_nodeid', 'namespace_index', 'node_class', 'variant_type', 'node_id', 'permanent_id' ]) logging.info(properties) exporter = DictExporter(attriter=lambda attrs: [(k, v) for k, v in attrs if k in properties]) return exporter.export(self)
def delete(self, request): """ Delete node from cache :param request: {node_id: int} """ cached_tree = CachedTree() cached_tree.delete_node(request.data['node_id']) exporter = DictExporter() return Response(data=exporter.export(cached_tree.tree))
def save_tree(self, root, directory=None): data_path = ( directory or '.' ) + '/nn_data/' + self.config.experiment_name + '/spectral_tree.dict' attriter = lambda attrs: [(k, v) for k, v in attrs if k != 'net'] ## save as pickled dict dict_exporter = DictExporter(attriter=attriter) treedict = dict_exporter.export(root) with open(data_path, 'wb') as outfile: pickle.dump(treedict, outfile)
def export_tree(self, filename): exporter_dict = DictExporter(dictcls=OrderedDict, attriter=sorted) self.export_cst_dict = exporter_dict.export(self.cst) exporter_json = JsonExporter(indent=2, sort_keys=True) with open(filename, 'w') as filehandle: exporter_json.write(self.cst, filehandle) print('CST tree export to JSON successful!') return
def put(self, request): """ Change value in node :param request: {node_id: int, new_value: str} """ cached_tree = CachedTree() node = cached_tree.get_node_by_id(request.data['node_id']) node.change_node_value(request.data['new_value']) exporter = DictExporter() return Response(data=exporter.export(cached_tree.tree))
def post(self, request): """ Create node in cache :param request: {parent_id: int, value: str} """ cached_tree = CachedTree() cached_tree.create_node(parent_id=request.data['parent_id'], value=request.data['value']) exporter = DictExporter() return Response(data=exporter.export(cached_tree.tree))
def _update_subpanel(subpanel_obj, supb_changes): """Update the checkboxes of a subpanel according to checkboxes checked in the model preview. Args: subpanel_obj(dict): a subpanel object supb_changes(dict): terms to keep under a parent term. example: {"HP:0001250": [(HP:0020207, HP:0020215, HP:0001327]} Returns: subpanel_obj(dict): an updated subpanel object """ checkboxes = subpanel_obj.get("checkboxes", {}) new_checkboxes = {} for parent, children_list in supb_changes.items(): # create mini tree obj from terms in changes dict. Add all nodes at the top level initially root = Node(id="root", name="root", parent=None) all_terms = {} # loop over the terms to keep into the checboxes dict for child in children_list: if child.startswith("OMIM"): new_checkboxes[child] = checkboxes[child] continue custom_name = None term_title = None if child in checkboxes: custom_name = checkboxes[child].get("custom_name") term_title = checkboxes[child].get("term_title") term_obj = store.hpo_term( child) # else it's an HPO term, and might have nested term: node = None try: node = Node(child, parent=root, description=term_obj["description"]) except Exception as ex: flash(f"Term {child} could not be find in database") continue all_terms[child] = term_obj if custom_name: node.custom_name = custom_name if term_title: node.term_title = term_title # Rearrange tree nodes according the HPO ontology root = store.organize_tree(all_terms, root) LOG.info(f"Updated HPO tree:{root}:\n{RenderTree(root)}") exporter = DictExporter() for child_node in root.children: # export node to dict node_dict = exporter.export(child_node) new_checkboxes[child_node.name] = node_dict subpanel_obj["checkboxes"] = new_checkboxes subpanel_obj["updated"] = datetime.datetime.now() return subpanel_obj
def dir_structure_to_json_generator(self, start_dir, ffilter, dfilter, hasdirs): # Non recursive function class Data: def __init__(self, path, childD): self.path = path self.childD = childD if ffilter: filter_file_list= str(ffilter).split(',') else: filter_file_list = '*.*' if hasdirs is None: hasdirs= False else: if hasdirs == 'True': hasdirs = True else: hasdirs = False flag = True initial_root = None if start_dir and os.path.exists(start_dir): start = Data(start_dir, None) directories = [start] while len(directories) > 0: directory = directories.pop() if flag: root = AnyNode(type="Directory", path=directory.path) initial_root = root flag = False else: root = directory.childD for name in os.listdir(directory.path): fullpath = os.path.join(directory.path, name) if os.path.isfile(fullpath): if self.directory_regex_pattern_matching(fullpath, dfilter) and self.is_file_machting_filter(fullpath, filter_file_list): AnyNode(type="File", path=fullpath, lastmodified=str(self.modification_date(fullpath)), parent=root) elif os.path.isdir(fullpath): if hasdirs and\ len(os.listdir(fullpath)) > 0 and self.directory_regex_pattern_matching(fullpath, dfilter) and\ self.has_files_by_filter(fullpath, filter_file_list): # Check if the directory is not empty and if the directory contains any of the files from filter child_directory = AnyNode(type="Directory", path=fullpath, parent=root) directories.append(Data(fullpath, child_directory)) # It's a directory, store it. exporter = DictExporter() data = exporter.export(initial_root) else: data = {'Directory': 'Does not exist or was not given'} return data
def __init__(self, dictcls=OrderedDict, attriter=None, childiter=list, maxlevel=None): DictExporter.__init__( self, dictcls=dictcls, attriter=attriter, childiter=childiter, maxlevel=maxlevel, )
def save_in_file(self, file_path): """ Saves the tree in a json file """ for n in PreOrderIter(self.root): n.name = n.to_dic() exporter = DictExporter() tree_json = exporter.export(self.root) final_json = {'main_player': self.main_player, 'tree': tree_json} os.makedirs(os.path.dirname(file_path), exist_ok=True) with open(file_path, 'w') as feedsjson: json.dump(final_json, feedsjson, indent=4)
def huc_navigate(self, huc_navigation_tree, hu_digit): huc_nodes = self.init_huc_navigation_tree(huc_navigation_tree) exporter = DictExporter() return { 'hydrologic_digit': 'hu' + str(hu_digit), 'data': exporter.export( huc_nodes['hu' + str(hu_digit)][self.huc_code[0:hu_digit]]) }
def get_tree(request): session = request.session filters = get_metadata_filters(session) thumbs_ids = get_thumbs_ids(session) allowed_collection_ids = get_allowed_ids(session) node = request.GET.get('node') profile = request.user.profile is_root = node == 'root' if is_root: node = get_folder_metadata_filter(session, profile.root) root = Node('coll.name', id=1, path='coll.path') if False: for no in node.split(';'): thumbs_ids_collection, coll_id = irods_interface.get_tree_root( no, profile.irods_user, filters, allowed_collection_ids, root) else: thumbs_ids_collection = None if allowed_collection_ids is None: for no in node.split(';'): coll_id = get_collection_id(no) Node(no, root, id=coll_id, path=no, expanded=False) else: for key in sorted(allowed_collection_ids.keys()): Node(key, root, id=allowed_collection_ids[key], path=key, expanded=False) else: root, thumbs_ids_collection, coll_id = irods_interface.get_tree( node, profile.irods_user, filters, allowed_collection_ids) thumbs_ids[coll_id] = thumbs_ids_collection if root is not None: exporter = DictExporter() tree = exporter.export(root) else: tree = {} set_thumbs_ids(session, thumbs_ids) return JsonResponse(tree)
def tmpToMongo(leaf_url): ##TODO: test leaf_url = DictExporter().export(leaf_url) tmp_leaf_name = leaf_url['id'].split(".") leaf_name = tmp_leaf_name[-1] leaf_number = re.findall('\d+', str(leaf_name)) #leaf_number = filter(str.isdigit, str(leaf_name)) if leaf_number: leaf_number = leaf_number[0] else: leaf_number = None print(leaf_url['id']) print(leaf_number) testcase_data = { "type" : "tc", "name" : leaf_name, "number" : leaf_number, "url" : leaf_url, "Target test tool type" : "RBT", "metadata" : [ { "tags" : "AFG" }, { "Type" : "ut" } ] } postToMongo('http://142.133.174.148:8888/', 'localhost', 'RBT', 'filter_tests', testcase_data)
def to_yaml(self, outfile=None): """ Export LopperYAML tree to a yaml output file Args: outfile (string): path to a yaml output file Returns: Nothing """ if self.anytree: #dct = DictExporter(dictcls=OrderedDict, attriter=sorted).export(self.anytree) dct = DictExporter(dictcls=OrderedDict).export(self.anytree) #dct = DictExporter().export(self.anytree) # print( "blah: %s" % dct ) # for d in dct: # print( "%s" % d ) if not outfile: print( yaml.dump(dct, default_flow_style=False, default_style='"')) else: with open(outfile, "w") as file: yaml.dump(dct, file, default_flow_style=False)
def save(self, filename): """ Save tree with all its attributes and computed scores to binary pickle file. :param filename: filename of the pickle :type filename: str """ # export to OrderedDict exporter = DictExporter(dictcls=OrderedDict, attriter=sorted) exported_dict = exporter.export(self) if not filename.endswith(PKL_EXT): filename += PKL_EXT # serialize to pickle with open(filename, "wb") as f: pickle.dump(exported_dict, f, protocol=pickle.HIGHEST_PROTOCOL)
def test_json_importer(): """Json Importer.""" refdata = { 'id': 'root', 'children': [{ 'id': 'sub0', 'children': [{ 'id': 'sub0B' }, { 'id': 'sub0A' }] }, { 'id': 'sub1', 'children': [{ 'id': 'sub1A' }, { 'id': 'sub1B' }, { 'id': 'sub1C', 'children': [{ 'id': 'sub1Ca' }] }] }] } lines = [ '{', ' "children": [', ' {', ' "children": [', ' {', ' "id": "sub0B"', ' },', ' {', ' "id": "sub0A"', ' }', ' ],', ' "id": "sub0"', ' },', ' {', ' "children": [', ' {', ' "id": "sub1A"', ' },', ' {', ' "id": "sub1B"', ' },', ' {', ' "children": [', ' {', ' "id": "sub1Ca"', ' }', ' ],', ' "id": "sub1C"', ' }', ' ],', ' "id": "sub1"', ' }', ' ],', ' "id": "root"', '}' ] imported = DictExporter().export(JsonImporter().import_("\n".join(lines))) eq_(refdata, imported) with NamedTemporaryFile(mode="w+") as ref: ref.write("\n".join(lines)) ref.seek(0) imported = DictExporter().export(JsonImporter().read(ref)) eq_(refdata, imported)
def test_dict_importer_node(): """Dict Importer.""" importer = DictImporter(Node) exporter = DictExporter() refdata = { 'name': 'root', 'children': [{ 'name': 'sub0', 'children': [{ 'name': 'sub0B' }, { 'name': 'sub0A' }] }, { 'name': 'sub1', 'children': [{ 'name': 'sub1A' }, { 'name': 'sub1B' }, { 'name': 'sub1C', 'children': [{ 'name': 'sub1Ca' }] }] }] } data = deepcopy(refdata) root = importer.import_(data) eq_(data, refdata) eq_(exporter.export(root), data) r = RenderTree(root) expected = u"\n".join([ u"Node('/root')", u"├── Node('/root/sub0')", u"│ ├── Node('/root/sub0/sub0B')", u"│ └── Node('/root/sub0/sub0A')", u"└── Node('/root/sub1')", u" ├── Node('/root/sub1/sub1A')", u" ├── Node('/root/sub1/sub1B')", u" └── Node('/root/sub1/sub1C')", u" └── Node('/root/sub1/sub1C/sub1Ca')", ]) eq_str(str(r), expected)
def bizdatatree_to_dict(root_node: BizDataNode, reverse=True, top_n=0, min_threshold=0) -> OrderedDict: def _sorted(children): res = sorted(children, key=lambda child: float(child.value), reverse=reverse) if min_threshold > 0: res = [item for item in res if float(item.value) > min_threshold] if top_n > 0: res = res[:top_n] return res exporter = DictExporter(dictcls=OrderedDict, childiter=_sorted) return exporter.export(root_node)
def test_dict_exporter_mixin(): """Dict Exporter.""" class MyClass(NodeMixin): def __init__(self, foo, parent=None): super(MyClass, self).__init__() self.foo = foo self.parent = parent root = MyClass('root') s0 = MyClass('s0', parent=root) s0b = MyClass('s0b', parent=s0) s0a = MyClass('s0a', parent=s0) s1 = MyClass('s1', parent=root) s1a = MyClass('s1a', parent=s1) s1b = MyClass('s1b', parent=s1) s1c = MyClass('s1c', parent=s1) s1ca = MyClass('s1ca', parent=s1c) exporter = DictExporter() eq_( exporter.export(root), { 'foo': 'root', 'children': [{ 'foo': 's0', 'children': [{ 'foo': 's0b' }, { 'foo': 's0a' }] }, { 'foo': 's1', 'children': [{ 'foo': 's1a' }, { 'foo': 's1b' }, { 'foo': 's1c', 'children': [{ 'foo': 's1ca' }] }] }] })
def build_phenotype_tree(self, hpo_id): """Creates an HPO Tree based on one or more given ancestors Args: hpo_id(str): an HPO term Returns: tree_dict(dict): a tree of all HPO children of the given term, as a dictionary """ root = Node(id="root", name="root", parent=None) all_terms = {} unique_terms = set() def _hpo_terms_list(hpo_ids): for term_id in hpo_ids: term_obj = self.hpo_term(term_id) if term_obj is None: continue # sort term children by ascending HPO number children = sorted( term_obj["children"], key=lambda x: int("".join([i for i in x if i.isdigit()])), ) term_obj["children"] = children all_terms[term_id] = term_obj if term_id not in unique_terms: node = Node(term_id, parent=root, description=term_obj["description"]) unique_terms.add(term_id) # recursive loop to collect children, children of children and so on _hpo_terms_list(term_obj["children"]) # compile a list of all HPO term objects to include in the submodel _hpo_terms_list( [hpo_id]) # trigger the recursive loop to collect nested HPO terms # rearrange tree according to the HPO ontology root = self.organize_tree(all_terms, root) node_resolver = resolver.Resolver("name") # Extract a tree structure having the chosen HPO term (hpo_id) as ancestor of all the children terms term_node = node_resolver.get(root, hpo_id) LOG.info( f"Built ontology for HPO term:{hpo_id}:\n{RenderTree(term_node)}") exporter = DictExporter() # Export this tree structure as dictionary, so that can be saved in database tree_dict = exporter.export(term_node) return tree_dict
def post(self, request: Request): """ Copy node to cache :param request: {node_id} """ db_tree = DBTree() cached_tree = CachedTree() exporter = DictExporter() if cached_tree.get_node_by_id(request.data['node_id']): result = exporter.export(cached_tree.tree) return Response(data=result) node = db_tree.get_node_by_id(request.data['node_id']) new_node = cached_tree.add_node(node) if new_node.is_deleted: cached_tree.delete_node(node.id) return Response(data=exporter.export(cached_tree.tree))
def build_page_hierarchy( df: pd.DataFrame, prepruning_nodes: Optional[List] = None, update_fn: Callable = lambda x: x) -> Tuple[Dict, int]: root = Node('/', count=0) tree = {'/': root} max_count = 0 # for the color for ua_name in update_fn(df[source_col]): if "loading of page" in ua_name: url_path = ua_name.split(' ')[-1] frags = url_path.split("/")[1:-1] frags = ['/'] + ['-' if f == '' else f for f in frags ] # fix root-node and handle empty parts parent_node = root for i, (parent, node) in enumerate(zip( frags[:-1], frags[1:])): # add missing in between nodes id = '/'.join([parent, node]) if id not in tree: # page indicates pagination of previous site, so just repeat it if prepruning_nodes is not None: if (parent in aggregation_nodes and node != 'author') or node == "page": drop_last_n = len(frags) - i - 1 frags = frags[: -drop_last_n] # drop resource for aggregation # TODO fix dirty hack for pruned tree break parent_node = Node(node, parent_node, count=0) tree[id] = parent_node else: parent_node = tree[id] node_id = '/'.join( frags[-2:] ) # use last two parts as id to avoid conflicts with other nodes with same name node = tree[node_id] node.count += 1 if node.count > max_count: max_count = node.count exporter = DictExporter() d = exporter.export(root) return d, max_count
def get(self, request): """ Save changes """ exporter = DictExporter() db_tree = DBTree() cached_tree = CachedTree() for cache_node in PreOrderIter(cached_tree.tree): self.save(cache_node, db_tree) for cache_node in PreOrderIter(cached_tree.tree): db_node = db_tree.get_node_by_id(cache_node.id) cache_node.is_deleted = db_node.is_deleted return Response( data={ "db_tree": exporter.export(db_tree.tree), "cached_tree": exporter.export(cached_tree.tree) })
def create_orgchart(): """Generate orgchart dict from s3 json dump.""" s3 = boto3.resource('s3') if waffle.switch_is_active('use_mock_hr'): # Do not import mock data in prod from mozillians.users.tests import MockOrgChart orgchart_json = MockOrgChart.generate_json() else: orgchart_object = s3.Object(settings.ORGCHART_BUCKET, settings.ORGCHART_KEY).get() orgchart_json = orgchart_object['Body'].read() data = json.loads(orgchart_json) entries = data['Report_Entry'] graph = { 'root': [], } # Create adjacency list for entry in entries: if 'WorkersManagersEmployeeID' not in entry: graph['root'].append(entry['EmployeeID']) continue if entry['WorkersManagersEmployeeID'] not in graph: graph[entry['WorkersManagersEmployeeID']] = [entry['EmployeeID']] else: graph[entry['WorkersManagersEmployeeID']].append(entry['EmployeeID']) # Create nodes dict nodes = { 'root': Node(name='root', title='root') } for entry in entries: # Encode values to utf8 first_name = entry['PreferredFirstName'].encode('utf8') last_name = entry['Preferred_Name_-_Last_Name'].encode('utf8') name = '{} {}'.format(first_name, last_name) title = entry['businessTitle'].encode('utf8') href = get_profile_link_by_email(entry['PrimaryWorkEmail']).encode('utf8') nodes[entry['EmployeeID']] = Node(name=name, title=title, href=href) # Create graph for key in graph: parent = nodes[key] for child in graph[key]: node = nodes[child] if node == parent: # Workaround for data incosistency node.parent = nodes['root'] continue node.parent = parent exporter = DictExporter() return exporter.export(nodes['root'])