def test_json_exporter(): """Json Exporter.""" root = AnyNode(id="root") s0 = AnyNode(id="sub0", parent=root) AnyNode(id="sub0B", parent=s0) AnyNode(id="sub0A", parent=s0) s1 = AnyNode(id="sub1", parent=root) AnyNode(id="sub1A", parent=s1) AnyNode(id="sub1B", parent=s1) s1c = AnyNode(id="sub1C", parent=s1) AnyNode(id="sub1Ca", parent=s1c) exporter = JsonExporter(indent=2, sort_keys=True) exported = exporter.export(root).split("\n") exported = [e.rstrip() for e in exported] # just a fix for a strange py2x behavior. lines = [ '{', ' "children": [', ' {', ' "children": [', ' {', ' "id": "sub0B"', ' },', ' {', ' "id": "sub0A"', ' }', ' ],', ' "id": "sub0"', ' },', ' {', ' "children": [', ' {', ' "id": "sub1A"', ' },', ' {', ' "id": "sub1B"', ' },', ' {', ' "children": [', ' {', ' "id": "sub1Ca"', ' }', ' ],', ' "id": "sub1C"', ' }', ' ],', ' "id": "sub1"', ' }', ' ],', ' "id": "root"', '}' ] eq_(exported, lines) exporter = JsonExporter(indent=2, sort_keys=True, maxlevel=2) exported = exporter.export(root).split("\n") exported = [e.rstrip() for e in exported] # just a fix for a strange py2x behavior. limitedlines = [ '{', ' "children": [', ' {', ' "id": "sub0"', ' },', ' {', ' "id": "sub1"', ' }', ' ],', ' "id": "root"', '}' ] eq_(exported, limitedlines) try: with NamedTemporaryFile(mode="w+", delete=False) as ref: with NamedTemporaryFile(mode="w+", delete=False) as gen: ref.write("\n".join(lines)) exporter.write(root, gen) # on Windows, you must close the files before comparison filecmp.cmp(ref.name, gen.name) finally: os.remove(ref.name) os.remove(gen.name)
def main(json_filepath, out_dot_path, htmlTitle): """IO.""" # Read JSON with open(json_filepath) as data_file: data = json.load(data_file) # Get edges edges, root = tree2graph(data) exporter = JsonExporter(indent=1, sort_keys=True) tempFile = out_dot_path + '.tmp' f = open(tempFile, 'w') print(exporter.export(root), file=f) f.close() find = "'" replace = " " jsonTreeString = '' for line in fileinput.input(files=tempFile): line = re.sub(find, replace, line.rstrip()) jsonTreeString = jsonTreeString + line os.remove(tempFile) body = '<body onload="onLoadDocument();">' body = body + '<h1>' + htmlTitle + '</h1>' body = body + ' <input id="vdspdata" type="hidden" value=\'' + jsonTreeString + '\' />' body = body + '</body>' src = 'treeViewer.html' filepath = pkg_resources.resource_filename(__name__, src) dst = out_dot_path copyfile(filepath, dst) with open(dst, "a") as myfile: myfile.write(body)
def ast_anytree_to_json(root_node): """ Serializes an anytree to json format and returns it. """ from anytree.exporter import JsonExporter exporter = JsonExporter(indent=2, sort_keys=True) return exporter.export(root_node)
def json_export(self): exporter = JsonExporter(indent=2, sort_keys=True, dictexporter=None) filename = "tree.json" filehandle = open(filename, 'w') filehandle.write( exporter.export(globals()[str(self.starting_board_state.fen()) + str(0)]))
class AstFileHandler: def __init__(self, output_folder, use_compression, process_nr=0): self.output_folder = output_folder self.use_compression = use_compression self.df = pd.DataFrame(columns=['id', 'AST']) self.first_save = True self.process_nr = process_nr # Create exporter to export the tree to JSON format self.exporter = JsonExporter(indent=2) def add_ast(self, ast, id): output = self.exporter.export(ast) self.df = self.df.append([{ 'id': id, 'AST': output }], ignore_index=True) def save(self): if self.first_save: self.df.to_csv( f'{self.output_folder}asts{self.process_nr}.csv{".bz2" if self.use_compression else ""}', index=False) self.first_save = False else: self.df.to_csv( f'{self.output_folder}asts{self.process_nr}.csv{".bz2" if self.use_compression else ""}', header=False, index=False, mode='a') self.df = pd.DataFrame(columns=['id', 'AST'])
def test_tree(): from anytree import Node, RenderTree udo = Node("Udo") marc = Node("Marc", parent=udo) lian = Node("Lian", parent=marc) dan = Node("Dan", parent=udo) jet = Node("Jet", parent=dan) jan = Node("Jan", parent=dan) joe = Node("Joe", parent=dan) print("CHILD") print(dan.children) print(dan.name) print(udo) Node('/Udo') print(joe) Node('/Udo/Dan/Joe') for pre, fill, node in RenderTree(udo): print("%s%s" % (pre, node.name)) from anytree.exporter import JsonExporter exporter = JsonExporter(indent=2, sort_keys=True) print("EXPORT") print(exporter.export(udo))
def test_json_exporter(): """Json Exporter.""" root = AnyNode(id="root") s0 = AnyNode(id="sub0", parent=root) AnyNode(id="sub0B", parent=s0) AnyNode(id="sub0A", parent=s0) s1 = AnyNode(id="sub1", parent=root) AnyNode(id="sub1A", parent=s1) AnyNode(id="sub1B", parent=s1) s1c = AnyNode(id="sub1C", parent=s1) AnyNode(id="sub1Ca", parent=s1c) lines = [ '{', ' "children": [', ' {', ' "children": [', ' {', ' "id": "sub0B"', ' },', ' {', ' "id": "sub0A"', ' }', ' ],', ' "id": "sub0"', ' },', ' {', ' "children": [', ' {', ' "id": "sub1A"', ' },', ' {', ' "id": "sub1B"', ' },', ' {', ' "children": [', ' {', ' "id": "sub1Ca"', ' }', ' ],', ' "id": "sub1C"', ' }', ' ],', ' "id": "sub1"', ' }', ' ],', ' "id": "root"', '}' ] exporter = JsonExporter(indent=2, sort_keys=True) exported = exporter.export(root).split("\n") exported = [e.rstrip() for e in exported] # just a fix for a strange py2x behavior. eq_(exported, lines) with NamedTemporaryFile(mode="w+") as ref: with NamedTemporaryFile(mode="w+") as gen: ref.write("\n".join(lines)) exporter.write(root, gen) assert filecmp.cmp(ref.name, gen.name)
def save(self): extension = ".rpt" dir_name = "../data/policies/" file_name = dir_name + self.agent_name + extension exporter = JsonExporter(indent=2, sort_keys=True) with open(file_name, 'w') as f: json.dump(exporter.export(self.tree), f) f.close()
def write_arvore_no_arquivo(arvore, arquivo): """ Escreve a @arvore no @arquivo """ exporter = JsonExporter(indent=2, sort_keys=True) json = exporter.export(arvore) with open(arquivo, 'w') as obj_file: obj_file.write(json)
def on_stop(self): if self.current.button: self.bank.add_note(self.current.note, self.current.text) self.bank.save_notes() stor = self.get_current_storage() exp = TreeExporter(indent=2, sort_keys=True) self.bank.save_tree(exp.export(stor.root_folder))
def _serialize_ontology(root, filename=None): """Serializes an ontology given by its root to a JSON file. If no output filename is given, return the serialized as string. """ exporter = JsonExporter(indent=2, sort_keys=True) if filename: exporter.write(root, filename) else: return exporter.export(root)
def _serialize_ontologies(roots, filename): """Serializes ontologies given by their roots to a JSON file. If no output filename is given, return the serialized as string. """ exporter = JsonExporter(indent=2, sort_keys=True) forest = [] for root in roots: forest.append(exporter.export(root)) if not filename: return forest with open(filename, 'w') as f: json.dump(forest, f, indent=2, sort_keys=True)
def load_serialize_evidence_dict(): global server_evidence_dict print("Loading evidence dict...", end='') with open(evidence_dict_path, 'rb') as fd: evidence_dict = pickle.load(fd) print("{} evidences".format(len(evidence_dict))) print("Serializing evidence dict...") exporter = JsonExporter() for key in tqdm(evidence_dict): evidence = evidence_dict[key] evidence['tree'] = exporter.export(evidence['tree']) evidence_dict[key] = json.dumps(evidence) server_evidence_dict = evidence_dict
def export_conversation_trees_to_db(user_id): root_id_list = [] root_nodes_list = [] print("Selecting all tweets from id: {}...".format(user_id)) conversation_query = collection.find( {'user.id': user_id, 'in_reply_to_user_id': {"$ne": None}}) # Mentioning somebody conversation_query2 = collection.find({'in_reply_to_user_id': user_id}) # Mentioned by somebody print("SET A: {} B: {} ".format(conversation_query.count(), conversation_query2.count())) for i, tweet in enumerate(conversation_query): t = find_root_tweet(tweet) if t['id'] not in process_set: process_set.add(t['id']) root_id_list.append(t) if i % 1000 == 0: print("Processing mentions {}".format(i)) for i, tweet in enumerate(conversation_query2): t = find_root_tweet(tweet) if t['id'] not in process_set: process_set.add(t['id']) root_id_list.append(t) if i % 1000 == 0: print("Processing mentioned {}".format(i)) print("SET A: {} B: {} UNION: {}".format(conversation_query.count(), conversation_query2.count(), len(process_set))) for i, root_tweet in enumerate(root_id_list): root_nodes = AnyNode(id=root_tweet['id'], name=root_tweet['user']['name'], text=root_tweet['text']) root_nodes.id = root_tweet['id'] # Not sure if why I need to state this two times. if i % 1000 == 0: pprint.pprint("Populating: {}".format(i)) populate_node(root_nodes) root_nodes_list.append(root_nodes) pprint.pprint('Total sum of root nodes: {}'.format(len(root_id_list))) # DotExporter(root_nodes_list[0]).to_picture("test.png") count = 0 for i, tree in enumerate(root_nodes_list): count = count + len(tree.descendants) + 1 exporter = JsonExporter(indent=2, sort_keys=True) json_tree = exporter.export(tree) collection_trees.insert_one(json.loads(json_tree)) print("Total conversation count: {}".format(count)) print("Average conversation length: {}".format(count / (len(root_id_list))))
def to_json(self, sink= None, **kwargs): """ writes region tree info to json Arguments: sink (str or None): file to save to. if None, will return json object. kwargs: addtional arguments to pass to anytree.exporter.jsonexporter.JsonExporter and json.dumps. """ exporter = JsonExporter(indent=2, **kwargs) if sink: with open(sink, 'w') as outfile: exporter.write(self.tree_root, outfile) return sink else: data = exporter.export(self.tree_root) return data
def convert_to_json(input="canopus_classyfire/classyfire.json", output="canopus_classyfire/classyfire_ontology.json"): import json # create a root to bundle everything root: Node nodes = {} # read owl file and cache all nodes in a dict{name, node} with open(input) as json_file: data = json.load(json_file) for term in data: id = term["chemont_id"] name = term["name"] parent_id = term["parent_chemont_id"] if parent_id == None or parent_id == "null": # create root node root = Node(name, id=id) nodes[id] = root else: # currently only uses one parent nodes[id] = Node(name, id=id, parent_id=parent_id) # link all nodes to their parents for key, node in nodes.items(): if key is not root.id: try: # find parent in cached nodes and set to node node.parent = nodes[node.parent_id] except AttributeError as ex: print(ex) raise ex # generate json string exporter = JsonExporter(indent=2, sort_keys=True) json = exporter.export(root) # print json and tree for debugging print(json) for pre, _, node in RenderTree(root): print("%s%s" % (pre, node.name)) # export to json file print("Writing to {}".format(output)) with open(output, "w") as file: print(json, file=file)
def convert_to_json(input="GFOP.owl", output="GFOP.json"): # create a root to bundle everything root: Node = Node("GFOP") nodes = {} # read owl file and cache all nodes in a dict{name, node} obo = Ontology(input) for term in obo.terms(): id = term.id name = term.name # find parents in distance 1 (exclude self) parent_terms = term.superclasses(with_self=False, distance=1).to_set() if parent_terms is None or len(parent_terms) == 0: # create root node nodes[name] = Node(name, id=id) else: # currently only uses one parent parent = parent_terms.pop() nodes[name] = Node(name, id=id, parent_id=parent.id, parent_name=parent.name) # link all nodes to their parents for key, node in nodes.items(): if key is not root.name: try: # find parent in cached nodes and set to node node.parent = nodes[node.parent_name] except AttributeError: # no parent - add to root node.parent = root # generate json string exporter = JsonExporter(indent=2, sort_keys=True) json = exporter.export(root) # print json and tree for debugging print(json) for pre, _, node in RenderTree(root): print("%s%s" % (pre, node.name)) # export to json file print("Writing to {}".format(output)) with open(output, "w") as file: print(json, file=file)
def build_tree(root): for node in PostOrderIter(root): node_status = node.config.is_root node.config.is_root = True node.name = str(node.config) exporter = JsonExporter(sort_keys=True) node_json = exporter.export(node).encode('utf-8') hash = hashlib.md5(node_json).hexdigest() node.config.set_hash(hash) if node.config.is_run: save_info(node) node.config.is_root = node_status node.name = str(node.config)
def treeBuilder(): # Tree Builder Function def returnJsonTree(d): exporter = JsonExporter(indent=2, sort_keys=True, ensure_ascii=False) return (exporter.export(d)) exporter = JsonExporter(indent=2, sort_keys=True, ensure_ascii=False) # Create a dictionary to procedurally store node objects in libTreeDict = {} try: libTreeDict["myRoot"] = libNode("my Library Folders", fullpath="/", topDir=1) # Build first layer, user selected library locations for row in c.execute('SELECT location FROM location'): # and in the loop use the name as key when you add your instance: libTreeDict[(row[0])] = libNode( os.path.basename(Path(row[0])), fullpath=row[0], parent=libTreeDict[("myRoot")], topDir=1) # root is parent '/' and locationDir ID is '/path' for row in c.execute( '''SELECT name, location, parent_dir, checked FROM directories WHERE checked == 1''' ): # TODO dictionary insert name variable, is it too long? libTreeDict[(row[1])] = libNode( row[0], fullpath=row[1], parent=libTreeDict[ row[2]]) # parent is stored in dir database as '/path' # End points, files. Directories or locations are possible parents for row in c.execute( '''SELECT name, location, parent_dir, track_id, type, size, checked FROM library WHERE checked == 1''' ): libTreeDict[(row[1])] = libNode( row[0], row[1], parent=libTreeDict[(row[2])], track_data=[row[3], row[0], row[4], row[5] ]) # parent is stored in dir database as '/path' # returnTree return (exporter.export(libTreeDict["myRoot"])) except Exception as e: if hasattr(e, 'message'): return (getattr(e, 'message', str(e))) else: return (e)
def create_tree(self): d0 = MCTSNode(state=str(self.starting_board_state.fen()), wins=1, sims=3) # parent_state = self.starting_board_state.copy() self.starting_board_state.push_san("e4") d1n1 = MCTSNode(state=str(self.starting_board_state.fen()), wins=0, sims=1, parent=d0) self.starting_board_state.push_san("e5") d1n2 = MCTSNode(state=str(self.starting_board_state.fen()), wins=1, sims=2, parent=d0) self.starting_board_state.push_san("Qh5") d2n1 = MCTSNode(state=str(self.starting_board_state.fen()), wins=1, sims=1, parent=d1n2) # d1n2.sims = 3 print(RenderTree(d0)) print("\n") # backpropagation function sim_node = d2n1 while (sim_node.parent): sim_node.parent.wins += 1 sim_node.parent.sims += 1 sim_node = sim_node.parent for pre, _, node in RenderTree(d0): treestr = u"%s%s" % (pre, node.score) print(treestr.ljust(8), node.wins, node.sims) print("\n") # exporter = DictExporter(dictcls=OrderedDict, attriter=sorted) # pprint(exporter.export(d0)) # print("\n") exporter = JsonExporter(indent=2, sort_keys=True) # print(exporter.export(d0)) filename = "test.json" filehandle = open(filename, 'w') filehandle.write(exporter.export(d0))
def depthFirstTreeIteration(self, parentID): print(parentID, self.name) exporter = JsonExporter(indent=2, sort_keys=True) # print(exporter.export(usa)) id = 0 payload = exporter.export(self) payload = {'name': self.name, 'title': self.name, 'url': self.url} print(payload) headers = { 'Content-type': 'application/json', 'Accept': 'application/json' } if parentID is None: # Post this item, if there is a parent # get the unique ID from the response # pass the uniqueID in the next call print("POST") url = 'http://localhost:8080/region' regions_post_response = requests.post(url=url, json=payload) print(regions_post_response) if regions_post_response.status_code == 200: id = regions_post_response.json()['id'] else: print("error") print(regions_post_response.status_code) return else: url = 'http://localhost:8080/region/' + parentID + "/add" print("POST on ", parentID) print(url) regions_post_response = requests.post(url=url, json=payload) if regions_post_response.status_code == 200: id = regions_post_response.json()['id'] else: print("error") print(regions_post_response.status_code) return # PUT this item and attach it to parentID for child in self.children: child.depthFirstTreeIteration(id)
def agg_main(): args = get_args() fname = args['filename'] threshold = int(args['threshold']) output_fn = args['out'] output_res = (args['boolean'] == 'True') df = source.get_data(fname) res = agg_clustering(df) root = res[0][0] json_root = res[1][0] #print_dendogram(root) exporter = JsonExporter(indent=2) json = exporter.export(json_root) c_final = print_clusters(root,threshold,output_res) if output_res: fo = open(output_fn, "w") fo.write(json); fo.close() return c_final
def agg_main(): args = get_args() groundTruth = args['gt_fn'] doc_vectors = args['vec_fn'] threshold = int(args['threshold']) output_fn = args['out'] output_res = (args['boolean'] == 'True') data = source.get_data(groundTruth, doc_vectors) res = agg_clustering(data) root = res[0][0] json_root = res[1][0] #print_dendogram(root) exporter = JsonExporter(indent=2) json = exporter.export(json_root) c_final = print_clusters(root,threshold,output_res) if output_res: fo = open(output_fn, "w") fo.write(json); fo.close() return c_final
def saving_tree_test(): # For now user should start by creating a root node root_node = Node(root) # Maybe the user wants to create more nodes to add to the tree a_node = Node(_a) b_node = Node(_b) # Then user should create a tree and initialize it with a root node tree_to_save = TTree("root", root_node) # Then add nodes to the tree tree_to_save.add_node(root_node, a_node) tree_to_save.add_node(root_node, b_node) """ Tree in this example looks like this... * root (0) * ├── _a (1) * └── _b (2) """ print('\n') print("Confirm that tree matches example code:") tree_to_save.print_tree(True) print('\n') from anytree.exporter import JsonExporter # The default lambda expression tells json what the default value of an # objects stuff should be if the value cannot be serialized js_exporter = JsonExporter( indent=2, sort_keys=True, default=lambda o: '<not serializable>') with open("./ts_modeling/saved_trees/tree_to_save.json", 'w') as js_file: js_exporter.write(tree_to_save.root, js_file) print("Here is the json formatting:") print(js_exporter.export(tree_to_save.root)) print('\n')
def dbgPrintTreeToJson(curNode, outputFile=None, prevCallback=None, postCallback=None): """Debug: print tree relation to image for single (root) node: print to single relation image file for top node list: print each single top node to single relation image file Args: curNode (Node): current tree node outputFile (str): output file prevCallback (function): function to call before print to json. Such as convert Timestamp to str postCallback (function): function to call after print. Such as convert back str to Timestamp Returns: Raises: """ if not outputFile: outputFile = "AllNode.json" if prevCallback: prevCallback(curNode) print("Output to json %s ..." % outputFile) # crete folder to avoid later export json failed curOutputFolder = os.path.dirname(outputFile) if curOutputFolder: createFolder(curOutputFolder) jsonExporter = JsonExporter(indent=2, sort_keys=True) treeJsonStr = jsonExporter.export(curNode) treeJson = json.loads(treeJsonStr) saveJsonToFile(outputFile, treeJson) if postCallback: postCallback(curNode)
def getCreatedProcesses(self): main_node = NewNode(00000) main_node.text = "Event log Processes" relevant_events = self.sort_events() print "Num of events - " + str(len(relevant_events)) for event_item in relevant_events: pid, ppid, new_process_name, command_line, event_date, parent_process_name = event_item[ 0], event_item[1], event_item[2], event_item[3], event_item[ 4], event_item[5] # find if there is already a node of the parent id res = search.findall(main_node, filter_=lambda node: node.name == ppid) if len(res) == 0: parent_node = NewNode(ppid, parent=main_node) parent_node.tags.append(str(ppid)) parent_node.text = parent_process_name child_node = NewNode(pid, parent=parent_node) self.setNodeInfo(child_node, command_line, new_process_name, pid, event_date, parent_process_name) else: for parent_node in res: child_node = NewNode(pid, parent=parent_node) self.setNodeInfo(child_node, command_line, new_process_name, pid, event_date, parent_process_name) if len(res) > 1: child_node.unknown = True child_node.text = "?" + new_process_name exporter = JsonExporter(indent=2, default=self.myconverter) d = exporter.export(main_node) # for bootstrap-treeview.js d2 = d.replace("children", "nodes") self.data = "[" + d2 + "]"
def print_tree_json(self): exporter = JsonExporter(indent=2, sort_keys=True) print(exporter.export(self.root))
def get(self, request, *args, **kwargs): filters_data = request.GET.get('data', {}) if filters_data: filters_data = json.loads(filters_data) for f in filters_data: if all(key in f for key in ['start', 'end']): f['start'] = dateutil.parser.isoparse(f['start']) f['end'] = dateutil.parser.isoparse(f['end']) tree = Node('') validations_qs = Validation.objects.all() \ .select_related('os__group', 'platform__generation', 'env', 'owner') for validation in validations_qs.order_by( '-platform__generation__weight', 'platform__weight', 'os__group__name', 'os__name', 'env__name', 'name'): # shortcuts platform = validation.platform os = validation.os # tree branch data: gen -> platform -> os.group -> os -> env -> validation name branch = ({ 'obj': platform.generation, 'name': platform.generation.name, 'level': 'gen' }, { 'obj': platform, 'name': platform.short_name, 'level': 'platform' }, { 'obj': os.group, 'name': os.group.name, 'level': 'os_group' }, { 'obj': os.group, 'name': os.name, 'level': 'os' }, { 'obj': validation.env, 'name': validation.env.name, 'level': 'env' }, { 'obj': validation.type, 'name': validation.type.name, 'level': 'validation_type' }, { 'obj': validation, 'name': validation.name, 'level': 'validation' }) # filter by input data if filters_data: ok = [] for f in filters_data: # date range check if all(key in f for key in ['start', 'end']): ok.append(f['start'] <= validation.date <= f['end']) # tree levels check else: for node in branch: if node['level'] == f['level']: # validation name pattern check if f['level'] == 'validation': if f['value'].lower( ) in node['name'].lower(): ok.append(True) break else: # filter by id if node['obj'].id in f['value']: ok.append(True) break # filter validation nodes by owner/component/feature if node['level'] == 'validation': if f['level'] == 'user' and node[ 'obj'].owner.id in f['value']: ok.append(True) break if f['level'] == 'component' and \ set(node['obj'].components) & set(f['value']): ok.append(True) break if f['level'] == 'feature' and \ set(node['obj'].features) & set(f['value']): ok.append(True) break else: ok.append(False) if not all(ok): continue parent = tree for node_data, icon_map in zip(branch, ICONS): # set icon according to tree level ICONS mapping icon, name = '', '' if isinstance(icon_map, tuple): for alias in icon_map: if node_data['obj'].name.lower() == alias[0]: icon = alias[1] else: icon = icon_map name = node_data['name'] node_main_params = { 'parent': parent, 'name': name, 'text': name, 'text_flat': name, 'selected': False, 'opened': True, 'level': node_data['level'], 'id': node_data['obj'].id, 'klass': type(node_data['obj']).__name__, 'icon': f'{icon} mdi tree-icon' } node_validation_params = { 'passed': validation.passed, 'failed': validation.failed, 'error': validation.error, 'blocked': validation.blocked, 'skipped': validation.skipped, 'canceled': validation.canceled, 'owner': validation.owner.id, 'date': validation.date.strftime('%a %b %d %Y') } # find node by name and level, if not create new one node = anytree.search.find( parent, lambda n: n.name == name and n.level == node_data['level']) if not node: if node_data['level'] == 'validation': node = Node(**node_main_params, **node_validation_params) else: node = Node(**node_main_params) parent = node exporter = JsonExporter() d = exporter.export(tree) # cut off root level to have Generation as first one on frontend d = json.loads(d).get('children', []) return Response(d)
def toFile(self, filename): exporter = JsonExporter(indent=0, sort_keys=False) with open(filename, 'w') as file: file.write(str(self._index)) file.write("\n") file.write(exporter.export(self._tree))
def monkertoJsonFile(path): # print("give bbs (eg '30bb)") # bbs = input() # path = 'C:\\Users\\Teemu\\Desktop\\mttranges\\'+ bbs print("loading... probably..") #inits, vars range = 'AK+ QQ' root = Node('root', id='root') nodeDict = {} txtDataDict = {} n_players = 0 sb = 1 bb = 2 antes = 2 #get file names & data filenames = [] for fn in os.listdir(path): filenames.append(fn) f = open(path + '/' + fn, "r") txtDataDict[fn.strip(".rng")] = f.read() #get max pelaaja määrä: saa siitä nodesta missä kaikki kippaa (0.0.0) plus 1. sortataan array ja katotaan et eka ja vika nollia ja otetaan len+1 sortedNodeList = sorted(fn.strip(".rng").split('.')) if (sortedNodeList[0] == '0' and sortedNodeList[-1] == '0' and len(sortedNodeList) + 1 > n_players): n_players = len(sortedNodeList) + 1 f.close() #looppaa jokainen node, jokaisen noden uniikki id lista nykyisestä kohtaa pelipuuta, esim ["0", "1"] for fn in sorted(filenames, key=lambda fn: len(fn.split('.'))): fn = fn.strip(".rng") latestNode = fn.split('.')[-1] # oikee positio saadaan aina fn ja n_players avulla pos = getPosition(fn, n_players) #frekvenssit, esim call 60% fold 40% tms freq = getNodeFreq(txtDataDict[fn]) #tallennetaan jokaiseen nodeen lista siihenastisista nodejen actioneista actionList = getActionList(fn, n_players, sb, bb, antes) #muutetaan actionList % betsikoot chipeiksi #actionLIst = remPercentagesFromActionList(actionList) parentNode = "" #jos eka pelaaja -> parent nodeksi 'root' if len(fn.split('.')) <= 1: parentNode = root #muuten parent nodeksi yleimpi node else: #ylemmän noden nimi on esim 0.1 tai 0 tai 0.1.40036, eli vika split '.', vika pois, ja '. takas' parentNode = nodeDict['.'.join(fn.split('.')[:-1])] newNode = Node(actionList[-1] + " (" + str(round(freq * 100, 2)) + "%)", id=fn, data=txtDataDict[fn], position=pos, parent=parentNode, freq=freq, actionList=actionList) nodeDict[fn] = newNode #export to JSON exporter = JsonExporter(indent=2, sort_keys=True) # save to file file = open(path.split("\\")[-1] + '.json', "w") file.write(exporter.export(root)) file.close()