def test_paste_tree(self): new_tree = Tree() new_tree.create_node("Jill", "jill") new_tree.create_node("Mark", "mark", parent="jill") self.tree.paste("jane", new_tree) self.assertEqual("jill" in self.tree.is_branch("jane"), True) self.tree.remove_node("jill")
def test_02_get_hierarchy_for_module_returns_single_node_when_nothing_depend_on_module(self, mock_client): """ Test that get_hierarchy_for_module returns a single node tree structure if no dependent modules are found :param mock_client: A mocked out version of erppeek.Client :return: """ # Mock Up mock_dp = DependencyGraph orig_mod_search = mock_dp.module_search orig_dep_search = mock_dp.dependency_search orig_client_search = mock_client.search mock_dp.module_search = MagicMock(return_value=[666]) mock_dp.dependency_search = MagicMock(return_value=[]) mock_dg = mock_dp('valid_module') test_hierarchy = Tree() test_hierarchy.create_node('valid_module', 'valid_module') self.assertEqual(mock_dg.hierarchy.to_json(), test_hierarchy.to_json(), 'get_hierarchy_for_module did not return [] when finding no dependent modules') # Mock Down mock_client.stop() mock_dp.module_search.stop() mock_client.search.stop() mock_client.search = orig_client_search mock_dp.dependency_search.stop() mock_dp.module_search = orig_mod_search mock_dp.dependency_search = orig_dep_search
class Conversation: def __init__(self, tweet): self.root_tweet = tweet self.conversation_tree = Tree() self.conversation_tree.create_node(tweet, tweet) self.depth = int() self.tweets_id = list() self.tweets_id.append(tweet) self.width = int() def add_replay(self, tweet, parent_tweet): self.conversation_tree.create_node(tweet, tweet, parent=parent_tweet) self.tweets_id.append(tweet) def set_depth(self): self.depth = self.conversation_tree.depth() + 1 def find_depth(self): return self.depth def get_tweets_id(self): return self.tweets_id def set_width(self): self.width = len(self.tweets_id) def find_width(self): return self.width def get_conversation_tree(self): return self.conversation_tree
def visit_root(self, node, tree=None): tree = Tree() root = repr(node) tree.create_node(root, root) for child in node.children: tree = self.visit(child, tree=tree) return tree
def test_show_data_property(self): new_tree = Tree() class Flower(object): def __init__(self, color): self.color = color new_tree.create_node("Jill", "jill", data=Flower("white")) new_tree.show(data_property="color")
def main(): try: conf = open(args.config, 'r') tempConf = yaml.load_all(conf) for line in tempConf: list_path = line["ListPath"] write_missed = line["WriteMissed"] pack_list_file = open(list_path, "r+") pack_list = json.load(pack_list_file) checked = check(pack_list, write_missed) tree = Tree() tree.create_node(cur_time, "root") generate_tree(checked, tree, "root") print "\n" tree.show() print "\n" except KeyboardInterrupt: print '\nThe process was interrupted by the user' raise SystemExit
class Scansion(object): """ .src : list of strings """ #/////////////////////////////////////////////////////////////////////////// def __init__(self, source_file): """ Scansion.__init__ source_file : (src) source file's name. """ self.htree = Tree() self.src = [] # creating root node (level 0) : self.htree.create_node(tag = "root", identifier = "root", data = Hypothesis(htree = self.htree, level=0, language=None, src=source_file)) # calling root node : msg(0, "Calling the root node.") stop = False while not stop: leaves_to_be_extended = [leave for leave in self.htree.leaves() if not leave.data.dead] for leave in leaves_to_be_extended: leave.data.go_on() if len(leaves_to_be_extended)==0: stop = True
def _get_random_tree(self, start, max_depth=999): """ Returns a random tree from PCFG starting with symbol 'start' depth: the maximum depth of tree """ t = Tree() t.create_node(ParseNode(start,'')) # get ids of not expanded nonterminals in tree nodes_to_expand, depth = self.__get_nodes_to_expand_and_depth(t) while len(nodes_to_expand) > 0: # for each non terminal, choose a random rule and apply it for node in nodes_to_expand: symbol = t[node].tag.symbol # if tree exceeded the allowed depth, expand nonterminals # using rules from terminating_rule_ids if depth >= (max_depth-1): # choose from rules for nonterminal from terminating_rule_ids rhsix = np.random.choice(self.grammar.terminating_rule_ids[symbol], size=1) else: # choose from rules for nonterminal according to production probabilities rhsix = np.random.choice(len(self.grammar.rules[symbol]), p=self.grammar.prod_probabilities[symbol], size=1) t[node].tag.rule = rhsix[0] # index of production rule used when expanding this node rhs = self.grammar.rules[symbol][rhsix[0]] for s in rhs: t.create_node(tag=ParseNode(s,''), parent=node) nodes_to_expand, depth = self.__get_nodes_to_expand_and_depth(t) return t
def parse_xml(path): tree = ET.parse(path) bill_list = [] destination = "console" # eg: (cox, "*****@*****.**") (rent, 2000) # / \ & / \ # (Evan, 0.5) (Jason, 0.5) (Evan, 0.45) (Jason, 0.55) for bill in tree.findall("bill"): billname = bill.get("name") bill_tree = Tree() bill_value = bill.get("fixed") if bill_value is None: bill_value = bill.get("from_email") bill_tree.create_node(tag=billname, identifier=billname, data=bill_value) for user in bill.findall("user"): username = user.get("name") ratio = user.get("ratio") bill_tree.create_node(tag=username, identifier=username, parent=billname, data=ratio) bill_list.append(bill_tree) # Get the location to dump our results for d in tree.findall("output"): destination = d.get("destination") return (bill_list, destination)
def build_directory_tree(service): print colored("*** Building directory tree ***", 'blue') # initialize a new directory structure directory = Tree() directory.create_node("Root", "root") page_token = None while True: try: param = {} if page_token: param['pageToken'] = page_token # Get children of folderID children = service.children().list(folderId='root', **param).execute() # For each child in folder, get ID, name and Type # and write to the directory tree for child in children.get('items', []): try: file__ = service.files().get(fileId=child['id']).execute() directory.create_node(file__['title'], child['id'], parent = 'root', data=node('root', child['id'], file__['title'], file__['mimeType'])) except errors.HttpError, error: print 'An error occurred: %s' % error # Get next page token for current folderID page_token = children.get('nextPageToken') if not page_token: break except errors.HttpError, error: print colored('An error occurred: %s', 'red') % error break
class TreePipeline(object): def open_spider(self, spider): self.tree = Tree() self.tree.create_node("root", "root") def process_item(self, item, spider): lst = item['text'] lst = [x.strip() for x in [y.replace('...', '') for y in lst]] item['pagetitle'] = item['pagetitle'].replace('...', '') lst[-1] = item['pagetitle'] for idx, elem in enumerate(lst): if idx == 0: previous = "root" else: previous = "|".join(lst[:idx]) elem = "|".join(lst[:idx + 1]) # elem = elem.replace('...', '') elem = elem.encode('utf-8').decode('utf-8') if not self.tree.contains(elem): print "Adding node %s" % elem self.tree.create_node(elem, elem, parent=previous) # self.tree.show() return item def close_spider(self, spider): self.tree.show() with open(makepath('data/cats/tree.json'), 'w') as outfile: outfile.write(self.tree.to_json()) self.tree.save2file(makepath('data/cats/tree.tree'))
def create_tree(indexed_titles, root, children=None): t = Tree() identifier = indexed_titles[root] t.create_node(root, identifier) if children: for sub_tree in children: t.paste(identifier, sub_tree) return t
def test_modify_node_identifier_root(self): tree = Tree() tree.create_node("Harry", "harry") tree.create_node("Jane", "jane", parent="harry") tree.update_node(tree['harry'].identifier, identifier='xyz', tag='XYZ') self.assertTrue(tree.root == 'xyz') self.assertTrue(tree['xyz'].tag == 'XYZ') self.assertEqual(tree.parent('jane').identifier, 'xyz')
def merge_trees(t1, t2, tick): t = Tree() identifier = -tick # using negative numbers as identifiers, positive numbers are ids for the leaf nodes name = "new_cluster_%s" % tick t.create_node(name, identifier) t.paste(identifier, t1) t.paste(identifier, t2) return t, name
class AcquisitionChain(object): def __init__(self): self._tree = Tree() self._root_node = self._tree.create_node("acquisition chain", "root") self._device_to_node = dict() def add(self, master, slave): slave_node = self._tree.get_node(slave) master_node = self._tree.get_node(master) if slave_node is not None and isinstance(slave, AcquisitionDevice): if slave_node.bpointer is not self._root_node and master_node is not slave_node.bpointer: raise RuntimeError( "Cannot add acquisition device %s to multiple masters, current master is %s" % (slave, slave_node._bpointer) ) else: # user error, multiple add, ignore for now return if master_node is None: master_node = self._tree.create_node(tag=master.name, identifier=master, parent="root") if slave_node is None: slave_node = self._tree.create_node(tag=slave.name, identifier=slave, parent=master) else: self._tree.move_node(slave_node, master_node) def _execute(self, func_name): tasks = list() prev_level = None for dev in reversed(list(self._tree.expand_tree(mode=Tree.WIDTH))[1:]): node = self._tree.get_node(dev) level = self._tree.depth(node) if prev_level != level: gevent.joinall(tasks) tasks = list() func = getattr(dev, func_name) tasks.append(gevent.spawn(func)) gevent.joinall(tasks) def prepare(self, dm, scan_info): # self._devices_tree = self._get_devices_tree() for master in (x for x in self._tree.expand_tree() if isinstance(x, AcquisitionMaster)): del master.slaves[:] for dev in self._tree.get_node(master).fpointer: master.slaves.append(dev) dm_prepare_task = gevent.spawn(dm.prepare, scan_info, self._tree) self._execute("_prepare") dm_prepare_task.join() def start(self): self._execute("_start") for acq_dev in (x for x in self._tree.expand_tree() if isinstance(x, AcquisitionDevice)): acq_dev.wait_reading() dispatcher.send("end", acq_dev)
def build_conversation(cursor): conversation_roots = postgres_queries.find_conversations_root(cursor) conversation_list = list() for root in conversation_roots: conversation_tree = Tree() conversation_tree.create_node(root[0], root[0]) search_children(root[0], conversation_tree, cursor) conversation_list.append(conversation_tree) return conversation_list
def create_powerset_tree(self): tree = Tree(None) for idx, s in enumerate(self.object_sets): if idx == 0: tree.create_node((s, self.get_max_timeset(s)), s) else: parent_index = frozenset(sorted(list(s)[0:-1])) tree.create_node((s, self.get_max_timeset(s)), s, parent_index) return tree
def create_family_tree(): ## Create the family tree tree = Tree() tree.create_node("Harry", "harry") # root node tree.create_node("Jane", "jane", parent="harry") tree.create_node("Bill", "bill", parent="harry") tree.create_node("Diane", "diane", parent="jane") tree.create_node("Mary", "mary", parent="diane") tree.create_node("Mark", "mark", parent="jane") return tree
def test_session_tree(beacon, capsys): session = beacon.get("test_session2") session.sessions_tree.show() out1, err1 = capsys.readouterr() t = Tree() t.create_node("test_session2", "test_session2") t.create_node("test_session", "test_session", parent="test_session2") t.show() out2, err2 = capsys.readouterr() assert out1 == out2
class AcquisitionChain(object): def __init__(self, parallel_prepare=False): self._tree = Tree() self._root_node = self._tree.create_node("acquisition chain", "root") self._device_to_node = dict() self._presets_list = list() self._parallel_prepare = parallel_prepare self._device2one_shot_flag = weakref.WeakKeyDictionary() @property def nodes_list(self): nodes_gen = self._tree.expand_tree() nodes_gen.next() # first node is 'root' return list(nodes_gen) def add(self, master, slave): self._device2one_shot_flag.setdefault(slave, False) slave_node = self._tree.get_node(slave) master_node = self._tree.get_node(master) if slave_node is not None and isinstance(slave, AcquisitionDevice): if(slave_node.bpointer is not self._root_node and master_node is not slave_node.bpointer): raise RuntimeError("Cannot add acquisition device %s to multiple masters, current master is %s" % ( slave, slave_node._bpointer)) else: # user error, multiple add, ignore for now return if master_node is None: master_node = self._tree.create_node( tag=master.name, identifier=master, parent="root") if slave_node is None: slave_node = self._tree.create_node( tag=slave.name, identifier=slave, parent=master) else: self._tree.move_node(slave, master) slave.parent = master def add_preset(self, preset): self._presets_list.append(preset) def set_stopper(self, device, stop_flag): """ By default any top master device will stop the scan. In case of several top master, you can define which one won't stop the scan """ self._device2one_shot_flag[device] = not stop_flag def __iter__(self): if len(self._tree) > 1: return AcquisitionChainIter(self, parallel_prepare=self._parallel_prepare) else: return iter(())
def test_modify_node_identifier_directly_failed(self): tree = Tree() tree.create_node("Harry", "harry") tree.create_node("Jane", "jane", parent="harry") n = tree.get_node("jane") self.assertTrue(n.identifier == 'jane') # Failed to modify n.identifier = "xyz" self.assertTrue(tree.get_node("xyz") is None) self.assertTrue(tree.get_node("jane").identifier == 'xyz')
def test_modify_node_identifier_recursively(self): tree = Tree() tree.create_node("Harry", "harry") tree.create_node("Jane", "jane", parent="harry") n = tree.get_node("jane") self.assertTrue(n.identifier == 'jane') # Success to modify tree.update_node(n.identifier, identifier='xyz') self.assertTrue(tree.get_node("jane") is None) self.assertTrue(tree.get_node("xyz").identifier == 'xyz')
def to_tree(ts): """ :param ts: list of stuff to be treed :return: treelib.Tree with the stuff after zf78 and putted in the tree. """ tree = Tree() tree.create_node("root", 0, data=list([0,0.0])) coded = encode(ts) for i, word in enumerate(coded): tree.create_node(word[1],i+1,parent=word[0], data=list([0,0.0])) return tree
def test_all_nodes_itr(self): """ tests: Tree.all_nodes_iter Added by: William Rusnack """ new_tree = Tree() self.assertEqual(len(new_tree.all_nodes_itr()), 0) nodes = list() nodes.append(new_tree.create_node('root_node')) nodes.append(new_tree.create_node('second', parent=new_tree.root)) for nd in new_tree.all_nodes_itr(): self.assertTrue(nd in nodes)
def conversation_regarding_language(cursor): conversation_amount = postgres_queries.find_conversation_number(cursor) conversation_list = list() depth_dict = dict() depth_dict_long = dict() depth_dict_short = dict() number_of_tweets_dict = dict() test_i = 0 for i in range(0, conversation_amount + 1, 1): conversation_tree = Tree() conversation = postgres_queries.find_conversation(i, cursor) test_i += len(conversation) for tweet in conversation: if tweet[2] is None and tweet[5] is True: conversation_tree.create_node(tweet[0], tweet[0]) tweets_in_conversation = list() build_conversation_lang(tweet[0], conversation, conversation_tree, tweets_in_conversation) depth = conversation_tree.depth() + 1 number_of_tweets = len(conversation_tree.all_nodes()) #short/long if number_of_tweets >=20: if depth in depth_dict_long: depth_dict_long[depth] += 1 else: depth_dict_long[depth] = 1 else: if depth in depth_dict_short: depth_dict_short[depth] += 1 else: depth_dict_short[depth] = 1 if number_of_tweets in number_of_tweets_dict: number_of_tweets_dict[number_of_tweets] += 1 else: number_of_tweets_dict[number_of_tweets] = 1 if depth in depth_dict: depth_dict[depth] += 1 else: depth_dict[depth] = 1 # check if conversation_tree is null- dont add if len(conversation_tree.all_nodes())!=0: conversation_list.append(conversation_tree) # number = 0 new_tweet_list_id = list() for con in conversation_list: nodes = con.all_nodes() for node in nodes: new_tweet_list_id.append(int(node.tag)) # number += len(con.all_nodes()) # print len(new_tweet_list_id) # for tweet_id in new_tweet_list_id: # print tweet_id return new_tweet_list_id, conversation_list
def retrieve_dependencies(self, jarName): if jarName is None: root = self.tree.get_node(self.tree.root) root = root.data.jarName else: root = jarName tgfOutput = subprocess.Popen('dosocs2 dependencies ' + root, stdout=subprocess.PIPE, shell=True) count = 0 tree = Tree() dependencies = [] relationships = [] while True: line = tgfOutput.stdout.readline() if not line: break match = re.match(r"(\d+) - (.*)", line) if match: if count == 0: count = count + 1 tree.create_node(match.group(2), match.group(1)) else: dependencies.append((match.group(2), match.group(1))) match = re.match(r"(\d+) (\d+)", line) if match: relationships.append((match.group(1), match.group(2))) if not relationships: print("No child relationships for " + jarName) return None while relationships: for item in relationships: node = tree.get_node(item[0]) if node is not None: rel = [item for item in relationships if int(item[0]) == int(node.identifier)] if rel is not None: rel = rel[0] dep = [item for item in dependencies if int(item[1]) == int(rel[1])] if dep is not None: dep = dep[0] tree.create_node(dep[0], dep[1], parent=node.identifier) relationships.remove(rel) dependencies.remove(dep) tree.show() if jarName is None: os.chdir(os.pardir)
def toy_demo(): tree_dict_list = [] tree_dict_list.append({None : 'A' , 'A' : ['B', 'C'], 'B': 'D', 'C':'E',\ 'E': ['F','G'],\ 'D' : '1', 'F' : '2', 'G':'3'}) tree_dict_list.append({None : 'A' , 'A' : ['B', 'C'], 'B':['D','E'],\ 'C' : '1', 'D' : '2', 'E':'3'}) tree_dict_list.append({None : 'A' , 'A' : ['B','C', 'D','E'],\ 'B' : '1', 'C' : '2', 'D' : '3', 'E' : '4'}) tree_dict_list.append({None : 'A' , 'A' : ['B','C', 'D'], \ 'B' : ['E', 'F', 'G'], 'C' : ['H', 'I'],\ 'D' : ['J', 'K', 'L', 'M'], 'E' : ['N', 'O'], \ 'N' : '1', 'O': '2', 'F': '3', 'G': '4', 'H': '5',\ 'I': '6', 'J' :'7', 'K': '8', 'L': '9', 'M': '0'}) tree_dict_list.append({None : 'A' , 'A' : 'B', 'B' : ['C', 'D', 'E'],\ 'C' : ['H', 'I'], 'D' : ['J', 'K', 'L', 'M'], \ 'E' : ['N', 'O']}) for tree_dict in tree_dict_list[0:1]: tree = Tree() idx = 0 ids = {None : None} for parents ,children in sorted(tree_dict.items()): for child in children: ids[child] = idx tree.create_node(child, idx, parent = ids[parents], data = nRange(0,0)) idx += 1 tree.show(idhidden = False) genRange(tree, tree.root, 0, 1) tree.show(data_property="mRange") tree_dep_dict = {ids['1'] : ids['A'], ids['2'] : ids['3'], ids['3'] : ids['1'] } hieght_dict = gen_height_list(tree, tree_dep_dict) print hieght_dict #hieght_dict = {ids['1']: 3, ids['2'] : 3, ids['3']: 1 } # hieght_dict = {ids['1']: 1, ids['2'] : 1, ids['3']: 1, ids['4']: 2 } # hieght_dict = { ids['1'] : 1, ids['2'] : 3, ids['3']: 1, ids['4']: 1, \ # ids['5'] : 3, ids['6'] : 1, ids['7']: 1, ids['8']: 2, \ # ids['9'] : 1, ids['0'] : 1 } path_dict = {} gen_suptag(tree, tree.root, hieght_dict, path_dict) print sorted([(tree[k].tag, v) for k,v in path_dict.items()])
def types_of_conversation(): conversation_amount = postgres_queries.find_annotated_conversation_number() conversation_list = list() depth_dict = dict() depth_dict_long = dict() depth_dict_short = dict() number_of_tweets_dict = dict() for i in range (0, conversation_amount + 1, 1): conversation_tree = Tree() converastion = postgres_queries.find_conversation(i) for tweet in converastion: if tweet[1] is None: conversation_tree.create_node(tweet[0], tweet[0]) build_conversation(tweet[0], converastion, conversation_tree) depth = conversation_tree.depth() + 1 number_of_tweets = len(conversation_tree.all_nodes()) #short/long if number_of_tweets >=20: if depth in depth_dict_long: depth_dict_long[depth] += 1 else: depth_dict_long[depth] = 1 else: if depth in depth_dict_short: depth_dict_short[depth] += 1 else: depth_dict_short[depth] = 1 if number_of_tweets in number_of_tweets_dict: number_of_tweets_dict[number_of_tweets] += 1 else: number_of_tweets_dict[number_of_tweets] = 1 if depth in depth_dict: depth_dict[depth] += 1 else: depth_dict[depth] = 1 conversation_list.append(conversation_tree) #print depth_dict print 'Depth of a conversation' for depth, count in depth_dict.iteritems(): print depth, '\t', count print 'Number of tweets in a conversation' for number, count in number_of_tweets_dict.iteritems(): print number, '\t', count print 'Depth of a long conversation' for depth, count in depth_dict_long.iteritems(): print depth, '\t', count print 'Depth of a short conversation' for depth, count in depth_dict_short.iteritems(): print depth, '\t', count return conversation_list
def test_unicode_filename(self): tree = Tree() tree.create_node('Node 1', 'node_1') export_to_dot(tree, 'ŕʩϢ.dot') expected = """\ digraph tree { \t"node_1" [label="Node 1", shape=circle] }""" self.assertTrue(os.path.isfile('ŕʩϢ.dot'), "The file ŕʩϢ.dot could not be found.") generated = self.read_generated_output('ŕʩϢ.dot') self.assertEqual(expected, generated, "The generated file content is not the expected one") os.remove('ŕʩϢ.dot')
def test_export_with_minus_in_filename(self): tree = Tree() tree.create_node('Example Node', 'example-node') expected = """\ digraph tree { \t"example-node" [label="Example Node", shape=circle] }""" export_to_dot(tree, 'id_with_minus.dot') self.assertTrue(os.path.isfile('id_with_minus.dot'), "The file id_with_minus.dot could not be found.") generated = self.read_generated_output('id_with_minus.dot') self.assertEqual(expected, generated, "The generated file content is not the expected one") os.remove('id_with_minus.dot')
class Group(ElementWithAttributes): def __init__(self): super(Group, self).__init__() self.type = DATA_DIR_TYPES.GROUP self.path = None self.tree = Tree() def __getitem__(self, item): if item not in self.tree: rsplit = item.rsplit("/", maxsplit=1) if len(rsplit) == 1: item_0 = self.tree.root key = rsplit[0] else: item_0, key = rsplit if item_0 in self.tree: node = self.tree[item_0] if (isinstance(node.data, ElementWithAttributes) and key in node.data.attrs): return node.data.attrs[ key] # ### RETURN attribute value ### raise KeyError(f"{item} is not a valid key") node = self.tree[item] if isinstance(node.data, Group): # rebuild tree with reduced identifiers stree = self.tree.subtree(item) for n in stree.all_nodes_itr(): if n.predecessor(stree.identifier) is None: parent = None else: parent = n.predecessor(stree.identifier).split( item, maxsplit=1)[1] node.data.tree.create_node(n.tag, n.identifier.split(item, maxsplit=1)[1], parent, data=n.data) elif isinstance(node.data, DataSet): if node.data.df.empty: if self.path is None: raise GroupError( f"{item} is not loaded yet and this element is not linked to a File or Group" ) node.data.df = pd.read_parquet(self.path / item / DATA_FILE) return node.data def __setitem__(self, key, value): if key in self.tree: raise KeyError(f"{key} already exists") rsplit = key.rsplit("/", maxsplit=1) if len(rsplit) == 1: item_0 = self.tree.root key_1 = rsplit[0] else: item_0, key_1 = rsplit if item_0 is not None and item_0 not in self.tree: raise KeyError(f"Parent key {item_0} does not exist") dd_type = None if isinstance(value, Group): dd_type = value.type new_tree = Tree() for node in value.tree.all_nodes_itr(): if node.parent is None: parent = None else: parent = key + "/" + node.parent new_tree.create_node(node.tag, key + "/" + node.identifier, parent=parent, data=node.data) value.tree = new_tree self.tree.create_node(tag=key_1, identifier=key, parent=item_0, data=value) self.tree.paste(key, new_tree) elif isinstance(value, DataSet): dd_type = DATA_DIR_TYPES.DATASET self.tree.create_node(tag=key_1, identifier=key, parent=item_0, data=value) if self.path is not None: value.df.to_parquet(self.path / key / DATA_FILE) elif isinstance(value, Raw): pass elif isinstance(value, Attribute): pass else: raise ValueError(f"{value} is not a valid type for DataDir") # write ddir and attributes file if self is linked if isinstance(value, ElementWithAttributes) and self.path is not None: (self.path / key).mkdir() _write_ddir_json(self.path / key, dd_type=dd_type) json.dump(value.attrs, (self.path / key / ATTRIBUTES_FILE).open("w"), indent=4) def link(self, path): self.path = path
def Test1(): tree = Tree() tree.create_node("NodeRoot", "root", data="root data", parent=None) # root node tree.create_node("Branch1", "node1", data="node1 data", parent="root") tree.create_node("Branch2", "node2", parent="root") tree.create_node("Branch11", "node11", parent="node1") tree.create_node("Branch121", "node12", parent="node1") tree.create_node("Branch111", "node111", parent="node12") data = [{ "name": "root", "value": 1, "children": [{ "name": "root-child1", "value": 2 }, { "name": "root-child2", "value": 3, "children": [{ "name": "root-child2-child1", "value": 4 }] }] }] data3 = [{ 'name': 'A', 'children': [{ 'name': 'B', 'children': [{ 'name': 'bar', 'children': [{ 'name': 'bar' }, { 'name': 'Bar' }] }, { 'name': 'Bar' }] }, { 'name': 'C' }] }] tree.show() # tree. # print(tree) store(tree, "test.dat") return tree
def use_hyp(word2syn, output, data): un_change = [] dic = Tree() dic.create_node("100001740", "100001740") add = -1 while add != 0: add = 0 f = open(datapath + "wn_hyp.pl", "r") while True: line = f.readline() if not line: break else: l, r = re.findall('\d+', line) try: dic.create_node(l, l, parent=r) add += 1 except: pass print(dic.size()) entail = defaultdict(list) for n in dic.all_nodes(): for m in dic.subtree(n.tag).all_nodes(): if m.tag != n.tag: entail[n.tag].append(m.tag) label = set() for d in data: d0 = d[0] d1 = d[1] if p.singular_noun(d[0]) != False: d0 = p.singular_noun(d[0]) if p.singular_noun(d[1]) != False: d1 = p.singular_noun(d[1]) for i in word2syn[d0]: for j in word2syn[d1]: if j in entail[i]: if d[0] + "\t" + ">" + "\t" + d[1] not in output: output += [d[0] + "\t" + ">" + "\t" + d[1]] label.add(d) elif i in entail[j]: if d[0] + "\t" + "<" + "\t" + d[1] not in output: output += [d[0] + "\t" + "<" + "\t" + d[1]] label.add(d) if d not in un_change and d not in label: un_change += [d] print("before single: " + str(len(data)) + " after: " + str(len(un_change))) output += ["\n"] del entail data = un_change del un_change un_change = [] alter = defaultdict(list) for n in dic.all_nodes(): for m in dic.siblings(n.tag): if m.tag != n.tag and n.bpointer != m.tag: alter[n.tag].append(m.tag) label = set() for d in data: d0 = d[0] d1 = d[1] if p.singular_noun(d[0]) != False: d0 = p.singular_noun(d[0]) if p.singular_noun(d[1]) != False: d1 = p.singular_noun(d[1]) for i in word2syn[d0]: for j in word2syn[d1]: if j in alter[i]: if d[0] + "\t" + "|" + "\t" + d[1] not in output: output += [d[0] + "\t" + "|" + "\t" + d[1]] label.add(d) elif i in alter[j]: if d[0] + "\t" + "|" + "\t" + d[1] not in output: output += [d[0] + "\t" + "|" + "\t" + d[1]] label.add(d) if d not in un_change and d not in label: un_change += [d] del alter print("before single: " + str(len(data)) + " after: " + str(len(un_change))) output += ["\n"] return output, un_change
class PythonASTTreeBasedStructureGenerator(): def __init__(self): self.code = None self.python_grammar = plyplus.Grammar( plyplus.grammars.open('python.g')) self.node_collection = None self.program = None self.tree = Tree() self.tree.create_node(data="program", identifier=0, tag="program") self.has_been_generated = False def from_file(self, filepath): file = open(filepath, "r") self.code = file.read() file.close() return self def from_code(self, str_code): self.code = str_code return self def _generate(self): self._parse_code() self._fill_tree(self.program, 0, 0) self.has_been_generated = True def generate(self, as_copy=False): if not self.has_been_generated: self._generate() if as_copy: return Tree(self.tree) return self.tree def _parse_code(self): parsed_code = self.python_grammar.parse(self.code) tree_collection = parsed_code.select('*') # remove the end tokens new_list_of_nodes = [] for i in tree_collection: if not isinstance(i, plyplus.plyplus.TokValue): new_list_of_nodes.append(i) self.node_collection = plyplus.strees.STreeCollection( new_list_of_nodes) self.program = self.node_collection[0] def _fill_tree(self, node, parent_id, current_id): # print("node is", node, " parent id is", parent_id, "and current id is", current_id) if isinstance(node, plyplus.plyplus.TokValue): # print("token", node, "so we return") return current_id sons = node.named_tail # dict for son_key, son_value in sons.items(): for son in son_value: # son being Stree # we don't take the tokens (called False) if son_key == False: continue current_id += 1 self.tree.create_node(data=son_key, parent=parent_id, identifier=current_id, tag=son_key) current_id = self._fill_tree(son, current_id, current_id) return current_id def print_tree(self): if not self.has_been_generated: self._generate() print(self.tree.show()) return self def print_ast_as_image(self, filename="AST.png"): if not self.has_been_generated: self._generate() self.program.to_png_with_pydot(filename) return self
# Example usage of treelib # # Author: chenxm # __author__ = 'chenxm' from treelib import Tree, Node ## Create the family tree tree = Tree() tree.create_node("Harry", "harry") # root node tree.create_node("Jane", "jane", parent="harry") tree.create_node("Bill", "bill", parent="harry") tree.create_node("Diane", "diane", parent="jane") tree.create_node("George", "george", parent="diane") tree.create_node("Mary", "mary", parent="diane") tree.create_node("Jill", "jill", parent="george") tree.create_node("Mark", "mark", parent="jane") print("#"*4 + "Breakdown of out family") tree.show(cmp=lambda x,y: cmp(x.tag, y.tag), key=None, reverse=True) #tree.show(key=lambda x: x.tag, reverse=False) tree.save2file("/home/chenxm/Desktop/tree.txt", idhidden=False) print('\n') print("#"*4 + "All family members in DEPTH mode") for node in tree.expand_tree(mode=Tree.DEPTH): print tree[node].tag print('\n')
def testUnjsonify(): tree = Tree() tree.create_node('home', 'home') tree.create_node('phone', 'phone', parent='home') tree.create_node('laptop', 'laptop', parent='home') tree.create_node('screen', 'screen', parent='laptop') tree.create_node(19, 19, parent='home') tree.create_node((1, 2), (1, 2), parent='screen') j = tree.to_json() unjsonify(j).show()
from treelib import Tree from apps.tree_tests.tree_node import TreeNode tree = Tree() tree.create_node('a', 'a', data=TreeNode('a', 'a', [5, 5])) tree.create_node('b', 'b', parent='a', data=TreeNode('b', 'b', [5, 5])) tree.create_node('c', 'c', parent='a', data=TreeNode('c', 'c', [5, 6])) tree.create_node('d', 'd', parent='c', data=TreeNode('d', 'd')) tree.create_node('e', 'e', parent='d', data=TreeNode('e', 'e')) print(tree) for child in tree.children('b'): print(child.tag)
def to_tree(self): """ returns a TreeLib tree """ tree = TreeLibTree() for node in self: tree.create_node(node, node.node_id, parent=node.parent) return tree
bodies = line.split(")") left = bodies[0] right=bodies[1].rstrip("\n") parent_check[right] = left parent_check_reverse[left] = right nodeids.append(left) nodeids.append(right) for nodeid in nodeids: if nodeid in parent_check: continue else: rootid = nodeid unique_ids = set(nodeids) tree.create_node(tag=rootid,identifier=rootid) while len(unique_ids) != len(tree.all_nodes()): for rightNode in list(parent_check.keys()): if tree.get_node(parent_check[rightNode]) is not None and tree.get_node(rightNode) is None: tree.create_node(tag=rightNode, identifier=rightNode, parent=tree.get_node(parent_check[rightNode])) for node in tree.all_nodes(): sum += tree.depth(node) print(sum) def intersection(lst1, lst2): lst3 = [value for value in lst1 if value in lst2]
def create_ast(self, filename): """ Create an ast for a given file Arguments : filename : The name of the file to parse """ # Create parser if self.is_64_bit: opcache = opcache_parser_64.OPcacheParser(filename) else: opcache = opcache_parser.OPcacheParser(filename) # Create syntax tree ast = Tree() ast.create_node("script", "script") ast.create_node("main_op_array", "main_op_array", parent="script") ast.create_node("function_table", "function_table", parent="script") ast.create_node("class_table", "class_table", parent="script") # Get main structures main_op_array = opcache['script']['main_op_array'] functions = opcache['script']['function_table']['buckets'] classes = opcache['script']['class_table']['buckets'] # Main OP array for idx, opcode in enumerate(main_op_array['opcodes']): opcode = OPcode(str(idx), opcode, main_op_array, opcache, self.is_64_bit) ast.paste("main_op_array", opcode) # Function Table for function in functions: # Create function node function_name = function['key']['val'] function_id = function_name + "_function" ast.create_node(function_name, function_id, parent="function_table") # Iterate over opcodes op_array = function['val']['op_array'] for idx, opcode in enumerate(op_array['opcodes']): opcode = OPcode(str(idx), opcode, op_array, opcache, self.is_64_bit) ast.paste(function_id, opcode) # Class Table for class_ in classes: # Check for real classes if class_['val']['u1']['type'] == IS_PTR: # Create class node class_name = class_['key']['val'] class_id = class_name + "_class" ast.create_node(class_name, class_id, parent="class_table") # Function Table for function in class_['val']['class']['function_table'][ 'buckets']: # Create function node function_name = function['key']['val'] class_function_id = function_name + "_class_function" ast.create_node(function_name, class_function_id, parent=class_id) # Iterate over opcodes for idx, opcode in enumerate( function['val']['op_array']['opcodes']): opcode = OPcode(str(idx), opcode, function['val']['op_array'], opcache) ast.paste(class_function_id, opcode) return ast
class Parser_analyzer: """ 语句LL(1)文法: NEED:expr, 各种终止符 NOTE:int_t为无法解决: A -> B int B -> int b | ϵ 类型的回溯问题采用的特殊方案, 出现在int_t main()位置。 """ def __init__(self): self.Vn = [] # 非终结符 self.Vt = [] # 终结符 self.table = None # 预测分析表 self.stack_anls = [] self.stack_toke = [] self.err_info = [] self.AST_Tree = Tree() self.AST_Tree_root = None self.parent_uid = None self.node_parent_dict = None self.current_anal_scope = 0 def load_analyzer(self, prod_path, ff_path): prod_set = {} prod_set_ori = open(prod_path, 'r', encoding='utf-8').readlines() temp_prod = '' for item in prod_set_ori: item = item.strip() if item[0] != '|': temp = item.split(' ') temp_prod = temp[0] res = '' for ii in temp[2:]: res += '{} '.format(ii) res = res.strip() prod_set[temp_prod] = [] prod_set[temp_prod].append(res) if temp_prod not in self.Vn: self.Vn.append(temp_prod) else: temp = item.split(' ') res = '' for ii in temp[1:]: res += '{} '.format(ii) res = res.strip() prod_set[temp_prod].append(res) ff_set = {} ff_set_ori = open(ff_path, 'r', encoding='utf-8').readlines() for item in ff_set_ori: item = item.replace('\n', '') item = item.split('\t') end_symbol = item[0] eps_flag = item[1] fi_set = item[2].split(' ') if len(item) == 4: fo_set = item[3].split(' ') else: fo_set = [] ff_set[end_symbol] = { 'eps_flag': eps_flag, 'fi_set': fi_set, 'fo_set': fo_set } self.table = [[] for row in range(len(self.Vn))] # 预测分析表 for item in self.Vn: item_prod = prod_set[item] item_ff = ff_set[item] if item_ff['eps_flag'] == 'true': item_ff['fi_set'].remove('eps') for non in item_ff['fi_set']: if non not in self.Vt: self.Vt.append(non) for n in range(len(self.Vn)): self.table[n].append('') aim_prod = None aim2_prod = None for temp_prod in item_prod: temp_shit = temp_prod.split(' ') temp_first = temp_shit[0] if temp_first == 'eps' and len(temp_shit) > 1: aim2_prod = temp_prod if non == temp_first: aim_prod = temp_prod break elif temp_first in ff_set: if non in ff_set[temp_first]['fi_set'] or ff_set[ temp_first]['eps_flag'] == 'true': aim_prod = temp_prod break if aim_prod is None: aim_prod = aim2_prod self.table[self.Vn.index(item)][self.Vt.index(non)] = aim_prod if item_ff['eps_flag'] == 'true': for non in item_ff['fo_set']: if non not in self.Vt: self.Vt.append(non) for n in range(len(self.Vn)): self.table[n].append('') self.table[self.Vn.index(item)][self.Vt.index(non)] = 'eps' def load_stack(self, token_list, start): self.stack_anls = [] self.stack_anls.append('#') self.stack_anls.append(start) self.stack_toke = [] self.stack_toke.append('#') temp = list(reversed(token_list)) self.stack_toke.extend(temp) self.err_info = [] self.node_parent_dict = {start: [None]} def table_show(self): res = '' # print(self.Vt) res += "{}\n".format(str(self.Vt)) idx = 0 for item in self.table: # print('{}'.format(self.Vn[idx]), end='\t') res += "{}\t".format(self.Vn[idx]) idx2 = 0 for jt in item: # print('\'{}\'({})'.format(jt, self.Vt[idx2]), end=' ') res += "'{}'({}) ".format(jt, self.Vt[idx2]) idx2 += 1 # print() res += '\n' idx += 1 return res def ans_show(self): print(self.stack_anls) print(self.stack_toke) print() def creat_node(self, tag, parent, data): if self.AST_Tree.size() == 0: node = self.AST_Tree.create_node(tag='{}'.format(tag), data=data) self.AST_Tree_root = node else: node = self.AST_Tree.create_node(tag='{}'.format(tag), parent=parent, data=data) return node.identifier def create_dotPic(self, root_dir): # root_dir = './treePic' self.AST_Tree.to_graphviz(filename='{}/tree.dot'.format(root_dir)) string = open('{}/tree.dot'.format(root_dir)).read() dot = graphviz.Source(string) dot.render('{}/tree'.format(root_dir), format='png') def run(self, log=False): anlsRes = '' anlsLog = '' toke = self.stack_toke.pop(-1) symbol = self.stack_anls.pop(-1) while symbol != '#': if symbol in [toke.tag, toke.type]: # 刷新作用域 if symbol == '{': self.current_anal_scope += 1 elif symbol == '}': self.current_anal_scope -= 1 else: toke.set_scope(self.current_anal_scope) # 刷新真值 if toke.type == 'num': toke.set_value(toke.tag) # 创建节点并新增 self.creat_node(symbol, self.node_parent_dict[symbol][-1], toke) self.node_parent_dict[symbol].pop(-1) if len(self.node_parent_dict[symbol]) == 0: self.node_parent_dict.pop(symbol) toke = self.stack_toke.pop(-1) if log: # print('\t*HIT: {}\t<-\t{}'.format(symbol, toke)) anlsLog += "\t*HIT: {}\t<-\t{}\n".format(symbol, toke) if toke == '#': break elif symbol in self.Vn: if toke.type in ['var', 'num']: # 变量-数字转换 table_item = self.table[self.Vn.index(symbol)][ self.Vt.index(toke.type)] else: table_item = self.table[self.Vn.index(symbol)][ self.Vt.index(toke.tag)] table_item = table_item.split(' ') if table_item[0] == '': # 错误分析 # print('\t*ERROR: {}\t<-\t{}'.format(symbol, toke)) anlsLog += "\t*ERROR: {}\t<-\t{}\n".format(symbol, toke) self.err_info.append( "row: {}, col: {}, token: '{}' cont match '{}'\n". format(toke.row, toke.col, toke, symbol)) elif table_item[0] == 'eps': # 无效回溯 if len(table_item) > 1: # 有效分析 temp = list(reversed(table_item))[0:-1] self.stack_anls.extend(temp) # 添加节点-父节点Hash表 for item in temp: if item not in self.node_parent_dict: self.node_parent_dict[item] = [] self.node_parent_dict[item].append(self.parent_uid) else: # 有效分析 temp = list(reversed(table_item)) self.stack_anls.extend(temp) # 创建节点并新增 self.parent_uid = self.creat_node( symbol, self.node_parent_dict[symbol][-1], symbol) self.node_parent_dict[symbol].pop(-1) if len(self.node_parent_dict[symbol]) == 0: self.node_parent_dict.pop(symbol) # 添加节点-父节点Hash表 for item in temp: if item not in self.node_parent_dict: self.node_parent_dict[item] = [] self.node_parent_dict[item].append(self.parent_uid) if log: # print() # print("symb:\'{}\'----stack:{}".format(symbol, list(reversed(self.stack_anls)))) # print("toke:{}----stack:{}".format(toke, list(reversed(self.stack_toke)))) anlsLog += "\n" anlsLog += "symb:\'{}\'----stack:{}\n".format( symbol, list(reversed(self.stack_anls))) anlsLog += "toke:{}----stack:{}\n".format( toke, list(reversed(self.stack_toke))) symbol = self.stack_anls.pop(-1) self.node_parent_dict.clear() # self.ans_show() if len(self.err_info) == 0: # print('match compete!') anlsRes += "match compete!\n" for item in self.err_info: anlsRes += "{}".format(item) return anlsRes, anlsLog
class RIAC(AbstractTeacher): def __init__(self, mins, maxs, seed, env_reward_lb, env_reward_ub, max_region_size=200, alp_window_size=None, nb_split_attempts=50, sampling_in_leaves_only=False, min_region_size=None, min_dims_range_ratio=1 / 6, discard_ratio=1 / 4): AbstractTeacher.__init__(self, mins, maxs, env_reward_lb, env_reward_ub, seed) # Maximal number of (task, reward) pairs a region can hold before splitting self.maxlen = max_region_size self.alp_window = self.maxlen if alp_window_size is None else alp_window_size # Initialize Regions' tree self.tree = Tree() self.regions_bounds = [Box(self.mins, self.maxs, dtype=np.float32)] self.regions_alp = [0.] self.tree.create_node('root', 'root', data=Region(maxlen=self.maxlen, r_t_pairs=[ deque(maxlen=self.maxlen + 1), deque(maxlen=self.maxlen + 1) ], bounds=self.regions_bounds[-1], alp=self.regions_alp[-1])) self.nb_dims = len(mins) self.nb_split_attempts = nb_split_attempts # Whether task sampling uses parent and child regions (False) or only child regions (True) self.sampling_in_leaves_only = sampling_in_leaves_only # Additional tricks to original RIAC, enforcing splitting rules # 1 - Minimum population required for both children when splitting --> set to 1 to cancel self.minlen = self.maxlen / 20 if min_region_size is None else min_region_size # 2 - minimum children region size (compared to initial range of each dimension) # Set min_dims_range_ratio to 1/np.inf to cancel self.dims_ranges = self.maxs - self.mins self.min_dims_range_ratio = min_dims_range_ratio # 3 - If after nb_split_attempts, no split is valid, flush oldest points of parent region # If 1- and 2- are canceled, this will be canceled since any split will be valid self.discard_ratio = discard_ratio # book-keeping self.sampled_tasks = [] self.all_boxes = [] self.all_alps = [] self.update_nb = -1 self.split_iterations = [] self.hyperparams = locals() def compute_alp(self, sub_region): if len(sub_region[0]) > 2: cp_window = min(len(sub_region[0]), self.alp_window) # not completely window half = int(cp_window / 2) # print(str(cp_window) + 'and' + str(half)) first_half = np.array(sub_region[0])[-cp_window:-half] snd_half = np.array(sub_region[0])[-half:] diff = first_half.mean() - snd_half.mean() cp = np.abs(diff) else: cp = 0 alp = np.abs(cp) return alp def split(self, nid): # Try nb_split_attempts splits on region corresponding to node <nid> reg = self.tree.get_node(nid).data best_split_score = 0 best_bounds = None best_sub_regions = None is_split = False for i in range(self.nb_split_attempts): sub_reg1 = [ deque(maxlen=self.maxlen + 1), deque(maxlen=self.maxlen + 1) ] sub_reg2 = [ deque(maxlen=self.maxlen + 1), deque(maxlen=self.maxlen + 1) ] # repeat until the two sub regions contain at least minlen of the mother region while len(sub_reg1[0]) < self.minlen or len( sub_reg2[0]) < self.minlen: # decide on dimension dim = self.random_state.choice(range(self.nb_dims)) threshold = reg.bounds.sample()[dim] bounds1 = Box(reg.bounds.low, reg.bounds.high, dtype=np.float32) bounds1.high[dim] = threshold bounds2 = Box(reg.bounds.low, reg.bounds.high, dtype=np.float32) bounds2.low[dim] = threshold bounds = [bounds1, bounds2] valid_bounds = True if np.any(bounds1.high - bounds1.low < self.dims_ranges * self.min_dims_range_ratio): valid_bounds = False if np.any(bounds2.high - bounds2.low < self.dims_ranges * self.min_dims_range_ratio): valid_bounds = valid_bounds and False # perform split in sub regions sub_reg1 = [ deque(maxlen=self.maxlen + 1), deque(maxlen=self.maxlen + 1) ] sub_reg2 = [ deque(maxlen=self.maxlen + 1), deque(maxlen=self.maxlen + 1) ] for i, task in enumerate(reg.r_t_pairs[1]): if bounds1.contains(task): sub_reg1[1].append(task) sub_reg1[0].append(reg.r_t_pairs[0][i]) else: sub_reg2[1].append(task) sub_reg2[0].append(reg.r_t_pairs[0][i]) sub_regions = [sub_reg1, sub_reg2] # compute alp alp = [self.compute_alp(sub_reg1), self.compute_alp(sub_reg2)] # compute score split_score = len(sub_reg1) * len(sub_reg2) * np.abs(alp[0] - alp[1]) if split_score >= best_split_score and valid_bounds: is_split = True best_split_score = split_score best_sub_regions = sub_regions best_bounds = bounds if is_split: # add new nodes to tree for i, (r_t_pairs, bounds) in enumerate(zip(best_sub_regions, best_bounds)): self.tree.create_node(identifier=self.tree.size(), parent=nid, data=Region(self.maxlen, r_t_pairs=r_t_pairs, bounds=bounds, alp=alp[i])) else: assert len(reg.r_t_pairs[0]) == (self.maxlen + 1) reg.r_t_pairs[0] = deque( islice(reg.r_t_pairs[0], int(self.maxlen * self.discard_ratio), self.maxlen + 1)) reg.r_t_pairs[1] = deque( islice(reg.r_t_pairs[1], int(self.maxlen * self.discard_ratio), self.maxlen + 1)) return is_split def add_task_reward(self, node, task, reward): reg = node.data nid = node.identifier if reg.bounds.contains(task): # task falls within region self.nodes_to_recompute.append(nid) children = self.tree.children(nid) for n in children: # if task in region, task is in one sub-region self.add_task_reward(n, task, reward) need_split = reg.add(task, reward, children == []) # COPY ALL MODE if need_split: self.nodes_to_split.append(nid) def episodic_update(self, task, reward, is_success): self.update_nb += 1 # Add new (task, reward) to regions nodes self.nodes_to_split = [] self.nodes_to_recompute = [] new_split = False root = self.tree.get_node('root') self.add_task_reward( root, task, reward) # Will update self.nodes_to_split if needed assert len(self.nodes_to_split) <= 1 # Split a node if needed need_split = len(self.nodes_to_split) == 1 if need_split: new_split = self.split(self.nodes_to_split[0]) # Execute the split if new_split: # Update list of regions_bounds if self.sampling_in_leaves_only: self.regions_bounds = [ n.data.bounds for n in self.tree.leaves() ] else: self.regions_bounds = [ n.data.bounds for n in self.tree.all_nodes() ] # Recompute ALPs of modified nodes for nid in self.nodes_to_recompute: node = self.tree.get_node(nid) reg = node.data reg.alp = self.compute_alp(reg.r_t_pairs) # Collect regions data (regions' ALP and regions' (task, reward) pairs) all_nodes = self.tree.all_nodes( ) if not self.sampling_in_leaves_only else self.tree.leaves() self.regions_alp = [] self.r_t_pairs = [] for n in all_nodes: self.regions_alp.append(n.data.alp) self.r_t_pairs.append(n.data.r_t_pairs) # Book-keeping if new_split: self.all_boxes.append(copy.copy(self.regions_bounds)) self.all_alps.append(copy.copy(self.regions_alp)) self.split_iterations.append(self.update_nb) assert len(self.regions_alp) == len(self.regions_bounds) return new_split, None def sample_random_task(self): return self.regions_bounds[0].sample() # First region is root region def sample_task(self): mode = self.random_state.rand() if mode < 0.1: # "mode 3" (10%) -> sample on regions and then mutate lowest-performing task in region if len(self.sampled_tasks) == 0: self.sampled_tasks.append(self.sample_random_task()) else: self.sampled_tasks.append( self.non_exploratory_task_sampling()["task"]) elif mode < 0.3: # "mode 2" (20%) -> random task self.sampled_tasks.append(self.sample_random_task()) else: # "mode 1" (70%) -> proportional sampling on regions based on ALP and then random task in selected region region_id = proportional_choice(self.regions_alp, self.random_state, eps=0.0) self.sampled_tasks.append(self.regions_bounds[region_id].sample()) return self.sampled_tasks[-1].astype(np.float32) def non_exploratory_task_sampling(self): # 1 - Sample region proportionally to its ALP region_id = proportional_choice(self.regions_alp, self.random_state, eps=0.0) # 2 - Retrieve (task, reward) pair with lowest reward worst_task_idx = np.argmin(self.r_t_pairs[region_id][0]) # 3 - Mutate task by a small amount (using Gaussian centered on task, with 0.1 std) task = self.random_state.normal( self.r_t_pairs[region_id][1][worst_task_idx].copy(), 0.1) # clip to stay within region (add small epsilon to avoid falling in multiple regions) task = np.clip(task, self.regions_bounds[region_id].low + 1e-5, self.regions_bounds[region_id].high - 1e-5) return { "task": task, "infos": { "bk_index": len(self.all_boxes) - 1, "task_infos": region_id } } def dump(self, dump_dict): dump_dict['all_boxes'] = self.all_boxes dump_dict['split_iterations'] = self.split_iterations dump_dict['all_alps'] = self.all_alps # dump_dict['riac_params'] = self.hyperparams return dump_dict @property def nb_regions(self): return len(self.regions_bounds) @property def get_regions(self): return self.regions_bounds
def find_path(startpose, endpose, path_step): #tree DST used to store path solutions. #each path is uniquely denoted by its leaf pointer. soln_tree = Tree() root = soln_tree.create_node(data=startpose) # root :) def path_from_leaf(node): path = [node.data] while not node.is_root(): predecessor = node.predecessor(soln_tree.identifier) path.append(soln_tree.get_node(predecessor).data) node = soln_tree.get_node(predecessor) path.reverse() return path frontier = [root] next_frontier = [] pass_ct = 0 #let rocket start upside down, but remove cases where it's #upside down once it's righted itself righted = False while frontier: pose_node = frontier.pop() current_pose = pose_node.data def create_node(data): return soln_tree.create_node(data=data, parent=pose_node) if random.random() < (1 / 1000): path = path_from_leaf(pose_node) x = [s.x for s in path] y = [s.y for s in path] plt.plot(x, y, linewidth=2, color='blue') plt.savefig(f"plot{random.random()}.png") plt.close() #bruteforce with large set of P, G rs = RocketSimulation(current_pose.x, current_pose.y, current_pose.dx, current_pose.dy, current_pose.theta, current_pose.dtheta) next_states = [ rs.clone().update_n(p, g, path_step) for p in np.arange(0, 1, 0.05) for g in np.arange(-0.2, 0.2, 0.05) ] def normalize_angle(angle): angle = angle % (2 * math.pi) angle = (angle + (2 * math.pi)) % (2 * math.pi) if angle > math.pi: angle -= (2 * math.pi) return angle def collision_filter(pose): L = 40 W = 2 s = math.sin(pose.theta) c = math.cos(pose.theta) outer_points = [ Vector2(0, L / 2), Vector2(1.8 * W, -(L / 2) - W), Vector2(-1.8 * W, -(L / 2) - W) ] for p in outer_points: if ((p.x * s) + (p.y * c) + pose.y) < 0: return False return True def orientation_filter(pose): if abs(normalize_angle(pose.theta)) <= math.pi / 4: return True else: return not righted if not righted: for state in next_states: if abs(normalize_angle(state.theta)) <= math.pi / 4: righted = True #filter states filters = [ lambda pose: pose.y > 0, #lambda pose: pose_dist(startpose, endpose) > pose_dist(pose, endpose), #closer #lambda pose: abs(normalize_angle(pose.theta)) <= math.pi / 4, #upside-down bad collision_filter, orientation_filter ] next_states = list( filter(lambda x: all(f(x) for f in filters), next_states)) correct_solns = list( filter(lambda pose: pose_eq(pose, endpose), next_states)) if (correct_solns): print("Correct solutions found!!!") return [ path_from_leaf(create_node(soln)) for soln in correct_solns ] next_states = sorted(next_states, key=lambda x: pose_dist(x, endpose), reverse=False) #try promising cases first if random.random() <= 1 / 500: print( f"Produced {len(next_states)} states during pass (t={pass_ct * path_step * 0.02})" ) if len(next_states) != 0: print( f"Closest distance: {pose_dist(next_states[0], endpose)}") if len(next_states) != 0: #always choose the best soln, also choose 2 random members next_frontier.append(create_node(next_states.pop())) #next_frontier.extend( # [create_node(x) for x in random.sample(next_states[:30], 3)] #) next_frontier.extend([ create_node(x) for x in random.sample(next_states, min(len(next_states), 2)) ]) if not frontier: if not next_frontier: print("No next frontier! Exiting!") frontier = next_frontier next_frontier = [] print( f"Round {pass_ct} completed. Produced {len(frontier)} states.") pass_ct += 1 #generate summary plots if (pass_ct != 0) and (pass_ct % 15 == 0): all_paths = filter(lambda x: len(x) == pass_ct + 1, soln_tree.paths_to_leaves()) for identifier_list in all_paths: node_list = [ soln_tree.get_node(id) for id in identifier_list ] pose_list = [pose_node.data for pose_node in node_list] x = [s.x for s in pose_list] y = [s.y for s in pose_list] plt.plot(x, y, linewidth=2, color='blue') plt.savefig(f"summary_plt{random.random()}.png") plt.close() #perform larger culling step in case of overfull frontier if len(frontier) > 4000: #Bucketization buckets = defaultdict(list) #bucket sizes bx, by, btheta = 2.5, 2.5, 0.5 for f in frontier: buckets[hash( (math.floor(f.data.x / bx), math.floor(f.data.y / by), math.floor(f.data.theta / btheta)))].append(f) #pick representatives from buckets new_frontier = [] for bucket, nodes in buckets.items(): #pick ideal and random candidate sorted_poses = sorted(nodes, key=lambda node: weigthed_pose_dist( node.data, endpose)) new_frontier.append(sorted_poses[0]) if len(sorted_poses) > 1: #make sure same candidate isnt chosen twice sorted_poses.pop(0) new_frontier.extend( random.sample(sorted_poses, min(len(sorted_poses), 2))) #re-add any exceptional cases #for node in sorted_poses: # if node.data.y < 85: # new_frontier.append(node) print( f"Bucketization: removed {len(frontier) - len(new_frontier)}/{len(frontier)} elements" ) frontier = new_frontier print(f"frontier: {frontier}\nnext_frontier: {next_frontier}") return []
class InteractiveServer: _sentinel = object() def __init__(self, client): self.client = client self.closed = False self._search_result = None self.nodes = None self.offset = 0 self.limit = 100 self.assets_list = [] self.finish = False self.page = 1 self.total_assets = 0 self.total_count = 0 # 分页展示中用来存放数目总条数 self.nodes_tree = None # 授权节点树 self.get_user_assets_paging_async() self.get_user_nodes_async() @property def page_size(self): return self.client.request.meta['height'] - 8 @property def search_result(self): if self._search_result: return self._search_result else: return [] @search_result.setter def search_result(self, value): if not value: self._search_result = value return value = self.filter_system_users(value) self._search_result = value def display_logo(self): logo_path = os.path.join(config['ROOT_PATH'], "logo.txt") if not os.path.isfile(logo_path): return with open(logo_path, 'rb') as f: for i in f: if i.decode('utf-8').startswith('#'): continue self.client.send(i.decode('utf-8').replace('\n', '\r\n')) def display_banner(self): self.client.send(char.CLEAR_CHAR) self.display_logo() header = _("\n{T}{T}{title} {user}, Welcome to use Jumpserver open source fortress system {end}{R}{R}") menus = [ _("{T}1) Enter {green}ID{end} directly login or enter {green}part IP, Hostname, Comment{end} to search login(if unique).{R}"), _("{T}2) Enter {green}/{end} + {green}IP, Hostname{end} or {green}Comment {end} search, such as: /ip.{R}"), _("{T}3) Enter {green}p{end} to display the host you have permission.{R}"), _("{T}4) Enter {green}g{end} to display the node that you have permission.{R}"), _("{T}5) Enter {green}g{end} + {green}NodeID{end} to display the host under the node, such as g1.{R}"), _("{T}6) Enter {green}s{end} Chinese-english switch.{R}"), _("{T}7) Enter {green}h{end} help.{R}"), _("{T}0) Enter {green}q{end} exit.{R}") ] self.client.send(header.format( title="\033[1;32m", user=self.client.user, end="\033[0m", T='\t', R='\r\n\r' )) for menu in menus: self.client.send(menu.format( green="\033[32m", end="\033[0m", T='\t', R='\r\n\r' )) def dispatch(self, opt): if opt is None: return self._sentinel elif opt.startswith("/"): self.search_and_display(opt.lstrip("/")) elif opt in ['p', 'P', '']: self.display_assets() elif opt in ['g', 'G']: self.display_nodes_tree() elif opt.startswith("g") and opt.lstrip("g").isdigit(): self.display_node_assets(int(opt.lstrip("g"))) elif opt in ['q', 'Q', 'exit', 'quit']: return self._sentinel elif opt in ['s', 'S']: switch_lang() self.display_banner() elif opt in ['h', 'H']: self.display_banner() else: self.search_and_proxy(opt) def search_assets(self, q): if not self.finish: assets = app_service.get_search_user_granted_assets(self.client.user, q) return assets assets = self.assets_list result = [] # 所有的 if q in ('', None): result = assets # 全匹配到则直接返回全匹配的 if len(result) == 0: _result = [asset for asset in assets if is_obj_attr_eq(asset, q)] if len(_result) == 1: result = _result # 最后模糊匹配 if len(result) == 0: result = [asset for asset in assets if is_obj_attr_has(asset, q)] return result def display_assets(self): """ Display user all assets :return: """ self.display_result_paging(self.assets_list) def display_nodes(self): if self.nodes is None: self.get_user_nodes() if len(self.nodes) == 0: self.client.send(warning(_("No"))) return id_length = max(len(str(len(self.nodes))), 5) name_length = item_max_length(self.nodes, 15, key=lambda x: x.name) amount_length = item_max_length(self.nodes, 10, key=lambda x: x.assets_amount) size_list = [id_length, name_length, amount_length] fake_data = ['ID', _("Name"), _("Assets")] self.client.send(wr(title(format_with_zh(size_list, *fake_data)))) for index, node in enumerate(self.nodes, 1): data = [index, node.name, node.assets_amount] self.client.send(wr(format_with_zh(size_list, *data))) self.client.send(wr(_("Total: {}").format(len(self.nodes)), before=1)) def display_nodes_tree(self): if self.nodes is None: self.get_user_nodes() if not self.nodes: self.client.send(wr(_('No Nodes'), before=0)) return self.nodes_tree.show(key=lambda node: node.identifier) self.client.send(wr(title(_("Node: [ ID.Name(Asset amount) ]")), before=0)) self.client.send(wr(self.nodes_tree._reader.replace('\n', '\r\n'), before=0)) prompt = _("Tips: Enter g+NodeID to display the host under the node, such as g1") self.client.send(wr(title(prompt), before=1)) def display_node_assets(self, _id): if self.nodes is None: self.get_user_nodes() if _id > len(self.nodes) or _id <= 0: msg = wr(warning(_("There is no matched node, please re-enter"))) self.client.send(msg) self.display_nodes_tree() return assets = self.nodes[_id - 1].assets_granted self.display_result_paging(assets) def display_search_result(self): sort_by = config["ASSET_LIST_SORT_BY"] self.search_result = sort_assets(self.search_result, sort_by) fake_data = [_("ID"), _("Hostname"), _("IP"), _("LoginAs")] id_length = max(len(str(len(self.search_result))), 4) hostname_length = item_max_length(self.search_result, 15, key=lambda x: x.hostname) sysuser_length = item_max_length(self.search_result, key=lambda x: x.system_users_name_list) size_list = [id_length, hostname_length, 16, sysuser_length] header_without_comment = format_with_zh(size_list, *fake_data) comment_length = max( self.client.request.meta["width"] - size_of_str_with_zh(header_without_comment) - 1, 2 ) size_list.append(comment_length) fake_data.append(_("Comment")) self.client.send(wr(title(format_with_zh(size_list, *fake_data)))) for index, asset in enumerate(self.search_result, 1): data = [ index, asset.hostname, asset.ip, asset.system_users_name_list, asset.comment ] self.client.send(wr(format_with_zh(size_list, *data))) total_page = math.ceil(self.total_count/self.page_size) self.client.send(wr(title(_("Page: {}, Count: {}, Total Page: {}, Total Count: {}").format( self.page, len(self.search_result), total_page, self.total_count)), before=1) ) def search_and_display(self, q): assets = self.search_assets(q) self.display_result_paging(assets) def get_user_nodes(self): self.nodes = app_service.get_user_asset_groups(self.client.user) self.sort_nodes() self.construct_nodes_tree() def sort_nodes(self): self.nodes = sorted(self.nodes, key=lambda node: node.key) def construct_nodes_tree(self): self.nodes_tree = Tree() root = 'ROOT_ALL_ORG_NODE' self.nodes_tree.create_node(tag='', identifier=root, parent=None) for index, node in enumerate(self.nodes): tag = "{}.{}({})".format(index+1, node.name, node.assets_amount) key = node.key parent_key = key[:node.key.rfind(':')] or root self.nodes_tree.create_node(tag=tag, identifier=key, data=node, parent=parent_key) def get_user_nodes_async(self): thread = threading.Thread(target=self.get_user_nodes) thread.start() @staticmethod def filter_system_users(assets): for asset in assets: system_users_granted = asset.system_users_granted high_priority = max([s.priority for s in system_users_granted]) \ if system_users_granted else 1 system_users_cleaned = [s for s in system_users_granted if s.priority == high_priority] asset.system_users_granted = system_users_cleaned return assets def get_user_assets_paging(self): while not self.closed: assets, total = app_service.get_user_assets_paging( self.client.user, offset=self.offset, limit=self.limit ) logger.info('Get user assets paging async: {}'.format(len(assets))) if not assets: logger.info('Get user assets paging async finished.') self.finish = True return if not self.total_assets: self.total_assets = total self.total_count = total self.assets_list.extend(assets) self.offset += self.limit def get_user_assets_paging_async(self): thread = threading.Thread(target=self.get_user_assets_paging) thread.start() def choose_system_user(self, system_users): if len(system_users) == 1: return system_users[0] elif len(system_users) == 0: return None while True: self.client.send(wr(_("Select a login:: "), after=1)) self.display_system_users(system_users) opt = net_input(self.client, prompt="ID> ") if opt.isdigit() and len(system_users) > int(opt): return system_users[int(opt)] elif opt in ['q', 'Q']: return None else: for system_user in system_users: if system_user.name == opt: return system_user def display_system_users(self, system_users): for index, system_user in enumerate(system_users): self.client.send(wr("{} {}".format(index, system_user.name))) def search_and_proxy(self, opt): assets = self.search_assets(opt) if assets and len(assets) == 1: asset = assets[0] self.search_result = None if asset.protocol == "rdp" or asset.platform.lower().startswith("windows"): self.client.send(warning( _("Terminal does not support login rdp, " "please use web terminal to access")) ) return self.proxy(asset) else: self.display_result_paging(assets) def display_result_paging(self, result_list): if result_list is self.assets_list: self.total_count = self.total_assets else: if len(result_list) == 0: return self.total_count = len(result_list) action = PAGE_DOWN gen_result = self.get_result_page_down_or_up(result_list) while True: try: page, result = gen_result.send(action) except TypeError: try: page, result = next(gen_result) except StopIteration: logger.info('No Assets') # self.display_banner() self.client.send(wr(_("No Assets"), before=0)) return None except StopIteration: logger.info('Back display result paging.') # self.display_banner() return None self.display_result_of_page(page, result) action = self.get_user_action() def get_result_page_down_or_up(self, result_list): left = 0 page = 1 page_up_size = 0 # 记录上一页大小 while True: right = left + self.page_size result = result_list[left:right] if not result and (result_list is self.assets_list) and self.finish and self.total_assets == 0: # 无授权资产 return None, None elif not result and (result_list is self.assets_list) and self.finish: # 上一页是最后一页 left -= page_up_size page -= 1 continue elif not result and (result_list is self.assets_list) and not self.finish: # 还有下一页(暂时没有加载完),需要等待 time.sleep(1) continue elif not result and (result_list is not self.assets_list): # 上一页是最后一页 left -= page_up_size page -= 1 continue else: # 其他4中情况,返回assets action = yield (page, result) if action == BACK: return None, None elif action == PAGE_UP: if page <= 1: # 已经是第一页了 page = 1 left = 0 else: page -= 1 left -= self.page_size else: # PAGE_DOWN page += 1 left += len(result) page_up_size = len(result) def display_result_of_page(self, page, result): self.client.send(char.CLEAR_CHAR) self.page = page self.search_result = result self.display_search_result() self.display_prompt_of_page() def display_prompt_of_page(self): self.client.send(wr(_('Tips: Enter the asset ID and log directly into the asset.'), before=1)) prompt_page_up = _("Page up: P/p") prompt_page_down = _("Page down: Enter|N/n") prompt_back = _("BACK: b/q") prompts = [prompt_page_up, prompt_page_down, prompt_back] prompt = '\t'.join(prompts) self.client.send(wr(prompt, before=1)) def get_user_action(self): opt = net_input(self.client, prompt=':') if opt in ('p', 'P'): return PAGE_UP elif opt in ('b', 'q'): return BACK elif opt.isdigit() and self.search_result and 0 < int(opt) <= len(self.search_result): self.proxy(self.search_result[int(opt)-1]) return BACK else: # PAGE_DOWN return PAGE_DOWN def proxy(self, asset): system_user = self.choose_system_user(asset.system_users_granted) if system_user is None: self.client.send(_("No system user")) return forwarder = ProxyServer(self.client, asset, system_user) forwarder.proxy() def interact(self): self.display_banner() while not self.closed: try: opt = net_input(self.client, prompt='Opt> ', before=1) rv = self.dispatch(opt) if rv is self._sentinel: break except socket.error as e: logger.debug("Socket error: {}".format(e)) break self.close() def interact_async(self): thread = threading.Thread(target=self.interact) thread.daemon = True thread.start() def close(self): logger.debug("Interactive server server close: {}".format(self)) self.closed = True
def TreeSequenceToTreeClass(simulation, tree_event_sequence, is_AA_mutation_in_root_node=False): t1 = time.time() tree_size = len(tree_event_sequence.tree_sequence) tree_class_tree = Tree() root_id = 'Unknown' array_tree = simulation.GetTree() for i in range(tree_size): if array_tree[i] == -1: root_id = i tree_class_tree.create_node(root_id, root_id, data=None) # placeholder on root break # there can be only one root if root_id == 'Unknown': raise ValueError("There is no root in this tree") for i in range(tree_size): if i != root_id: tree_class_tree.create_node( i, i, parent=root_id, data=None) # placeholder on other places for i in range(tree_size): if i != root_id: tree_class_tree.move_node(i, array_tree[i]) for i in range(tree_size): noc = len(tree_class_tree.get_node(i).fpointer) # number of children ni = tree_event_sequence.tree_sequence[i].node_id iam = tree_event_sequence.tree_sequence[i].is_a_mutation on = tree_event_sequence.tree_sequence[i].old_nucleotyde nn = tree_event_sequence.tree_sequence[i].new_nucleotyde mc = tree_event_sequence.tree_sequence[i].mutation_cite tti = tree_event_sequence.tree_sequence[i].tree_time tty = tree_event_sequence.tree_sequence[i].tree_type if (i == root_id) and (is_AA_mutation_in_root_node == True): tree_class_tree.update_node(i, data=TreeEvent(is_a_mutation=True, number_of_children=noc, old_nucleotyde=0, new_nucleotyde=0, mutation_cite=0, tree_time=0, tree_type='coalescence', node_id=ni)) else: tree_event = TreeEvent(is_a_mutation=iam, number_of_children=noc, old_nucleotyde=on, new_nucleotyde=nn, mutation_cite=mc, tree_time=tti, tree_type=tty, node_id=ni) tree_class_tree.update_node(i, data=tree_event) t2 = time.time() print('Time spent on conversion to tree class = ', t2 - t1) return tree_class_tree
class FTPClient(QtWidgets.QLabel): """ FTP 连接类 """ _signal = pyqtSignal(str) def __del__(self): """ 退出时执行ftp断开 :return: """ print("connect close") self.ftp.close() #self._signal.emit('Del') def __init__(self, host: str, username: str, password: str, port='21'): """ 初始化 FTP 输入主机 端口用户名密码 之后连接FTP服务器 :param host: 主机 :param username: 用户名 :param password: 密码 :param port: 端口 """ print("init") super(FTPClient, self).__init__() self.host = host self.port = int(port) self.username = username self.password = password def startConnect(self): """ 建立FTP连接 :return: """ self.nowDirName = 'root' #建立文件树 和根节点 self.tree = Tree() itemProject = QStandardItem('root') itemProject.setIcon(self.getIcon()) self.tree.create_node('root', 'root', parent=None, data=itemProject) # 连接FTP 连接成功之后 创建root的子目录 self.ftp_connect() self.createTree(self.ftp.nlst(), 'root') #print('pwd',self.ftp.pwd()) # 以下注释部分完成了 在ftp上文件系统内部的跳转 和列出文件系统 #print('cwd0428',self.ftp.cwd('0428')) #print('nlst',self.ftp.nlst()) #print('pwd',self.ftp.pwd()) #print('cwd0428', self.ftp.cwd('Laser')) #print('nlst', self.ftp.nlst()) #print('pwd', self.ftp.pwd()) #self.createTree(self.ftp.nlst(), 'root/0428') self.tree.show() self._signal.emit("OK") # 信号发送 #print("EMIT OK") #print(self.tree.children('root')) #self.download_file('/readme.txt','G:/data_sun/readme.txt') def restartTree(self): print("刷新树") self.tree.remove_subtree('root') itemProject = QStandardItem('root') itemProject.setIcon(self.getIcon()) self.tree.create_node('root', 'root', parent=None, data=itemProject) self.ftp.cwd('/') self.createTree(self.ftp.nlst(), 'root') def createTree(self, chiledList: list, parent: str) -> bool: """ 通过输入的 子目录列表 和父目录的名称 进行建立文件树 :param chiledList: 下一层目录所有的文件列表 :param parent: 父路径名字 :return: 是否创建了子树 0创建失败 1创建成功 """ if self.tree.subtree(parent).depth() == 0: #当前子树深度 为0 那么说明还没有刷新该节点 print("叶节点,开始创建文件子树") else: print("不是叶节点") return 0 #按照列表内部的数据 依此建树 树的名称均为 父路径 + / + 当前文件名称(主要为了实现唯一标识 不然不同文件夹下相同的文件名 就会出错) for i in chiledList: itemProject = QStandardItem((parent + '/' + i)) #print((parent+'/'+i),(parent+'/'+i).split('.')) if len((parent + '/' + i).split('.')) == 1: #如果是文件夹 那么获取系统的文件夹的图标 itemProject.setIcon(self.getIcon()) else: itemProject.setIcon( self.getIcon('.' + (parent + '/' + i).split('.')[-1])) self.tree.create_node( parent + '/' + i.encode('utf-8').decode('utf-8'), parent + '/' + i.encode('utf-8').decode('utf-8'), parent=parent, data=itemProject) # 根节点 return 1 def ftp_connect(self): """ FTP的具体连接类 :return: None """ self.ftp = FTP() # ftp.set_debuglevel(2) #连接主机 self.ftp.connect(self.host, self.port) #实现登录 self.ftp.login(self.username, self.password) self.ftp.encoding = 'utf-8' print("log in success") def getIcon(self, extension='file'): """ 获取扩展名在操作系统下的默认图标 :param extension: 文件扩展名 如果不写默认为是文件 :return: 对应的图标 """ provider = QFileIconProvider() tmpFile = QTemporaryFile('./_aa' + extension) tmpFile.setAutoRemove(False) icon = provider.icon(QFileInfo('./_aa' + extension)) if extension == 'file': # 首先生成一个临时文件 之后获取临时文件的图标返回 fileInfo = QFileInfo("C:\\Users") fileIcon = QFileIconProvider() #print(fileInfo, fileIcon) icon = QIcon(fileIcon.icon(fileInfo)) return icon return icon def download_file(self, remotepath: str, localpath: str): """ 从远程FTP服务器下载文件 到本地路径 :param remotepath: 远端路径 :param localpath: 本地路径 :return: None """ remotepath = remotepath.replace('//', '/') localpath = localpath.replace('//', '/') if os.path.isdir(remotepath) or len(remotepath.split('.')) == 1: #是文件夹 self.download_dir(remotepath, localpath) return print("是文件") bufsize = 1024 fp = open(localpath, 'wb') self.ftp.retrbinary('RETR ' + remotepath, fp.write, bufsize) self.ftp.set_debuglevel(0) fp.close() print("下载远程文件:", remotepath, "\t到本地路径:", localpath, "成功") def download_dir(self, remotedir: str, localdir: str): """ 下载远程的文件夹到本地文件夹 例如 download_dir('/test','G:/ftpdata/test10') 或者download_dir('test','G:/ftpdata/test10') 后面这个会新建一个test文档 之前那个新建/test会报错 因此就不会创建 :param remotedir: 远程文件夹 :param localdir: 本地文件夹 :return: """ try: os.makedirs(localdir) # 由于我之前的处理是 将文件夹直接加到了 本地连接的后面 所以需要先新建一个文件夹 except OSError: print("本地文件已经存在,不进行新建") pass print("开始下载文件夹:从 ", remotedir, " 到 ", localdir) os.chdir(localdir) self.walk(remotedir, localdir) print("文件夹下载结束") def get_dirs_files(self): """ 获取当前目录的文件夹和文件 :return: (当前目录下的文件,当前目录下的文件夹) """ dir_res = [] self.ftp.dir('.', dir_res.append) files = [f.split(None, 8)[-1] for f in dir_res if f.startswith('-')] dirs = [f.split(None, 8)[-1] for f in dir_res if f.startswith('d')] return (files, dirs) def walk(self, remotedir, localdir): """ 在文件夹内部递归 单个传递每一个文件 直到文件夹内部文件全部传递完毕 :param remotedir: 远程文件夹 :param localdir: 本地文件夹 :return: """ print('Walking to', remotedir, os.getcwd()) self.ftp.cwd(remotedir) try: os.mkdir(remotedir) except OSError: print("创建文件夹失败,文件夹可能已经存在") pass os.chdir(localdir) print("now dir", os.getcwd()) ftp_curr_dir = self.ftp.pwd() print("local dir", localdir) files, dirs = self.get_dirs_files() print("FILES: ", files) print("DIRS: ", dirs) for f in files: print(remotedir, ':', f) outf = open(f, 'wb') try: self.ftp.retrbinary('RETR %s' % f, outf.write) finally: outf.close() for d in dirs: print("Dir:", d, ftp_curr_dir) os.chdir(localdir) #self.ftp.cwd(ftp_curr_dir) self.walk(d, os.path.join(localdir, d)) self.ftp.cwd('..') #不加这句的话 只能递归一层 之后会出错 def uploadFile(self, remotepath='./', localpath='./'): print("Upload", localpath, remotepath, os.path.isfile(localpath)) if not os.path.isfile(localpath): return print('+++ upload %s to %s' % (localpath, remotepath)) self.ftp.storbinary('STOR ' + remotepath, open(localpath, 'rb')) def upload_dir(self, remotedir='./', localdir='./'): ''' 实现文件的上传 :param localdir: :param remotedir: :return: ''' if not os.path.isdir(localdir): return print("Upload dir", remotedir, localdir) try: self.ftp.cwd(remotedir) except: self.ftp.mkd(remotedir) self.ftp.cwd(remotedir) print("远程文件夹创建成功") for file in os.listdir(localdir): # src = os.path.join(localdir, file) src = localdir + '/' + file print(src) if os.path.isfile(src): print("is file") self.uploadFile(file, src) elif os.path.isdir(src): try: self.ftp.mkd(file) except: sys.stderr.write('the dir is exists %s' % file) self.upload_dir(file, src) self.ftp.cwd('..') def upload_file(self, remotepath: str, localpath: str): """ 上传本地文件到服务器 :param remotepath: 远端路径 :param localpath: 本地路径 :return: None """ while '//' in remotepath: remotepath = remotepath.replace('//', '/') while '//' in localpath: localpath = localpath.replace('//', '/') print(remotepath, localpath) if os.path.isdir(remotepath) or len(remotepath.split('.')) == 1: #是文件夹 self.upload_dir(remotepath, localpath) return bufsize = 1024 fp = open(localpath, 'rb') self.ftp.storbinary('STOR ' + remotepath, fp, bufsize) self.ftp.set_debuglevel(0) fp.close() print("上传本地文件:", localpath, "\t到远程:", remotepath, "成功")
def create(self,words_list,postags_list,arcs_list): # 输入三个list # 第一个是words_list 词语序列,词序 # 第二个词性 # 第三个是依存关系,这个也是用于构建树的关键 tree = Tree() # 使用一层层的搭建技术 # 我们设定五个层 layer1 = [] layer2 = [] layer3 = [] layer4 = [] # layer5 = [] # print('words_list' + str(words_list)) # print('arcs_list'+str(arcs_list)) # 首节点 for i in range(len(arcs_list)): arc_head = arcs_list[i].split(':')[0] # 首节点 if int(arc_head) == 0: HED_id = i # layer1层 for i in range(len(arcs_list)): arc_head = arcs_list[i].split(':')[0] if int(arc_head) - 1 == int(HED_id): node = {'node' + str(i) : 'HED'} layer1.append(node) # layer2层 for i in range(len(arcs_list)): arc_head = arcs_list[i].split(':')[0] # 说明有arc_head在layer1中,那就是这个点在layer2中 for lay in layer1: if int(list(lay.keys())[0].lstrip('node')) == int(arc_head) - 1: node = {'node' + str(i) : list(lay.keys())[0]} layer2.append(node) # layer3层 for i in range(len(arcs_list)): arc_head = arcs_list[i].split(':')[0] # 说明有arc_head在layer2中,那就是这个点在layer3中 for lay in layer2: if int(list(lay.keys())[0].lstrip('node')) == int(arc_head) - 1: node = {'node' + str(i): list(lay.keys())[0]} layer3.append(node) # layer4层 for i in range(len(arcs_list)): arc_head = arcs_list[i].split(':')[0] # 说明有arc_head在layer3中,那就是这个点在layer4中 for lay in layer3: if int(list(lay.keys())[0].lstrip('node')) == int(arc_head) - 1: node = {'node' + str(i): list(lay.keys())[0]} layer4.append(node) # print(layer1) # print(layer2) # print(layer3) # print(layer4) # 四层都构建完毕 # 下面就根据一层层的搭建树 # 首先创建根节点 if not tree.contains('HED'): tree.create_node(str(HED_id) + ' ' + words_list[int(HED_id)], 'HED', data=postags_list[int(HED_id)] + ' ' + arcs_list[int(HED_id)].split(':')[1]) # layer1 for lay in layer1: nodename = list(lay.keys())[0] parent = list(lay.values())[0] tree.create_node( nodename.lstrip('node') + ' ' + words_list[int(nodename.lstrip('node'))], nodename, parent=parent, data=postags_list[int(nodename.lstrip('node'))] + ' ' + arcs_list[int(nodename.lstrip('node'))].split(':')[1]) # layer2 for lay in layer2: nodename = list(lay.keys())[0] parent = list(lay.values())[0] tree.create_node( nodename.lstrip('node') + ' ' + words_list[int(nodename.lstrip('node'))], nodename, parent=parent, data=postags_list[int(nodename.lstrip('node'))] + ' ' + arcs_list[int(nodename.lstrip('node'))].split(':')[1]) # layer3 for lay in layer3: nodename = list(lay.keys())[0] parent = list(lay.values())[0] tree.create_node( nodename.lstrip('node') + ' ' + words_list[int(nodename.lstrip('node'))], nodename, parent=parent, data=postags_list[int(nodename.lstrip('node'))] + ' ' + arcs_list[int(nodename.lstrip('node'))].split(':')[1]) # layer4 for lay in layer4: nodename = list(lay.keys())[0] parent = list(lay.values())[0] tree.create_node( nodename.lstrip('node') + ' ' + words_list[int(nodename.lstrip('node'))], nodename, parent=parent, data=postags_list[int(nodename.lstrip('node'))] + ' ' + arcs_list[int(nodename.lstrip('node'))].split(':')[1]) return tree
def __init__(self): self.mfest = load_manifest("../chapters.yaml") self.books = {} self.chaps = {} for adef in self.mfest: for defheader, defs in adef.items(): if not defheader.startswith("BOOK_"): self.chaps[defheader] = defs else: self.books[defheader] = defs # # nested dict approach, not working very well ''' for title, bookchaps in self.books.items(): print ("BOOK: {title}".format(title=title)) print ("+" * 80) book = {title: bookchaps} pprint (book) print ("-" * 80) pprint (expand_def(book, self.chaps)) print ("*" * 80) #pprint(books) ''' # # tree approach, better self.treechap = {} for title, chap in self.chaps.items(): self.treechap[title] = create_tree({title: chap}) self.treebook = {} for title, book in self.books.items(): self.treebook[title] = create_tree({title: book}) for title, tree in self.treebook.items(): # tree.show() for node in tree.expand_tree(mode=Tree.DEPTH): # print ("+", node) realtag = node if type(realtag) is Node: realtag = node.tag if "|" in realtag: realtag = realtag.split("|")[1] if realtag.startswith("$ref:"): chapkey = realtag.split("$ref:")[1] newtree = Tree(tree=self.treechap[chapkey], deep=True) # move up its children to replace totally the root subtree = newtree.subtree(newtree.children(newtree.root)[0].tag) newtree = subtree for anode in tree.children(node): origtag = anode.tag if "|" in origtag: origtag = anode.tag.split("|")[1] # print (origtag) newtree.create_node(timestamp_node(origtag), origtag, parent=newtree.root, data=time()) # find parent node of the node to be replaced parent = tree.parent(node) # use the old timestamp data to preserve insertion order newtree.get_node(newtree.root).data = tree.get_node(node).data # remove old node tree.remove_subtree(node) # replace with new expanded node tree.paste(parent.identifier, newtree)
class AcquisitionChainIter(object): def __init__(self, acquisition_chain, parallel_prepare=True): self.__sequence_index = -1 self._parallel_prepare = parallel_prepare self.__acquisition_chain_ref = weakref.ref(acquisition_chain) # set all slaves into master for master in (x for x in acquisition_chain._tree.expand_tree() if isinstance(x, AcquisitionMaster)): del master.slaves[:] master.slaves.extend( acquisition_chain._tree.get_node(master).fpointer) # create iterators tree self._tree = Tree() self._root_node = self._tree.create_node("acquisition chain", "root") device2iter = dict() for dev in acquisition_chain._tree.expand_tree(): if not isinstance(dev, (AcquisitionDevice, AcquisitionMaster)): continue dev_node = acquisition_chain._tree.get_node(dev) parent = device2iter.get(dev_node.bpointer, "root") try: it = iter(dev) except TypeError: one_shot = self.acquisition_chain._device2one_shot_flag.get( dev, True) dev_iter = DeviceIterator(dev, one_shot) else: dev_iter = DeviceIteratorWrapper(it) device2iter[dev] = dev_iter self._tree.create_node(tag=dev.name, identifier=dev_iter, parent=parent) @property def acquisition_chain(self): return self.__acquisition_chain_ref() def prepare(self, scan, scan_info): preset_tasks = list() if self.__sequence_index == 0: preset_tasks.extend([ gevent.spawn(preset.prepare) for preset in self.acquisition_chain._presets_list ]) scan.prepare(scan_info, self.acquisition_chain._tree) self._execute("_prepare", wait_between_levels=not self._parallel_prepare) if self.__sequence_index == 0: gevent.joinall(preset_tasks, raise_error=True) def start(self): if self.__sequence_index == 0: preset_tasks = [ gevent.spawn(preset.start) for preset in self.acquisition_chain._presets_list ] gevent.joinall(preset_tasks, raise_error=True) self._execute("_start") def stop(self): self._execute("stop", master_to_slave=True, wait_all_tasks=True) preset_tasks = [ gevent.spawn(preset.stop) for preset in self.acquisition_chain._presets_list ] gevent.joinall(preset_tasks) # wait to call all stop on preset gevent.joinall(preset_tasks, raise_error=True) def next(self): self.__sequence_index += 1 gevent.joinall([ gevent.spawn(dev_iter.wait_ready) for dev_iter in self._tree.expand_tree() if dev_iter is not 'root' ], raise_error=True) try: if self.__sequence_index: for dev_iter in self._tree.expand_tree(): if dev_iter is 'root': continue dev_iter.next() except StopIteration: # should we stop all devices? for acq_dev_iter in ( x for x in self._tree.expand_tree() if x is not 'root' and isinstance(x.device, ( AcquisitionDevice, AcquisitionMaster))): if hasattr(acq_dev_iter, 'wait_reading'): acq_dev_iter.wait_reading() dispatcher.send("end", acq_dev_iter.device) raise return self def _execute(self, func_name, master_to_slave=False, wait_between_levels=True, wait_all_tasks=False): tasks = list() prev_level = None if master_to_slave: devs = list(self._tree.expand_tree(mode=Tree.WIDTH))[1:] else: devs = reversed(list(self._tree.expand_tree(mode=Tree.WIDTH))[1:]) for dev in devs: node = self._tree.get_node(dev) level = self._tree.depth(node) if wait_between_levels and prev_level != level: gevent.joinall(tasks, raise_error=True) tasks = list() prev_level = level func = getattr(dev, func_name) tasks.append(gevent.spawn(func)) # ensure that all tasks are executed # (i.e: don't raise the first exception on stop) if wait_all_tasks: gevent.joinall(tasks) gevent.joinall(tasks, raise_error=True)
class InteractiveServer: _sentinel = object() _user_assets_cached = {} def __init__(self, client): self.client = client self.closed = False self._results = None self.nodes = None self.assets = None self.get_user_assets_finished = False self.page = 1 self.total_asset_count = 0 # 用户被授权的所有资产数量 self.total_count = 0 # 分页展示中的资产总数量 self.node_tree = None # 授权节点树 self.load_user_assets_from_cache() self.get_user_assets_and_update_async() self.get_user_nodes_async() @property def page_size(self): _page_size = config['ASSET_LIST_PAGE_SIZE'] if _page_size.isdigit(): return int(_page_size) elif _page_size == 'all': return self.total_count else: return self.client.request.meta['height'] - 8 @property def total_pages(self): return math.ceil(self.total_count / self.page_size) @property def need_paging(self): return config['ASSET_LIST_PAGE_SIZE'] != 'all' @property def results(self): if self._results: return self._results else: return [] @results.setter def results(self, value): self._results = value # # Display banner # def display_banner(self): self.client.send(char.CLEAR_CHAR) self.display_logo() header = _( "\n{T}{T}{title} {user}, Welcome to use Jumpserver open source fortress system {end}{R}{R}" ) menu = [ _("{T}1) Enter {green}ID{end} directly login or enter {green}part IP, Hostname, Comment{end} to search login(if unique).{R}" ), _("{T}2) Enter {green}/{end} + {green}IP, Hostname{end} or {green}Comment {end} search, such as: /ip.{R}" ), _("{T}3) Enter {green}p{end} to display the host you have permission.{R}" ), _("{T}4) Enter {green}g{end} to display the node that you have permission.{R}" ), _("{T}5) Enter {green}g{end} + {green}NodeID{end} to display the host under the node, such as g1.{R}" ), _("{T}6) Enter {green}s{end} Chinese-english switch.{R}"), _("{T}7) Enter {green}h{end} help.{R}"), _("{T}8) Enter {green}r{end} to refresh your assets and nodes.{R}" ), _("{T}0) Enter {green}q{end} exit.{R}") ] self.client.send_unicode( header.format(title="\033[1;32m", user=self.client.user, end="\033[0m", T='\t', R='\r\n\r')) for item in menu: self.client.send_unicode( item.format(green="\033[32m", end="\033[0m", T='\t', R='\r\n\r')) def display_logo(self): logo_path = os.path.join(config['ROOT_PATH'], "logo.txt") if not os.path.isfile(logo_path): return with open(logo_path, 'rb') as f: for i in f: if i.decode('utf-8').startswith('#'): continue self.client.send_unicode( i.decode('utf-8').replace('\n', '\r\n')) def dispatch(self, opt): if opt is None: return self._sentinel elif opt.startswith("/"): self.search_and_display_assets(opt.lstrip("/")) elif opt in ['p', 'P', '']: self.display_assets() elif opt in ['g', 'G']: self.display_nodes_as_tree() elif opt.startswith("g") and opt.lstrip("g").isdigit(): self.display_node_assets(int(opt.lstrip("g"))) elif opt in ['q', 'Q', 'exit', 'quit']: return self._sentinel elif opt in ['s', 'S']: switch_lang() self.display_banner() elif opt in ['r', 'R']: self.refresh_assets_nodes() self.display_banner() elif opt in ['h', 'H']: self.display_banner() else: self.search_and_proxy_assets(opt) # # Search assets # def search_and_display_assets(self, q): assets = self.search_assets(q) self.display_assets_paging(assets) def search_and_proxy_assets(self, opt): assets = self.search_assets(opt) if assets and len(assets) == 1: asset = assets[0] if asset.protocol == "rdp" \ or asset.platform.lower().startswith("windows"): self.client.send_unicode( warning( _("Terminal does not support login rdp, " "please use web terminal to access"))) return self.proxy(asset) else: self.display_assets_paging(assets) def refresh_assets_nodes(self): self.get_user_assets_and_update_async() self.get_user_nodes_async() def wait_until_assets_load(self): while self.assets is None and \ self.get_user_assets_finished is False: time.sleep(0.2) def search_assets(self, q): self.wait_until_assets_load() result = [] # 所有的 if q in ('', None): result = self.assets # 用户输入的是数字,可能想使用id唯一键搜索 elif q.isdigit() and self.results and \ len(self.results) >= int(q): result = [self.results[int(q) - 1]] # 全匹配到则直接返回全匹配的 if len(result) == 0: _result = [ asset for asset in self.assets if is_obj_attr_eq(asset, q) ] if len(_result) == 1: result = _result # 最后模糊匹配 if len(result) == 0: result = [ asset for asset in self.assets if is_obj_attr_has(asset, q) ] return result # # Display assets # def display_assets(self): self.wait_until_assets_load() self.display_assets_paging(self.assets) def display_assets_paging(self, assets): if len(assets) == 0: self.client.send_unicode(wr(_("No Assets"), before=0)) return self.total_count = len(assets) action = None gen = self._page_generator(assets) while True: try: page, _assets = gen.send(action) except StopIteration as e: if None not in e.value: page, _assets = e.value self.display_a_page_assets(page, _assets) break else: self.display_a_page_assets(page, _assets) self.display_page_bottom_prompt() action = self.get_user_action() def _page_generator(self, assets): start, page = 0, 1 while not self.client.closed: _assets = assets[start:start + self.page_size] # 最后一页 if page == self.total_pages: return page, _assets # 执行动作 else: action = yield page, _assets # 退出 if action == BACK: break # 不分页, 不对页码和下标做更改 elif not self.need_paging: continue # 上一页 elif action == PAGE_UP: if page <= 1: page = 1 start = 0 else: page -= 1 start -= self.page_size # 下一页 else: page += 1 start += len(_assets) return None, None def display_a_page_assets(self, page, assets): self.client.send(char.CLEAR_CHAR) self.page = page sort_by = config["ASSET_LIST_SORT_BY"] self.results = sort_assets(assets, sort_by) fake_data = [_("ID"), _("Hostname"), _("IP"), _("LoginAs")] id_length = max(len(str(len(self.results))), 4) hostname_length = item_max_length(self.results, 15, key=lambda x: x.hostname) sysuser_length = item_max_length( self.results, key=lambda x: x.system_users_name_list) size_list = [id_length, hostname_length, 16, sysuser_length] header_without_comment = format_with_zh(size_list, *fake_data) comment_length = max( self.client.request.meta["width"] - size_of_str_with_zh(header_without_comment) - 1, 2) size_list.append(comment_length) fake_data.append(_("Comment")) self.client.send_unicode( wr(title(format_with_zh(size_list, *fake_data)))) for index, asset in enumerate(self.results, 1): data = [ index, asset.hostname, asset.ip, asset.system_users_name_list, asset.comment ] self.client.send_unicode(wr(format_with_zh(size_list, *data))) self.client.send_unicode( wr(title( _("Page: {}, Count: {}, Total Page: {}, Total Count: {}"). format(self.page, len(self.results), self.total_pages, self.total_count)), before=1)) def display_page_bottom_prompt(self): msg = wr( _('Tips: Enter the asset ID and log directly into the asset.'), before=1) self.client.send_unicode(msg) prompt_page_up = _("Page up: P/p") prompt_page_down = _("Page down: Enter|N/n") prompt_back = _("BACK: b/q") prompts = [prompt_page_up, prompt_page_down, prompt_back] prompt = '\t'.join(prompts) self.client.send_unicode(wr(prompt, before=1)) def get_user_action(self): opt = net_input(self.client, prompt=':') if opt in ('p', 'P'): return PAGE_UP elif opt in ('b', 'q', None): return BACK elif opt and opt.isdigit() and self.results and 0 < int(opt) <= len( self.results): self.proxy(self.results[int(opt) - 1]) return BACK else: return PAGE_DOWN # # Get assets # def load_user_assets_from_cache(self): assets = self.__class__._user_assets_cached.get(self.client.user.id) self.assets = assets if assets: self.total_asset_count = len(assets) def get_user_assets_and_update_async(self): thread = threading.Thread(target=self.get_user_assets_and_update) thread.start() def get_user_assets_and_update(self): assets = app_service.get_user_assets(self.client.user) assets = self.filter_system_users(assets) self.__class__._user_assets_cached[self.client.user.id] = assets self.load_user_assets_from_cache() self.get_user_assets_finished = True # # Nodes # def get_user_nodes_async(self): thread = threading.Thread(target=self.get_user_nodes) thread.start() def get_user_nodes(self): nodes = app_service.get_user_asset_groups(self.client.user) nodes = sorted(nodes, key=lambda node: node.key) self.nodes = self.filter_system_users_of_assets_under_nodes(nodes) self._construct_node_tree() def filter_system_users_of_assets_under_nodes(self, nodes): for node in nodes: node.assets_granted = self.filter_system_users(node.assets_granted) return nodes def _construct_node_tree(self): self.node_tree = Tree() root = 'ROOT_ALL_ORG_NODE' self.node_tree.create_node(tag='', identifier=root, parent=None) for index, node in enumerate(self.nodes): tag = "{}.{}({})".format(index + 1, node.name, node.assets_amount) key = node.key parent_key = key[:node.key.rfind(':')] or root self.node_tree.create_node(tag=tag, identifier=key, data=node, parent=parent_key) def display_nodes_as_tree(self): if self.nodes is None: self.get_user_nodes() if not self.nodes: self.client.send_unicode(wr(_('No Nodes'), before=0)) return self.node_tree.show(key=lambda node: node.identifier) self.client.send_unicode( wr(title(_("Node: [ ID.Name(Asset amount) ]")), before=0)) self.client.send_unicode( wr(self.node_tree._reader.replace('\n', '\r\n'), before=0)) prompt = _( "Tips: Enter g+NodeID to display the host under the node, such as g1" ) self.client.send_unicode(wr(title(prompt), before=1)) def display_node_assets(self, _id): if self.nodes is None: self.get_user_nodes() if _id > len(self.nodes) or _id <= 0: msg = wr(warning(_("There is no matched node, please re-enter"))) self.client.send_unicode(msg) self.display_nodes_as_tree() return assets = self.nodes[_id - 1].assets_granted self.display_assets_paging(assets) # # System users # @staticmethod def filter_system_users(assets): for asset in assets: system_users_granted = asset.system_users_granted high_priority = max([s.priority for s in system_users_granted]) \ if system_users_granted else 1 system_users_cleaned = [ s for s in system_users_granted if s.priority == high_priority ] asset.system_users_granted = system_users_cleaned return assets def choose_system_user(self, system_users): if len(system_users) == 1: return system_users[0] elif len(system_users) == 0: return None while True: self.client.send_unicode(wr(_("Select a login:: "), after=1)) self.display_system_users(system_users) opt = net_input(self.client, prompt="ID> ") if opt.isdigit() and len(system_users) > int(opt): return system_users[int(opt)] elif opt in ['q', 'Q']: return None else: for system_user in system_users: if system_user.name == opt: return system_user def display_system_users(self, system_users): for index, system_user in enumerate(system_users): self.client.send_unicode( wr("{} {}".format(index, system_user.name))) # # Proxy # def proxy(self, asset): system_user = self.choose_system_user(asset.system_users_granted) if system_user is None: self.client.send_unicode(_("No system user")) return forwarder = ProxyServer(self.client, asset, system_user) forwarder.proxy() # # Entrance # def interact(self): self.display_banner() while not self.closed: try: opt = net_input(self.client, prompt='Opt> ', before=1) rv = self.dispatch(opt) if rv is self._sentinel: break except socket.error as e: logger.debug("Socket error: {}".format(e)) break self.close() def close(self): logger.debug("Interactive server server close: {}".format(self)) self.closed = True def interact_async(self): # 目前没用 thread = threading.Thread(target=self.interact) thread.daemon = True thread.start()
def fov_connect(fov_ins_array): def parent(edges, i): coords = np.where( edges == i ) edge = edges[ coords[0][0] ] if edge[0] == i: return edge[1] + 1 return edge[0] + 1 skels = kimimaro.skeletonize( fov_ins_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=50, anisotropy=(200,200,1000), # default True fix_branching=True, # default True fix_borders=True, # default True progress=True, # default False parallel=2, # <= 0 all cpu, 1 single process, 2+ multiprocess ) ends_dict = {} fov_ins_skel_array = np.zeros_like(fov_ins_array) ends_array = np.zeros_like(fov_ins_array) for label_ in skels: skel = skels[label_] coords = (skel.vertices / np.array([200, 200, 1000])).astype(int) fov_ins_skel_array[coords[:, 0], coords[:, 1], coords[:, 2]] = label_ coords = coords.tolist() edges = skel.edges.tolist() ftree = Tree() cur_ = edges[0][0] ftree.create_node(cur_, cur_, data = coords[0]) cur_list = [cur_] while(len(edges) > 0 and len(cur_list) > 0): _cur_list = [] edges_ = edges[:] #print(cur_list) for cur_ in cur_list: next_inds = np.where(np.array(edges_) == cur_)[0] if len(next_inds) == 0:continue for next_ind in next_inds: edge_ = edges_[next_ind] edges.remove(edge_) #print(cur_, edge_) if edge_[0] == cur_: next_ = edge_[-1] else: next_ = edge_[0] _cur_list.append(next_) ftree.create_node(next_, next_, data = coords[next_], parent = cur_) edges_ = edges[:] cur_list = _cur_list ends = [x.data for x in ftree.leaves()] ends.append(coords[0]) ends_dict[label_] = ends ends_ = np.array(ends) ends_array[ends_[:, 0], ends_[:, 1], ends_[:, 2]] = 1 #ends_array = dilation(ends_array, ball(1)) return fov_ins_skel_array, ends_array, ends_dict
import selenium #Library for navigating through webpages from selenium import webdriver #Main driver used from treelib import Node, Tree #Tool implementing tree structures driver = webdriver.Chrome( 'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe') #Here the path for the webdriver of Chrome is given as parameter, or can be added on the path variable of the system driver.get('http://curlie.org/') tree = Tree() #Initialize the tree structure to save the categories categories = driver.find_elements_by_xpath( '//aside/div/h2[@class="top-cat"]/a') #Extract the main categories using a XPATH query #Here it return all the a elements that are in the hierarchy aside-div...with a h2 tag that has 'top-cat' as a class attribute urls = [] tree.create_node("Curlie Site", "curlie") #Create the root node, with "curlie" as ID for category in categories: #Loop through all the main categories tree.create_node(category.text, category.text, parent="curlie") #Create a node with as a name the name of the category, and as ID the name, and as parent the root node urls.append( (category.get_attribute("href"), category.text)) #Fill a list of couples of categorie's link and name for url in urls: #Loop through all the links saved previously driver.get( url[0] ) #Get the link of the category, and navigate through it using the webdriver parentId = url[1] #Get the name of the category categories = driver.find_elements_by_xpath( '//section[@class="children"]/div/div[@class="cat-item"]/a')
for child in children: add_child(parent, child[1]) fill_branch(child[1]) def get_paths(): paths = tree.paths_to_leaves() you = [p for p in paths if 'YOU' in p][0] san = [p for p in paths if 'SAN' in p][0] while you[1] == san[1]: you.pop(0) san.pop(0) san.pop(0) return you[-1::-1] + san root = 'COM' tree = Tree() tree.create_node(root, root) fill_branch(root) # tree.show() print(ORBITS) path = get_paths() print(path) print(len(path) - 3) if INPUT != []: print(INPUT)
print(','.join([tree[node].tag for node in tree.expand_tree()])) example("All family members (with identifiers) but Diane's sub-family:") tree.show(idhidden=False, filter=lambda x: x.identifier != 'diane') example("Let me introduce Diane family only:") sub_t = tree.subtree('diane') sub_t.show() example("Children of Diane:") for child in tree.is_branch('diane'): print(tree[child].tag) example("New members join Jill's family:") new_tree = Tree() new_tree.create_node("n1", 1) # root node new_tree.create_node("n2", 2, parent=1) new_tree.create_node("n3", 3, parent=1) tree.paste('bill', new_tree) tree.show() example("They leave after a while:") tree.remove_node(1) tree.show() example("Now Mary moves to live with grandfather Harry:") tree.move_node('mary', 'harry') tree.show() example("A big family for Mark to send message to the oldest Harry:") print(','.join([tree[node].tag for node in tree.rsearch('mark')]))
import os, sys import shutil from flask import Flask, request, jsonify, send_from_directory from flask import Response import requests from treelib import Node, Tree from time import sleep import io import json File_system = Tree() File_system.create_node("", "") DATA_NODES = [] down_servers = [] api = Flask(__name__) def broadcast_command(command, data=""): ping_servers() for (ip, port) in DATA_NODES: if (ip, port) not in down_servers: url = f"http://{ip}:{port}" + command if (data == ""): response = requests.get(url) else: response = requests.post(url, data=data) #api.logger.info(response.content)
if __name__ == '__main__': part1 = 'Bottom1' part2 = 'Front1' part3 = 'Top1' part4 = 'Ear0' # wrong parts for this object wpart1 = 'Bottom0' wpart2 = 'Front0' wpart3 = 'Top0' wpart4 = 'Ear1' t2 = Tree() t2.create_node(ParseNode('S', 7), identifier='S') t2.create_node(ParseNode('S', 4), parent='S', identifier='S1') t2.create_node(ParseNode('S', 4), parent='S', identifier='S2') t2.create_node(ParseNode('S', 5), parent='S', identifier='S3') t2.create_node(ParseNode('P', 0), identifier='P0', parent='S') t2.create_node(ParseNode('Body', ''), identifier='B', parent='P0') t2.create_node(ParseNode('P', 3), identifier='P1', parent='S1') t2.create_node(ParseNode('P', 1), identifier='P2', parent='S2') t2.create_node(ParseNode('P', 5), identifier='P3', parent='S3') t2.create_node(ParseNode('S', 4), identifier='S4', parent='S3') t2.create_node(ParseNode('P', 7), identifier='P4', parent='S4') t2.create_node(ParseNode(part1, ''), parent='P1', identifier='B0') t2.create_node(ParseNode(part2, ''), parent='P2', identifier='F0') t2.create_node(ParseNode(part3, ''), parent='P3', identifier='T0') t2.create_node(ParseNode(part4, ''), parent='P4', identifier='E0')
next_var = self.csp.select_next_var(node_schedule.assignment) if next_var == None: return None # here when we expand the childs, we need to fix domain for child in node_schedule.expand_with_heuristic( self.csp, node_schedule.assignment, next_var): self.tree[node_schedule].count += 1 self.tree.add_node(child, node_schedule) next_node = self.solve(child) return None # var = self.csp.select_next_var(node_schedule.schedule_assign) # if not var: return None # value = self.csp.select_next_value(node_schedule.schedule_assign, var) if __name__ == "__main__": # csp = CSPSchedule # print("put sad wings around me now") # bb = BacktrackSchedule() from treelib import Node, Tree tree = Tree() root = tree.create_node(1, 1, data={}) node = NodeX("lul", 2, 2) tree.add_node(node, root)
class RRT: def __init__(self, current, desir, step_size, kenamatic, world_size, obstacle=0): self.jobs = [] self.size = world_size # Desired start and end point self.current = current self.desir = desir # A matrix that holds all dots from both starts self.all_point = self.current # The trees that arrange the matrix self.tree = Tree() self.tree.create_node(0, 0, data=self.current) # root node # Did we finish the search self.NOF = Value("i", 0) self.statos = Value("i", 0) # self.winindex = Value("i", 0) self.winindex = Manager().list() self.badindex = Manager().list() self.badpoint = Manager().list() self.pool_point = Manager().list() self.pool_index = Manager().list() self.num_pool = 9 # The number of dots already inserted into the tree self.NOP = 1 # Step size self.step = step_size # Number of dimensions # self.NOD = len(current.T) self.t = time.time() self.obstacle = obstacle # The kinematics of the arms self.kin = kenamatic # Initial position of the arm base print("open new RRT model") def goliniar(self, point_a, point_b): mid_all_point = self.midpoint(point_a[0], point_b[0]) i = 0 dis = 1 loops = len(mid_all_point) while i < loops: ans = self.kin.configure_check(mid_all_point[i].reshape(1, 12), self.obstacle) if ans == 0: return i - dis, mid_all_point else: dis = max(int(ans**2 / 4000), 1) i = i + dis return loops - 1, mid_all_point def goliniarpros(self, a): steelrun = True while steelrun: for i in range(len(self.pool_point)): secsed = True allpoints = self.midpoint(self.pool_point[i], self.desir) i = 0 loops = len(allpoints) while i < loops: ans = self.kin.configure_check1( allpoints[i].reshape(1, 12), self.obstacle_gpu) if not ans: secsed = False if (i > 8): self.badpoint.append(allpoints[i - 8]) self.badindex.append(self.pool_index[i]) del self.pool_point[i] del self.pool_index[i] break else: i = i + max(int(ans**2 / 3000), 1) if secsed: self.statos.value = 1 self.winindex.append(self.pool_index[i]) steelrun = False print("tread {} die ".format(self.pool_index[i])) self.NOF.value -= 1 def get_direction(self, tree, index): tree.show() #allrote = self.current if not index == 0: root = tree[index].predecessor(tree._identifier) good_route = tree[index].data while (not root == 0 or root == None): point = np.array(tree[root].data) good_route = np.append(point, good_route, axis=0) root = tree[root].predecessor(tree._identifier) allrote = self.midpoint(self.desir[0], good_route[-1]) allrote = np.append(allrote[:-1], self.midpoint(allrote[-1], good_route[0]), axis=0) for i in range(1, len(good_route)): allrote = np.append(allrote[:-1], self.midpoint(good_route[i - 1], good_route[i]), axis=0) allrote = np.append(allrote[:-1], self.midpoint(allrote[-1], self.current[0]), axis=0) else: allrote = self.midpoint(self.desir[0], self.current[0]) # ============================================================================= # good_route = np.append(self.current, self.desir, axis=0) # for i in range(1, len(good_route)): # allrote = np.append(allrote, self.midpoint(good_route[i - 1], good_route[i]), axis=0) # ============================================================================= # allrote = allrote[1:, :] return np.flip(allrote, axis=0) # Extracting the path we found from the tree def get_direction1(self, tree, index): #tree.show() allrote = self.current if not index == 0: root = tree[index].predecessor(tree._identifier) good_route = tree[index].data while (not root == 0 or root == None): point = np.array(tree[root].data) good_route = np.append(point, good_route, axis=0) root = tree[root].predecessor(tree._identifier) for i in range(1, len(good_route)): allrote = np.append(allrote[:-1], self.midpoint(good_route[i - 1], good_route[i]), axis=0) allrote = np.append(allrote[1:], self.midpoint(self.all_point[index], self.desir[0]), axis=0) allrote = np.append(self.midpoint(allrote[0], allrote[1]), allrote[1:, :], axis=0) allrote = np.append(self.midpoint(self.current[0], allrote[0]), allrote, axis=0) else: good_route = np.append(self.current, self.desir, axis=0) for i in range(1, len(good_route)): allrote = np.append(allrote, self.midpoint(good_route[i - 1], good_route[i]), axis=0) # allrote = allrote[1:, :] return allrote # Gets a vector and normalizes it to a desired step size def get_normelize_vec(self, vec): return (vec / np.sqrt(np.sum(pow(vec, 2)))) * self.step def midpoint(self, startp, endp): return np.linspace(startp, endp, int(np.max(np.abs(startp - endp) + 1))) # Gets a vector point and returns the geometrically closest vector point def min_distance(self, newpoint, data_points): return np.argmin( np.sum(pow(np.subtract(newpoint, data_points), 2), axis=1)) # Adds a point into the tree def add_to_tree(self, index, newpoint): self.tree.create_node(self.NOP, self.NOP, parent=index, data=newpoint) self.all_point = np.append(self.all_point, newpoint, axis=0) self.NOP += 1 print("NOP : ", self.NOP) def bildwold(self): NOO = 200 a = 1 p = 50 obs = np.array( [np.ones(p) * NOO, np.linspace(-NOO, NOO, num=p), np.ones(p)]).T for i in range(1, int(NOO * a)): obs = np.append(obs, np.array([ np.ones(p) * NOO, np.linspace(-NOO, NOO, num=p), np.ones(p) * (i) ]).T, axis=0) obs = np.append(obs, np.array([ np.ones(p) * -NOO, np.linspace(-NOO, NOO, num=p), np.ones(p) * (i) ]).T, axis=0) obs = np.append(obs, np.array([ np.linspace(-NOO, NOO, num=p), np.ones(p) * NOO, np.ones(p) ]).T, axis=0) for i in range(1, int(NOO * a)): obs = np.append(obs, np.array([ np.linspace(-NOO, NOO, num=p), np.ones(p) * NOO, np.ones(p) * (i) ]).T, axis=0) obs = np.append(obs, np.array([ np.linspace(-NOO, NOO, num=p), np.ones(p) * -NOO, np.ones(p) * (i) ]).T, axis=0) NOO = NOO * 3 for i in range(1, int(NOO * 2)): obs = np.append(obs, np.array([ np.linspace(-NOO, NOO, num=p), np.ones(p) * (NOO - i), np.zeros(p) ]).T, axis=0) return obs def open_prosses(self): p = Process(target=self.goliniarpros, args=(self, )) self.jobs.append(p) p.start() self.NOF.value += 1 # Adds a point in a random direction def add_point(self, newpoint): index = self.min_distance(newpoint, self.all_point) #V newpoint = self.get_normelize_vec( np.subtract(newpoint, self.all_point[index])) + self.all_point[index] godindex, temppoints = self.goliniar( self.all_point[index].reshape(1, 12), newpoint) if (godindex > 0): self.add_to_tree(index, temppoints[godindex].reshape(1, 12)) self.pool_point.append(self.all_point[-1]) self.pool_index.append(index) #print("number of processes that are currently running : {}".format(self.NOF.value)) def improved_path1(self, allpoint): print("1") t = time.time() templen = 1000000 while (templen - len(allpoint) > 10): a = len(allpoint) mid = int(len(allpoint) / 2) Peza = 5 index_a = mid + Peza index_b = mid - Peza beast = 0 templen = len(allpoint) while (index_a <= len(allpoint) - 1 and index_b >= 0): good_index, temp_points = self.goliniar( allpoint[index_a].reshape(1, 12), allpoint[index_b].reshape(1, 12)) if (good_index + 1 == len(temp_points)): beast = Peza Peza += 5 index_a = mid + Peza index_b = mid - Peza if (not beast == 0): tempend = allpoint[(mid + beast - 1):, :] tempstart = allpoint[:(mid - beast + 1), :] if (not len(tempend)): tempend = allpoint[-1, :].reshape((1, 12)) if (not len(tempstart)): tempend = allpoint[0, :].reshape((1, 12)) tempmid = self.midpoint(tempstart[-1, :], tempend[0, :]) temp = np.append(tempstart, tempmid[1:-1], axis=0) allpoint = np.append(temp, tempend, axis=0) print("old : {} \t new : {}".format(a, len(allpoint))) print(-t + time.time()) return allpoint def improved_path2(self, allpoint): print("2") t = time.time() templen = 1000000 for i in range(1, 4): mid = int(len(allpoint) / 4 * i) Peza = 5 index_a = mid + Peza index_b = mid - Peza beast = 0 templen = len(allpoint) while (index_a <= len(allpoint) - 1 and index_b >= 0): good_index, temp_points = self.goliniar( allpoint[index_a].reshape(1, 12), allpoint[index_b].reshape(1, 12)) if (good_index + 1 == len(temp_points)): beast = Peza else: break Peza += 5 index_a = mid + Peza index_b = mid - Peza if (not beast == 0): tempend = allpoint[(mid + beast - 1):, :] tempstart = allpoint[:(mid - beast + 1), :] if (not len(tempend)): tempend = allpoint[-1, :].reshape((1, 12)) if (not len(tempstart)): tempend = allpoint[0, :].reshape((1, 12)) tempmid = self.midpoint(tempstart[-1, :], tempend[0, :]) temp = np.append(tempstart, tempmid[1:-1], axis=0) allpoint = np.append(temp, tempend, axis=0) # ============================================================================= # i=len(allpoint)-5 # while(i<0): # good_index, temp_points =self.goliniar(allpoint[i].reshape(1,12),allpoint[-1].reshape(1,12)) # if (good_index+1==len(temp_points)): # beast = i # else: # break # i -= 5 # if (not beast == 0): # tempmid = self.midpoint( allpoint[beast-1],allpoint[-1]) # allpoint = np.append(allpoint[(beast-1):],tempmid, axis=0) # ============================================================================= print(-t + time.time()) return allpoint def improved_path3(self, allpoint): print("4") t = time.time() templen = 1000000 a = len(allpoint) i = 5 a = len(allpoint) while (i < len(allpoint)): good_index, temp_points = self.goliniar(allpoint[0].reshape(1, 12), allpoint[i].reshape(1, 12)) if (good_index + 1 == len(temp_points)): beast = i else: break i += 5 if (not beast == 0): tempmid = self.midpoint(allpoint[0], allpoint[beast - 1]) allpoint = np.append(tempmid, allpoint[(beast - 1):], axis=0) i = len(allpoint) - 1 print("old : {} \t new : {}".format(a, len(allpoint))) a = len(allpoint) while (i > 0): good_index, temp_points = self.goliniar( allpoint[i].reshape(1, 12), allpoint[-1].reshape(1, 12)) if (good_index + 1 == len(temp_points)): beast = i else: break i -= 5 if (not beast == len(allpoint) - 1): tempmid = self.midpoint(allpoint[beast - 1], allpoint[-1]) allpoint = np.append(allpoint[:(beast - 1)], tempmid, axis=0) print("old : {} \t new : {}".format(a, len(allpoint))) a = len(allpoint) mid = int(len(allpoint) / 2) Peza = 5 index_a = mid + Peza index_b = mid - Peza beast = 0 templen = len(allpoint) while (index_a <= len(allpoint) - 1 and index_b >= 0): good_index, temp_points = self.goliniar( allpoint[index_a].reshape(1, 12), allpoint[index_b].reshape(1, 12)) if (good_index + 1 == len(temp_points)): beast = Peza Peza += 5 index_a = mid + Peza index_b = mid - Peza if (not beast == 0): tempend = allpoint[(mid + beast - 1):, :] tempstart = allpoint[:(mid - beast + 1), :] if (not len(tempend)): tempend = allpoint[-1, :].reshape((1, 12)) if (not len(tempstart)): tempend = allpoint[0, :].reshape((1, 12)) tempmid = self.midpoint(tempstart[-1, :], tempend[0, :]) temp = np.append(tempstart, tempmid[1:-1], axis=0) allpoint = np.append(temp, tempend, axis=0) print("old : {} \t new : {}".format(a, len(allpoint))) print(-t + time.time()) return allpoint def get_winindex(self): print(self.winindex) print("Extracting the route") dis = self.tree.depth(self.winindex[0]) * self.step + np.linalg.norm( self.tree[self.winindex[0]].data - self.desir) min_val = 99999 for i in self.winindex: dis = self.tree.depth(i) * self.step + np.linalg.norm( self.tree[i].data - self.desir) print(i, "\t", self.tree.depth(i), "\t", np.linalg.norm(self.tree[i].data - self.desir), '\t', dis) if (dis < min_val): min_val = dis win = i print(win) self.winindex = Manager().list() return win # The function that activates everything def let_the_magic_begin(self, ros_fun): # self.open_prosses(self.current[0],self.desir[0],self.NOP - 1) self.pool_point.append(self.current[0]) self.pool_index.append(0) self.open_prosses() NOL = 0 while (not self.statos.value): if (not NOL % 10): self.size += 10 self.size = min(self.size, 360) newpoint = (np.random.rand(1, 12) * self.kin.limit * self.size - self.kin.offset) self.add_point(newpoint) if (len(self.badindex)): for i in range(len(self.badindex)): self.add_to_tree(self.badindex[0], self.badpoint[0].reshape(1, 12)) del self.badindex[0] del self.badpoint[0] NOL += 1 print("Waiting for all processes to die") for job in self.jobs: job.join() best_point_index = self.get_winindex() allrote = self.get_direction1(self.tree, best_point_index) print(time.time() - self.t) input("preace any key to improved path\n") if (best_point_index == 0): print("no need to improve path") allrote1 = allrote else: for i in allrote: print( self.kin.configure_check1(i.reshape(1, 12), self.obstacle_gpu)) if not (self.kin.configure_check1(i.reshape(1, 12), self.obstacle_gpu)): print("faild") # ============================================================================= # # allrote2=self.improved_path1(allrote) # for i in allrote2: # if not (self.kin.configure_check1(i.reshape(1,12),self.obstacle_gpu)): # print("faild") # # allrote3=self.improved_path2(allrote) # for i in allrote3: # if not (self.kin.configure_check1(i.reshape(1,12),self.obstacle_gpu)): # print("faild") # # allrote4=self.improved_path3(allrote) # for i in allrote4: # if not (self.kin.configure_check1(i.reshape(1,12),self.obstacle_gpu)): # print(0) # print("faild") # ============================================================================= # ============================================================================= # # # allrote2=self.improved_path(allrote) # print("old -> : {}".format(len(allrote))) # print("1 -> : {} imp -> {}".format(len(allrote2),len(allrote)-len(allrote2))) # print("2 -> : {} imp -> {}".format(len(allrote3),len(allrote)-len(allrote3))) # print("3 -> : {} imp -> {}".format(len(allrote4),len(allrote)-len(allrote4))) # ============================================================================= input("preace any key to run the simulation\n") ros_fun.send_to_arm(allrote * np.pi / 180) input("preace any key to run the simulation\n") ros_fun.send_to_arm(np.flip(allrote, axis=0) * np.pi / 180) def run_serce(self, ros_fun, isinvers, invers_pos): self.obstacle_gpu = th.from_numpy(self.obstacle).float().to('cuda') if (isinvers): self.desir = self.kin.invers_arms_cfg(invers_pos) self.t = time.time() if self.kin.configure_check1(self.desir, self.obstacle_gpu): self.let_the_magic_begin(ros_fun) else: print("can't go to disre location")
from treelib import Node, Tree import json with open('jsondata.txt') as json_file: data = json.load(json_file) tree = Tree() tree.create_node(identifier='0', data='<html></html>') #print(len(data["tag"])) key = {"sd"} key.clear() for k, v in data.items(): for i in v: _id = str(i['id']) _tag = str(i['tag']) parent = i['parent'] for x in parent: tree.create_node(identifier=_id, parent=str(x), data=_tag) tree.show() #tree.show() x = tree.to_json() print(x) tree.save2file('tree.txt', data_property=True) # print(x) # print(key)
x0=ig2)['x'][0] ig2 = 1 / ig2 a = ig1 + n0 * 0.5 - 1 from scipy import stats gg = gamma.cdf(lamda, ig1, scale=1 / ig2) g = stats.invgamma.cdf(var, ig1, scale=ig2) mumu = (y.min() + y.max()) * 0.5 / m sigma_mu = (y.max() - m * mumu) / (k * sqrt(m)) var_mu = sigma_mu**2 tau = 1 / var_mu taumu = tau * mumu DataTypes = df0.dtypes.map(lambda x: x.kind) #%% tree = Tree() root = tree.create_node('0', 0, data=df0) root.xvar = get_xvar(df0) root.var = None tree.w2 = [] tree.leaf = [0] ProbDefault = array([2.5, 2.5, 4]).cumsum() / 9 ## tree = trueTree T = 1250 burn = 250 trees = [deepcopy(tree) for i in range(m)] MM = pd.DataFrame(index=df0.index, columns=range(m), data=mumu) Yhat = zeros((n0, T)) Depth_mu = zeros(T) tdic = [None for i in range(m * T)] #tdic = [tdic.copy() for i in range(T)]