def expand_tree(self, tree, x, y, depth): considered_insts = tree.get_instances() for key, val in tree.children.items(): if len(set(y[val])) == 1: # If there is only one class in this subset, set the child terminal value to this class tree.children[key] = Node(terminal_value=y[val[0]], depth=depth) elif len(val) == 0: # If the split left this branch empty, set the terminal value to the majority class of the parent subset labels, counts = np.unique(y[considered_insts], return_counts=True) terminal_value = labels[np.argmax(counts)] tree.children[key] = Node(terminal_value=terminal_value, depth=depth) elif self.min_samples_leaf > len(val) or depth == self.max_depth: # If the number of samples at this leaf is lower than the minimum or maximum depth has been reached, # set the terminal value to the majority class in the branch subset labels, counts = np.unique(y[val], return_counts=True) terminal_value = labels[np.argmax(counts)] tree.children[key] = Node(terminal_value=terminal_value, depth=depth) else: # Otherwise, find the best partition for this leaf and expand the subtree tree.children[key] = self.find_best_partition( x[val, :], y[val], tree.avail_attrs, depth) self.expand_tree(tree.children[key], x[val], y[val], depth + 1) return
def _process_break(self, action): node1 = Node(action.label) node2 = Node(action.label2) self.stack.append(node1) self.stack.append(node2) self.buffer_indices.pop(0) if len(self.buffer_indices) != 0: self.current_token_index = self.buffer_indices[0]
def _make_named_entity(self, concept, literals): wiki_tag = "_".join(literals) wiki_node = Node(wiki_tag, tag=wiki_tag) name_node = Node("name") for i, literal in enumerate(literals): literal_node = Node(literal, tag=literal) name_node.add_child(literal_node, "op{}".format(i + 1)) concept_node = Node(concept) concept_node.add_child(wiki_node, "wiki") concept_node.add_child(name_node, "name") return concept_node
def test_amr_print_with_polarity(): b: Node = Node('bad') neg: Node = Node('-') i: Node = Node('imitate-01') c: Node = Node('country') d: Node = Node('develop-02') m: Node = Node('most') w: Node = Node('world') r: Node = Node('real-04') b.add_child(neg, 'polarity') b.add_child(i, 'ARG1') b.add_child(r, 'ARG1-of') i.add_child(c, 'ARG1') c.add_child(d, 'ARG1-of') c.add_child(w, 'location') d.add_child(m, 'degree') generated_amr_str = b.amr_print() generated_amr_no_spaces = ''.join(generated_amr_str.split()) expected_amr_str = """( d1 / bad :polarity - :ARG1 ( d1_1 / imitate-01 :ARG1 ( d1_1_1 / country :ARG1-of ( d1_1_1_1 / develop-02 :degree ( d1_1_1_1_1 / most ) ) :location ( d1_1_1_2 / world ) ) ) :ARG1-of ( d1_2 / real-04 ) )""" expected_amr_no_spaces = ''.join(expected_amr_str.split()) assert generated_amr_no_spaces == expected_amr_no_spaces, \ 'expected \n' + expected_amr_str + '\ngot\n' + generated_amr_str + '\n'
def test_amr_print_with_reentrancy(): r: Node = Node('receive-01') w: Node = Node('we') t: Node = Node('thing') r2: Node = Node('remind-01') p: Node = Node('pay-01') h: Node = Node('hospital') n: Node = Node('now') a: Node = Node('already') r.add_child(w, 'ARG0') r.add_child(t, 'ARG1') r.add_child(h, 'ARG2') r.add_child(n, 'time') r.add_child(a, 'time') t.add_child(r2, 'ARG0-of') r2.add_child(p, 'ARG1') r2.add_child(w, 'ARG2') p.add_child(w, 'ARG0') generated_amr_str = r.amr_print_with_reentrancy() expected_amr_str = """(r / receive-01~e.4 :ARG0 (w / we~e.0) :ARG1 (t / thing~e.7 :ARG0-of~e.7 (r2 / remind-01~e.7 :ARG1 (p / pay-01~e.6 :ARG0 w) :ARG2 w)) :ARG2~e.8 (h / hospital~e.10) :time (n / now~e.2) :time (a / already~e.3))""" smatch = calculate_smatch(generated_amr_str, expected_amr_str) assert smatch == 1
def setUp(self): self.lowestCommonAncestor = LowestCommonAncestor() self.one = Node(1) self.two = Node(2, self.one) self.three = Node(3, self.one) self.four = Node(4, self.two) self.five = Node(5, self.two) self.six = Node(6, self.three) self.seven = Node(7, self.three) self.eight = Node(8, self.four) self.nine = Node(9, self.four)
def create_node(self, node_id="anode"): node = Node(id=node_id) node.lastContact = datetime.now() node.status = Node.Status.IDLE db.session.add(node) db.session.commit() return node
def add_new_car(self, name, lane, intention): from utils.utils import cars_cross_path graph = self.get_graph() leaf_cars = self.get_leaf_cars() follow_list = list() visited = set() # list of nodes to_visit = [graph[car] for car in leaf_cars] # starts checking the leaves while to_visit: node = to_visit.pop(0) if node.get_name() not in visited: visited.add(node.get_name()) if cars_cross_path(lane, intention, node.get_lane(), node.get_intention()): follow_list.append(node.get_name()) for car in node.get_follow_list(): visited.add(car) else: # If it doesn't collide then add the follow list of the node to to_visit list to_visit = to_visit + [graph[car] for car in graph[node.get_name()].get_follow_list()] for car in follow_list: if car in self.get_leaf_cars(): leaf_cars.remove(car) leaf_cars.add(name) if name == self.get_name() and name in follow_list: follow_list.remove(name) graph[name] = Node(name=name, follow_list=follow_list, lane=lane, intention=intention) return follow_list
def __init__(self, mode, values): if mode == GenMode.BST and values is not None: leaves = [Node(val=value) for value in values] root = leaves[0] self.root_node = root for leaf in leaves[1:]: self.build_bst(root, leaf)
def _update_nodes(self): try: nodes = [] for device in self._devices: node = Node(device) nodes.append(node) hub = self._get_hub(nodes) if hub is None or hub.neighbors is None: _LOGGER.error("No hub found") return for node in nodes: self._update_edges(node, nodes) for hop in range(len(nodes)): if not self._update_hop(hop, nodes): break for node in nodes: self._update_relation_type(node, nodes) self._nodes = nodes except Exception as ex: trace_back = sys.exc_info()[2] line = trace_back.tb_lineno _LOGGER.error( f"Failed to update nodes due to error: {ex} [LN: {line}]")
def find_best_partition(self, x, y, avail_attrs, depth): # Randomly select F features from the available ones np.random.shuffle(avail_attrs) feat_ixs = avail_attrs[:self.n_features] # Compute the score for each attribute and keep the one with the highest score best_score = -100 for feat_ix in feat_ixs: score = self.compute_score(x[:, feat_ix], y) if score > best_score: best_feat_ix = feat_ix best_score = score # Annotate this feature as selected in the tree creation (To measure the feature importance in the forest) self.feat_selected[best_feat_ix] = 1 # Remove the attribute from the list of available attributes avail_attrs = [attr for attr in avail_attrs if attr != best_feat_ix] # Create the Node and add a child per value of the selected attribute out_node = Node(attribute=best_feat_ix, avail_attrs=avail_attrs, depth=depth, children={}) for val in self.attr_vals[best_feat_ix]: out_node.add_child(val, np.argwhere(x[:, best_feat_ix] == val)[:, 0]) return out_node
def getSingleResult(self, results): for result in results: node = Node(result['_id'], result['name'], result['timestamp'], result['desc'], result['logEntryRef'], result['logCreator'], "", result['icon'], result['source'], result['visible']) return node
def fit_tree(self, X, y, depth=0): """Fit a decision tree with recursive splitting on nodes. Args: X (ndarray): training set without class labels. y (ndarray) : class labels for training set. depth (int) : starting depth of decision tree. Returns: tree (Node): root node of learned decision tree. """ # Get number of training observations in current node with each class label 0 and 1. class_distribution = [np.sum(y == i) for i in range(self.n_classes)] # Instantiate node to grow the decision tree. tree = Node(n=y.size, class_distribution=class_distribution, gini_index=_gini(y, self.n_classes)) # Perform recursive splitting to max depth. if depth < self.max_depth: gini_index, split_index = self.get_split(X, y) # Get indices for data and class labels to go to the left child, send the rest to the right child. if split_index is not None: index_left = (X[:, split_index] == 1) X_left, y_left = X[index_left], y[index_left] X_right, y_right = X[~index_left], y[~index_left] tree.gini_index = gini_index tree.feature_index = split_index depth += 1 tree.left = self.fit_tree(X_left, y_left, depth=depth) tree.right = self.fit_tree(X_right, y_right, depth=depth) return tree
def addCollection(self, values, parent): for i in range (0, len(values)): pred = Node(EOGraph.IRI, EOGraph.firstType) if(type(values[i])==type([])): obj = Node(EOGraph.blankNode) self.addTriple(parent, pred, obj) self.addCollection(values[i], obj) else: obj = Literal(values[i]) self.addTriple(parent, pred, obj) pred = Node(EOGraph.IRI, EOGraph.restType) if(i==len(values)-1): obj = Node(EOGraph.IRI, EOGraph.nilType) else: obj = Node(EOGraph.blankNode) self.addTriple(parent, pred, obj) parent=obj
def add(): if request.method == "GET": return render_template("node/add.html") elif request.method == "POST": id = request.form["id"] node = Node(id) db.session.add(node) db.session.commit() flash("Node '%s' added successfully" % node.id) return redirect(url_for(".show", id=node.id))
def test_amr_print_simple(): r: Node = Node('recommend-01') a: Node = Node('advocate-01') i: Node = Node('it') v: Node = Node('vigorous') r.add_child(a, 'ARG1') a.add_child(i, 'ARG1') a.add_child(v, 'manner') generated_amr_str = r.amr_print() generated_amr_no_spaces = ''.join(generated_amr_str.split()) expected_amr_str = """( d1 / recommend-01 :ARG1 ( d1_1 / advocate-01 :ARG1 ( d1_1_1 / it ) :manner ( d1_1_2 / vigorous ) ) )""" expected_amr_no_spaces = ''.join(expected_amr_str.split()) assert generated_amr_no_spaces == expected_amr_no_spaces, \ 'expected \n' + expected_amr_str + '\ngot\n' + generated_amr_str + '\n'
def m_node(m_driver, p_driver_get_usage): p_driver_get_usage.return_value = 30.0, [1.0, 1.0, 1.0] return Node({ 'volume_path': '/volumes', 'conf_path': '/etc/cobalt.conf', 'max_fill': 0.8, 'conf': { 'name': 'test-node', 'labels': ['ssd'] } }, m_driver)
def node(driver): node_conf = { 'conf_path': '/etc/cobalt.conf', 'volume_path': '/mnt', 'max_fill': 0.8, 'conf': { 'name': 'test-node', 'labels': ['ssd'] } } return Node(node_conf, driver)
def addEoTriples(self, graphDefinition, values, types, parent=None): for key in graphDefinition: pred = Node(EOGraph.IRI, EOGraph.schema.format(key)) if(type(graphDefinition[key])==type({})): nestedGraphObject = graphDefinition[key] node = None if("id" in nestedGraphObject): idValue = values[key] if key in values.keys() else "" node = Node(EOGraph.IRI, idValue) else: node = Node(EOGraph.blankNode) if(key in types.keys()): typeValue = types[key] self.addTriple(node, Node(EOGraph.IRI, EOGraph.fnType), Node(EOGraph.IRI, EOGraph.schema.format(typeValue))) if(parent!=None): self.addTriple(parent, pred, node) self.addEoTriples(nestedGraphObject, values, types, node) else: subj = parent if(key=="id" or key not in values or not values[key]): continue if(type(values[key])==type([])): obj = Node(EOGraph.blankNode) self.addTriple(parent, pred, obj) self.addCollection(values[key], obj) else: obj = Literal(values[key]) self.addTriple(subj, pred, obj)
def add_starting_nodes(self): if self.starting_solver == StartingSolver.LeastPossibilites: min = 1000000 initial_node = Node(self.gb.words_list) # find the word with minimum possibilities in dictionary for word in initial_node.words_list: count = len(self.collection.dic_len_list[word.length]) if count < min: min = count initial_node.active_word = word print("initial node active word length: {}", initial_node.active_word.length) possible_words = self.collection.dic_len_list[ initial_node.active_word.length] for w in possible_words: newnode = self.new_node_with_new_word(initial_node, initial_node.active_word, w) self.stack_push(newnode) elif self.starting_solver == StartingSolver.MostIntersects: new_word = None max = 0 initial_node = Node(self.gb.words_list) for word in initial_node.words_list: if word.value[0] == '1' and len(word.crossword_at_with) > max: max = len(word.crossword_at_with) new_word = word print('max=', word.length) initial_node.active_word = new_word possible_words = self.collection.dic_len_list[ initial_node.active_word.length] for w in possible_words: newnode = self.new_node_with_new_word(initial_node, initial_node.active_word, w) self.stack_push(newnode)
def __init__(self, machine_manager, volume_manager, context): self._driver = BTRFSDriver(context['node']['volume_path']) self._from_etcd = None self._machine_manager = machine_manager self._volume_manager = volume_manager self._node = Node(context['node'], self._driver) self._work = [] self._volume_loop = self._machine_loop = None self._max_error_count = context['max_error_count'] self._error_timeout = context['max_error_timeout'] self._delay = context['agent_ttl'] self._watch_timeout = context['watch_timeout'] self._started = False
def _process_shift(self, action): current_token = self.tokens[self.current_token_index] if self.parser_parameters.with_reattach and self._is_named_entity(): if self.parser_parameters.with_gold_concept_labels: node = self._make_named_entity(action.label, self.named_entity_metadata[0][1]) else: node = self._make_named_entity(self.index_word_map[current_token], self.named_entity_metadata[0][1]) self.named_entity_metadata.pop(0) elif self.parser_parameters.with_reattach and self._is_date_entity(): node = self._make_date_entity(self.date_entity_metadata[0][1], self.date_entity_metadata[0][2]) self.date_entity_metadata.pop(0) else: if self.parser_parameters.with_gold_concept_labels: node = Node(action.label) else: node = Node(self.index_word_map[current_token]) self.stack.append(node) self.buffer_indices.pop(0) if len(self.buffer_indices) != 0: self.current_token_index = self.buffer_indices[0]
def main(): """ Main process for blockchain node. """ logging.basicConfig(level=logging.INFO) logging.info('\nStarting node server...') # Unpack network file with open("./photoblocks/network.json") as f: network = json.load(f) # Test local db connection db = redis.Redis(host='redis', port=6379) while True: try: if db.execute_command('PING'): logging.info(f'\nConnected to the local database server.') break else: continue except Exception as e: logging.error(f'\n{e}') continue # Construct Node object logging.info('\nCreating Node data structure from configuration...') node = Node(network["local"]) logging.info(f'\nNode data structure created.') # Start a node socket client logging.info('\nStarting node socket client on background thread...') thread = threading.Thread(target=ClientSock, args=(network, node)) logging.info('\nThread process created.') thread.daemon = True thread.start() logging.info('\nThread process started.') time.sleep(5) # Start node socket server logging.info('\nStarting node socket server on background thread...') thread = threading.Thread(target=serversock, args=()) logging.info('\nThread process created.') thread.daemon = True thread.start() logging.info('\nThread process started.') while True: time.sleep(5)
def _parse_nodes(self): node_sheet = self.workbook.sheet_by_name(NODE_SHEET) rows = node_sheet.get_rows() for i, each_row in enumerate(rows): if i == 0: # skip header row continue label = each_row[0].value size = each_row[1].value try: color = each_row[2].value except IndexError: color = random_colors_generator() node = Node(label=label, size=size, color=color) self.nodes.append(node)
async def update_node(self, hostname: str, ip_address: str, acquire: bool = False) -> None: """Updates the given node or creates a new one if it does not yet exist. :param str hostname: Hostname of the node :param str ip_address: IP Address of the node :param bool acquire: True, if the node should get acquired """ # search by hostname since IPs can change node = self.config.node_repository.get_node_by_hostname(hostname) # create or update the node if node is None: node = Node(name=hostname, online=True, ip_address=ip_address, hostname=hostname) await self.config.node_repository.add_node(node) self.log(node, 'added to registry') elif node.ip_address != ip_address: node.ip_address = ip_address await self.config.node_repository.call_listeners() self.log(node, 'ip address has changed') # update node state if node.online is False: node.online = True self.log(node, 'is online') await self.config.node_repository.call_listeners() await self.config.room_repository.call_listeners() # acquire node if necessary if node.room is not None: if node.acquired is False: if node.ip_address != self.master_ip: self.master.send_acquisition(node.ip_address) node.acquired = True self.log(node, 'acquired') elif acquire and node.ip_address != self.master_ip: # re-acquire if announcement is received again (e.g. in case of a restart) self.master.send_acquisition(node.ip_address) self.log(node, 're-acquired')
def fit_tree(self, X, y, weights, depth=0): """Fit a decision tree with recursive splitting on nodes, takes additional weight argument for AdaBoost. Args: X (ndarray): training set without class labels. y (ndarray): class labels for training set. weights (ndarray): weights for each training instance. depth (int): starting depth of decision tree. Returns: tree (Node): root node of learned decision tree. """ # Get sum of weights from each class for the class distribution in current node. D = weights class_weights = [np.sum(D * (y == i)) for i in range(self.n_classes)] # Instantiate node to grow the decision tree. tree = Node(n=y.size, class_distribution=class_weights, gini_index=_gini(y, self.n_classes, weights=D)) # Perform recursive splitting to max depth. if depth < self.max_depth: gini_index, split_index = self.get_split(X, y, weights=D) # Get indices for data, class labels, and weights to go to the left child, send the rest to the right child. if split_index is not None: index_left = (X[:, split_index] == 1) X_left, y_left, D_left = X[index_left], y[index_left], D[ index_left] X_right, y_right, D_right = X[~index_left], y[~index_left], D[ ~index_left] tree.gini_index = gini_index tree.feature_index = split_index depth += 1 tree.left = self.fit_tree(X_left, y_left, weights=D_left, depth=depth) tree.right = self.fit_tree(X_right, y_right, weights=D_right, depth=depth) return tree
def get_nodes(): return_value = "" threads = list() for device in json.loads(config['TARGETS']['devices']): with concurrent.futures.ThreadPoolExecutor() as executor: future = executor.submit(run_cmd, device, "cisco_ios", config['AUTH']['username'], config['AUTH']['password'], "show lldp neighbors detail") return_value = future.result() neighbors = json.loads(return_value, object_hook=lambda d: SimpleNamespace(**d)) for i in neighbors: nb = i.neighbor.split(".")[0].lower() if nb not in nodes: nodes[nb] = Node(i.neighbor, i.chassis_id, i.management_ip, i.capabilities) if i.local_interface != "" and i.neighbor_interface != "": links[device + "<->" + nb] = Link([ Interface(i.local_interface, "", "", "", "", device), Interface(i.neighbor_interface, "", "", "", "", nb) ])
def wibednode(id): logging.debug('Node request from node with id %s', id) output = {} logging.debug('NODE REQUEST: %s', request.get_json(force=True)) try: input = json.loads(request.data) validateInput(input) # Attempt to get existing node from id node = Node.query.get(id) # If it doesn't exist, create a new one if not node: if Node.Status(input["status"]) == Node.Status.IDLE: output["reinit"] = {} return jsonify(**output) else: node = Node(id) db.session.add(node) # Update node fields based on input updateNode(node, input) handleExperimentData(node, input, output) handleFirmwareUpgrade(node, input, output) handleCommands(node, input, output) handleSendRestore(node, input, output) except Exception as e: logging.debug('Exception joining node: %s', e) output["errors"] = [str(e)] db.session.rollback() logging.debug('SERVER REPLY: %s', output) return jsonify(**output)
def _make_date_entity(self, date_relations, quantities): date_entity_node = Node("date-entity") for date_relation, quantity in zip(date_relations, quantities): date_entity_node.add_child(Node(quantity, quantity), date_relation) return date_entity_node
def test_amr_print_with_literal(): r: Node = Node('realize-01') neg: Node = Node('-') i: Node = Node('i') t2: Node = Node('threaten-01') c2: Node = Node('country') iran1: Node = Node(None, 'Iran') n: Node = Node('name') iran2: Node = Node(None, 'Iran') h: Node = Node('huge') s2: Node = Node('such') c: Node = Node('cause-01') a: Node = Node('amr-unknown') s: Node = Node('simple') r.add_child(neg, 'polarity') r.add_child(i, 'ARG0') r.add_child(t2, 'ARG1') r.add_child(s, 'manner') t2.add_child(c2, 'ARG0') t2.add_child(h, 'mod') t2.add_child(c, 'ARG1-of') c2.add_child(iran1, 'wiki') c2.add_child(n, 'name') n.add_child(iran2, 'op1') h.add_child(s2, 'degree') c.add_child(a, 'ARG0') generated_amr_str = r.amr_print() expected_amr_str = """(r / realize-01 :polarity~e.3 -~e.3 :ARG0 (i / i~e.0) :ARG1 (t2 / threaten-01~e.11 :ARG0 (c2 / country :wiki "Iran" :name (n / name :op1 "Iran"~e.6)) :mod (h / huge~e.10 :degree (s2 / such~e.8)) :ARG1-of (c / cause-01~e.5 :ARG0~e.5 (a / amr-unknown~e.5))) :manner (s / simple~e.1))""" smatch = calculate_smatch(generated_amr_str,expected_amr_str) assert smatch == 1