def start_with_graph(): global dag dag = DAG() dag.from_dict({'a': ['b', 'c'], 'b': ['d'], 'c': ['d'], 'd': []})
def do_end_of_sfc(self, line): """End of SFC""" self.exactly_n_args(line.split(), 0) if not self.haveAssignedDir or self.targetDir == '': print "Please using assign_dir first to indicate work directory" else: # judge DAG structure if self.dag.validate()[0] is False: print "You have constructed a SFC with non-DAG structure, reject it" else: topoOrder, topoOrder_str = self.dag.topological_sort(), "" for item in topoOrder: topoOrder_str += item topoOrder_str += "," topoOrder_str = topoOrder_str[:-1] # create sfc request in terms of txt cmd = "echo \"%s\" >> %s/sfc%d.txt" % ( topoOrder_str, self.targetDir, self.sfcNumber) status, output = commands.getstatusoutput(cmd) if status != 0: print "Cannot create SFC request" print output return self.dag = DAG() self.sfcNumber += 1
def getPrograms(input, output): states = [i for i in range(len(output)+1)] delta = getDelta(states, input, output) start = 0 accept = len(output) dag = DAG(states, delta, start, accept) return dag
def generate_dag(optimal_indvidual, stage_name, num_nodes): # create nodes for the graph nodes = np.empty((0), dtype=np.str) for n in range(1, (num_nodes + 1)): nodes = np.append(nodes, ''.join([stage_name, "_", str(n)])) # initialize directed asyclic graph (DAG) and add nodes to it dag = DAG() for n in nodes: dag.add_node(n) # split best indvidual found via GA to identify vertices connections and connect them in DAG edges = np.split(optimal_indvidual, np.cumsum(range(num_nodes - 1)))[1:] v2 = 2 for e in edges: v1 = 1 for i in e: if i: dag.add_edge(''.join([stage_name, "_", str(v1)]), ''.join([stage_name, "_", str(v2)])) v1 += 1 v2 += 1 # delete nodes not connected to anyother node from DAG for n in nodes: if len(dag.predecessors(n)) == 0 and len(dag.downstream(n)) == 0: dag.delete_node(n) nodes = np.delete(nodes, np.where(nodes == n)[0][0]) return dag, nodes
def validate_dependency_debug(graph): assert graph, "Graph is empty,please check!!" #convert_to_std_list_graph(graph) #dag.from_dict(graph) #noinspection PyBroadException is_valid = False dag = DAG() try: for key, value in graph.items(): dag.add_node(key) for key, value in graph.items(): for v in value: if v is not '': dag.add_edge(key, v) except KeyError as e: logger.log_error("KeyError while adding to dag, msg is {}".format(e)) except DAGValidationError as e: logger.log_error( "DAGValidationError while adding to dag, msg is {},please check dependent relationship" .format(e)) except Exception as e: logger.log_error("Exception while adding to dag, msg is {}".format(e)) else: is_valid = True assert is_valid, "===> key(testcase):{},value(dependent):{}".format( key, value)
def __init__(self, hps=None): """Init HyperparameterSpace.""" self._hyperparameters = OrderedDict() self._condition_dict = OrderedDict() self._forbidden_list = [] self._hp_count = 0 self._dag = DAG() if hps is not None: self._hps2ds(hps)
def test_all_downstreams_pass_graph(): dag2 = DAG() dag2.from_dict({'a': ['c'], 'b': ['d'], 'c': ['d'], 'd': []}) assert dag.all_downstreams('a', dag2.graph) == ['c', 'd'] assert dag.all_downstreams('b', dag2.graph) == ['d'] assert dag.all_downstreams('d', dag2.graph) == []
def test_from_dict_at_instantiation(): temp_dict = {'a': ['b', 'c'], 'b': ['d'], 'c': ['d'], 'd': []} dag = DAG(temp_dict) assert dag.graph == { 'a': set(['b', 'c']), 'b': set('d'), 'c': set('d'), 'd': set() }
def get_bags(data): dag = DAG() for line in data: bag, contains = line.split(" bags contain ") for c in contains.split(","): m = RE_CONTAIN.match(c.strip()) if m: dag.add_edge(bag, m.group(2), int(m.group(1))) return dag
def test_prune_dag_should_remove_nodes_preceding_given_node(self): dag = DAG() dag.add_edges(sources=['b', 'c'], target='a') dag.add_edges(sources=['a'], target='d') dag.add_edges(sources=['d'], target='e') self.assertCountEqual(['c', 'b', 'a', 'd', 'e'], dag.to_string()) dag.prune_upto('d') self.assertCountEqual(['d', 'e'], dag.to_string())
def test_find_not_reachable_with_some_reachable_reversed_order(self): dag = DAG() dag.add_edges(sources=['b', 'c'], target='a') dag.add_edges(sources=['d'], target='e') # The first argument is the shares sent by a node. They are send in the # order d, e, b, c. We assume that if there is a path from c to a, # then there is a path as d -> e -> b -> c, and therefore return empty # list self.assertCountEqual( dag.find_not_reachable(['d', 'e', 'b', 'c'], 'a'), [])
def test_add_edge(self): dag = DAG() dag.add_edges(sources=['b', 'c'], target='a') self.assertCountEqual(dag.heads(), ['a']) self.assertTrue(dag.has('a')) self.assertTrue(dag.has('b')) self.assertTrue(dag.has('c')) self.assertFalse(dag.has('d')) self.assertTrue(dag.has_path('b', 'a')) self.assertFalse(dag.has_path('a', 'b'))
def inital_graph(): dag = DAG() #dag.from_dict({0:[2], # 1:[2,3], # 2:[4], # 3:[4,5], # 4:[], # 5:[]}) dag.from_dict({0: [], 1: [], 2: [], 3: [0, 2], 4: [0], 5: [0, 2, 3]}) return dag
def test_prune_should_remove_nodes_preceding_given_hash(self): dag = DAG() dag.add_edges(sources=['b', 'c'], target='a') dag.add_edges(sources=['a'], target='d') dag.add_edges(sources=['d'], target='e') env = simpy.Environment() node = Node(env=env, name='node 1') node.dag = dag node.blocks_received = ['c', 'b', 'a', 'd', 'e'] self.assertCountEqual(['c', 'b', 'a', 'd', 'e'], dag.to_string()) node._prune() self.assertCountEqual(['d', 'e'], node.dag.to_string())
def create_random_dag(node_names, node_user_map): """ Randomly generates a DAG graph. Start of by creating a list that represents the hierarchy. Each element in the list is another list showing the nodes in that level. The number of nodes in each level is random. Then use this hierarchy to create nodes and edges between nodes. Edges are created by randomly selecting a node in the previous level as a parent. Args: node_names (list): list of all the nodes to be used node_user_map (dict): use node name as a key to get all the users for node Returns: graph (DAG): returns a randomly generated DAG object """ # the number of nodes to create will be the same as the length of the node_names list node_num = len(node_names) hierarchy = [] curr_num_of_nodes = 0 hierarchy.append([curr_num_of_nodes]) curr_num_of_nodes += 1 # create a hierarchy for the nodes while curr_num_of_nodes < node_num: nodes_to_create = random.choice( list(range(curr_num_of_nodes, node_num))) level = [i for i in range(curr_num_of_nodes, nodes_to_create + 1)] curr_num_of_nodes += len(level) hierarchy.append(level) # create empty graph object without passing in input matrix graph = DAG(node_names, node_user_map) # use the hierarchy to create the nodes and edges for level in range(len(hierarchy)): if level == 0: graph.add_node(f"Node 0", node_user_map["Node 0"]) else: for num in hierarchy[level]: curr_node_name = f"Node {num}" graph.add_node(curr_node_name, node_user_map[curr_node_name]) parent_level = level - 1 # randomly choose a node a level above in the hierarchy as the parent parent_node_num = random.choice(hierarchy[parent_level]) parent_node_name = f"Node {parent_node_num}" graph.add_edge(parent_node_name, curr_node_name) # for node in graph.node_list: # print(f"node: {node}, edges: {graph.node_list[node].edges.keys()}") return graph
def __init__(self, *, env, name): self.seq_no = 0 self.env = env self.name = name self.out_pipe = BroadcastPipe(env=env, sender=self.name) self.in_pipe = simpy.Store(self.env) self.neighbours = set() self.dag = DAG() # required to help prune dag for large simulations self.blocks_received = [] self.shares_sent = [] # Track num_shares_sent separately as shares_sent is pruned when block is received self.num_shares_sent = 0 self.shares_not_rewarded = {} self.num_blocks = 0
def __init__(self, desc=None): """Init SearchSpace.""" super(SearchSpace, self).__init__() if desc is None: desc = SearchSpaceConfig().to_json() for name, item in desc.items(): self.__setattr__(name, item) self.__setitem__(name, item) self._params = OrderedDict() self._condition_dict = OrderedDict() self._forbidden_list = [] self._hp_count = 0 self._dag = DAG() if desc is not None: self.form_desc(desc)
def __init__(self, desc=None): """Init SearchSpace.""" super(SearchSpace, self).__init__() if desc is None: desc = SearchSpaceConfig().to_dict() if desc.type is not None: desc = ClassFactory.get_cls(ClassType.SEARCHSPACE, desc.type).get_space(desc) for name, item in desc.items(): self.__setattr__(name, item) self.__setitem__(name, item) self._params = OrderedDict() self._condition_dict = OrderedDict() self._forbidden_list = [] self._hp_count = 0 self._dag = DAG() if desc is not None: self.form_desc(desc)
def __init__(self, data): self.reactions = collections.defaultdict(list) pattern = r'(\d+ \w+)' self.dag = DAG('FUEL') self.dag.AddNode('ORE', 1) for line in data: chems = re.findall(pattern, line) # Only one result for these inputs. result = chems.pop().split(' ') # result = (X, node_name) self.dag.AddNode(result[1], result[0]) reactors = dict([ (n, int(x)) for x, n in [p.strip().split(' ') for p in chems] ]) value = (int(result[0]), reactors) self.reactions[result[1]] = value for k in self.reactions: self.dag.AddEdges(k, self.reactions[k][1])
def generate_dag(optimal_indvidual, stage_name, num_nodes): # optimal_individual为本stage的二进制字符串 # create nodes for the graph nodes = np.empty((0), dtype=np.str) # 给stage的节点命名,比如s1 stage,节点名字为s1_1,s1_2,... for n in range(1, (num_nodes + 1)): nodes = np.append(nodes, ''.join([stage_name, "_", str(n)])) # initialize directed asyclic graph (DAG) and add nodes to it # 加入所有节点 dag = DAG() for n in nodes: dag.add_node(n) # split best indvidual found via GA to identify vertices connections and connect them in DAG # cumsum累积和,cumsum([0, 1, 2, 3])返回[0, 1, 3, 6] # 在这里体现为比如有4个node,二进制字符串长度为6,切割成s[:0], s[0:1], s[1:3], s[3:6] # 即连接每个节点的二进制字符串 # 最后再删除第一个节点没有连的数据(上面的s[:0]) edges = np.split(optimal_indvidual, np.cumsum(range(num_nodes - 1)))[1:] v2 = 2 # 遍历所有节点的连接情况 for e in edges: v1 = 1 # 遍历这个节点的二进制字符串 # 如果是1,添加边到dag # 这里其实for循环替代v1会好看些 for i in e: if i: dag.add_edge(''.join([stage_name, "_", str(v1)]), ''.join([stage_name, "_", str(v2)])) v1 += 1 v2 += 1 # delete nodes not connected to anyother node from DAG # 删除孤立的点 for n in nodes: if len(dag.predecessors(n)) == 0 and len(dag.downstream(n)) == 0: dag.delete_node(n) nodes = np.delete(nodes, np.where(nodes == n)[0][0]) return dag, nodes
def __init__(self, input_file): self.cfg = self._read(input_file) self.info = self.cfg['pipeline'] self.owner = self.info['owner'] self.basename = self.info['basename'] self.version = self.info['version'] self.dag = DAG() self.stages = {} for name in self.info['stages']: self.stages[name] = self.load_stage(name) self.dag.add_node(name) for name in self.info['stages']: stage_info = self.cfg[name] for parent in stage_info['depends-on']: self.dag.add_edge(parent, name)
def ops2dag(merged_ops): """Load ops dict into dag.""" dag = DAG() outs = { op['outputs'].name: op for name, op in merged_ops.items() if op['outputs'] is not None } for name, node in merged_ops.items(): inps = node['inputs'] pre_node_name = 'root' dag.add_node_if_not_exists(name) if inps is not None: for inp in inps: pre_node = outs.get(inp.name) if pre_node is not None: pre_node_name = pre_node.op_name dag.add_edge(pre_node_name, name) else: dag.add_edge(pre_node_name, name) return dag
def __init__(self, host='127.0.0.1', port=5000, analytics=False, light_client=False,own_key=None): """ Client that interacts with other peers in the network. light_client=True, to disable peering and storing peer information. """ self.host = host self.port = port self.is_shutdown = False self.state = StateDB() self.keypair = Keypair.from_genesis_file(read_genesis_state()) if own_key: self.keypair = own_key self.dag = DAG() self.peers = Peers(port) self.FIXED_PEERS = (('127.0.0.1',5000),('127.0.0.1',5001),('127.0.0.1',5003)) self.sessions = {} self.logger = logging.getLogger('main') self.is_light_client = light_client self.lock = Lock() self.broadcast_executor = ThreadPoolExecutor(max_workers=8) self.tx_executor = ThreadPoolExecutor(max_workers=8) self.query_executor = ThreadPoolExecutor(max_workers=4) self.analytics_enabled = analytics self.analytics_doc_id = None self.metrics_lock = Lock() self.collect_metrics = False self.metrics_start = None self.metrics_end = None self.transactions_count = 0 self.txn_insert_times = {}
def ops2dag(merged_ops): """Load ops dict into dag.""" dag = DAG() dot = DagGraphVisual() dot.node(name='root', label='root') outs = {op['outputs'].name: op for name, op in merged_ops.items() if op['outputs'] is not None} outs = {k.replace('Conv2D:0', 'BiasAdd:0'): v for k, v in outs.items()} for name, node in merged_ops.items(): inps = node['inputs'] pre_node_name = 'root' dag.add_node_if_not_exists(name) dot.node(name=name, label=name) if inps is not None: for inp in inps: pre_node = outs.get(inp.name) if pre_node is not None: pre_node_name = pre_node.op_name dag.add_edge(pre_node_name, name) dot.edge(pre_node_name, name) else: dag.add_edge(pre_node_name, name) dot.edge(pre_node_name, name) dot.show() return dag
def fetch_resources_by_topic(worksheets): """Smooth out into a nice JSON-able data structure. { "deadbeef": { "id": "deadbeef" , "subtopics": { "fa1afe1": { "id": "fa1afe1" , "topic_id": "deadbeef" , "dag": { "names": ["fadedfad"] , "vertices": [{"incomingNames": "feeddeaf"}] } , "resources": { "fadedfad": { "id": "fadedfad" , "topic_id": "deadbeef" , "subtopic_id": "fa1afe1" , "etc": "stuff" } } } } """ topics = {} for topic_id, csvurl in worksheets: topic = {} topic['id'] = topic_id topic['subtopics'] = subtopics = defaultdict(lambda: defaultdict(dict)) raw = _get(csvurl) reader = csv.reader(io.StringIO(raw)) headers = next(reader) for row in reader: resource = dict(zip(headers, row)) resource['id'] = resource['uid'] resource['topic_id'] = topic_id subtopic = subtopics[resource['subtopic_id']] subtopic['resources'][resource['id']] = resource if 'dag' not in subtopic: # First time seeing it. Populate! subtopic['dag'] = DAG() subtopic['id'] = resource['subtopic_id'] subtopic['topic_id'] = topic_id # Populate DAGs. # ============== # We have to do this in a second loop so that we can tell whether # before_this and after_this are in fact in the same subtopic as a # given resource. The base data is not clean on this point. for subtopic in subtopics.values(): for resource in subtopic['resources'].values(): # Relax the py-dag API to be more like the js DAG we had. d = subtopic['dag'] add_node = lambda node: d.add_node( node) if node and node not in d.graph else None add_edge = lambda a, b: d.add_edge( a, b) if a and b and a != b else None add_node(resource['id']) if resource['before_this'] in subtopic['resources']: add_node(resource['before_this']) add_edge(resource['before_this'], resource['id']) if resource['after_this'] in subtopic['resources']: add_node(resource['after_this']) add_edge(resource['id'], resource['after_this']) # Convert DAGs to the format that the JavaScript expects. for subtopic in subtopics.values(): dag = subtopic['dag'] subtopic['dag'] = { "names": dag.topological_sort() , "vertices": \ {k: {"incomingNames": list(dag.graph[k])} for k in dag.graph} } topics[topic_id] = topic return topics
def DAG_Attack(model, test_dataset, args): # Hyperparamter for DAG num_iterations = 20 gamma = 0.5 num = 15 gpu = args.gpu # set device configuration device_ids = [] if gpu == 'gpu': if not torch.cuda.is_available(): print("No cuda available") raise SystemExit device = torch.device(args.device1) device_ids.append(args.device1) if args.device2 != -1: device_ids.append(args.device2) if args.device3 != -1: device_ids.append(args.device3) if args.device4 != -1: device_ids.append(args.device4) else: device = torch.device("cpu") if len(device_ids) > 1: model = nn.DataParallel(model, device_ids=device_ids) model = model.to(device) adversarial_examples = [] for batch_idx in range(len(test_dataset)): image, label = test_dataset.__getitem__(batch_idx) image = image.unsqueeze(0) pure_label = label.squeeze(0).numpy() image, label = image.clone().detach().requires_grad_( True).float(), label.clone().detach().float() image, label = image.to(device), label.to(device) # Change labels from [batch_size, height, width] to [batch_size, num_classes, height, width] label_oh = make_one_hot(label.long(), n_classes, device) if args.attacks == 'DAG_A': adv_target = torch.zeros_like(label_oh) elif args.attacks == 'DAG_B': adv_target = generate_target_swap(label_oh.cpu().numpy()) adv_target = torch.from_numpy(adv_target).float() elif args.attacks == 'DAG_C': # choice one randome particular class except background class(0) unique_label = torch.unique(label) target_class = int(random.choice(unique_label[1:]).item()) adv_target = generate_target(label_oh.cpu().numpy(), target_class=target_class) adv_target = torch.from_numpy(adv_target).float() else: print( "wrong adversarial attack types : must be DAG_A, DAG_B, or DAG_C" ) raise SystemExit adv_target = adv_target.to(device) _, _, _, _, _, image_iteration = DAG(model=model, image=image, ground_truth=label_oh, adv_target=adv_target, num_iterations=num_iterations, gamma=gamma, no_background=True, background_class=0, device=device, verbose=False) if len(image_iteration) >= 1: adversarial_examples.append([image_iteration[-1], pure_label]) del image_iteration print('total {} {} images are generated'.format(len(adversarial_examples), args.attacks)) return adversarial_examples
def blank_setup(): global dag dag = DAG()
# -*- coding: utf-8 -*- from dag import DAG dag = DAG() dag.from_dict({'a': ['b', 'c'], 'b': ['d'], 'c': ['d'], 'd': [], 'e': ['d']}) val = dag.all_downstreams('a') ind_node = dag.ind_nodes() print(ind_node)
def test_find_not_reachable_with_some_reachable(self): dag = DAG() dag.add_edges(sources=['b', 'c'], target='a') dag.add_edges(sources=['d'], target='e') self.assertCountEqual( dag.find_not_reachable(['b', 'c', 'd', 'e'], 'a'), ['d', 'e'])
def validate_dependency(graph): assert graph, "Graph is empty,please check!!" dag = DAG() is_valid, msg = dag.validate(graph) assert is_valid, msg