示例#1
0
    def test_graphrunner_breadth_first(self) -> None:
        """Test code for GraphRunner, with the breadth-first mode."""
        graph = Graph()
        self.create_graph(graph)

        kwargs: Dict[str, List[str]] = {'backward': [], 'forward': []}
        runner = TestRunner(graph, depth_first=False, lazy=False)
        runner.run(**kwargs)

        lst1 = ['output', 'conv4', 'input3', 'conv3', 'input2', 'conv2', 'conv1', 'weight2', 'input1', 'weight1']
        self.assertEqual(kwargs['backward'], lst1,
                         'backward traversal failed in breadth-first mode.')

        lst2 = ['input3', 'input2', 'input1', 'weight1', 'weight2',
                'conv4', 'conv3', 'conv1', 'conv2', 'output']
        self.assertEqual(kwargs['forward'], lst2, 'forward traversal failed in breadth-first mode.')

        self.assertEqual(runner.message, [
            'start running.',
            'conv4: backward process',
            'conv3: backward process',
            'conv2: backward process',
            'conv1: backward process',
            'conv4: forward process',
            'conv3: forward process',
            'conv1: forward process',
            'conv2: forward process',
            'finished running.',
        ])

        print("GraphRunner bradth-first mode test passed!")
示例#2
0
def breadth_first_search(nodes, edges):
    for node in nodes:
        root = node
        graph = Graph(root, nodes, edges, False)
        breadth_search = bfs(graph)
        breadth_search.search()
        reset(nodes, edges)
示例#3
0
    def make_simple_model(self) -> Model:
        graph = Graph()

        # two inputs
        x = Input(
            'input',
            [1, 5, 5, 3],
            Float32(),
        )

        w = Constant(
            'weight',
            Float32(),
            np.zeros([1, 2, 2, 3]),
            dimension_format='NHWC',
        )

        # Conv
        conv = Conv('conv', [1, 4, 4, 1],
                    Float32(), {
                        'X': x,
                        'W': w
                    },
                    kernel_shape=[2, 2])

        # One output
        y = Output('output', [1, 4, 4, 1], Float32(), {'input': conv})

        # add ops to the graph
        graph.add_op_and_inputs(y)
        model = Model()
        model.graph = graph
        return model
示例#4
0
def test_can_remove_edge():
    graph = Graph()
    graph.addNode("start")
    graph.addNode("end")
    graph.addEdge("start", "end")
    output = graph.removeEdge("start", "end")
    assert output == True
示例#5
0
    def create_expected_graph(data: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input('placeholder', [1, 5, 5, 3], Float32())

        # constant and internal nodes
        w = Constant('weight', Float32(), data)
        q = QTZ_binary_mean_scaling('qtz1', [1, 2, 2, 3], Float32(),
                                    {'input': w})

        # Conv
        conv = Conv('conv', [1, 4, 4, 3],
                    Float32(), {
                        'X': x,
                        'W': q
                    },
                    kernel_shape=[2, 2])

        # One output
        rs = Reshape('reshape', [1, 48], Float32(), {'data': conv})
        y = Output(
            'output',
            [1, 48],
            Float32(),
            {'input': rs},
        )

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#6
0
    def create_sample_graph(data1: np.ndarray, data2: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input('placeholder', [1, 5, 5, 3], Float32())

        # Conv1
        w1 = Constant('weight1', Float32(), data1)
        conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), {'X': x, 'W': w1}, kernel_shape=[2, 2])

        # activation quantizer
        s1 = Constant('aq_const1', Float32(), np.array(1))
        s2 = Constant('aq_const2', Float32(), np.array(2))
        aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), {'X': conv1, 'Y': s1, 'Z': s2})

        # Conv2
        w2 = Constant('weight2', Float32(), data2)
        kq = QTZ_binary_mean_scaling('kqtz1', [1, 2, 2, 3], Float32(), {'input': w2})
        conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), {'X': aq, 'W': kq}, kernel_shape=[2, 2])
        conv2.a_quantizer = [aq]
        conv2.quantizer = kq

        # One output
        y = Output('output', [1, 3, 3, 3], Float32(), {'input': conv2})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#7
0
    def create_sample_graph(data1: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input('placeholder', [1, 5, 5, 3], Float32())

        # Conv1
        w1 = Constant('weight1', Float32(), data1)
        conv1 = Conv('conv1', [1, 4, 4, 3],
                     QUANTIZED_PACKED(), {
                         'X': x,
                         'W': w1
                     },
                     kernel_shape=[2, 2])
        conv1.is_quantized = True

        pool1 = SpaceToDepth('s2d', [1, 2, 2, 12], Float32(), {'input': conv1})

        # One output
        y = Output('output', [1, 2, 2, 12], Float32(), {'input': pool1})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#8
0
def flow_network(nodes, edges):
    root = nodes[0]
    destination = nodes[len(nodes) - 2]

    graph = Graph(root, destination, nodes, edges, True)
    edmonds_karp = Edmonds_Karp(graph)
    edmonds_karp.find_maximum_flow()
示例#9
0
def test_can_find_edge():
    graph = Graph()
    graph.addNode("start")
    graph.addNode("end")
    graph.addEdge("start", "end")
    output = graph.findEdge("start", "end")
    assert output == True
示例#10
0
def floyd_warshall(nodes, edges):
    for node in nodes:
        print("root-> {} {}".format(node.get_sequence(), node.get_rotulo()))
        root = node
        graph = Graph(root, nodes, edges, True)
        floyd_warshall = FloydWarshall(graph)
        floyd_warshall.search()
        reset(nodes, edges)
示例#11
0
def djikstra(nodes, edges):
    for node in nodes:
        print("root-> {} {}".format(node.get_sequence(), node.get_rotulo()))
        root = node
        graph = Graph(root, nodes, edges, True)
        spf = SPF(graph)
        spf.search_shortest_path()
        reset(nodes, edges)
示例#12
0
def eulerian_path_finder(nodes, edges):
    for node in nodes:
        root = node
        graph = Graph(root, nodes, edges, True)
        eulerian_path_search = EulerianPath(graph)
        eulerian_path_search.search()
        reset(nodes, edges)

        print("#####")
示例#13
0
 def __init__(self):
     """
     switches: a dictionary that maps switch names to components
     hosts: a dictionary that maps host names to components
     """
     self.name = None
     self.home = None
     self.handle = None
     self.switches = {}
     self.hosts = {}
     self.links = {}
     super(NetworkDriver, self).__init__()
     self.graph = Graph()
示例#14
0
    def create_sample_graph() -> Graph:
        graph = Graph()

        x = Input('placeholder', [2], Float32())

        s1 = Constant('potato_1', Float32(), np.array([1, 2]))
        s2 = Constant('potato_2', Float32(), np.array([1, 3]))
        add1 = Add('potatoes', [2], Float32(), {'A': s1, 'B': s2})
        add2 = Add('more_potatoes', [2], Float32(), {'A': x, 'B': add1})

        # One output
        y = Output('output', [2], Float32(), {'input': add2})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#15
0
    def create_sample_graph(data1: np.ndarray, data2: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input('placeholder', [1, 5, 5, 3], Float32())

        # Conv1
        w1 = Constant('weight1', Float32(), data1)
        conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), {'X': x, 'W': w1}, kernel_shape=[2, 2])

        # activation quantizer
        s1 = Constant('aq_const1', Int32(), np.array([2], dtype=np.int32))
        s2 = Constant('aq_const2', Float32(), np.array([2.0], dtype=np.float32))
        aq1 = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), {'X': conv1, 'Y': s1, 'Z': s2})

        # Conv2
        w2 = Constant('weight2', Float32(), data2)
        kq = QTZ_binary_mean_scaling('kqtz1', [1, 2, 2, 3], Float32(), {'input': w2})
        conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), {'X': aq1, 'W': kq}, kernel_shape=[2, 2])
        conv2.a_quantizer = [aq1]
        conv2.quantizer = kq
        conv2.is_quantized = True

        sc = Constant('bn_scale', Float32(), np.random.rand(3))
        be = Constant('bn_b', Float32(), np.random.rand(3))
        mu = Constant('bn_mu', Float32(), np.random.rand(3))
        va = Constant('bn_var', Float32(), np.random.rand(3))
        bn = BatchNormalization('bn', [1, 3, 3, 3], Float32(), {'X': conv2,
                                                                'scale': sc,
                                                                'B': be,
                                                                'mean': mu,
                                                                'var': va})

        # activation quantizer
        s3 = Constant('aq_const3', Int32(), np.array([2], dtype=np.int32))
        s4 = Constant('aq_const4', Float32(), np.array([2.0], dtype=np.float32))
        aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), {'X': bn, 'Y': s3, 'Z': s4})

        # One output
        y = Output('output', [1, 3, 3, 3], Float32(), {'input': aq2})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#16
0
    def create_sample_graph_2(data1: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input('placeholder', [1, 5, 5, 3], Float32())

        # Conv1
        w1 = Constant('weight1', Float32(), data1)
        conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), {'X': x, 'W': w1}, kernel_shape=[2, 2])

        s1 = Constant('const1', Float32(), np.zeros([1, 4, 4, 3]))
        add1 = Add('add', [1, 4, 4, 3], Float32(), {'A': conv1, 'B': s1})

        y = Output('output', [1, 4, 4, 3], Float32(), {'input': add1})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#17
0
    def create_transposed_graph(self, data: np.ndarray) -> Graph:
        graph = Graph()
        data = data.transpose([3, 2, 1, 0])

        # input
        x = Input('placeholder', [1, 5, 5, 3],
                  Float32(),
                  dimension_format='NHWC')

        # constant and internal nodes
        w = Constant('weight', Float32(), data, dimension_format='NHWC')

        i = Identity('identity1', [1, 2, 2, 3],
                     Float32(), {'input': w},
                     dimension_format='NHWC')

        q = QTZ_binary_mean_scaling('qtz1', [1, 2, 2, 3],
                                    Float32(), {'input': i},
                                    dimension_format='NHWC')

        # Conv
        conv = Conv('conv', [1, 4, 4, 3],
                    Float32(), {
                        'X': x,
                        'W': q
                    },
                    kernel_shape=[2, 2],
                    dimension_format='NHWC')

        rs = Reshape('reshape', [1, 48], Float32(), {'data': conv})

        # One output
        y = Output(
            'output',
            [1, 48],
            Float32(),
            {'input': rs},
        )

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#18
0
    def test_graph_conv(self) -> None:
        """Test code for making a simple graph with Conv."""
        graph = Graph()

        # two inputs
        x = Input(
            'input',
            [1, 5, 5, 3],
            Float32(),
        )

        w = Constant('weight', Float32(), np.zeros([1, 2, 2, 3]))

        # Conv
        conv = Conv(
            'conv',
            [1, 4, 4, 3],
            Float32(),
            {
                'X': x,
                'W': w
            },  # you can get these keys by 'Conv.input_names'
            kernel_shape=[2, 2])

        # One output
        y = Output(
            'output',
            [1, 4, 4, 3],
            Float32(),
            {'input': conv}  # you can get this key by 'Output.input_names'
        )

        # add ops to the graph
        graph.add_op(x)
        graph.add_op(w)
        graph.add_op(conv)
        graph.add_op(y)

        self.assertTrue(graph.check_nodes(),
                        "All inputs of operators must match their outputs.")
        print("Graph test passed!")
示例#19
0
    def colour(self):
        power_set = self.graph.power_set()
        X = self.init_colour_array(power_set)

        for s in power_set:
            if s == 0:
                continue
            Xs = INFINITY

            nodes = self.get_nodes_in_set(power_set[s])
            edges = self.get_nodes_edges(nodes)
            g = Graph(nodes[0], nodes[len(nodes) - 1], nodes, edges, False)
            I = g.maximal_independent_sets()

            for independent_set in I:
                i = self.find_index(power_set, nodes, independent_set)
                Xi = X[i] + 1
                if Xi < Xs:
                    X[s] = Xi

        print("Número minimo de colorações: {}".format(X[len(X) - 1]))
示例#20
0
    def create_sample_graph(data: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input('placeholder', [3, 5, 5, 1],
                  Float32(),
                  dimension_format='CWHN')

        # constant and internal nodes
        w = Constant('weight', Float32(), data, dimension_format='CWHN')
        i1 = Identity('identity1', [3, 2, 2, 1],
                      Float32(), {'input': w},
                      dimension_format='CWHN')
        q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 1],
                                    Float32(), {'input': i1},
                                    dimension_format='CWHN')

        # Conv
        conv = Conv('conv', [3, 4, 4, 1],
                    Float32(), {
                        'X': x,
                        'W': q
                    },
                    kernel_shape=[2, 2],
                    dimension_format='CWHN')

        # One output
        rs = Reshape('reshape', [1, 48], Float32(), {'data': conv})
        y = Output(
            'output',
            [1, 48],
            Float32(),
            {'input': rs},
        )

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph
示例#21
0
def generate_sv_single_video(context):
    try:
        detection_file, args = context
        output_pb_file = os.path.join(args.output_path, os.path.basename(detection_file).split('.')[0] + '.mp4.cut.pb')
        output_json_file = os.path.join(args.output_path, os.path.basename(detection_file).split('.')[0] + '.mp4.cut.mp4.final.reduced.json')
        cname = "single_view"
        moving_cameras = ["MOT17-05", "MOT17-06", "MOT17-07", "MOT17-10", "MOT17-11", "MOT17-12", "MOT17-13", "MOT17-14"]
        ############################################################
        # load and set configs
        video_name = os.path.basename(detection_file).split('-')[0] +  '-' + os.path.basename(detection_file).split('-')[1]
        #video_name = os.path.basename(detection_file).split('_')[0]
        if video_name in moving_cameras:
            config_file = os.path.join(args.config_path, "single_view_online_moving.json")
        else:
            config_file = os.path.join(args.config_path, "single_view_online_static.json")
        with open(config_file, 'r') as f:
            configs = json.loads(f.read())
        ############################################################
        # load data
        nodes = load_detections_to_nodes(detection_file,
                cname,
                configs["detection_confidence"],
                args.start_frame,
                args.start_frame + args.num_frames_process - 1, args.do_augmentation)

        opts = {} 
        graph = Graph(nodes,
                create_affinities(configs["affinity"], opts),
                cname)
        
        engine = create_algorithm(configs["algorithm"])

        output = engine(graph, cname)
        save_nodes_online_pbs(output, cname, output_pb_file)
        save_nodes_to_json(output, cname, output_json_file)
    except Exception as e:
        mlog.info(e)
        sys.exit()
示例#22
0
def main():

    parser = argparse.ArgumentParser(description="DoSTect allow to detect SYN flooding attack with Parametric/Non Parametric CUSUM change point detection")
    
    # Create an exclusive group: in this group only one parameter can be used at time
    source_group = parser.add_mutually_exclusive_group(required=True)
    source_group.add_argument('-i', '--interface', action='store', dest="interface",
                        help="Network interface from which to perform live capture",
                        metavar="INTERFACE",
                        type=lambda x: is_valid_interface(parser, x))

    source_group.add_argument('-f', '--file', action='store', dest="file",
                        help="Packet capture file", metavar="FILE .pcap/.pcapng",
                        type=lambda x: is_valid_capture(parser, x))

    parser.add_argument('-s', '--slice', dest='interval', action='store',default=5.0,
                        help="Specify duration of time interval observation in seconds (default: 5)")
   
    parser.add_argument("-p", "--parametric",  action='store', dest="param",type=bool, nargs='?',
                        const=True, default=False,
                        help="Flag to set CUSUM Parametric mode")

    parser.add_argument("-g", '--graph',  action='store', dest="graph",type=bool, nargs='?',
                        const=True, default=False,
                        help="Activate influxDB data sender: requires --interface")

    parser.add_argument('-t', '--threshold', action='store', dest="threshold",
                        help="Threshold detection value for CUSUM Parametric mode", type=float)
    
    parser.add_argument('-a', '--address', action='store', dest="address",
                        help=" IPv4 address of attacked machine for PCAP capture: requires --file", type=str)
    
    parser.add_argument("-v", "--verbose",  action='store', dest="verbose",type=bool, nargs='?',
                        const=True, default=False,
                        help="Flag to set verbose output mode")
    
    # Arguments parser
    args = parser.parse_args()

    #Check if can cast slice to int()
    try: 
        int(args.interval)
    except:
        parser.error("%s is not a valid integer time interval!" % str(args.interval))


    # Check if graph mode and file capture both selected
    if (args.graph and args.file is not None):
            parser.error("--graph unable to start with --file [FILE .pcap/.pcapng]")

    # Check file && localaddr dependency
    if (args.file and args.address is None) or (args.interface and args.address is not None):
        parser.error("--pcap requires --address [ADDRESS].")
    
    elif args.file is not None:
        # Check address format
        try: 
            ipaddress.IPv4Address(args.address)
        except:
            parser.error("%s is not an IPv4 address!" % str(args.address))

    # Initialize to default value if None
    if args.threshold is None:
        args.threshold = 5.0

    # Initialize to Graph module if -g mode
    plot = None
    if args.graph:
        try:
            plot = Graph(os.path.join(os.path.dirname(__file__), 'config/influxdb/config.ini'))
        except:
            utils.colors(7,0,"[Graph startup] - Error while connecting to influxdb instance: check your influxd service!", 12)
            sys.exit(1)

    # Set TERM for curses color support
    if os.getenv("TERM") is None:
        os.environ['TERM'] = "xterm-256color"

    # Start live capture if file is None (-i [INTERFACE] mode)
    if args.file is None:
        analyzer = LiveCatcher(
            source=str(args.interface),
            plot=plot,
            parametric=args.param,
            time_interval=int(args.interval),
            threshold=float(args.threshold),
            verbose=bool(args.verbose)
        )
    else:
        # Start analyzer from PCAP capture (-f [FILE] mode)
        analyzer = OfflineCatcher(
            source=str(args.file),
            ipv4_address=str(args.address),
            parametric=args.param,
            time_interval=int(args.interval),
            threshold=float(args.threshold),
            verbose=bool(args.verbose)
        )

    def sigint_handler(signum, frame):

        if args.graph:
            plot.stop_writing_thread()
        
        print_statistics()


        exit(0)

    def print_statistics():
        utils.colors(0,0,"                                                        ",5)
        utils.colors(0,0,"Status: monitoring ended",7)
        utils.colors(9,0,"Total intervals:           " + str(analyzer.get_total_intervals()),3)
        utils.colors(10,0,"Anomalous intervals count: " + str(analyzer.get_anomalous_intervals_count()),3)
        utils.colors(12,0,"Max volume reached:        " + str(analyzer.get_max_volume()),3)
        utils.colors(13,0,"Mean volume reached:       " + str(analyzer.get_mean_volume()),3)

        start_time = analyzer.get_time_start()
        end_time =  analyzer.get_time_end()

        if args.file is None and start_time != 0 and end_time != 0:
            utils.colors(14,0,"Attack start detected at:       " + str(datetime.fromtimestamp(start_time)),12)
            utils.colors(15,0,"End attack detected at:         " + str(datetime.fromtimestamp(end_time)),12)

    # Register handler for SIGINT
    signal.signal(signal.SIGINT, sigint_handler)
    
    try:
        # Start analyzer
        analyzer.start()
    except (KeyboardInterrupt, SystemExit):
        sys.exit()

    print_statistics()
示例#23
0
文件: tf.py 项目: ki-lm/blueoil
    def make_graph(cls, tf_mp) -> Graph:
        importer = Importer(tf_mp)
        graph = Graph()

        importer.add_all_nodes(graph)
        return graph
示例#24
0
              for y in range(0,
                             len(users) + 1)]
    for i in range(0, len(users)):
        matrix[0][i + 1] = users[i]
        matrix[i + 1][0] = users[i]

    for group, topics in json_data.items():
        for topicId, title in topics[0].items():
            for comment in title[0]['comments']:
                for userId, comments in comment.items():
                    for i in range(0, len(users) + 1):
                        if matrix[i][0] == userId:
                            matrix[i][i] += 1
    return matrix


def printMatrix(matrix):
    for i in range(0, len(users) + 1):
        for j in range(0, len(users) + 1):
            print(matrix[i][j], end=" ")
        print()


json_data = json.load(open('data/opiates_data.json'))
myCommentGraph = Graph()
users = []
parseJSON()
users.sort()
matrix = makeRelationMatrix()

printMatrix(matrix)
示例#25
0
    def create_quantized_graph2(self, data1: np.ndarray, data2: np.ndarray,
                                data3: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input(
            'placeholder',
            [1, 5, 5, 3],
            Float32(),
        )

        # constant and internal nodes
        scaling1, qdata1 = self.binary_mean_scaling(data1)
        w = Constant('weight', Float32(), qdata1 * scaling1)

        q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 3], Float32(),
                                    {'input': w})

        # Conv
        conv1 = Conv('conv1', [1, 4, 4, 3],
                     Float32(), {
                         'X': x,
                         'W': w
                     },
                     kernel_shape=[2, 2])

        s1 = Constant('aq_const1', Float32(), np.array(1))

        s2 = Constant('aq_const2', Float32(), np.array(2))

        aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3],
                                       QUANTIZED_NOT_PACKED(), {
                                           'X': conv1,
                                           'Y': s1,
                                           'Z': s2
                                       })

        from modules.packer import Packer
        packer = Packer(1, 32)
        scaling2, qdata2 = self.binary_mean_scaling(data2)
        w2 = Constant('weight2',
                      Uint32(),
                      packer.run(qdata2),
                      packed=True,
                      actual_shape=[3, 2, 2, 3])

        q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Float32(),
                                     {'input': w2})
        q2.scaling_factor = scaling2

        conv2 = Conv(
            'conv2',
            [1, 3, 3, 3],
            Float32(),
            {
                'X': aq,
                'W': w2
            },
            kernel_shape=[2, 2],
            quantized=True,
        )
        conv2.quantizer = q2

        scaling3, qdata3 = self.binary_mean_scaling(data3)
        w3 = Constant('weight2',
                      Uint32(),
                      packer.run(qdata3),
                      packed=True,
                      actual_shape=[3, 2, 2, 3])

        q3 = QTZ_binary_mean_scaling('qtz3', [3, 2, 2, 3], Float32(),
                                     {'input': w3})
        q3.scaling_factor = scaling3

        conv3 = Conv('conv3', [1, 3, 3, 3],
                     Float32(), {
                         'X': aq,
                         'W': w3
                     },
                     kernel_shape=[2, 2],
                     quantized=True)
        conv3.quantizer = q3

        y1 = Output('output1', [1, 3, 3, 3], Float32(), {'input': conv2})

        y2 = Output('output2', [1, 3, 3, 3], Float32(), {'input': conv3})

        # add ops to the graph
        graph.add_op_and_inputs(y1)
        graph.add_op_and_inputs(y2)

        return graph, scaling2, scaling3
示例#26
0
    def create_sample_graph3(self, data1: np.ndarray, data2: np.ndarray,
                             data3: np.ndarray) -> Graph:
        graph = Graph()

        # input
        x = Input(
            'placeholder',
            [1, 5, 5, 3],
            Float32(),
        )

        # constant and internal nodes
        w = Constant('weight', Float32(), data1)

        q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 3], Float32(),
                                    {'input': w})

        # Conv
        conv1 = Conv('conv1', [1, 4, 4, 3],
                     Float32(), {
                         'X': x,
                         'W': q
                     },
                     kernel_shape=[2, 2])

        i2 = Identity('identity2', [1, 4, 4, 3], Float32(), {'input': conv1})

        s1 = Constant('aq_const1', Float32(), np.array(1))

        s2 = Constant('aq_const2', Float32(), np.array(2))

        aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), {
            'X': i2,
            'Y': s1,
            'Z': s2
        })

        w2 = Constant('weight2', Float32(), data2)

        q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Float32(),
                                     {'input': w2})

        conv2 = Conv('conv2', [1, 3, 3, 3],
                     Float32(), {
                         'X': aq,
                         'W': q2
                     },
                     kernel_shape=[2, 2])

        w3 = Constant('weight3', Float32(), data3)

        q3 = QTZ_binary_mean_scaling('qtz3', [3, 2, 2, 3], Float32(),
                                     {'input': w3})

        conv3 = Conv('conv3', [1, 3, 3, 3],
                     Float32(), {
                         'X': aq,
                         'W': q3
                     },
                     kernel_shape=[2, 2])

        y1 = Output('output1', [1, 3, 3, 3], Float32(), {'input': conv2})

        y2 = Output('output2', [1, 3, 3, 3], Float32(), {'input': conv3})

        # add ops to the graph
        graph.add_op_and_inputs(y1)
        graph.add_op_and_inputs(y2)

        return graph
示例#27
0
    def create_quantized_graph(self, data: np.ndarray, data2: np.ndarray, data3: np.ndarray) \
            -> Tuple[Graph, np.float32, np.float32]:
        graph = Graph()

        # two inputs
        x = Input(
            'placeholder',
            [1, 5, 5, 3],
            Float32(),
        )

        from modules.packer import Packer
        packer = Packer(1, 32)
        data = data.transpose([3, 2, 1, 0])
        scaling, qdata = self.binary_mean_scaling(data)
        shape = list(data.shape)
        w = Constant(
            'weight',
            Float32(),
            qdata * scaling,
        )

        q = QTZ_binary_mean_scaling('qtz1', shape, Float32(), {'input': w})
        q.scaling_factor = scaling

        # Conv
        conv1 = Conv(
            'conv1',
            [1, 4, 4, 3],
            Float32(),
            {
                'X': x,
                'W': w
            },
            kernel_shape=[2, 2],
        )

        s1 = Constant('aq_const1', Float32(), np.array(1))

        s2 = Constant('aq_const2', Float32(), np.array(2))

        aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3],
                                       QUANTIZED_NOT_PACKED(), {
                                           'X': conv1,
                                           'Y': s1,
                                           'Z': s2
                                       })

        dummy = Transpose('dummy', [1, 4, 4, 3],
                          QUANTIZED_NOT_PACKED(), {'data': aq},
                          perm=[0, 1, 2, 3])

        scaling2, qdata2 = self.binary_mean_scaling(data2)
        w2 = Constant('weight2',
                      Uint32(),
                      packer.run(qdata2),
                      packed=True,
                      actual_shape=[3, 2, 2, 3])

        # quantizer connected to conv2 as 'conv2.quantizer'
        q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Uint32(),
                                     {'input': w2})
        q2.scaling_factor = scaling2

        conv2 = Conv('conv2', [1, 3, 3, 3],
                     Float32(), {
                         'X': dummy,
                         'W': w2
                     },
                     kernel_shape=[2, 2],
                     quantized=True)
        conv2.quantizer = q2

        s3 = Constant('aq_const1', Float32(), np.array(1))

        s4 = Constant('aq_const2', Float32(), np.array(2))

        aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), {
            'X': conv2,
            'Y': s3,
            'Z': s4
        })

        w3 = Constant('weight3', Float32(), data3)

        conv3 = Conv('conv3', [1, 2, 2, 3],
                     Float32(), {
                         'X': aq2,
                         'W': w3
                     },
                     kernel_shape=[2, 2])

        # One output
        y = Output('output', [1, 2, 2, 3], Float32(), {'input': conv3})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph, scaling, scaling2
    def CASE0(self, main):
        """
        Startup sequence:
        apply cell <name>
        git pull
        mvn clean install
        onos-package
        onos-verify-cell
        onos-uninstall
        onos-install
        onos-start-cli
        Set IPv6 cfg parameters for Neighbor Discovery
        start event scheduler
        start event listener
        """
        import time
        from threading import Lock, Condition
        from core.graph import Graph
        from tests.CHOTestMonkey.dependencies.elements.ONOSElement import Controller
        from tests.CHOTestMonkey.dependencies.EventGenerator import EventGenerator
        from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduler

        gitPull = main.params['TEST']['autoPull']
        onosPackage = main.params['TEST']['package']
        gitBranch = main.params['TEST']['branch']
        karafTimeout = main.params['TEST']['karafCliTimeout']
        main.enableIPv6 = main.params['TEST']['IPv6']
        main.enableIPv6 = True if main.enableIPv6 == "on" else False
        main.caseSleep = int(main.params['TEST']['caseSleep'])
        main.numCtrls = main.params['TEST']['numCtrl']
        main.controllers = []
        for i in range(1, int(main.numCtrls) + 1):
            newController = Controller(i)
            newController.setCLI(getattr(main, 'ONOScli' + str(i)))
            main.controllers.append(newController)
        main.devices = []
        main.links = []
        main.hosts = []
        main.intents = []
        main.enabledEvents = {}
        for eventName in main.params['EVENT'].keys():
            if main.params['EVENT'][eventName]['status'] == 'on':
                main.enabledEvents[int(
                    main.params['EVENT'][eventName]['typeIndex'])] = eventName
        print main.enabledEvents
        main.graph = Graph()
        main.eventScheduler = EventScheduler()
        main.eventGenerator = EventGenerator()
        main.variableLock = Lock()
        main.mininetLock = Lock()
        main.ONOSbenchLock = Lock()
        main.threadID = 0
        main.eventID = 0
        main.caseResult = main.TRUE

        main.case("Set up test environment")
        main.log.report("Set up test environment")
        main.log.report("_______________________")

        main.step("Apply Cell environment for ONOS")
        if (main.onoscell):
            cellName = main.onoscell
            cellResult = main.ONOSbench.setCell(cellName)
            utilities.assert_equals(expect=main.TRUE,
                                    actual=cellResult,
                                    onpass="******",
                                    onfail="Test step FAIL")
        else:
            main.log.error(
                "Please provide onoscell option at TestON CLI to run CHO tests"
            )
            main.log.error(
                "Example: ~/TestON/bin/cli.py run CHOTestMonkey onoscell <cellName>"
            )
            main.cleanup()
            main.exit()

        main.step("Git checkout and pull " + gitBranch)
        if gitPull == 'on':
            checkoutResult = main.ONOSbench.gitCheckout(gitBranch)
            pullResult = main.ONOSbench.gitPull()
            cpResult = (checkoutResult and pullResult)
        else:
            checkoutResult = main.TRUE
            pullResult = main.TRUE
            main.log.info(
                "Skipped git checkout and pull as they are disabled in params file"
            )
            cpResult = (checkoutResult and pullResult)
        utilities.assert_equals(expect=main.TRUE,
                                actual=cpResult,
                                onpass="******",
                                onfail="Test step FAIL")

        main.step("mvn clean & install")
        if gitPull == 'on':
            mvnResult = main.ONOSbench.cleanInstall()
        else:
            mvnResult = main.TRUE
            main.log.info(
                "Skipped mvn clean install as it is disabled in params file")
        utilities.assert_equals(expect=main.TRUE,
                                actual=mvnResult,
                                onpass="******",
                                onfail="Test step FAIL")
        main.ONOSbench.getVersion(report=True)

        main.step("Create ONOS package")
        if onosPackage == 'on':
            packageResult = main.ONOSbench.buckBuild()
        else:
            packageResult = main.TRUE
            main.log.info(
                "Skipped onos package as it is disabled in params file")
        utilities.assert_equals(expect=main.TRUE,
                                actual=packageResult,
                                onpass="******",
                                onfail="Test step FAIL")

        main.step("Uninstall ONOS package on all Nodes")
        uninstallResult = main.TRUE
        for i in range(int(main.numCtrls)):
            main.log.info("Uninstalling package on ONOS Node IP: " +
                          main.onosIPs[i])
            uResult = main.ONOSbench.onosUninstall(main.onosIPs[i])
            utilities.assert_equals(expect=main.TRUE,
                                    actual=uResult,
                                    onpass="******",
                                    onfail="Test step FAIL")
            uninstallResult = (uninstallResult and uResult)

        main.step("Install ONOS package on all Nodes")
        installResult = main.TRUE
        for i in range(int(main.numCtrls)):
            main.log.info("Installing package on ONOS Node IP: " +
                          main.onosIPs[i])
            iResult = main.ONOSbench.onosInstall(node=main.onosIPs[i])
            utilities.assert_equals(expect=main.TRUE,
                                    actual=iResult,
                                    onpass="******",
                                    onfail="Test step FAIL")
            installResult = (installResult and iResult)

        main.step("Start ONOS CLI on all nodes")
        cliResult = main.TRUE
        startCliResult = main.TRUE
        pool = []
        for controller in main.controllers:
            t = main.Thread(target=controller.startCLI,
                            threadID=main.threadID,
                            name="startOnosCli",
                            args=[])
            pool.append(t)
            t.start()
            main.threadID = main.threadID + 1
        for t in pool:
            t.join()
            startCliResult = startCliResult and t.result
        if not startCliResult:
            main.log.info("ONOS CLI did not start up properly")
            main.cleanup()
            main.exit()
        else:
            main.log.info("Successful CLI startup")
            startCliResult = main.TRUE
        utilities.assert_equals(expect=main.TRUE,
                                actual=startCliResult,
                                onpass="******",
                                onfail="Test step FAIL")

        main.step("Set IPv6 cfg parameters for Neighbor Discovery")
        setIPv6CfgSleep = int(main.params['TEST']['setIPv6CfgSleep'])
        if main.enableIPv6:
            time.sleep(setIPv6CfgSleep)
            cfgResult1 = main.controllers[0].CLI.setCfg(
                "org.onosproject.incubator.net.neighbour.impl.NeighbourResolutionManager",
                "ndpEnabled", "true")
            time.sleep(setIPv6CfgSleep)
            cfgResult2 = main.controllers[0].CLI.setCfg(
                "org.onosproject.provider.host.impl.HostLocationProvider",
                "ipv6NeighborDiscovery", "true")
        else:
            main.log.info(
                "Skipped setting IPv6 cfg parameters as it is disabled in params file"
            )
            cfgResult1 = main.TRUE
            cfgResult2 = main.TRUE
        cfgResult = cfgResult1 and cfgResult2
        utilities.assert_equals(
            expect=main.TRUE,
            actual=cfgResult,
            onpass="******",
            onfail="Failed to cfg set ipv6NeighborDiscovery")

        main.step("Start a thread for the scheduler")
        t = main.Thread(target=main.eventScheduler.startScheduler,
                        threadID=main.threadID,
                        name="startScheduler",
                        args=[])
        t.start()
        stepResult = main.TRUE
        with main.variableLock:
            main.threadID = main.threadID + 1

        utilities.assert_equals(expect=main.TRUE,
                                actual=stepResult,
                                onpass="******",
                                onfail="Test step FAIL")

        main.step(
            "Start a thread to listen to and handle network, ONOS and application events"
        )
        t = main.Thread(target=main.eventGenerator.startListener,
                        threadID=main.threadID,
                        name="startListener",
                        args=[])
        t.start()
        with main.variableLock:
            main.threadID = main.threadID + 1

        caseResult = installResult and uninstallResult and startCliResult and cfgResult
        utilities.assert_equals(expect=main.TRUE,
                                actual=caseResult,
                                onpass="******",
                                onfail="Set up test environment FAIL")
    def CASE0(self, main):
        """
        Startup sequence:
        apply cell <name>
        git pull
        onos-package
        onos-verify-cell
        onos-uninstall
        onos-install
        onos-start-cli
        Set IPv6 cfg parameters for Neighbor Discovery
        start event scheduler
        start event listener
        """
        import time
        from threading import Lock, Condition
        from core.graph import Graph
        from tests.CHOTestMonkey.dependencies.elements.ONOSElement import Controller
        from tests.CHOTestMonkey.dependencies.EventGenerator import EventGenerator
        from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduler

        try:
            from tests.dependencies.ONOSSetup import ONOSSetup
            main.testSetUp = ONOSSetup()
        except ImportError:
            main.log.error("ONOSSetup not found exiting the test")
            main.cleanAndExit()
        main.testSetUp.envSetupDescription()

        try:
            onosPackage = main.params['TEST']['package']
            karafTimeout = main.params['TEST']['karafCliTimeout']
            main.enableIPv6 = main.params['TEST']['IPv6']
            main.enableIPv6 = True if main.enableIPv6 == "on" else False
            main.caseSleep = int(main.params['TEST']['caseSleep'])
            main.onosCell = main.params['ENV']['cellName']
            main.apps = main.params['ENV']['cellApps']
            main.controllers = []

            main.devices = []
            main.links = []
            main.hosts = []
            main.intents = []
            main.enabledEvents = {}
            for eventName in main.params['EVENT'].keys():
                if main.params['EVENT'][eventName]['status'] == 'on':
                    main.enabledEvents[int(main.params['EVENT'][eventName]
                                           ['typeIndex'])] = eventName
            print main.enabledEvents
            main.graph = Graph()
            main.eventScheduler = EventScheduler()
            main.eventGenerator = EventGenerator()
            main.variableLock = Lock()
            main.mininetLock = Lock()
            main.ONOSbenchLock = Lock()
            main.threadID = 0
            main.eventID = 0
            main.caseResult = main.TRUE
            stepResult = main.testSetUp.envSetup()
        except Exception as e:
            main.testSetUp.envSetupException(e)

        main.testSetUp.evnSetupConclusion(stepResult)

        setupResult = main.testSetUp.ONOSSetUp(main.Cluster,
                                               cellName=main.onosCell)
        for i in range(1, main.Cluster.numCtrls + 1):
            newController = Controller(i)
            newController.setCLI(main.Cluster.active(i - 1).CLI)
            main.controllers.append(newController)

        main.step("Set IPv6 cfg parameters for Neighbor Discovery")
        setIPv6CfgSleep = int(main.params['TEST']['setIPv6CfgSleep'])
        if main.enableIPv6:
            time.sleep(setIPv6CfgSleep)
            cfgResult1 = main.controllers[0].CLI.setCfg(
                "org.onosproject.net.neighbour.impl.NeighbourResolutionManager",
                "ndpEnabled", "true")
            time.sleep(setIPv6CfgSleep)
            cfgResult2 = main.controllers[0].CLI.setCfg(
                "org.onosproject.provider.host.impl.HostLocationProvider",
                "requestIpv6ND", "true")
        else:
            main.log.info(
                "Skipped setting IPv6 cfg parameters as it is disabled in params file"
            )
            cfgResult1 = main.TRUE
            cfgResult2 = main.TRUE
        cfgResult = cfgResult1 and cfgResult2
        utilities.assert_equals(
            expect=main.TRUE,
            actual=cfgResult,
            onpass="******",
            onfail="Failed to cfg set ipv6NeighborDiscovery")

        main.step("Start a thread for the scheduler")
        t = main.Thread(target=main.eventScheduler.startScheduler,
                        threadID=main.threadID,
                        name="startScheduler",
                        args=[])
        t.start()
        stepResult = main.TRUE
        with main.variableLock:
            main.threadID = main.threadID + 1

        utilities.assert_equals(expect=main.TRUE,
                                actual=stepResult,
                                onpass="******",
                                onfail="Test step FAIL")

        main.step(
            "Start a thread to listen to and handle network, ONOS and application events"
        )
        t = main.Thread(target=main.eventGenerator.startListener,
                        threadID=main.threadID,
                        name="startListener",
                        args=[])
        t.start()
        with main.variableLock:
            main.threadID = main.threadID + 1

        caseResult = setupResult and cfgResult
        utilities.assert_equals(expect=main.TRUE,
                                actual=caseResult,
                                onpass="******",
                                onfail="Set up test environment FAIL")
示例#30
0
    def create_precompute_graph(self, data1: np.ndarray, data2: np.ndarray,
                                data3: np.ndarray) -> Graph:
        graph = Graph()

        # two inputs
        x = Input(
            'placeholder',
            [1, 5, 5, 3],
            Float32(),
        )

        scaling1, qdata = self.binary_mean_scaling(
            data1.transpose([3, 2, 1, 0]))
        w = Constant('weight', Float32(), qdata * scaling1)

        # Conv
        conv1 = Conv('conv1', [1, 4, 4, 3],
                     Float32(), {
                         'X': x,
                         'W': w
                     },
                     kernel_shape=[2, 2])

        s1 = Constant('aq_const1', Float32(), np.array(1))

        s2 = Constant('aq_const2', Float32(), np.array(2))

        aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), {
            'X': conv1,
            'Y': s1,
            'Z': s2
        })

        dummy = Transpose('dummy', [1, 4, 4, 3],
                          Float32(), {'data': aq},
                          perm=[0, 1, 2, 3])

        scaling2, qdata2 = self.binary_mean_scaling(data2)
        w2 = Constant('weight2', Float32(), qdata2 * scaling2)

        conv2 = Conv('conv2', [1, 3, 3, 3],
                     Float32(), {
                         'X': dummy,
                         'W': w2
                     },
                     kernel_shape=[2, 2])

        s3 = Constant('aq_const1', Float32(), np.array(1))

        s4 = Constant('aq_const2', Float32(), np.array(2))

        aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), {
            'X': conv2,
            'Y': s3,
            'Z': s4
        })

        w3 = Constant('weight3', Float32(), data3)

        conv3 = Conv('conv3', [1, 2, 2, 3],
                     Float32(), {
                         'X': aq2,
                         'W': w3
                     },
                     kernel_shape=[2, 2])

        # One output
        y = Output('output', [1, 2, 2, 3], Float32(), {'input': conv3})

        # add ops to the graph
        graph.add_op_and_inputs(y)

        return graph