コード例 #1
0
def buildGraph(pbfile, opmap):
    gdef = tf.GraphDef()
    gdef.ParseFromString(open(pbfile, 'rb').read())
    open('frozen.txt', 'w').write(str(gdef))

    def extract(name):
        n = name
        # return n
        n = re.sub(r'^\^', '', n)
        n = re.sub(r':\d+$', '', n)
        return n

    nodedict = OrderedDict()
    for node in gdef.node:
        nd = MyGraph.MyNode()
        nd.name = node.name
        if node.op in opmap:
            nd.op = opmap[node.op]
        else:
            nd.op = node.op
        nd.input = node.input
        nd.attr = node.attr
        nd.input_norm = [extract(i) for i in node.input]
        nodedict[extract(node.name)] = nd
    mygraph = MyGraph(nodedict)
    mygraph.type = 'tf'
    return mygraph
コード例 #2
0
def buildGraph(pbfile, opmap):
    gdef = tf.GraphDef()
    gdef.ParseFromString(open(pbfile, 'rb').read())
    open('frozen.txt', 'w').write(str(gdef))

    def extract(name):
        n = name
        # return n
        n = re.sub(r'^\^', '', n)
        n = re.sub(r':\d+$', '', n)
        return n

    nodedict = OrderedDict()
    for node in gdef.node:
        nd = MyGraph.MyNode()
        nd.name = node.name
        if node.op in opmap:
            nd.op = opmap[node.op]
        else:
            nd.op = node.op
        nd.input = node.input
        nd.attr = node.attr
        nd.input_norm = [extract(i) for i in node.input]
        nodedict[extract(node.name)] = nd
    mygraph = MyGraph(nodedict)
    mygraph.type = 'tf'
    return mygraph
コード例 #3
0
ファイル: main.py プロジェクト: neveroldmilk/dijkstra
def main():
    g = MyGraph(100)
    graph = g.generate()
    # graph = g.static()
    d = DFS()
    v = d.dfs(g.graph, 1)
    print v
    g.render()
コード例 #4
0
 def __init__(self, log, splines='ortho'):
     self.log = log
     self.tl = self.get_TL_set()  # all tasks in the log
     self.ti = self.get_TI_set(
     )  # tasks that appear at least once as first task of a case
     self.to = self.get_TO_set(
     )  # tasks that appear at least once as last task in a case
     self.ds = self.direct_succession()  # direct successors a > b
     self.cs = self.causality()  # causality a -> b <=> a > b and b /> a
     self.pr = self.parallel()  # parralelism a > b and b > a
     self.ind = self.choice()  # no direct succession a # b and b # a
     self.xl = self.get_XL_set(
     )  # potential task connections (a->b or a->(b#c) or (b#c)->d)
     self.yl = self.get_YL_set(
     )  # subset of XL: eliminating a->b and a->c if there exists some a->(b#c), eliminating b->c and b->d if there exists some (b#c)->d.
     self.G = MyGraph(splines)
     self.inv_cs = self.inv_causality()  # inverse causality
コード例 #5
0
    def create_graph(self, filename='graph'):
        if self.log is None:
            print(
                "ERROR: Read log file and build model before creating graph.")
            return
        elif self.TL_set is None:
            print("ERROR: Build model before creating graph.")
            return
        causality = self.causalities_dict
        parallel_events = self.parallel_tasks_set
        inv_causality = self.inv_causalities_dict
        '''
        Code below is copied from this site: https://ai.ia.agh.edu.pl/pl:dydaktyka:dss:lab03
        However some modifications were made.
        '''

        G = MyGraph()

        # adding split gateways based on causality
        for event in causality.keys():
            if len(causality[event]) > 1:
                if tuple(causality[event]) in parallel_events:
                    G.add_and_split_gateway(event, causality[event])
                else:
                    G.add_xor_split_gateway(event, causality[event])

        # adding merge gateways based on inverted causality
        for event in inv_causality.keys():
            if len(inv_causality[event]) > 1:
                if tuple(inv_causality[event]) in parallel_events:
                    G.add_and_merge_gateway(inv_causality[event], event)
                else:
                    G.add_xor_merge_gateway(inv_causality[event], event)
            elif len(inv_causality[event]) == 1:
                source = list(inv_causality[event])[0]
                G.edge(source, event)

        # adding start event
        G.add_event("start")
        if len(self.TI_set) > 1:
            if tuple(self.TI_set) in parallel_events:
                G.add_and_split_gateway("start", self.TI_set)
            else:
                G.add_xor_split_gateway("start", self.TI_set)
        else:
            G.edge("start", list(self.TI_set)[0])

        # adding end event
        G.add_event("end")
        if len(self.TO_set) > 1:
            if tuple(self.TO_set) in parallel_events:
                G.add_and_merge_gateway(self.TO_set, "end")
            else:
                G.add_xor_merge_gateway(self.TO_set, "end")
        else:
            G.edge(list(self.TO_set)[0], "end")

        # G.format = 'svg'
        G.render('graphs/' + filename)
        G.view('graphs/' + filename)
コード例 #6
0
ファイル: darknet2ncnn.py プロジェクト: nihui/gen-ncnn-models
def buildGraph(config_path, weights_path):

    unique_config_file = unique_config_sections(config_path)
    cfg_parser = configparser.ConfigParser()
    cfg_parser.read_file(unique_config_file)

    weights_file = open(weights_path, 'rb')
    # read out major, minor, revision, net.seen
    readfile(weights_file, (4*4), 'head')

    mydict = OrderedDict()
    # record the output of the original layer
    mylist = []

    count = 4
    import queue
    for _section in cfg_parser.sections():

      sec_q = queue.Queue(0)
      sec_q.put(cfg_parser[_section])

      while not sec_q.empty():
        sec = sec_q.get()
        section = sec.name
        print('Parsing section {}'.format(section))

        # this section will can be a subsection
        if section.startswith('activation') or section.endswith('activation'):
            activation = sec.get('activation', fallback = 'logistic')
            if activation == 'linear':
                pass
            elif activation == 'linear' or activation == 'leaky' or activation == 'relu':
                node = MyGraph.MyNode()
                node.name = section
                node.op = 'Leaky'

                if activation == 'linear':
                    node.slope = 1
                elif  activation == 'leaky':
                    node.slope = 0.1
                elif activation == 'relu':
                    node.slope = 0
                    
                node.input = [prev_output]
                node.input_norm = node.input
                #node.attr = []
                mydict[node.name] = node
                prev_output = node.name
                # prev_layer_filters no change
            else:
                raise ValueError(
                    'Unknown activation function `{}` in section {}'.format(
                    activation, section))
            if section.startswith('activation'):
                mylist.append(section)

        elif re.match(r'^(convolutional|depthwise|groupwise)_\d+$', section):
            if section.startswith('convolutional'):
                conv = 'conv'
                filters = sec.getint('filters', fallback = 1)
                groups = 1
                op = 'Conv2D'
            elif section.startswith('depthwise'):
                conv = 'dconv'
                filters = prev_layer_filters
                multiplier = sec.getint('multiplier', fallback = 1)
                assert multiplier == 1
                groups = filters
                op = 'DepthwiseConv2dNative'
            elif section.startswith('groupwise'):
                conv = 'gconv'
                filters = sec.getint('filters', fallback=1)
                groups = sec.getint('groups', fallback = 1)
                op = 'DepthwiseConv2dNative'

            size = sec.getint('size', fallback = 1)
            stride = sec.getint('stride', fallback = 1)
            pad = sec.getint('pad', fallback = 0)
            padding = sec.getint('padding', fallback = 0)
            activation = sec.get('activation', fallback = 'logistic')
            batch_normalize = sec.getint('batch_normalize', 0)

            # padding='same' is equivalent to Darknet pad=1
            # padding = 'same' if pad == 1 else 'valid'
            if pad:
                padding = size//2

            # Setting weights.
            # Darknet serializes convolutional weights as:
            # [bias/beta, [gamma, mean, variance], conv_weights]
            #prev_layer_shape = prev_layer.shape

            # TODO: This assumes channel last dim_ordering.
            if conv == 'conv':
                weights_shape = (size, size, prev_layer_filters, filters)
                idx_tf2darknet = [0, 1, 2, 3]

            elif conv == 'dconv':
                weights_shape = (size, size, filters)
                idx_tf2darknet = [0, 1, 2]

            elif conv == 'gconv':
                weights_shape = (size, size, prev_layer_filters//groups, filters//groups, groups)
                idx_tf2darknet = [0, 1, 2, 3, 4]

            idxmap = {x: i for i, x in enumerate(idx_tf2darknet)}
            idx_dartnet2tf = [idxmap[i] for i in range(len(idxmap))]
            weights_size = np.product(weights_shape)

            print('  ' + conv, 'bn' if batch_normalize else '  ', activation, weights_shape)

            conv_bias = np.ndarray(
                shape=(filters, ),
                dtype=np.float32,
                buffer=readfile(weights_file, (filters * 4), section+'-bias'))
            count += filters

            if batch_normalize:
                bn_weights = np.ndarray(
                    shape=(3, filters),
                    dtype=np.float32,
                    buffer=readfile(weights_file, (filters * 12), section+'-batchnorm'))
                count += 3 * filters

                # TODO: Keras BatchNormalization mistakenly refers to var
                # as std.
                bn_weight_list = [
                    bn_weights[0],  # scale gamma
                    conv_bias,  # shift beta
                    bn_weights[1],  # running mean
                    bn_weights[2]  # running var
                ]

            conv_weights = np.ndarray(
                shape=[weights_shape[i] for i in idx_tf2darknet],
                dtype=np.float32,
                buffer=readfile(weights_file, (weights_size * 4), section+'-weights'))
            count += weights_size

            # DarkNet conv_weights are serialized Caffe-style:
            # (out_dim, in_dim, height, width)
            # We would like to set these to Tensorflow order:
            # (height, width, in_dim, out_dim)
            # TODO: Add check for Theano dim ordering.
            #print("the darknet shape is ", conv_weights.shape)
            conv_weights = np.transpose(conv_weights, idx_dartnet2tf)
            #print("the tf shape is ", conv_weights.shape)
            conv_weights = [conv_weights] if batch_normalize else [
                conv_weights, conv_bias
            ]

            # Create nodes
            #conv_layer = np.zeros([1, 1, filters], dtype = np.float32)
            node = MyGraph.MyNode()
            node.name = section
            node.op = op
            node.input = [prev_output]
            node.input_norm = node.input
            node.kernel = conv_weights[0]
            node.padding = padding
            node.strides = [1,stride,stride,1]
            node.groups = groups
            node.filters = filters
            mydict[node.name] = node
            prev_output = node.name
            prev_layer_filters = filters

            if batch_normalize:
                node = MyGraph.MyNode()
                node.name = section + '_batch_normalize'
                node.op = 'FusedBatchNorm'
                node.input = [prev_output]
                node.input_norm = node.input
                #node.attr = []
                node.gamma = bn_weights[0]
                node.beta = conv_bias
                node.mean = bn_weights[1]
                node.variance = bn_weights[2]
                mydict[node.name] = node
                prev_output = node.name
                # prev_layer_filters no change
            else:
                node = MyGraph.MyNode()
                node.name = section + '_bias'
                node.op = 'BiasAdd'
                node.input = [prev_output]
                node.input_norm = node.input
                #node.attr = []
                node.bias = conv_bias
                mydict[node.name] = node
                prev_output = node.name

            if activation == 'linear':
                mylist.append(prev_output)
            else:
                tmp_parser = configparser.ConfigParser()
                name = section + '_activation'
                tmp_parser.add_section(name)
                tmp_parser.set(name, 'activation', activation)
                sec_q.put(tmp_parser[name])
                mylist.append(name)

        elif section.startswith('shuffle'):
            node = MyGraph.MyNode()
            node.name = section
            node.op = 'Shuffle'
            node.input = [prev_output]
            node.input_norm = node.input

            node.groups = int(cfg_parser[section]['groups'])
            mydict[node.name] = node
            prev_output = node.name
            mylist.append(section)


        elif re.match(r'^(pooling|maxpool|avgpool)_\d+$', section):
            node = MyGraph.MyNode()
            node.stride = sec.getint('stride', fallback = 1)
            node.size = sec.getint('size', node.stride)
            node.padding = sec.getint('padding', fallback = (node.size-1)//2)

            if section.startswith('pooling'):
                node.mode = str(cfg_parser[section]['mode'])
                node.global_pooling = 0
            elif section.startswith('maxpool'):
                node.mode = 'max'
                node.global_pooling = 0
            elif section.startswith('avgpool'):
                node.mode = 'avg'
                node.global_pooling = 1
           
            node.name = section
            node.op = 'Pooling'
            node.input = [prev_output]
            node.input_norm = node.input
            mydict[node.name] = node
            prev_output = node.name
            #print('pooling ', vars(node))
            mylist.append(section)

        elif section.startswith('route'):
            ids = [int(i) for i in cfg_parser[section]['layers'].split(',')]
            node = MyGraph.MyNode()
            node.name = section
            node.op = 'NCNNConcat'
            node.input = [mylist[i] for i in ids]
            #print('mylist is ', mylist, 'the ids is ', ids, 'node input is ', node.input)
            node.input_norm = node.input
            node.axis = 0
            node.filters = sum([getFilters(mydict, mylist[i]) for i in ids])
            mydict[node.name] = node
            prev_output = node.name
            mylist.append(section)
            prev_layer_filters = node.filters

        elif section.startswith('reorg'):
            node = MyGraph.MyNode()
            node.name = section
            node.op = 'DarknetReorg'
            node.input = [prev_output]
            node.stride = sec.getint('stride', fallback = 1)
            node.input_norm = node.input
            node.filters = getFilters(mydict, node.input[0]) * node.stride * node.stride
            mydict[node.name] = node
            prev_output = node.name
            mylist.append(section)
            prev_layer_filters = node.filters

        elif re.match(r'^(shortcut)_\d+$', section):
            activation = sec.get('activation', fallback = 'logistic')
            from_ = sec.getint('from')


            node = MyGraph.MyNode()
            node.name = section
            node.op = 'BinaryOp'
            node.op_type = 0
            node.input = [prev_output, mylist[from_]]
            #print('mylist is ', mylist, 'the from_ is ', from_, 'node input is ', node.input)
            node.input_norm = node.input
            mydict[node.name] = node
            prev_output = node.name

            if activation == 'linear':
                mylist.append(prev_output)
            else:
                tmp_parser = configparser.ConfigParser()
                name = section + '_activation'
                tmp_parser.add_section(name)
                tmp_parser.set(name, 'activation', activation)
                sec_q.put(tmp_parser[name])
                # NOTE: this section has relative reference
                mylist.append(name)


        elif section.startswith('connected'):
            activation = sec.get('activation', fallback='linear')
            filters = sec.getint('output', 2)

            bias_data = np.ndarray(
                shape=[filters],
                dtype=np.float32,
                buffer=readfile(weights_file, (filters * 4), section+'-bias'))

            fc_data = np.ndarray(
                shape=[prev_layer_filters, filters],
                dtype=np.float32,
                buffer=readfile(weights_file, (prev_layer_filters * filters * 4), section+'-weight'))

            node = MyGraph.MyNode()
            node.name = section
            node.op = 'MatMul'
            node.input = [prev_output]
            node.input_norm = node.input
            node.multiplier = fc_data
            mydict[node.name] = node
            prev_output = node.name
            prev_layer_filters = filters

            node = MyGraph.MyNode()
            node.name = section + '_bias'
            node.op = 'BiasAdd'
            node.input = [prev_output]
            node.input_norm = node.input
            # node.attr = []
            node.bias = bias_data
            mydict[node.name] = node
            prev_output = node.name


            if activation == 'linear':
                mylist.append(prev_output)
            else:
                tmp_parser = configparser.ConfigParser()
                name = section + '_activation'
                tmp_parser.add_section(name)
                tmp_parser.set(name, 'activation', activation)
                sec_q.put(tmp_parser[name])
                mylist.append(name)

        elif section.startswith('net'):
            node = MyGraph.MyNode()
            node.name = section
            node.op = 'DarknetNet'
            node.input = []
            node.input_norm = []

            node.width = int(cfg_parser['net_0']['width'])
            node.height = int(cfg_parser['net_0']['height'])
            node.channels = int(cfg_parser['net_0']['channels'])
            node.filters = node.channels
            # print(vars(node))
            # node.attr = []
            mydict[node.name] = node
            # start here
            prev_output = node.name
            prev_layer_filters = node.channels
            mylist.append(section)

        elif section.startswith('region'):
            node = MyGraph.MyNode()
            node.name = section
            node.op = 'DarknetRegion'
            node.input = [prev_output]
            node.input_norm = node.input

            node.classes = int(cfg_parser[section]['classes'])
            node.num = int(cfg_parser[section]['num'])
            node.softmax = int(cfg_parser[section]['softmax'])
            node.anchors = [float(i) for i in re.split(r',', cfg_parser[section]['anchors'])]

            #print(vars(node))
            #node.attr = []
            mydict[node.name] = node
            prev_output = node.name
            mylist.append(section)

        elif section.startswith('softmax'):
            node = MyGraph.MyNode()
            node.name = section
            node.op = 'Softmax'
            node.input = [prev_output]
            node.input_norm = node.input

            mydict[node.name] = node
            prev_output = node.name
            mylist.append(section)
            pass

        elif section.startswith('cost'):
            pass  # Configs not currently handled during model definition.

        else:
            raise ValueError(
                'Unsupported section header type: {}'.format(section))
        print('  out filters ', prev_layer_filters)
    print('loaded {} bytes in weights file'.format(count*4))

    mygraph = MyGraph(mydict)
    mygraph.type = 'darknet'
    return mygraph
コード例 #7
0
def main():
    g = MyGraph()
    g.add_vertices(8)
    for i, c in enumerate(['r', 's', 't', 'u', 'v', 'w', 'x', 'y']):
        g.attributes[i]['name'] = c

    g.add_edge(0, 1)
    g.add_edge(0, 4)
    g.add_edge(1, 5)
    g.add_edge(2, 5)
    g.add_edge(2, 6)
    g.add_edge(2, 3)
    g.add_edge(3, 6)
    g.add_edge(3, 7)
    g.add_edge(5, 6)
    g.add_edge(6, 7)
    BFS(g, 1)
コード例 #8
0
ファイル: Ui_main.py プロジェクト: macoding1994/ZBQDD
    def setupUi(self, MainWindow):
        MainWindow.setObjectName("MainWindow")
        MainWindow.resize(800, 600)
        self.centralWidget = QtWidgets.QWidget(MainWindow)
        self.centralWidget.setObjectName("centralWidget")
        self.gridLayout_4 = QtWidgets.QGridLayout(self.centralWidget)
        self.gridLayout_4.setObjectName("gridLayout_4")
        self.groupBox = QtWidgets.QGroupBox(self.centralWidget)
        self.groupBox.setMinimumSize(QtCore.QSize(750, 450))
        self.groupBox.setObjectName("groupBox")
        self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
        self.gridLayout.setObjectName("gridLayout")
        self.widget = MyGraph(self.groupBox)
        self.widget.setObjectName("widget")
        self.gridLayout.addWidget(self.widget, 0, 0, 1, 1)
        self.gridLayout_4.addWidget(self.groupBox, 0, 0, 1, 2)
        self.groupBox_3 = QtWidgets.QGroupBox(self.centralWidget)
        self.groupBox_3.setObjectName("groupBox_3")
        self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox_3)
        self.gridLayout_3.setObjectName("gridLayout_3")
        self.label_2 = QtWidgets.QLabel(self.groupBox_3)
        font = QtGui.QFont()
        font.setPointSize(15)
        self.label_2.setFont(font)
        self.label_2.setAlignment(QtCore.Qt.AlignCenter)
        self.label_2.setObjectName("label_2")
        self.gridLayout_3.addWidget(self.label_2, 0, 0, 1, 1)
        self.label_4 = QtWidgets.QLabel(self.groupBox_3)
        font = QtGui.QFont()
        font.setPointSize(15)
        self.label_4.setFont(font)
        self.label_4.setText("")
        self.label_4.setObjectName("label_4")
        self.gridLayout_3.addWidget(self.label_4, 0, 1, 1, 1)
        self.gridLayout_4.addWidget(self.groupBox_3, 1, 0, 1, 1)
        self.groupBox_2 = QtWidgets.QGroupBox(self.centralWidget)
        self.groupBox_2.setObjectName("groupBox_2")
        self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)
        self.gridLayout_2.setObjectName("gridLayout_2")
        self.lineEdit = QtWidgets.QLineEdit(self.groupBox_2)
        self.lineEdit.setReadOnly(True)
        self.lineEdit.setObjectName("lineEdit")
        self.gridLayout_2.addWidget(self.lineEdit, 1, 1, 1, 2)
        self.comboBox = QtWidgets.QComboBox(self.groupBox_2)
        self.comboBox.setObjectName("comboBox")
        self.comboBox.addItem("")
        self.comboBox.addItem("")
        self.gridLayout_2.addWidget(self.comboBox, 0, 1, 1, 2)
        self.label = QtWidgets.QLabel(self.groupBox_2)
        self.label.setObjectName("label")
        self.gridLayout_2.addWidget(self.label, 1, 0, 1, 1)
        self.label_3 = QtWidgets.QLabel(self.groupBox_2)
        self.label_3.setObjectName("label_3")
        self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 1)
        self.pushButton = QtWidgets.QPushButton(self.groupBox_2)
        self.pushButton.setObjectName("pushButton")
        self.gridLayout_2.addWidget(self.pushButton, 2, 0, 1, 1)
        self.pushButton_2 = QtWidgets.QPushButton(self.groupBox_2)
        self.pushButton_2.setObjectName("pushButton_2")
        self.gridLayout_2.addWidget(self.pushButton_2, 2, 1, 1, 2)
        self.gridLayout_4.addWidget(self.groupBox_2, 1, 1, 1, 1)
        MainWindow.setCentralWidget(self.centralWidget)

        self.retranslateUi(MainWindow)
        QtCore.QMetaObject.connectSlotsByName(MainWindow)
コード例 #9
0
ファイル: Ui_main.py プロジェクト: macoding1994/ZBQDD
class Ui_MainWindow(object):
    def setupUi(self, MainWindow):
        MainWindow.setObjectName("MainWindow")
        MainWindow.resize(800, 600)
        self.centralWidget = QtWidgets.QWidget(MainWindow)
        self.centralWidget.setObjectName("centralWidget")
        self.gridLayout_4 = QtWidgets.QGridLayout(self.centralWidget)
        self.gridLayout_4.setObjectName("gridLayout_4")
        self.groupBox = QtWidgets.QGroupBox(self.centralWidget)
        self.groupBox.setMinimumSize(QtCore.QSize(750, 450))
        self.groupBox.setObjectName("groupBox")
        self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
        self.gridLayout.setObjectName("gridLayout")
        self.widget = MyGraph(self.groupBox)
        self.widget.setObjectName("widget")
        self.gridLayout.addWidget(self.widget, 0, 0, 1, 1)
        self.gridLayout_4.addWidget(self.groupBox, 0, 0, 1, 2)
        self.groupBox_3 = QtWidgets.QGroupBox(self.centralWidget)
        self.groupBox_3.setObjectName("groupBox_3")
        self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox_3)
        self.gridLayout_3.setObjectName("gridLayout_3")
        self.label_2 = QtWidgets.QLabel(self.groupBox_3)
        font = QtGui.QFont()
        font.setPointSize(15)
        self.label_2.setFont(font)
        self.label_2.setAlignment(QtCore.Qt.AlignCenter)
        self.label_2.setObjectName("label_2")
        self.gridLayout_3.addWidget(self.label_2, 0, 0, 1, 1)
        self.label_4 = QtWidgets.QLabel(self.groupBox_3)
        font = QtGui.QFont()
        font.setPointSize(15)
        self.label_4.setFont(font)
        self.label_4.setText("")
        self.label_4.setObjectName("label_4")
        self.gridLayout_3.addWidget(self.label_4, 0, 1, 1, 1)
        self.gridLayout_4.addWidget(self.groupBox_3, 1, 0, 1, 1)
        self.groupBox_2 = QtWidgets.QGroupBox(self.centralWidget)
        self.groupBox_2.setObjectName("groupBox_2")
        self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)
        self.gridLayout_2.setObjectName("gridLayout_2")
        self.lineEdit = QtWidgets.QLineEdit(self.groupBox_2)
        self.lineEdit.setReadOnly(True)
        self.lineEdit.setObjectName("lineEdit")
        self.gridLayout_2.addWidget(self.lineEdit, 1, 1, 1, 2)
        self.comboBox = QtWidgets.QComboBox(self.groupBox_2)
        self.comboBox.setObjectName("comboBox")
        self.comboBox.addItem("")
        self.comboBox.addItem("")
        self.gridLayout_2.addWidget(self.comboBox, 0, 1, 1, 2)
        self.label = QtWidgets.QLabel(self.groupBox_2)
        self.label.setObjectName("label")
        self.gridLayout_2.addWidget(self.label, 1, 0, 1, 1)
        self.label_3 = QtWidgets.QLabel(self.groupBox_2)
        self.label_3.setObjectName("label_3")
        self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 1)
        self.pushButton = QtWidgets.QPushButton(self.groupBox_2)
        self.pushButton.setObjectName("pushButton")
        self.gridLayout_2.addWidget(self.pushButton, 2, 0, 1, 1)
        self.pushButton_2 = QtWidgets.QPushButton(self.groupBox_2)
        self.pushButton_2.setObjectName("pushButton_2")
        self.gridLayout_2.addWidget(self.pushButton_2, 2, 1, 1, 2)
        self.gridLayout_4.addWidget(self.groupBox_2, 1, 1, 1, 1)
        MainWindow.setCentralWidget(self.centralWidget)

        self.retranslateUi(MainWindow)
        QtCore.QMetaObject.connectSlotsByName(MainWindow)

    def retranslateUi(self, MainWindow):
        _translate = QtCore.QCoreApplication.translate
        MainWindow.setWindowTitle(_translate("MainWindow", "中科飞龙数据展示软件"))
        self.groupBox.setTitle(_translate("MainWindow", "画图"))
        self.groupBox_3.setTitle(_translate("MainWindow", "显示"))
        self.label_2.setText(_translate("MainWindow", "总不确定度:"))
        self.groupBox_2.setTitle(_translate("MainWindow", "按钮"))
        self.comboBox.setItemText(0, _translate("MainWindow", "1"))
        self.comboBox.setItemText(1, _translate("MainWindow", "3"))
        self.label.setText(_translate("MainWindow", "文件路径:"))
        self.label_3.setText(_translate("MainWindow", "结构调整:"))
        self.pushButton.setText(_translate("MainWindow", "预览"))
        self.pushButton_2.setText(_translate("MainWindow", "执行"))
コード例 #10
0
from graph import MyGraph

g = MyGraph()
g.addVertex("tokyo")
g.addVertex("dallas")
g.addVertex("aspen")
g.addVertex("los angeles")
g.addVertex("hong kong")
g.addEdge("dallas", "tokyo")
g.addEdge("dallas", "aspen")
g.addEdge("hong kong", "tokyo")
g.addEdge("hong kong", "dallas")
g.addEdge("los angeles", "hong kong")
g.addEdge("los angeles", "aspen")
#g.removeEdge("dallas", "aspen")
print(g.adjacencyList)
g.removeVertex("hong kong")
print(g.adjacencyList)
コード例 #11
0
    def create_graph(self, filename='graph'):
        G = MyGraph()
        causality = self.causality
        parallel_events = self.parallel_events
        inv_causality = self.inv_causality
        start_set_events = self.start_events
        end_set_events = self.end_events

        # adding split gateways based on causality
        for event in causality:
            if len(causality[event]) > 1:
                if tuple(causality[event]) in parallel_events:
                    G.add_and_split_gateway(event, causality[event])
                else:
                    G.add_xor_split_gateway(event, causality[event])

        # adding merge gateways based on inverted causality
        for event in inv_causality:
            if len(inv_causality[event]) > 1:
                if tuple(inv_causality[event]) in parallel_events:
                    G.add_and_merge_gateway(inv_causality[event], event)
                else:
                    G.add_xor_merge_gateway(inv_causality[event], event)
            elif len(inv_causality[event]) == 1:
                source = list(inv_causality[event])[0]
                G.edge(source, event)

        # adding start event
        G.add_event("start")
        if len(start_set_events) > 1:
            if tuple(start_set_events) in parallel_events:
                G.add_and_split_gateway(event, start_set_events)
            else:
                G.add_xor_split_gateway(event, start_set_events)
        else:
            G.edge("start", list(start_set_events)[0])

        # adding end event
        G.add_event("end")
        if len(end_set_events) > 1:
            if tuple(end_set_events) in parallel_events:
                G.add_and_merge_gateway(end_set_events, event)
            else:
                G.add_xor_merge_gateway(end_set_events, event)
        else:
            G.edge(list(end_set_events)[0], "end")

        G.render('../graphs/' + filename, view=True)
コード例 #12
0
class Alpha():
    def __init__(self, log, splines='ortho'):
        self.log = log
        self.tl = self.get_TL_set()  # all tasks in the log
        self.ti = self.get_TI_set(
        )  # tasks that appear at least once as first task of a case
        self.to = self.get_TO_set(
        )  # tasks that appear at least once as last task in a case
        self.ds = self.direct_succession()  # direct successors a > b
        self.cs = self.causality()  # causality a -> b <=> a > b and b /> a
        self.pr = self.parallel()  # parralelism a > b and b > a
        self.ind = self.choice()  # no direct succession a # b and b # a
        self.xl = self.get_XL_set(
        )  # potential task connections (a->b or a->(b#c) or (b#c)->d)
        self.yl = self.get_YL_set(
        )  # subset of XL: eliminating a->b and a->c if there exists some a->(b#c), eliminating b->c and b->d if there exists some (b#c)->d.
        self.G = MyGraph(splines)
        self.inv_cs = self.inv_causality()  # inverse causality

    def __str__(self):
        alpha_sets = []
        alpha_sets.append("TL set: {}".format(self.tl))
        alpha_sets.append("TI set: {}".format(self.ti))
        alpha_sets.append("TO set: {}".format(self.to))
        alpha_sets.append("XL set: {}".format(self.xl))
        alpha_sets.append("YL set: {}".format(self.yl))
        return '\n'.join(alpha_sets)

    def get_TL_set(self):
        tl = set()
        for item in self.log:
            for i in item:
                tl.add(i)
        return tl

    def get_TI_set(self):
        ti = set()
        for item in self.log:
            ti.add(item[0])
        return ti

    def get_TO_set(self):
        to = set()
        for item in self.log:
            to.add(item[-1])
        return to

    def get_XL_set(self):
        xl = set()
        subsets = itertools.chain.from_iterable(
            itertools.combinations(self.tl, r)
            for r in range(1,
                           len(self.tl) + 1))
        independent_a_or_b = [
            a_or_b for a_or_b in subsets
            if self.__is_ind_set(a_or_b, self.ind)
        ]
        for a, b in itertools.product(independent_a_or_b, independent_a_or_b):
            if self.__is_cs_set((a, b), self.cs):
                xl.add((a, b))
        return xl

    def __is_ind_set(self, s, ind):
        if len(s) == 1:
            return True
        else:
            s_all = itertools.combinations(s, 2)
            for pair in s_all:
                if pair not in ind:
                    return False
            return True

    def __is_cs_set(self, s, cs):
        set_a, set_b = s[0], s[1]
        s_all = itertools.product(set_a, set_b)
        for pair in s_all:
            if pair not in cs:
                return False
        return True

    def get_YL_set(self):
        yl = copy.deepcopy(self.xl)
        s_all = itertools.combinations(yl, 2)
        for pair in s_all:
            if self.__issubset(pair[0], pair[1]):
                yl.discard(pair[0])
            elif self.__issubset(pair[1], pair[0]):
                yl.discard(pair[1])

        # remove self-loops
        self_loop = set()
        for pair in self.pr:
            if pair == pair[::
                            -1]:  # if we found pairs like (b,b), add b into self-loop sets
                self_loop.add(pair[0])

        to_be_deleted = set()
        for pair in yl:
            if self.__contains(pair, self_loop):
                to_be_deleted.add(pair)
        for pair in to_be_deleted:
            yl.discard(pair)
        return yl

    def __issubset(self, a, b):
        if set(a[0]).issubset(b[0]) and set(a[1]).issubset(b[1]):
            return True
        return False

    def __contains(self, a, b):
        # return True if nested tuple "a" contains any letter in set "b"
        # e.g. __contains((('a',), ('b',)), ('b', 'c')) -> True
        return any(j == i[0] for i in a for j in b)

    def get_footprint(self):
        footprint = []
        footprint.append("All transitions: {}".format(self.tl))
        footprint.append("Direct succession: {}".format(self.ds))
        footprint.append("Causality: {}".format(self.cs))
        footprint.append("Parallel: {}".format(self.pr))
        footprint.append("Choice: {}".format(self.ind))
        return '\n'.join(footprint)

    def generate_footprint(self, txtfile='footprint.txt'):
        with open(txtfile, 'w') as f:
            f.write(self.get_footprint())

    def direct_succession(self):
        # x > y
        ds = set()
        for trace in self.log:
            for x, y in zip(trace, trace[1:]):
                ds.add((x, y))
        return ds

    def causality(self):
        # x -> y
        cs = {}
        for pair in self.ds:
            if pair[::-1] not in self.ds:
                if pair[0] in cs.keys():
                    cs[pair[0]].append(pair[1])
                else:
                    cs[pair[0]] = [pair[1]]
        return cs

    def inv_causality(self):
        # only for causality cases which has one succesor
        inv_cs = {}
        for key, values in self.cs.items():
            if len(values) == 1:
                if values[0] in inv_cs.keys():
                    inv_cs[values[0]].append(key)
                else:
                    inv_cs[values[0]] = [key]
        return inv_cs

    def parallel(self):
        # (x || y) & (y || x)
        pr = set()
        for pair in self.ds:
            if pair[::-1] in self.ds:
                pr.add(pair)
        return pr

    def choice(self):
        # (x # y) & (y # x)
        ind = set()  # ind is the abbreviation of independent
        all_permutations = itertools.permutations(self.tl, 2)
        '''for pair in all_permutations:
            if pair not in self.cs and pair[::-1] not in self.cs and pair not in self.pr:
                ind.add(pair)'''
        for pair in all_permutations:
            if pair not in self.ds and pair[::-1] not in self.ds:
                ind.add(pair)
        return ind

    def set_contain(self, s, value):
        for i in s:
            for j in i:
                if j == value:
                    return True

        return False

    def create_graph(self, filename='graph', view=False, l1l=None):
        # adding split gateways based on causality
        for event in self.cs:
            if len(self.cs[event]) > 1:
                if tuple(self.cs[event]) in self.pr:
                    self.G.add_and_split_gateway(event, self.cs[event])
                #elif tuple(self.cs[event]) in self.ind:
                #    self.G.add_xor_split_gateway(event,self.cs[event])
                else:
                    if l1l is not None and event in l1l:
                        temp_cs = self.cs[event]
                        temp_cs.append(event)
                        self.G.add_xor_split_gateway(event, temp_cs)
                    else:
                        self.G.add_xor_split_gateway(event, self.cs[event])

        # adding merge gateways based on inverted causality
        for event in self.inv_cs:
            if len(self.inv_cs[event]) > 1:
                if tuple(self.inv_cs[event]) in self.pr:
                    self.G.add_and_merge_gateway(self.inv_cs[event], event)
                else:
                    if l1l is not None and any(elem in self.inv_cs[event]
                                               for elem in l1l):
                        temp_event = [event]
                        for i in self.inv_cs[event]:
                            temp_event.append(i)
                        self.G.add_xor_merge_split_gateway(
                            self.inv_cs[event], temp_event)
                    else:
                        self.G.add_xor_merge_gateway(self.inv_cs[event], event)
            elif len(self.inv_cs[event]) == 1:
                if l1l is not None and self.inv_cs[event][0] in l1l:
                    temp_inv_cs = [event]
                    temp_inv_cs.append(self.inv_cs[event][0])
                    self.G.add_xor_split_gateway(self.inv_cs[event][0],
                                                 temp_inv_cs)
                else:
                    source = list(self.inv_cs[event])[0]
                    self.G.edge(source, event)

        # adding start event
        self.G.add_event("start")
        if len(self.ti) > 1:
            if tuple(self.ti) in self.pr:
                self.G.add_and_split_gateway("start", self.ti)
            else:
                self.G.add_xor_split_gateway("start", self.ti)
        else:
            self.G.edge("start", list(self.ti)[0])

        # adding end event
        self.G.add_event("end")
        if len(self.to) > 1:
            if tuple(self.to) in self.pr:
                self.G.add_and_merge_gateway(self.to, "end")
            else:
                self.G.add_xor_merge_gateway(self.to, "end")
        else:
            self.G.edge(list(self.to)[0], "end")

        self.G.render('../graphs/' + filename, view=view)

        return self.G