Ejemplo n.º 1
0
def parse(edges_path, source_header='source', target_header='target', weight_header='weight',
          edge_limit=None) -> Graph:
    """
    Parse graph from input file
    :param edges_path: input file
    :param source_header: header for source node
    :param target_header: header for target node
    :param weight_header: header for weight
    :param edge_limit: limit the number of edges to parse from file
    :return: Graph object
    """
    # ToDo: parse by streaming to avoid loading a large graph to memmory
    # ToDo: parse nodes and edges properties
    # ToDo: validate inputs

    name = get_name_from_path(edges_path)
    edges = list()

    with open(edges_path, mode='r') as edges_file:
        edges_reader = csv.DictReader(edges_file, delimiter=',')
        for i, row in enumerate(edges_reader):

            if edge_limit is not None and i >= edge_limit:
                break

            edges.append((
                int(row[source_header]),
                int(row[target_header]),
                float(row[weight_header])
            ))

    graph = Graph(name)
    graph.add_edges(edges)
    return graph
Ejemplo n.º 2
0
 def test_3_generalisation_sort_order(self):
     # START AGAIN - more tests, ensure children nodes with children themselves, are prioritised
     
     # C2,C --|> B
     # B,B2 --|> A
     g = Graph()
     c = GraphNode('C', 0, 0, 200, 200)
     c2 = GraphNode('C2', 0, 0, 200, 200)
     b = GraphNode('B', 0, 0, 200, 200)
     b2 = GraphNode('B2', 0, 0, 200, 200)
     a = GraphNode('A', 0, 0, 200, 200)
     # add out of order
     g.AddNode(b2)
     g.AddNode(b)
     g.AddNode(c)
     g.AddNode(c2)
     g.AddNode(a)
     g.AddEdge(c, b)['uml_edge_type'] = 'generalisation'
     g.AddEdge(c2, b)['uml_edge_type'] = 'generalisation'
     g.AddEdge(b2, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(b, a)['uml_edge_type'] = 'generalisation'
     nodelist_normal = [node.id for node in g.nodes]
     nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
     nodelist_sorted_expected = ['A', 'B', 'B2', 'C', 'C2']
     nodelist_sorted_expected2 = ['A', 'B', 'B2', 'C2', 'C']
     #print "nodelist_normal", nodelist_normal
     #print "nodelist_sorted_expected", nodelist_sorted_expected
     #print "nodelist_sorted", nodelist_sorted
     assert nodelist_sorted_expected == nodelist_sorted or \
             nodelist_sorted_expected2 == nodelist_sorted
Ejemplo n.º 3
0
def train(model_config):
    print(list_config(model_config))
    data = TrainData(model_config)
    graph = Graph(True, model_config, data)
    graph.create_model_multigpu()
    with tf.train.MonitoredTrainingSession(
        checkpoint_dir=model_config.logdir,
        save_checkpoint_secs=model_config.save_model_secs,
        config=get_session_config(),
    ) as sess:
    # with tf.Session(config=get_session_config()) as sess:
    #     sess.run(tf.initialize_all_variables())
        ckpt = tf.train.get_checkpoint_state(model_config.logdir)
        if ckpt:
            graph.saver.restore(sess, ckpt.model_checkpoint_path)
        perplexitys = []
        start_time = datetime.now()
        previous_step = 0
        while True:
            input_feed, _, _ = get_feed(graph.objs, data, model_config, True)
            fetches = [graph.train_op, graph.increment_global_step, graph.global_step,
                       graph.perplexity]
            _, _, step, perplexity = sess.run(fetches, input_feed)
            perplexitys.append(perplexity)

            if (step - previous_step) > model_config.model_print_freq:
                end_time = datetime.now()
                time_span = end_time - start_time
                start_time = end_time
                print('Perplexity:\t%f at step %d using %s.' % (perplexity, step, time_span))
                perplexitys.clear()
                previous_step = step
    def setUp(self):
        # Hyperparameters
        num_nodes = 100
        num_edges = 500
        weight_max = 100

        self.edges = []
        self.nodes = [Node()]

        # Build connected nodes
        for _ in range(num_nodes - 1):
            new = Node()
            self.nodes.append(new)
            old = choice(self.nodes)

            edge = Edge(randint(0, weight_max))
            self.edges.append(edge)
            old.connect_to(new, edge)

        # Add random connections
        missing_connections = [
            (n1, n2) for n1 in self.nodes
            for n2 in list(set(self.nodes) - set(n1.get_neighbors()))
        ]
        for i in range(num_edges - (num_nodes - 1)):
            a, b = missing_connections.pop(
                randint(0,
                        len(missing_connections) - 1))
            edge = Edge(randint(0, weight_max))
            self.edges.append(edge)
            a.connect_to(b, edge)

        self.edges = list(set(self.edges))
        self.graph = Graph(self.nodes)
Ejemplo n.º 5
0
    def getGraph(self):
        if self.graph == None:

            self.graph = Graph()
            self.graph.initFromLists(self.pageNumber, self.node_rk_generator(),
                                     self.edge_generator())
        return self.graph
Ejemplo n.º 6
0
def test():
    it = data.get_data_iter()
    graph = Graph(is_train=False)
    tf.reset_default_graph()
    graph.create_model()

    sv = tf.train.Supervisor(logdir=get_path(args.logdir),
                             global_step=graph.global_step)
    sess = sv.PrepareSession()
    losses = []
    scores = 0
    total = 0
    while True:
        input_feed, gt_last_events, effective_batch_size, is_end = get_data(
            graph.inputs_ph, it)
        fetches = [graph.last_event, graph.loss, graph.global_step]
        last_events, loss, step = sess.run(fetches, input_feed)

        for i in range(effective_batch_size):
            losses.append(loss)

            total += 1
            if last_events[i] == gt_last_events[i]:
                scores += 1

        if is_end:
            break

    file = 'Accurary%sLoss%s.txt' % (scores / total, np.mean(losses))
    output = 'Accurary:\t%s Loss:\t%s' % (scores / total, np.mean(losses))
    if not os.path.exists(get_path(args.resultdir)):
        os.mkdir(get_path(args.resultdir))
    f = open(get_path(args.resultdir) + file, mode='w', encoding='utf-8')
    f.write(output)
    f.close()
Ejemplo n.º 7
0
    def test_7_generalisation_multiple_inhertitance(self):
        # START AGAIN - more tests, check multiple inheritance trees
        
        # See 'python-in/testmodule08_multiple_inheritance.py'
        # for another related unit test
        
        # F --|> M
        # F --|> S
        g = Graph()
        f = GraphNode('F', 0, 0, 200, 200)
        m = GraphNode('M', 0, 0, 200, 200)
        s = GraphNode('S', 0, 0, 200, 200)
        g.AddEdge(f, m)['uml_edge_type'] = 'generalisation'
        g.AddEdge(f, s)['uml_edge_type'] = 'generalisation'

        nodelist_normal = [node.id for node in g.nodes]
        #print "nodelist_normal", nodelist_normal
    
        nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
        nodelist_sorted_expected = ['M', 'F', 'S']
        assert nodelist_sorted_expected == nodelist_sorted, nodelist_sorted
    
        #print "nodelist_sorted_expected", nodelist_sorted_expected
        #print "nodelist_sorted", nodelist_sorted
    
        nodelist_sorted_annotated = [(node.id, annotation) for node,annotation in g.nodes_sorted_by_generalisation]
        nodelist_sorted_expected_annotated = [('M', 'root'), ('F', 'root'), ('S', 'root')]
        assert nodelist_sorted_expected_annotated == nodelist_sorted_annotated, nodelist_sorted_annotated
Ejemplo n.º 8
0
    def test_7_generalisation_multiple_inhertitance(self):
        # START AGAIN - more tests, check multiple inheritance trees

        # See 'python-in/testmodule08_multiple_inheritance.py'
        # for another related unit test

        # F --|> M
        # F --|> S
        g = Graph()
        f = GraphNode('F', 0, 0, 200, 200)
        m = GraphNode('M', 0, 0, 200, 200)
        s = GraphNode('S', 0, 0, 200, 200)
        g.AddEdge(f, m)['uml_edge_type'] = 'generalisation'
        g.AddEdge(f, s)['uml_edge_type'] = 'generalisation'

        nodelist_normal = [node.id for node in g.nodes]
        #print "nodelist_normal", nodelist_normal

        nodelist_sorted = [
            node.id for node, annotation in g.nodes_sorted_by_generalisation
        ]
        nodelist_sorted_expected = ['M', 'F', 'S']
        assert nodelist_sorted_expected == nodelist_sorted, nodelist_sorted

        #print "nodelist_sorted_expected", nodelist_sorted_expected
        #print "nodelist_sorted", nodelist_sorted

        nodelist_sorted_annotated = [
            (node.id, annotation)
            for node, annotation in g.nodes_sorted_by_generalisation
        ]
        nodelist_sorted_expected_annotated = [('M', 'root'), ('F', 'root'),
                                              ('S', 'root')]
        assert nodelist_sorted_expected_annotated == nodelist_sorted_annotated, nodelist_sorted_annotated
Ejemplo n.º 9
0
    def setUp(self):

        class FakeGui:
            def stateofthenation(self, recalibrate=False, auto_resize_canvas=True):
                pass

        self.g = Graph()
        self.overlap_remover = OverlapRemoval(self.g, margin=5, gui=FakeGui())
Ejemplo n.º 10
0
    def __init__(self,
                 geo_hiddens,
                 rnn_type,
                 rnn_hiddens,
                 graph_type,
                 graph,
                 input_dim,
                 output_dim,
                 use_sampling,
                 cl_decay_steps,
                 prefix=None):
        super(Seq2Seq, self).__init__(prefix=prefix)

        # initialize encoder
        with self.name_scope():
            encoder_cells = []
            encoder_graphs = []
            for i, hidden_size in enumerate(rnn_hiddens):
                pre_hidden_size = input_dim if i == 0 else rnn_hiddens[i - 1]
                c = RNNCell.create(rnn_type[i],
                                   pre_hidden_size,
                                   hidden_size,
                                   prefix='encoder_c%d_' % i)
                g = Graph.create_graphs('None' if i == len(rnn_hiddens) -
                                        1 else graph_type[i],
                                        graph,
                                        hidden_size,
                                        prefix='encoder_g%d_' % i)
                encoder_cells.append(c)
                encoder_graphs.append(g)
        self.encoder = Encoder(encoder_cells, encoder_graphs)

        # initialize decoder
        with self.name_scope():
            decoder_cells = []
            decoder_graphs = []
            for i, hidden_size in enumerate(rnn_hiddens):
                pre_hidden_size = input_dim if i == 0 else rnn_hiddens[i - 1]
                c = RNNCell.create(rnn_type[i],
                                   pre_hidden_size,
                                   hidden_size,
                                   prefix='decoder_c%d_' % i)
                g = Graph.create_graphs(graph_type[i],
                                        graph,
                                        hidden_size,
                                        prefix='decoder_g%d_' % i)
                decoder_cells.append(c)
                decoder_graphs.append(g)
        self.decoder = Decoder(decoder_cells, decoder_graphs, input_dim,
                               output_dim, use_sampling, cl_decay_steps)

        # initalize geo encoder network
        self.geo_encoder = MLP(geo_hiddens,
                               act_type='relu',
                               out_act=True,
                               prefix='geo_encoder_')
Ejemplo n.º 11
0
 def graph_list(cls, user_name, type, is_published=True):
     if is_published:
         graphs = Graph.select().filter(Graph.type == type,
                                        Graph.is_published).order_by(
                                            Graph.created_at.desc()).all()
     else:
         graphs = Graph.select().filter(
             Graph.type == type, Graph.user_name == user_name).order_by(
                 Graph.created_at.desc()).all()
     return [graph.get_json() for graph in graphs]
Ejemplo n.º 12
0
def predict_from_checkpoint(model_config, ckpt):
    """
    Make prediction using transformer, return list of InstancePred.

    :param model_config:
    :param ckpt:
    :return:
    """
    # Eval only uses single GPU
    assert model_config.num_gpus == 1

    train_data = TrainData(model_config)
    graph = Graph(False, model_config, train_data)
    tf.reset_default_graph()
    graph.create_model_multigpu()
    sess = tf.train.MonitoredTrainingSession(config=get_session_config())

    try:
        graph.saver.restore(sess, ckpt)
    except tf.errors.NotFoundError:
        # Partial restore
        import tensorflow.contrib.slim as slim
        var_list = slim.get_variables_to_restore()
        available_vars = {}
        reader = tf.train.NewCheckpointReader(ckpt)
        var_dict = {var.op.name: var for var in var_list}
        for var in var_dict:
            if reader.has_tensor(var):
                var_ckpt = reader.get_tensor(var)
                var_cur = var_dict[var]
                if any([
                        var_cur.shape[i] != var_ckpt.shape[i]
                        for i in range(len(var_ckpt.shape))
                ]):
                    print('Variable %s missing due to shape.', var)
                else:
                    available_vars[var] = var_dict[var]
            else:
                print('Variable %s missing.', var)

        partial_restore_ckpt = slim.assign_from_checkpoint_fn(
            ckpt_path,
            available_vars,
            ignore_missing_vars=True,
            reshape_variables=False)
        partial_restore_ckpt(sess)

    # instance_collections = evaluate_on_testsets(sess, graph, train_data)
    evaluate_and_write_to_disk(sess,
                               graph,
                               model_config,
                               train_data,
                               output_file_path=model_config.logdir)
Ejemplo n.º 13
0
    def setUp(self):
        self.graph = Graph()
        v1 = self.graph.add_actor_vertex("actor1", {'age':38})
        v2 = self.graph.add_actor_vertex("actor2", {'age':26})
        v3 = self.graph.add_actor_vertex("actor3", {'age':10})
        v4 = self.graph.add_movie_vertex("movie1", {'year':2017, 'gross':17000, 'lang': ["English"]})
        v5 = self.graph.add_movie_vertex("movie2", {'year':2017, 'gross':170000, 'country': ["China"]})

        self.graph.add_edge("actor1", "movie1", 3)
        self.graph.add_edge("actor2", "movie1", 4)
        self.graph.add_edge("actor3", "movie2", 5)
        self.graph.add_edge("actor3", "movie3", 6)
Ejemplo n.º 14
0
class OverlapTests(unittest.TestCase):
    def setUp(self):
        class FakeGui:
            def stateofthenation(self,
                                 recalibrate=False,
                                 auto_resize_canvas=True):
                pass

        self.g = Graph()
        self.overlap_remover = OverlapRemoval(self.g, margin=5, gui=FakeGui())

    def tearDown(self):
        pass

    def testStress1(self):

        for i in range(10):
            self.g.LoadGraphFromStrings(TEST_GRAPH5_STRESS)
            print i,
            were_all_overlaps_removed = self.overlap_remover.RemoveOverlaps()
            self.assertTrue(were_all_overlaps_removed)

            self.g.Clear()

    def testStress2_InitialBoot(self):
        """
        This is the slowest stress test because it runs the spring layout several times.
        """

        from layout.layout_spring import GraphLayoutSpring
        from layout.coordinate_mapper import CoordinateMapper

        self.g.LoadGraphFromStrings(
            GRAPH_INITIALBOOT)  # load the scenario ourselves

        layouter = GraphLayoutSpring(self.g)
        coordmapper = CoordinateMapper(self.g, (800, 800))

        def AllToLayoutCoords():
            coordmapper.AllToLayoutCoords()

        def AllToWorldCoords():
            coordmapper.AllToWorldCoords()

        for i in range(8):
            print i,

            AllToLayoutCoords()
            layouter.layout(keep_current_positions=False)
            AllToWorldCoords()

            were_all_overlaps_removed = self.overlap_remover.RemoveOverlaps()
            self.assertTrue(were_all_overlaps_removed)
Ejemplo n.º 15
0
class TestGraph(TestCase):
    def setUp(self):
        init_db()
        self.graph = Graph(db_session)

    def tearDown(self):
        Base.metadata.drop_all(bind=engine)

    @classmethod
    def tearDownClass(cls):
        db_session.remove()

    def test_add(self):
        self.graph.add(actor_item)
        self.assertEqual(self.graph.get_actor("a").wiki_page, actor_item.get("wiki_page"))
Ejemplo n.º 16
0
 def get_share_img(cls, user_name, id):
     graph = Graph.select().get(id)
     if graph is None:
         raise ServerException(msg='图片不存在')
     if not graph.is_published and user_name != graph.user_name:
         raise ServerException(msg='该图未发布')
     return graph
Ejemplo n.º 17
0
    def test_4(self):
        """
        Upgrade 1.1 to 1.1
        """
        g = Graph()
        filedata = """
# PynSource Version 1.1
{'type':'meta', 'info1':'Lorem ipsum dolor sit amet, consectetur adipiscing elit.'}
{'type':'umlshape', 'id':'UmlShapeCanvas', 'x':237, 'y':65, 'width':226, 'height':781, 'attrs':'scrollStepX|scrollStepY|classnametoshape|log|frame|save_gdi|working|font1|font2|umlworkspace|layout|coordmapper|layouter|overlap_remover', 'meths':'__init__|AllToLayoutCoords|AllToWorldCoords|onKeyPress|CmdInsertNewumlshape|CmdZapShape|Clear|ConvertParseModelToUmlModel|BuildEdgeModel|Go|CreateUmlShape|newRegion|CreateUmlEdge|OnWheelZoom|ChangeScale|stage1|stage2|stateofthenation|stateofthespring|RedrawEverything|ReLayout|LayoutAndPositionShapes|setSize|DecorateShape|createumlshapeShape|AdjustShapePosition|Redraw222|get_umlboxshapes|OnDestroy|OnLeftClick|DeselectAllShapes'}
{'type':'umlshape', 'id':'Log', 'x':1089, 'y':222, 'width':82, 'height':67, 'attrs':'', 'meths':'WriteText'}
{'type':'edge', 'id':'UmlShapeCanvas_to_MainApp', 'source':'UmlShapeCanvas', 'target':'MainApp', 'uml_edge_type':'composition'}
    """
        model.graph_persistence.PERSISTENCE_CURRENT_VERSION = 1.1
        self.assertTrue(
            g.persistence.UpgradeToLatestFileFormatVersion(filedata))
        #print g.persistence.filedata_list

        assert g.persistence.filedata_list[
            0] == "# PynSource Version 1.1", g.persistence.filedata_list[0]
        assert "meta" in g.persistence.filedata_list[
            1], g.persistence.filedata_list[1]
        assert g.persistence.ori_file_version == 1.1

        # now check type node has been converted to type umlshape
        data = eval(g.persistence.filedata_list[2])
        self.assertEquals('umlshape', data.get('type'))
Ejemplo n.º 18
0
 def query(cls, id):
     graph = Graph.select().get(id)
     data = QiniuService.get_doc(graph.data_url)
     graph_data = json.loads(data)
     result = graph.get_json()
     result['graph_data'] = graph_data
     return result
Ejemplo n.º 19
0
    def test_3(self):
        """
        Upgrade 1.0 to 1.1
        """
        g = Graph()
        filedata = """
# PynSource Version 1.0
{'type':'node', 'id':'UmlShapeCanvas', 'x':237, 'y':65, 'width':226, 'height':781, 'attrs':'scrollStepX|scrollStepY|classnametoshape|log|frame|save_gdi|working|font1|font2|umlworkspace|layout|coordmapper|layouter|overlap_remover', 'meths':'__init__|AllToLayoutCoords|AllToWorldCoords|onKeyPress|CmdInsertNewNode|CmdZapShape|Clear|ConvertParseModelToUmlModel|BuildEdgeModel|Go|CreateUmlShape|newRegion|CreateUmlEdge|OnWheelZoom|ChangeScale|stage1|stage2|stateofthenation|stateofthespring|RedrawEverything|ReLayout|LayoutAndPositionShapes|setSize|DecorateShape|createNodeShape|AdjustShapePosition|Redraw222|get_umlboxshapes|OnDestroy|OnLeftClick|DeselectAllShapes'}
{'type':'node', 'id':'Log', 'x':1089, 'y':222, 'width':82, 'height':67, 'attrs':'', 'meths':'WriteText'}
{'type':'node', 'id':'MainApp', 'x':788, 'y':217, 'width':234, 'height':717, 'attrs':'log|andyapptitle|frame|notebook|umlwin|yuml|asciiart|multiText|popupmenu|next_menu_id|printData|box|canvas|preview', 'meths':'OnInit|OnResizeFrame|OnRightButtonMenu|OnBuildGraphFromUmlWorkspace|OnSaveGraphToConsole|OnSaveGraph|OnLoadGraphFromText|OnLoadGraph|LoadGraph|OnTabPageChanged|InitMenus|Add|FileImport|FileImport2|FileImport3|FileNew|FilePrint|OnAbout|OnVisitWebsite|OnCheckForUpdates|OnHelp|OnDeleteNode|OnLayout|OnRefreshUmlWindow|MessageBox|OnButton|OnCloseFrame'}
{'type':'node', 'id':'MyEvtHandler', 'x':10, 'y':173, 'width':170, 'height':285, 'attrs':'log|frame|shapecanvas|popupmenu', 'meths':'__init__|UpdateStatusBar|OnLeftClick|_SelectNodeNow|OnEndDragLeft|OnSizingEndDragLeft|OnMovePost|OnPopupItemSelected|OnRightClick|RightClickDeleteNode'}
{'type':'edge', 'id':'UmlShapeCanvas_to_MainApp', 'source':'UmlShapeCanvas', 'target':'MainApp', 'uml_edge_type':'composition'}
{'type':'edge', 'id':'ImageViewer_to_MainApp', 'source':'ImageViewer', 'target':'MainApp', 'uml_edge_type':'composition'}
{'type':'edge', 'id':'UmlShapeCanvas_to_MainApp', 'source':'UmlShapeCanvas', 'target':'MainApp', 'uml_edge_type':'composition'}
    """
        model.graph_persistence.PERSISTENCE_CURRENT_VERSION = 1.1
        self.assertTrue(g.persistence.UpgradeToLatestFileFormatVersion(filedata))
        #print g.persistence.filedata_list

        assert g.persistence.filedata_list[0] == "# PynSource Version 1.1", g.persistence.filedata_list[0]
        assert "meta" in g.persistence.filedata_list[1], g.persistence.filedata_list[1]
        assert g.persistence.ori_file_version == 1.0
        
        # now check type node has been converted to type umlshape 
        data = eval(g.persistence.filedata_list[2])
        self.assertEquals('umlshape', data.get('type'))
    def test_connect(self):
        graph = Graph()
        self.assertEqual([], graph.get_nodes())
        self.assertEqual([], graph.get_edges())

        n1, n2, e = Node(), Node(), Edge()
        graph.connect(n1, n2, e)
        self.assertEqual(set((n1, n2)), set(graph.get_nodes()))
        self.assertEqual([e], graph.get_edges())
Ejemplo n.º 21
0
    def __init__(self, agent, kb, env):
        super(GraphNeuralSession, self).__init__(agent, kb, env)
        self.graph = GraphBatch([Graph(kb)])

        self.utterances = None
        self.context = None
        self.graph_data = None
        self.init_checklists = None
Ejemplo n.º 22
0
    def test_2_generalisation_sort_order(self):
        # C --|> B --|> A
        g = Graph()
        c = GraphNode('C', 0, 0, 200, 200)
        b = GraphNode('B', 0, 0, 200, 200) # parent of C
        a = GraphNode('A', 0, 0, 200, 200) # parent of B
        # add out of order
        g.AddNode(b)
        g.AddNode(c)
        g.AddNode(a)
        g.AddEdge(c, b)['uml_edge_type'] = 'generalisation'
        g.AddEdge(b, a)['uml_edge_type'] = 'generalisation'
        nodelist_normal = [node.id for node in g.nodes]
        nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
        nodelist_sorted_expected = ['A', 'B', 'C']
        #print "nodelist_normal", nodelist_normal
        #print "nodelist_sorted_expected", nodelist_sorted_expected
        #print "nodelist_sorted", nodelist_sorted
        assert nodelist_sorted_expected == nodelist_sorted

        # D --|> C --|> B --|> A
        d = GraphNode('D', 0, 0, 200, 200)
        g.AddNode(d)
        g.AddEdge(d, c)['uml_edge_type'] = 'generalisation'
        nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
        nodelist_sorted_expected = ['A', 'B', 'C', 'D']
        assert nodelist_sorted_expected == nodelist_sorted
    
        # E node not connected to anything
        e = GraphNode('E', 0, 0, 200, 200)
        g.AddNode(e)
        nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
        nodelist_sorted_expected = ['A', 'B', 'C', 'D', 'E']
        assert nodelist_sorted_expected == nodelist_sorted

        # D --|> C --|> B --|> A
        # E
        # C2 --|> B
        c2 = GraphNode('C2', 0, 0, 200, 200)
        g.AddNode(c2)
        g.AddEdge(c2, b)['uml_edge_type'] = 'generalisation'
        nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
        nodelist_sorted_expected = ['A', 'B', 'C', 'C2', 'D', 'E']
        assert nodelist_sorted_expected == nodelist_sorted
Ejemplo n.º 23
0
 def test_6(self):
     """
     Empty file
     """
     g = Graph()
     filedata = """
 """
     model.graph_persistence.PERSISTENCE_CURRENT_VERSION = 1.1
     self.assertFalse(g.persistence.can_I_read(filedata)[0])
     self.assertFalse(g.persistence.UpgradeToLatestFileFormatVersion(filedata))
Ejemplo n.º 24
0
 def delete(cls, user_name, id):
     graph = Graph.select().get(id)
     assert graph is not None
     assert graph.user_name == user_name
     assert QiniuService.delete_file(
         bucket_name=config.QI_NIU.get('doc_bucket_name'),
         file_name=graph.data_key)
     assert QiniuService.delete_file(
         bucket_name=config.QI_NIU.get('img_bucket_name'),
         file_name=graph.img_key)
     graph.delete()
     return True
Ejemplo n.º 25
0
 def test_4_generalisation_sort_order(self):
     # START AGAIN - more tests, ensure children nodes with children themselves, are prioritised
     # and furthermore, children with the most descendants are prioritised even more.
     
     # B,B1 --|> A
     # C --|> B
     g = Graph()
     c = GraphNode('C', 0, 0, 200, 200)
     b = GraphNode('B', 0, 0, 200, 200)
     b1 = GraphNode('B1', 0, 0, 200, 200)
     a = GraphNode('A', 0, 0, 200, 200)
     c2 = GraphNode('C2', 0, 0, 200, 200)
     d = GraphNode('D', 0, 0, 200, 200)
     # add out of order
     g.AddNode(b1)
     g.AddNode(b)
     g.AddNode(a)
     g.AddNode(c)
     g.AddNode(c2)
     g.AddNode(d)
     g.AddEdge(c2, b1)['uml_edge_type'] = 'generalisation'
     g.AddEdge(d, c)['uml_edge_type'] = 'generalisation'
     g.AddEdge(c, b)['uml_edge_type'] = 'generalisation'
     g.AddEdge(b1, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(b, a)['uml_edge_type'] = 'generalisation'
     nodelist_normal = [node.id for node in g.nodes]
     nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
     nodelist_sorted_expected = ['A', 'B', 'B1', 'C', 'D', 'C2']
     #print "nodelist_normal", nodelist_normal
     #print "nodelist_sorted_expected", nodelist_sorted_expected
     #print "nodelist_sorted", nodelist_sorted
     assert nodelist_sorted_expected == nodelist_sorted
Ejemplo n.º 26
0
def train():
    graph = Graph(is_train=True)
    graph.create_model()

    sv = tf.train.Supervisor(logdir=get_path(args.logdir),
                             global_step=graph.global_step,
                             saver=graph.saver,
                             save_model_secs=600)
    sess = sv.PrepareSession()
    losses = []
    while True:
        input_feed = get_data(graph.inputs_ph)
        fetches = [graph.train_op, graph.loss, graph.global_step]
        _, loss, step = sess.run(fetches, input_feed)
        losses.append(loss)

        if step % 100 == 0:
            print('Loss\t%s.' % np.mean(losses))
            losses = []

            from model.test import test
            test()
Ejemplo n.º 27
0
def main():
    """Draw a sample graph"""
    nodes = [1, 2, 3, 4, 5]
    edges = [(1, 2), (1, 3), (1, 5)]
    graph = Graph('Visualizer Sample')
    graph.add_nodes(nodes)
    graph.add_edges(edges)
    draw(graph)
Ejemplo n.º 28
0
 def save_graph(cls, user_name, id, type, title, data, img):
     if data:
         data = json.dumps(data)
     if title is None:
         title = 'Untitled'
     if id is not None:
         graph: Graph = Graph.select().get(id)
         graph.type = type
         graph.title = title
         data_key = md5(title + data)
         if graph.data_key != data_key:
             data_url = QiniuService.upload_doc(data, graph.data_key,
                                                data_key)
             graph.data_key = data_key
             graph.data_url = data_url
         img_key = md5(img)
         if graph.img_key != img_key:
             img_url = QiniuService.upload_img(img, graph.img_key, img_key)
             graph.img_key = img_key
             graph.img_url = img_url
             img_info = QiniuService.get_img_info(img_url)
             graph.width = img_info.get('width')
             graph.height = img_info.get('height')
             graph.size = img_info.get('size')
             graph.format = img_info.get('format')
             graph.color_model = img_info.get('colorModel')
         Graph.update(graph)
         return id
     else:
         data_key = md5(title + data + datetime.now().timestamp().__str__())
         data_url = QiniuService.upload_doc(data, file_name=data_key)
         img_key = md5(img + datetime.now().timestamp().__str__())
         img_url = QiniuService.upload_img(img, file_name=img_key)
         img_info = QiniuService.get_img_info(img_url)
         graph = Graph(user_name=user_name,
                       title=title,
                       data_key=data_key,
                       data_url=data_url,
                       type=type,
                       img_key=img_key,
                       img_url=img_url,
                       width=img_info.get('width'),
                       height=img_info.get('height'),
                       size=img_info.get('size'),
                       format=img_info.get('format'),
                       color_model=img_info.get('colorModel'))
         graph.insert()
         return Graph.select().filter(Graph.data_key == data_key).one().id
Ejemplo n.º 29
0
    def test_5(self):
        """
        Upgrade 1.1 to 1.0 - simulate loading a newer file format into an older version of the app
        Cannot read. (a bit strict, I know - but gui allows for forcing the read - see filemgmt.py)
        """
        g = Graph()
        filedata = """
# PynSource Version 1.1
{'type':'meta', 'info1':'Lorem ipsum dolor sit amet, consectetur adipiscing elit.'}
{'type':'umlshape', 'id':'UmlShapeCanvas', 'x':237, 'y':65, 'width':226, 'height':781, 'attrs':'scrollStepX|scrollStepY|classnametoshape|log|frame|save_gdi|working|font1|font2|umlworkspace|layout|coordmapper|layouter|overlap_remover', 'meths':'__init__|AllToLayoutCoords|AllToWorldCoords|onKeyPress|CmdInsertNewumlshape|CmdZapShape|Clear|ConvertParseModelToUmlModel|BuildEdgeModel|Go|CreateUmlShape|newRegion|CreateUmlEdge|OnWheelZoom|ChangeScale|stage1|stage2|stateofthenation|stateofthespring|RedrawEverything|ReLayout|LayoutAndPositionShapes|setSize|DecorateShape|createumlshapeShape|AdjustShapePosition|Redraw222|get_umlboxshapes|OnDestroy|OnLeftClick|DeselectAllShapes'}
{'type':'umlshape', 'id':'Log', 'x':1089, 'y':222, 'width':82, 'height':67, 'attrs':'', 'meths':'WriteText'}
{'type':'edge', 'id':'UmlShapeCanvas_to_MainApp', 'source':'UmlShapeCanvas', 'target':'MainApp', 'uml_edge_type':'composition'}
    """
        model.graph_persistence.PERSISTENCE_CURRENT_VERSION = 1.0
        self.assertFalse(g.persistence.UpgradeToLatestFileFormatVersion(filedata))
        self.assertFalse(g.persistence.can_I_read(filedata)[0])
Ejemplo n.º 30
0
    def layout(self, keep_current_positions=False, optimise=True):
        if len(self.graph.nodes) == 0:
            print "Layout aborted - nothing to lay out."
            return

        if not keep_current_positions:
            self.layoutPrepare()

        if self.gui:
            self.gui.kill_layout = False  # initialise

        memento1 = self.graph.GetMementoOfLayoutPoints()
        break_pending = 0

        for i in range(0, self.iterations):
            self.layoutIteration()

            if i % 100 == 0:  # i%50==0:
                if self.gui:
                    self.layoutCalcBounds(
                    )  # this is the only time you need to call this explicitly since are in the MIDDLE of a layout and about to visualise
                    self.gui.stateofthenation(
                        recalibrate=True,
                        auto_resize_canvas=False)  # refresh gui

                    if self.gui.kill_layout:
                        print "Layout aborted early, due to user interrupt"
                        break

            if i % 20 == 0:
                if optimise:
                    memento2 = self.graph.GetMementoOfLayoutPoints()
                    if Graph.MementosEqual(memento1, memento2, 0.01):
                        break_pending += 1
                        #print "!",
                        if break_pending > 5:
                            #print "break"
                            break
                    else:
                        break_pending = 0
                        #print ".",
                    memento1 = memento2

        #print
        self.layoutCalcBounds()
Ejemplo n.º 31
0
    def initialize_graph(self):

        # Get neural network
        self.network = self._get_neural_network()

        # Build computational graph
        self.graph = Graph(network=self.network,
                           save_path=self.save_path,
                           max_to_keep=self.max_to_keep)

        # Start session
        self.sess = tf.Session()

        # Initialize global variables
        self.sess.run(self.graph.init_global)

        # Save network object
        self._pickle_network()
Ejemplo n.º 32
0
    def test_2(self):
        """
        Upgrade 0.9 (no version number) to 1.1
        """
        g = Graph()
        filedata = """
{'type':'node', 'id':'c', 'x':230, 'y':174, 'width':60, 'height':120}
{'type':'node', 'id':'c1', 'x':130, 'y':174, 'width':60, 'height':120}
{'type':'edge', 'id':'c_to_c1', 'source':'c', 'target':'c1'}
    """
        model.graph_persistence.PERSISTENCE_CURRENT_VERSION = 1.1
        self.assertTrue(g.persistence.UpgradeToLatestFileFormatVersion(filedata))
        #print g.persistence.filedata_list

        assert g.persistence.filedata_list[0] == "# PynSource Version 1.1", g.persistence.filedata_list[0]
        assert "meta" in g.persistence.filedata_list[1], g.persistence.filedata_list[1]
        assert g.persistence.ori_file_version == 0.9
        
        # now check type node has been converted to type umlshape 
        data = eval(g.persistence.filedata_list[2])
        self.assertEquals('umlshape', data.get('type'))
Ejemplo n.º 33
0
    def test_1(self):
        """
        Upgrade 0.9 (no version number) to 1.0
        """
        g = Graph()
        
        filedata = """
{'type':'node', 'id':'c', 'x':230, 'y':174, 'width':60, 'height':120}
{'type':'node', 'id':'c1', 'x':130, 'y':174, 'width':60, 'height':120}
{'type':'edge', 'id':'c_to_c1', 'source':'c', 'target':'c1'}
    """
        #g.Clear()
        assert len(g.nodes) == 0
        #assert g.GraphToString().strip() == ""
        
        model.graph_persistence.PERSISTENCE_CURRENT_VERSION = 1.0
        #g.LoadGraphFromStrings(filedata)
        self.assertTrue(g.persistence.UpgradeToLatestFileFormatVersion(filedata))
        #print g.persistence.filedata_list
        
        assert g.persistence.filedata_list[0] == "# PynSource Version 1.0", g.persistence.filedata_list[0]
        assert g.persistence.ori_file_version == 0.9
Ejemplo n.º 34
0
    def __init__(self,
                 in_channels=2,
                 in_planes=128,
                 edge_importance_weighting=True):
        super().__init__()

        graph = Graph()
        A = torch.tensor(graph.A, dtype=torch.float32, requires_grad=False)
        self.register_buffer('A', A)

        # Networks.
        spatial_kernel_size = A.size(0)
        temporal_kernel_size = 9
        kernel_size = (temporal_kernel_size, spatial_kernel_size)

        self.data_bn = nn.BatchNorm1d(in_channels * A.size(1))
        self.st_gcn_networks = nn.ModuleList(
            (st_gcn(in_channels, 64, kernel_size, 1,
                    residual=False), st_gcn(64, 64, kernel_size,
                                            1), st_gcn(64, 64, kernel_size, 1),
             st_gcn(64, 64, kernel_size,
                    1), st_gcn(64, 128, kernel_size,
                               2), st_gcn(128, 128, kernel_size,
                                          1), st_gcn(128, 128, kernel_size, 1),
             st_gcn(128, 256, kernel_size,
                    2), st_gcn(256, 256, kernel_size,
                               1), st_gcn(256, 256, kernel_size, 1)))

        if edge_importance_weighting:
            self.edge_importance = nn.ParameterList([
                nn.Parameter(torch.ones(A.size()))
                for i in self.st_gcn_networks
            ])
        else:
            self.edge_importance = [1] * len(self.st_gcn_networks)

        self.cls = nn.Conv2d(256, in_planes, kernel_size=1)
Ejemplo n.º 35
0
 def test_6_generalisation_sort_order(self):
     # START AGAIN - more tests, check stranger trees
 
     # B,D,F --|> A
     # G --|> C --|> B
     # E --|> D
     g = Graph()
     a = GraphNode('A', 0, 0, 200, 200)
     b = GraphNode('B', 0, 0, 200, 200)
     c = GraphNode('C', 0, 0, 200, 200)
     d = GraphNode('D', 0, 0, 200, 200)
     e = GraphNode('E', 0, 0, 200, 200)
     f = GraphNode('F', 0, 0, 200, 200)
     h = GraphNode('H', 0, 0, 200, 200)
     # add out of order
     g.AddNode(f)
     g.AddNode(b)
     g.AddNode(a)
     g.AddNode(h)
     g.AddNode(c)
     g.AddNode(e)
     g.AddNode(d)
     g.AddEdge(b, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(d, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(f, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(h, c)['uml_edge_type'] = 'generalisation'
     g.AddEdge(c, b)['uml_edge_type'] = 'generalisation'
     g.AddEdge(e, d)['uml_edge_type'] = 'generalisation'
     nodelist_normal = [node.id for node in g.nodes]
 
     nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
     nodelist_sorted_expected = ['A', 'B', 'D', 'F', 'C', 'H', 'E']
     assert nodelist_sorted_expected == nodelist_sorted
 
     nodelist_sorted_annotated = [(node.id, annotation) for node,annotation in g.nodes_sorted_by_generalisation]
     nodelist_sorted_expected_annotated = [('A', 'root'), ('B', 'fc'), ('D', 'tab'), ('F', 'tab'), ('C', 'fc'), ('H', 'fc'), ('E', 'root')]
     assert nodelist_sorted_expected_annotated == nodelist_sorted_annotated
Ejemplo n.º 36
0
 def test_5_generalisation_sort_order(self):
     # START AGAIN - more tests, check stranger trees, though the algorithm
     # is proving pretty smart, prioritising children who have children to the left
 
     # B,B1,C,K --|> A
     # D --|> C
     g = Graph()
     a = GraphNode('A', 0, 0, 200, 200)
     b = GraphNode('B', 0, 0, 200, 200)
     b1 = GraphNode('B1', 0, 0, 200, 200)
     c = GraphNode('C', 0, 0, 200, 200)
     k = GraphNode('K', 0, 0, 200, 200)
     d = GraphNode('D', 0, 0, 200, 200)
     # add out of order
     g.AddNode(b1)
     g.AddNode(b)
     g.AddNode(a)
     g.AddNode(c)
     g.AddNode(k)
     g.AddNode(d)
     g.AddEdge(k, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(d, c)['uml_edge_type'] = 'generalisation'
     g.AddEdge(c, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(b1, a)['uml_edge_type'] = 'generalisation'
     g.AddEdge(b, a)['uml_edge_type'] = 'generalisation'
     nodelist_normal = [node.id for node in g.nodes]
     nodelist_sorted = [node.id for node,annotation in g.nodes_sorted_by_generalisation]
     #print "nodelist_normal", nodelist_normal
     #print "nodelist_sorted_expected", nodelist_sorted_expected
     #print "nodelist_sorted", nodelist_sorted
     assert nodelist_sorted[0] == 'A'
     assert nodelist_sorted[1] == 'C'
     assert nodelist_sorted[-1] == 'D'
 
     nodelist_sorted_annotated = [(node.id, annotation) for node,annotation in g.nodes_sorted_by_generalisation]
     assert nodelist_sorted_annotated[0] == ('A', 'root')
     assert nodelist_sorted_annotated[1] == ('C', 'fc')
     assert nodelist_sorted_annotated[-1] == ('D', 'fc')
     assert ('K', 'tab') in nodelist_sorted_annotated
     assert ('B', 'tab') in nodelist_sorted_annotated
     assert ('B1', 'tab') in nodelist_sorted_annotated
Ejemplo n.º 37
0
 def __setstate__(self, state):
     (self.hyperparameters, self.parameters, self.trainer) = state
     self.graph = Graph(self.hyperparameters, self.parameters)
Ejemplo n.º 38
0
 def __init__(self, hyperparameters):
     self.hyperparameters = hyperparameters
     self.parameters = Parameters(self.hyperparameters)
     self.trainer = Trainer()
     self.graph = Graph(self.hyperparameters, self.parameters)
Ejemplo n.º 39
0
    def test_8_multiple_inhertitance_render(self):
        # F --|> M
        # F --|> S
        g = Graph()
        f = GraphNode('F', 0, 0, 200, 200)
        m = GraphNode('M', 0, 0, 200, 200)
        s = GraphNode('S', 0, 0, 200, 200)
        g.AddEdge(f, m)['uml_edge_type'] = 'generalisation'
        g.AddEdge(f, s)['uml_edge_type'] = 'generalisation'
        nodelist_normal = [node.id for node in g.nodes]

        """
        Custom ordering allows us to bypass the graph 'nodes_sorted_by_generalisation'
        algorithm which might either be crashing or have unwanted ordering results.
        Thus we can experiment with how different experimental orderings will render.
        """
        mycustom_ordering = [(m, 'root'), (s, 'root'), (f, 'root')]

        from ascii_uml.layout_ascii import model_to_ascii_builder

        m = model_to_ascii_builder()
        s = m.main(g, nodes_annotated_and_sorted=mycustom_ordering)

        expected_s = """
+---+
| M |
+---+
                      
                      
                      
+---+       [ S ][ M ]
| S |        .        
+---+       /_\       
             |        
             |        
            +---+     
            | F |     
            +---+     
        """

        def remove_blank_lines(str):
            return os.linesep.join([s for s in str.splitlines() if s.strip()])
            
        # remove blank lines, since different margins and paddings in ascii uml layout
        # could cause difference
        expected_s = remove_blank_lines(expected_s)
        s = remove_blank_lines(s)
        
        #print
        #print "*"*88
        #print expected_s
        #print "*"*88
        #print s
        #print "*"*88
        
        if s.strip() <> expected_s.strip():
            # Write to file
            with open('logs/test_8_out_actual_.txt','w') as f: f.write(s)
            with open('logs/test_8_out_expected.txt','w') as f: f.write(expected_s)

            import difflib
            # delta = difflib.ndiff(s.strip(), expected_s.strip()) # this will always emit something, a visual of the original with changes.
            delta = difflib.unified_diff(s.strip(), expected_s.strip(), n=0,
                                fromfile='actual', tofile='expected')
            diff_s = ''.join(delta)
            print diff_s
        
        assert s.strip() == expected_s.strip()
Ejemplo n.º 40
0
class Model:
    def __init__(self, hyperparameters):
        self.hyperparameters = hyperparameters
        self.parameters = Parameters(self.hyperparameters)
        self.trainer = Trainer()
        self.graph = Graph(self.hyperparameters, self.parameters)

    def __getstate__(self):
        return (self.hyperparameters, self.parameters, self.trainer)

    def __setstate__(self, state):
        (self.hyperparameters, self.parameters, self.trainer) = state
        self.graph = Graph(self.hyperparameters, self.parameters)

    def embed(self, window):
        seq = [self.parameters.embeddings[word] for word in window]
        return numpy.dstack([numpy.resize(s, (1, s.size, 1)) for s in seq])

    def embeds(self, sequences):
        return numpy.vstack([self.embed(seq) for seq in sequences])

    def corrupt_example(self, e):
        import copy, random
        e = copy.deepcopy(e)
        pos = - self.hyperparameters.window_size // 2
        mid = e[pos]
        while e[pos] == mid: e[pos] = random.randint(0, self.hyperparameters.curriculum_size - 1)
        pr = 1. / self.hyperparameters.curriculum_size
        weight = 1. / pr
        return e, numpy.float32(weight)

    def corrupt_examples(self, correct_sequences):
        return zip(*[self.corrupt_example(e) for e in correct_sequences])

    def train(self, correct_sequences):
        noise_sequences, weights = self.corrupt_examples(correct_sequences)
        for w in weights: assert w == weights[0]
        learning_rate = self.hyperparameters.learning_rate

        r = self.graph.train(self.embeds(correct_sequences), self.embeds(noise_sequences), numpy.float32(learning_rate * weights[0]))

        correct_inputs_gradient, noise_inputs_gradient, losses, correct_scores, noise_scores = r

        to_normalize = set()
        for example in range(len(correct_sequences)):
            correct_sequence = correct_sequences[example]
            noise_sequence = noise_sequences[example]
            loss, correct_score, noise_score = losses[example], correct_scores[example], noise_scores[example]
            import pdb
            pdb.set_trace()

            correct_input_gradient = correct_inputs_gradient[example]
            noise_input_gradient = noise_inputs_gradient[example]

            # self.trainer.update(numpy.sum(loss), correct_score, noise_score)

            for w in weights: assert w == weights[0]
            embedding_learning_rate = self.hyperparameters.embedding_learning_rate * weights[0]
            if numpy.sum(loss) == 0:
                for di in correct_input_gradient + noise_input_gradient:
                    assert (di == 0).all()
            else:
                for (i, di) in zip(correct_sequence, correct_input_gradient.T):
                    self.parameters.embeddings[i] -= 1.0 * embedding_learning_rate * di
                    to_normalize.add(i)
                for (i, di) in zip(noise_sequence, noise_input_gradient.T):
                    self.parameters.embeddings[i] -= 1.0 * embedding_learning_rate * di
                    to_normalize.add(i)

            self.parameters.normalize(list(to_normalize))

    def predict(self, sequence):
        (score) = self.graph.predict(self.embed(sequence))
        return score

    def verbose_predict(self, sequence):
        (score, prehidden) = self.graph.verbose_predict(self.embed(sequence))
        return score, prehidden

    def validate(self, sequence):
        import copy
        corrupt_sequence = copy.copy(sequence)
        rank = 1
        correct_score = self.predict(sequence)
        mid = self.hyperparameters.window_size // 2

        for i in range(self.hyperparameters.curriculum_size - 1):
            if i == sequence[mid]: continue
            corrupt_sequence[mid] = i
            corrupt_score = self.predict(corrupt_sequence)
            rank += (correct_score <= corrupt_score)

        return rank