Exemple #1
0
def load_tugraphs(graph='mutag', labels_only = False):

    # load synthetic sbm graphs
    if graph[:3] == 'syn':
        scale = int(graph[3:])
        n = 750
        p, q = 0.5, 0.1
        p_, q_ = 0.4, 0.2
        gs1 = sbms(n=n, n1=100 * scale, n2=50 * scale, p=p, q=q)
        gs2 = sbms(n=n, n1=75 * scale, n2=75 * scale, p=p, q=q)
        graphs = gs2 + gs1
        labels = [1] * n + [2] * n
        return graphs, labels

    graph = name_conversion(graph)
    path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', graph)
    dataset = TUDataset(path, name=graph)
    graphs, labels = torch_geometric_2nx(dataset, labels_only)
    return graphs, labels
Exemple #2
0
""" sbm graph classification """

from Esme.dgms.fil import nodefeat
from Esme.graph.function import fil_strategy
from Esme.graph.generativemodel import sbms
from networkx.linalg.algebraicconnectivity import fiedler_vector

if __name__ == '__main__':
    n = 1
    p, q = 0.5, 0.1
    gs = sbms(n=n, n1=100, n2=50, p=p, q=q)
    for i in range(len(gs)):
        g = gs[i]
        # lapfeat = nodefeat(g, 'fiedler', norm=True)
        nodefeat = fiedler_vector(g, normalized=False)  # np.ndarray
        nodefeat = nodefeat.reshape(len(g), 1)

        gs[i] = fil_strategy(g, nodefeat, method='node', viz_flag=False)

    print('Finish computing lapfeat')
Exemple #3
0
parser.add_argument("--q",
                    default=0.1,
                    type=float,
                    help='The probablity btwn community')

if __name__ == '__main__':
    # sys.argv = ['graph/2sbm_gc.py']
    args = parser.parse_args()
    print(args)
    n = 100
    p, q = 0.5, args.q
    p_, q_ = 0.4, 0.2
    fil_method = args.fil_method
    zigzag = True if fil_method == 'combined' else False
    edge_kwargs = {'h': 0.3, 'edgefunc': 'edge_prob'}
    gs1 = sbms(n=n, n1=100, n2=50, p=p, q=q)
    gs2 = sbms(n=n, n1=75, n2=75, p=p, q=q)
    # gs3 = sbms(n=n, n1=75, n2=75, p=p_, q=q_)
    # gs3 = sbms(n=n, n1=50, n2=50, p=p, q=q)
    gs = gs2 + gs1
    labels = [1] * n + [2] * n

    plt.title('p: %s, q: %s' % (p, q))
    for i in range(len(gs)):
        g = gs[i]
        lp = LaplacianEigenmaps(d=1)
        lp.learn_embedding(g, weight='weight')
        lapfeat = lp.get_embedding()  # lapfeat is an array

        # viz = True if i%100==0 else False
        # plt.subplot(2, 1, 1+i//100)