Esempio n. 1
0
def test_sharing():
    data = Frame(create_test_data())
    f1 = FrameRef(data, index=toindex([0, 1, 2, 3]))
    f2 = FrameRef(data, index=toindex([2, 3, 4, 5, 6]))
    # test read
    for k, v in f1.items():
        assert U.allclose(data[k].data[0:4], v)
    for k, v in f2.items():
        assert U.allclose(data[k].data[2:7], v)
    f2_a1 = f2['a1'].data
    # test write
    # update own ref should not been seen by the other.
    f1[Index(th.tensor([0, 1]))] = {
        'a1': th.zeros([2, D]),
        'a2': th.zeros([2, D]),
        'a3': th.zeros([2, D]),
    }
    assert U.allclose(f2['a1'], f2_a1)
    # update shared space should been seen by the other.
    f1[Index(th.tensor([2, 3]))] = {
        'a1': th.ones([2, D]),
        'a2': th.ones([2, D]),
        'a3': th.ones([2, D]),
    }
    f2_a1[0:2] = th.ones([2, D])
    assert U.allclose(f2['a1'], f2_a1)
Esempio n. 2
0
def test_slicing():
    data = Frame(create_test_data(grad=True))
    f1 = FrameRef(data, index=toindex(slice(1, 5)))
    f2 = FrameRef(data, index=toindex(slice(3, 8)))
    # test read
    for k, v in f1.items():
        assert U.allclose(data[k].data[1:5], v)
    f2_a1 = f2['a1'].data
    # test write
    f1[Index(th.tensor([0, 1]))] = {
        'a1': th.zeros([2, D]),
        'a2': th.zeros([2, D]),
        'a3': th.zeros([2, D]),
    }
    assert U.allclose(f2['a1'], f2_a1)

    f1[Index(th.tensor([2, 3]))] = {
        'a1': th.ones([2, D]),
        'a2': th.ones([2, D]),
        'a3': th.ones([2, D]),
    }
    f2_a1[toindex(slice(0, 2))] = 1
    assert U.allclose(f2['a1'], f2_a1)

    f1[toindex(slice(2, 4))] = {
        'a1': th.zeros([2, D]),
        'a2': th.zeros([2, D]),
        'a3': th.zeros([2, D]),
    }
    f2_a1[toindex(slice(0, 2))] = 0
    assert U.allclose(f2['a1'], f2_a1)
Esempio n. 3
0
def test_slicing():
    data = Frame(create_test_data(grad=True))
    f1 = FrameRef(data, index=toindex(slice(1, 5)))
    f2 = FrameRef(data, index=toindex(slice(3, 8)))
    # test read
    for k, v in f1.items():
        assert F.allclose(F.narrow_row(data[k].data, 1, 5), v)
    f2_a1 = f2['a1']  # is a tensor
    # test write
    f1[Index(F.tensor([0, 1]))] = {
        'a1': F.zeros([2, D]),
        'a2': F.zeros([2, D]),
        'a3': F.zeros([2, D]),
    }
    assert F.allclose(f2['a1'], f2_a1)

    f1[Index(F.tensor([2, 3]))] = {
        'a1': F.ones([2, D]),
        'a2': F.ones([2, D]),
        'a3': F.ones([2, D]),
    }
    F.narrow_row_set(f2_a1, 0, 2, 1)
    assert F.allclose(f2['a1'], f2_a1)

    f1[toindex(slice(2, 4))] = {
        'a1': F.zeros([2, D]),
        'a2': F.zeros([2, D]),
        'a3': F.zeros([2, D]),
    }
    F.narrow_row_set(f2_a1, 0, 2, 0)
    assert F.allclose(f2['a1'], f2_a1)
Esempio n. 4
0
def test_sharing():
    data = Frame(create_test_data())
    f1 = FrameRef(data, index=toindex([0, 1, 2, 3]))
    f2 = FrameRef(data, index=toindex([2, 3, 4, 5, 6]))
    # test read
    for k, v in f1.items():
        assert F.allclose(F.narrow_row(data[k].data, 0, 4), v)
    for k, v in f2.items():
        assert F.allclose(F.narrow_row(data[k].data, 2, 7), v)
    f2_a1 = f2['a1']
    # test write
    # update own ref should not been seen by the other.
    f1[Index(F.tensor([0, 1]))] = {
        'a1': F.zeros([2, D]),
        'a2': F.zeros([2, D]),
        'a3': F.zeros([2, D]),
    }
    assert F.allclose(f2['a1'], f2_a1)
    # update shared space should been seen by the other.
    f1[Index(F.tensor([2, 3]))] = {
        'a1': F.ones([2, D]),
        'a2': F.ones([2, D]),
        'a3': F.ones([2, D]),
    }
    F.narrow_row_set(f2_a1, 0, 2, F.ones([2, D]))
    assert F.allclose(f2['a1'], f2_a1)
Esempio n. 5
0
def test_row1():
    # test row getter/setter
    data = create_test_data()
    f = FrameRef(Frame(data))

    # getter
    # test non-duplicate keys
    rowid = Index(F.tensor([0, 2]))
    rows = f[rowid]
    for k, v in rows.items():
        assert tuple(F.shape(v)) == (len(rowid), D)
        assert F.allclose(v, F.gather_row(data[k], F.tensor(rowid.tousertensor())))
    # test duplicate keys
    rowid = Index(F.tensor([8, 2, 2, 1]))
    rows = f[rowid]
    for k, v in rows.items():
        assert tuple(F.shape(v)) == (len(rowid), D)
        assert F.allclose(v, F.gather_row(data[k], F.tensor(rowid.tousertensor())))

    # setter
    rowid = Index(F.tensor([0, 2, 4]))
    vals = {'a1' : F.zeros((len(rowid), D)),
            'a2' : F.zeros((len(rowid), D)),
            'a3' : F.zeros((len(rowid), D)),
            }
    f[rowid] = vals
    for k, v in f[rowid].items():
        assert F.allclose(v, F.zeros((len(rowid), D)))

    # setting rows with new column should raise error with error initializer
    f.set_initializer(lambda shape, dtype : assert_(False))
    def failed_update_rows():
        vals['a4'] = F.ones((len(rowid), D))
        f[rowid] = vals
    assert check_fail(failed_update_rows)
Esempio n. 6
0
def test_row4():
    # test updating row with empty frame but has preset num_rows
    f = FrameRef(Frame(num_rows=5))
    rowid = Index(th.tensor([0, 2, 4]))
    f[rowid] = {'h': th.ones((3, 2))}
    ans = th.zeros((5, 2))
    ans[th.tensor([0, 2, 4])] = th.ones((3, 2))
    assert U.allclose(f['h'], ans)
Esempio n. 7
0
def test_row2():
    # test row getter/setter autograd compatibility
    data = create_test_data(grad=True)
    f = FrameRef(Frame(data))

    with F.record_grad():
        # getter
        c1 = f['a1']
        # test non-duplicate keys
        rowid = Index(F.tensor([0, 2]))
        rows = f[rowid]
        y = rows['a1']
        F.backward(y, F.ones((len(rowid), D)))
    assert F.allclose(
        F.grad(c1)[:, 0], F.tensor([1., 0., 1., 0., 0., 0., 0., 0., 0., 0.]))

    f['a1'] = F.attach_grad(f['a1'])
    with F.record_grad():
        c1 = f['a1']
        # test duplicate keys
        rowid = Index(F.tensor([8, 2, 2, 1]))
        rows = f[rowid]
        y = rows['a1']
        F.backward(y, F.ones((len(rowid), D)))
    assert F.allclose(
        F.grad(c1)[:, 0], F.tensor([0., 1., 2., 0., 0., 0., 0., 0., 1., 0.]))

    f['a1'] = F.attach_grad(f['a1'])
    with F.record_grad():
        # setter
        c1 = f['a1']
        rowid = Index(F.tensor([0, 2, 4]))
        vals = {
            'a1': F.attach_grad(F.zeros((len(rowid), D))),
            'a2': F.attach_grad(F.zeros((len(rowid), D))),
            'a3': F.attach_grad(F.zeros((len(rowid), D))),
        }
        f[rowid] = vals
        c11 = f['a1']
        F.backward(c11, F.ones((N, D)))
    assert F.allclose(
        F.grad(c1)[:, 0], F.tensor([0., 1., 0., 1., 0., 1., 1., 1., 1., 1.]))
    assert F.allclose(F.grad(vals['a1']), F.ones((len(rowid), D)))
    assert F.is_no_grad(vals['a2'])
Esempio n. 8
0
def test_add_rows():
    data = Frame()
    f1 = FrameRef(data)
    f1.add_rows(4)
    x = th.randn(1, 4)
    f1[Index(th.tensor([0]))] = {'x': x}
    ans = th.cat([x, th.zeros(3, 4)])
    assert U.allclose(f1['x'], ans)
    f1.add_rows(4)
    f1[toindex(slice(4, 8))] = {'x': th.ones(4, 4), 'y': th.ones(4, 5)}
    ans = th.cat([ans, th.ones(4, 4)])
    assert U.allclose(f1['x'], ans)
    ans = th.cat([th.zeros(4, 5), th.ones(4, 5)])
    assert U.allclose(f1['y'], ans)
Esempio n. 9
0
def test_add_rows():
    data = Frame()
    f1 = FrameRef(data)
    f1.add_rows(4)
    x = F.randn((1, 4))
    f1[Index(F.tensor([0]))] = {'x': x}
    ans = F.cat([x, F.zeros((3, 4))], 0)
    assert F.allclose(f1['x'], ans)
    f1.add_rows(4)
    f1[toindex(slice(4, 8))] = {'x': F.ones((4, 4)), 'y': F.ones((4, 5))}
    ans = F.cat([ans, F.ones((4, 4))], 0)
    assert F.allclose(f1['x'], ans)
    ans = F.cat([F.zeros((4, 5)), F.ones((4, 5))], 0)
    assert F.allclose(f1['y'], ans)
Esempio n. 10
0
def test_row2():
    # test row getter/setter autograd compatibility
    data = create_test_data(grad=True)
    f = FrameRef(Frame(data))

    # getter
    c1 = f['a1']
    # test non-duplicate keys
    rowid = Index(th.tensor([0, 2]))
    rows = f[rowid]
    rows['a1'].backward(th.ones((len(rowid), D)))
    assert U.allclose(c1.grad[:, 0],
                      th.tensor([1., 0., 1., 0., 0., 0., 0., 0., 0., 0.]))
    c1.grad.data.zero_()
    # test duplicate keys
    rowid = Index(th.tensor([8, 2, 2, 1]))
    rows = f[rowid]
    rows['a1'].backward(th.ones((len(rowid), D)))
    assert U.allclose(c1.grad[:, 0],
                      th.tensor([0., 1., 2., 0., 0., 0., 0., 0., 1., 0.]))
    c1.grad.data.zero_()

    # setter
    c1 = f['a1']
    rowid = Index(th.tensor([0, 2, 4]))
    vals = {
        'a1': Variable(th.zeros((len(rowid), D)), requires_grad=True),
        'a2': Variable(th.zeros((len(rowid), D)), requires_grad=True),
        'a3': Variable(th.zeros((len(rowid), D)), requires_grad=True),
    }
    f[rowid] = vals
    c11 = f['a1']
    c11.backward(th.ones((N, D)))
    assert U.allclose(c1.grad[:, 0],
                      th.tensor([0., 1., 0., 1., 0., 1., 1., 1., 1., 1.]))
    assert U.allclose(vals['a1'].grad, th.ones((len(rowid), D)))
    assert vals['a2'].grad is None
Esempio n. 11
0
    def __init__(self, name, self_loop, device, degree_as_nlabel=False, line_graph=False):
        """Initialize the dataset."""

        self.name = name  # MUTAG
        self.ds_name = 'nig'
        self.extract_dir = self._download()
        self.file = self._file_path()

        self.device = device

        self.self_loop = self_loop
        self.line_graph = line_graph

        self.graphs = []
        self.labels = []

        # relabel
        self.glabel_dict = {}
        self.nlabel_dict = {}
        self.elabel_dict = {}
        self.ndegree_dict = {}

        # global num
        self.N = 0  # total graphs number
        self.n = 0  # total nodes number
        self.m = 0  # total edges number

        # global num of classes
        self.gclasses = 0
        self.nclasses = 0
        self.eclasses = 0
        self.dim_nfeats = 0

        # flags
        self.degree_as_nlabel = degree_as_nlabel
        self.nattrs_flag = False
        self.nlabels_flag = False
        self.verbosity = False

        # calc all values
        self._load()

        self._deg = []
        self._edges = []

        self.in_degrees = lambda g: g.in_degrees(
                Index(np.arange(0, g.number_of_nodes()))).unsqueeze(1).float()

        self._adj = []

        if self.line_graph:
            self._lgs = []
            self._lgs_deg = []
            self._lgs_edges = []
            self._pm = []
            self._pd = []
        
        self._preprocess()

        if self.degree_as_nlabel:
            self._deg_as_feature()
        elif self.name == 'PROTEINS' or self.name == 'PTC' or self.name == 'MUTAG':
            self._attr_as_feature()
Esempio n. 12
0
def extract_deg_adj(graph_list):
    in_degrees = lambda g: g.in_degrees(
        Index(np.arange(0, g.number_of_nodes()))).unsqueeze(1).float()
    degs = [in_degrees(g) for g in graph_list]
    adjs = [g.adjacency_matrix() for g in graph_list]
    return degs, adjs