예제 #1
0
def test_m_phate(n_jobs):
    # create fake data
    n_time_steps = 50
    n_points = 20
    n_dim = 10
    n_pca = 5
    np.random.seed(42)
    data = np.cumsum(np.random.normal(
        0, 1, (n_time_steps, n_points, n_dim)), axis=0)

    # embedding
    m_phate_op = m_phate.M_PHATE(n_jobs=n_jobs, verbose=0, n_pca=n_pca)
    m_phate_data = m_phate_op.fit_transform(data)

    assert m_phate_data.shape[0] == n_points * n_time_steps
    assert m_phate_data.shape[1] == 2

    m_phate_op.set_params(intraslice_knn=m_phate_op.intraslice_knn)
    assert isinstance(m_phate_op.graph, graphtools.base.BaseGraph)
    m_phate_op.set_params(interslice_knn=m_phate_op.interslice_knn)
    assert isinstance(m_phate_op.graph, graphtools.base.BaseGraph)
    m_phate_op.set_params(n_svd=m_phate_op.n_svd)
    assert isinstance(m_phate_op.graph, graphtools.base.BaseGraph)

    G = m_phate_op.graph
    m_phate_op.set_params(intraslice_knn=m_phate_op.intraslice_knn+1)
    assert m_phate_op.graph is None
    m_phate_op.graph = G
    m_phate_op.set_params(interslice_knn=m_phate_op.interslice_knn+1)
    assert m_phate_op.graph is None
예제 #2
0
def test_multislice_kernel(intraslice_knn):
    # create fake data
    n_time_steps = 50
    n_points = 20
    n_dim = 10
    np.random.seed(42)
    data = np.cumsum(np.random.normal(
        0, 1, (n_time_steps, n_points, n_dim)), axis=0)
    kernel = m_phate.kernel.multislice_kernel(m_phate.utils.normalize(data), 
                                              intraslice_knn=intraslice_knn,
                                              decay=None)

    nnz = 0
    # intraslice kernel
    for t in range(n_time_steps):
        subkernel = kernel[t*n_points:(t+1)*n_points][:,t*n_points:(t+1)*n_points]
        assert subkernel.sum() == n_points * (intraslice_knn+1)
        nnz += subkernel.nnz

    # interslice kernel
    for i in range(n_points):
        subkernel = kernel[i::n_points][:,i::n_points]
        assert subkernel.nnz == n_time_steps ** 2
        nnz += subkernel.nnz

    # diagonal is double counted
    nnz -= kernel.shape[0]
    # everything else should be zero
    assert nnz == kernel.nnz

    # check this passes through phate op
    m_phate_op = m_phate.M_PHATE(intraslice_knn=intraslice_knn,
                                 decay=None, verbose=0)
    m_phate_data = m_phate_op.fit_transform(data)

    # threshold
    kernel.data[kernel.data < 1e-4] = 0

    assert m_phate_data.shape[0] == n_points * n_time_steps
    assert m_phate_data.shape[1] == 2
    np.testing.assert_allclose((m_phate_op.graph.kernel - kernel).data, 0,
                               rtol=0, atol=1e-14)
예제 #3
0
    n = trace.shape[0]
    m = trace.shape[1]
    neuron_ids = np.tile(np.arange(m), n)
    layer_ids = np.tile(data['layer'], n)
    epoch = np.repeat(np.arange(n), m)
    digit_ids = np.repeat(np.arange(10), 10)
    digit_activity = np.array([
        np.sqrt(np.sum(trace[:, :, digit_ids == digit]**2, axis=2))
        for digit in np.unique(digit_ids)
    ])
    most_active_digit = np.argmax(digit_activity, axis=0).flatten()

    if filename in out:
        m_phate_data = out[filename]['phate']
    else:
        m_phate_op = m_phate.M_PHATE()
        m_phate_data = m_phate_op.fit_transform(trace)

    out[filename] = {
        'phate': m_phate_data,
        'epoch': epoch,
        'most_active_digit': most_active_digit,
        'layer_ids': layer_ids,
        'loss': loss,
        'val_loss': val_loss,
        'digit_activity': digit_activity
    }

plt.rc('font', size=14)
filenames = [
    'dropout', 'kernel_l1', 'kernel_l2', 'vanilla', 'activity_l1',
예제 #4
0
        n = trace.shape[0]
        m = trace.shape[1]
        neuron_ids = np.tile(np.arange(m), n)
        layer_ids = np.tile(data['layer'], n)
        epoch = np.repeat(np.arange(n) + n_skip, m)
        digit_ids = np.repeat(np.arange(10), 10)
        digit_activity = np.array([
            np.sqrt(np.sum(trace[:, :, digit_ids == digit]**2, axis=2))
            for digit in np.unique(digit_ids)
        ])
        most_active_digit = np.argmax(digit_activity, axis=0).flatten()

        if filename in out:
            m_phate_data = out[filename]['phate']
        else:
            m_phate_op = m_phate.M_PHATE(interslice_knn=12, n_jobs=20)
            m_phate_data = m_phate_op.fit_transform(trace)

        out[filename] = {
            'phate': m_phate_data,
            'epoch': epoch,
            'most_active_digit': most_active_digit,
            'neuron_ids': neuron_ids,
            'layer_ids': layer_ids,
            'loss': loss,
            'val_loss': val_loss,
            'val_accuracy': val_acc,
            'task': np.repeat(data['task'][0, n_skip::n_step], m),
            'digit_activity': digit_activity
        }
    except Exception as e:
예제 #5
0
phate_naive = phate_naive_op.fit_transform(trace_flat)
tasklogger.log_complete("PHATE")
tasklogger.log_start("DM")
dm_naive = m_phate.kernel.DM(phate_naive_op.graph)
tasklogger.log_complete("DM")
tasklogger.log_start("t-SNE")
tsne_naive = TSNE().fit_transform(trace_flat)
tasklogger.log_complete("t-SNE")
tasklogger.log_start("ISOMAP")
isomap_naive = Isomap().fit_transform(trace_flat)
tasklogger.log_complete("ISOMAP")
tasklogger.log_complete("Naive DR")

tasklogger.log_start("Multislice DR")
tasklogger.log_start("M-PHATE")
m_phate_op = m_phate.M_PHATE(verbose=0)
m_phate_data = m_phate_op.fit_transform(trace)
tasklogger.log_complete("M-PHATE")
tasklogger.log_start("DM")
dm_ms = m_phate.kernel.DM(m_phate_op.graph)
tasklogger.log_complete("DM")

geodesic_file = os.path.expanduser(
    "data/classifier_{}_geodesic.npy".format(dataset))
if False:
    tasklogger.log_start("geodesic distances")
    tasklogger.log_warning(
        "Warning: geodesic distance calculation will take a long time.")
    D_geo = m_phate_op.graph.shortest_path(distance='affinity')
    tasklogger.log_complete("geodesic distances")
    np.save(geodesic_file, D_geo)
예제 #6
0
    n = trace.shape[0]
    m = trace.shape[1]
    neuron_ids = np.tile(np.arange(m), n)
    layer_ids = np.tile(data['layer'], n)
    epoch = np.repeat(np.arange(n), m)
    digit_ids = np.repeat(np.arange(10), 10)
    digit_activity = np.array([
        np.sqrt(np.sum(trace[:, :, digit_ids == digit]**2, axis=2))
        for digit in np.unique(digit_ids)
    ])
    most_active_digit = np.argmax(digit_activity, axis=0).flatten()

    if filename in out:
        m_phate_data = out[filename]['phate']
    else:
        m_phate_op = m_phate.M_PHATE(n_jobs=20)
        m_phate_data = m_phate_op.fit_transform(trace)

    out[filename] = {
        'phate': m_phate_data,
        'epoch': epoch,
        'most_active_digit': most_active_digit,
        'layer_ids': layer_ids,
        'loss': loss,
        'val_loss': val_loss,
        'digit_activity': digit_activity
    }

plt.rc('font', size=14)
filenames = [
    'dropout', 'kernel_l1', 'kernel_l2', 'vanilla', 'activity_l1',
예제 #7
0
        n = trace.shape[0]
        m = trace.shape[1]
        neuron_ids = np.tile(np.arange(m), n)
        layer_ids = np.tile(data['layer'], n)
        epoch = np.repeat(np.arange(n) + n_skip, m)
        digit_ids = np.repeat(np.arange(10), 10)
        digit_activity = np.array([
            np.sqrt(np.sum(trace[:, :, digit_ids == digit]**2, axis=2))
            for digit in np.unique(digit_ids)
        ])
        most_active_digit = np.argmax(digit_activity, axis=0).flatten()

        if filename in out:
            m_phate_data = out[filename]['phate']
        else:
            m_phate_op = m_phate.M_PHATE(interslice_knn=12)
            m_phate_data = m_phate_op.fit_transform(trace)

        out[filename] = {
            'phate': m_phate_data,
            'epoch': epoch,
            'most_active_digit': most_active_digit,
            'neuron_ids': neuron_ids,
            'layer_ids': layer_ids,
            'loss': loss,
            'val_loss': val_loss,
            'val_accuracy': val_acc,
            'task': np.repeat(data['task'][0, n_skip::n_step], m),
            'digit_activity': digit_activity
        }
    except Exception as e: