def callback_recovery(loc):
    d = loc['dict_obj']
    d.wc.append(emd(loc['dictionary'], d.generating_dict, 
                    'chordal', scale=True))
    d.wfs.append(emd(loc['dictionary'], d.generating_dict, 
                     'fubinistudy', scale=True))
    d.hc.append(hausdorff(loc['dictionary'], d.generating_dict, 
                          'chordal', scale=True))
    d.hfs.append(hausdorff(loc['dictionary'], d.generating_dict, 
                           'fubinistudy', scale=True))
    d.bd.append(beta_dist(d.generating_dict, loc['dictionary']))
    d.dr99.append(detection_rate(loc['dictionary'],
                                d.generating_dict, 0.99))
    d.dr97.append(detection_rate(loc['dictionary'],
                                d.generating_dict, 0.97))
def callback_recovery(loc):
    d = loc["dict_obj"]
    d.wc.append(
        emd(loc["dictionary"], d.generating_dict, "chordal", scale=True))
    d.wfs.append(
        emd(loc["dictionary"], d.generating_dict, "fubinistudy", scale=True))
    d.hc.append(
        hausdorff(loc["dictionary"], d.generating_dict, "chordal", scale=True))
    d.hfs.append(
        hausdorff(loc["dictionary"],
                  d.generating_dict,
                  "fubinistudy",
                  scale=True))
    d.bd.append(beta_dist(d.generating_dict, loc["dictionary"]))
    d.dr99.append(detection_rate(loc["dictionary"], d.generating_dict, 0.99))
    d.dr97.append(detection_rate(loc["dictionary"], d.generating_dict, 0.97))
Exemple #3
0
def test_inhomogeneous_dims():
    idx = arange(n_dims)
    for g in ['chordal_principal_angles', 'binetcauchy', 'geodesic']:
        for i in range(n_dims, 0, -1):
            assert_almost_equal(
                0., emd(dm, [a[:, idx[:i]] for a in dm], g, scale=True))
            assert_almost_equal(
                0., hausdorff(dm, [a[:, idx[:i]] for a in dm], g, scale=True))
    for g in ["chordal", "fubinistudy", "frobenius"]:
        assert_raises(ValueError, emd, dm, [a[:, :-1] for a in dm], g)
        assert_raises(ValueError, hausdorff, dm, [a[:, :-1] for a in dm], g)
Exemple #4
0
def callback_distance(loc):
    ii, iter_offset = loc['ii'], loc['iter_offset']
    n_batches = loc['n_batches']
    if np.mod((ii-iter_offset)/int(n_batches), n_iter) == 0:
        # Compute distance only every 5 iterations, as in previous case
        d = loc['dict_obj']
        d.wasserstein.append(emd(loc['dictionary'], d.generating_dict, 
                                 'chordal', scale=True))
        d.detect_rate.append(detection_rate(loc['dictionary'],
                                              d.generating_dict, 0.99))
        d.objective_error.append(loc['current_cost']) 
def callback_distance(loc):
    ii, iter_offset = loc['ii'], loc['iter_offset']
    n_batches = loc['n_batches']
    if np.mod((ii-iter_offset)/int(n_batches), n_iter) == 0:
        # Compute distance only every 5 iterations, as in previous case
        d = loc['dict_obj']
        d.wasserstein.append(emd(loc['dictionary'], d.generating_dict, 
                                 'chordal', scale=True))
        d.detection_rate.append(detectionRate(loc['dictionary'],
                                              d.generating_dict, 0.99))
        d.objective_error.append(loc['current_cost']) 
Exemple #6
0
def callback_distance(loc):
    ii, iter_offset = loc["ii"], loc["iter_offset"]
    n_batches = loc["n_batches"]
    if np.mod((ii - iter_offset) / int(n_batches), n_iter) == 0:
        # Compute distance only every 5 iterations, as in previous case
        d = loc["dict_obj"]
        d.wasserstein.append(
            emd(loc["dictionary"], d.generating_dict, "chordal", scale=True))
        d.detect_rate.append(
            detection_rate(loc["dictionary"], d.generating_dict, 0.99))
        d.objective_error.append(loc["current_cost"])
def callback_recovery(loc):
    d = loc['dict_obj']
    d.wc.append(emd(loc['dictionary'], d.generating_dict, 
                    'chordal', scale=True))
    d.wfs.append(emd(loc['dictionary'], d.generating_dict, 
                     'fubinistudy', scale=True))
    d.wcpa.append(emd(loc['dictionary'], d.generating_dict, 
                     'chordal_principal_angles', scale=True))
    d.wbc.append(emd(loc['dictionary'], d.generating_dict, 
                     'binetcauchy', scale=True))
    d.wg.append(emd(loc['dictionary'], d.generating_dict, 
                     'geodesic', scale=True))
    d.wfb.append(emd(loc['dictionary'], d.generating_dict, 
                     'frobenius', scale=True))
    d.hc.append(hausdorff(loc['dictionary'], d.generating_dict, 
                          'chordal', scale=True))
    d.hfs.append(hausdorff(loc['dictionary'], d.generating_dict, 
                           'fubinistudy', scale=True))
    d.hcpa.append(hausdorff(loc['dictionary'], d.generating_dict, 
                           'chordal_principal_angles', scale=True))
    d.hbc.append(hausdorff(loc['dictionary'], d.generating_dict, 
                           'binetcauchy', scale=True))
    d.hg.append(hausdorff(loc['dictionary'], d.generating_dict, 
                           'geodesic', scale=True))
    d.hfb.append(hausdorff(loc['dictionary'], d.generating_dict, 
                           'frobenius', scale=True))
    d.dr99.append(detectionRate(loc['dictionary'],
                                d.generating_dict, 0.99))
    d.dr97.append(detectionRate(loc['dictionary'],
                                d.generating_dict, 0.97))
Exemple #8
0
def callback_recovery(loc):
    d = loc["dict_obj"]
    d.wc.append(emd(loc["dictionary"], d.generating_dict, "chordal", scale=True))
    d.wfs.append(emd(loc["dictionary"], d.generating_dict, "fubinistudy", scale=True))
    d.wcpa.append(
        emd(loc["dictionary"], d.generating_dict, "chordal_principal_angles", scale=True)
    )
    d.wbc.append(emd(loc["dictionary"], d.generating_dict, "binetcauchy", scale=True))
    d.wg.append(emd(loc["dictionary"], d.generating_dict, "geodesic", scale=True))
    d.wfb.append(emd(loc["dictionary"], d.generating_dict, "frobenius", scale=True))
    d.hc.append(hausdorff(loc["dictionary"], d.generating_dict, "chordal", scale=True))
    d.hfs.append(
        hausdorff(loc["dictionary"], d.generating_dict, "fubinistudy", scale=True)
    )
    d.hcpa.append(
        hausdorff(
            loc["dictionary"], d.generating_dict, "chordal_principal_angles", scale=True
        )
    )
    d.hbc.append(
        hausdorff(loc["dictionary"], d.generating_dict, "binetcauchy", scale=True)
    )
    d.hg.append(hausdorff(loc["dictionary"], d.generating_dict, "geodesic", scale=True))
    d.hfb.append(hausdorff(loc["dictionary"], d.generating_dict, "frobenius", scale=True))
    d.dr99.append(detection_rate(loc["dictionary"], d.generating_dict, 0.99))
    d.dr97.append(detection_rate(loc["dictionary"], d.generating_dict, 0.97))
Exemple #9
0
def test_inhomogeneous_dims():
    idx = arange(n_dims)
    for g in ['chordal_principal_angles', 'binetcauchy', 'geodesic']:
        for i in range(n_dims, 0, -1):
            assert_almost_equal (0., emd(dm, [a[:,idx[:i]] for a in dm],
                                         g, scale=True))
            assert_almost_equal(0., hausdorff(dm, [a[:,idx[:i]] for a in dm],
                                              g, scale=True))
    for g in ["chordal", "fubinistudy", "frobenius"]:
        assert_raises(ValueError, emd, dm,
                      [a[:,:-1] for a in dm], g)
        assert_raises(ValueError, hausdorff, dm,
                      [a[:,:-1] for a in dm], g)
Exemple #10
0
learned_dict = MiniBatchMultivariateDictLearning(n_kernels=n_kernels, 
                                batch_size=batch_size, n_iter=n_iter,
                                n_nonzero_coefs=n_nonzero_coefs,
                                n_jobs=n_jobs, learning_rate=learning_rate,
                                kernel_init_len=kernel_init_len, verbose=1,
                                dict_init=dict_init, random_state=rng_global)

# Update learned dictionary at each iteration and compute a distance
# with the generating dictionary
for i in range(max_iter):
    learned_dict = learned_dict.partial_fit(X)
    # Compute the detection rate
    detect_rate.append(detection_rate(learned_dict.kernels_,
                                        generating_dict, 0.99))
    # Compute the Wasserstein distance
    wasserstein.append(emd(learned_dict.kernels_, generating_dict,
                        'chordal', scale=True))
    # Get the objective error
    objective_error.append(learned_dict.error_.sum())
    
plot_univariate(array(objective_error), array(detect_rate),
                array(wasserstein), n_iter, 'univariate-case')
    
# Another possibility is to rely on a callback function such as 
def callback_distance(loc):
    ii, iter_offset = loc['ii'], loc['iter_offset']
    n_batches = loc['n_batches']
    if np.mod((ii-iter_offset)/int(n_batches), n_iter) == 0:
        # Compute distance only every 5 iterations, as in previous case
        d = loc['dict_obj']
        d.wasserstein.append(emd(loc['dictionary'], d.generating_dict, 
                                 'chordal', scale=True))
Exemple #11
0
learned_dict = MiniBatchMultivariateDictLearning(n_kernels=n_kernels, 
                                batch_size=batch_size, n_iter=n_iter,
                                n_nonzero_coefs=n_nonzero_coefs,
                                n_jobs=n_jobs, learning_rate=learning_rate,
                                kernel_init_len=kernel_init_len, verbose=1,
                                dict_init=dict_init, random_state=rng_global)

# Update learned dictionary at each iteration and compute a distance
# with the generating dictionary
for i in range(max_iter):
    learned_dict = learned_dict.partial_fit(X)
    # Compute the detection rate
    detection_rate.append(detectionRate(learned_dict.kernels_,
                                        generating_dict, 0.99))
    # Compute the Wasserstein distance
    wasserstein.append(emd(learned_dict.kernels_, generating_dict,
                        'chordal', scale=True))
    # Get the objective error
    objective_error.append(learned_dict.error_.sum())
    
plot_multivariate(array(objective_error), array(detection_rate),
                100.-array(wasserstein), n_iter, 'multivariate-case')
    
# Another possibility is to rely on a callback function such as 
def callback_distance(loc):
    ii, iter_offset = loc['ii'], loc['iter_offset']
    n_batches = loc['n_batches']
    if np.mod((ii-iter_offset)/int(n_batches), n_iter) == 0:
        # Compute distance only every 5 iterations, as in previous case
        d = loc['dict_obj']
        d.wasserstein.append(emd(loc['dictionary'], d.generating_dict, 
                                 'chordal', scale=True))
Exemple #12
0
    kernel_init_len=kernel_init_len,
    verbose=1,
    dict_init=dict_init,
    random_state=rng_global,
)

# Update learned dictionary at each iteration and compute a distance
# with the generating dictionary
for _ in range(max_iter):
    learned_dict = learned_dict.partial_fit(X)
    # Compute the detection rate
    detect_rate.append(
        detection_rate(learned_dict.kernels_, generating_dict, 0.99))
    # Compute the Wasserstein distance
    wasserstein.append(
        emd(learned_dict.kernels_, generating_dict, "chordal", scale=True))
    # Get the objective error
    objective_error.append(learned_dict.error_.sum())

plot_univariate(
    array(objective_error),
    array(detect_rate),
    array(wasserstein),
    n_iter,
    "univariate-case",
)


# Another possibility is to rely on a callback function such as
def callback_distance(loc):
    ii, iter_offset = loc["ii"], loc["iter_offset"]