n_batches = loc['n_batches'] if np.mod((ii-iter_offset)/int(n_batches), n_iter) == 0: # Compute distance only every 5 iterations, as in previous case d = loc['dict_obj'] d.wasserstein.append(emd(loc['dictionary'], d.generating_dict, 'chordal', scale=True)) d.detect_rate.append(detection_rate(loc['dictionary'], d.generating_dict, 0.99)) d.objective_error.append(loc['current_cost']) # reinitializing the random generator learned_dict2 = MiniBatchMultivariateDictLearning(n_kernels=n_kernels, batch_size=batch_size, n_iter=max_iter*n_iter, n_nonzero_coefs=n_nonzero_coefs, callback=callback_distance, n_jobs=n_jobs, learning_rate=learning_rate, kernel_init_len=kernel_init_len, verbose=1, dict_init=dict_init, random_state=rng_global) learned_dict2.generating_dict = list(generating_dict) learned_dict2.wasserstein = list() learned_dict2.detect_rate = list() learned_dict2.objective_error = list() learned_dict2 = learned_dict2.fit(X) plot_univariate(array(learned_dict2.objective_error), array(learned_dict2.detect_rate), array(learned_dict2.wasserstein), n_iter=1, figname='univariate-case-callback')