def plot_benchmark1():
    """Plot various quantities obtained for varying values of alpha."""
    parameters = dict(n_var=200,
                      n_tasks=5,
                      density=0.15,

                      tol=1e-2,
#                      max_iter=50,
                      min_samples=100,
                      max_samples=150)

    cache_dir = get_cache_dir(parameters, output_dir=output_dir)
    gt = get_ground_truth(cache_dir)
    gt['precisions'] = np.dstack(gt['precisions'])

    emp_covs, n_samples = empirical_covariances(gt['signals'])
    n_samples /= n_samples.sum()

    alpha = []
    objective = []
    log_likelihood = []
    ll_penalized = []
    sparsity = []
    kl = []

    true_covs = np.empty(gt['precisions'].shape)
    for k in range(gt['precisions'].shape[-1]):
        true_covs[..., k] = np.linalg.inv(gt['precisions'][..., k])

    for out in iter_outputs(cache_dir):
        alpha.append(out['alpha'])
        objective.append(- out['objective'][-1])
        ll, llpen = group_sparse_scores(out['precisions'],
                                       n_samples, true_covs, out['alpha'])
        log_likelihood.append(ll)
        ll_penalized.append(llpen)
        sparsity.append(1. * (out['precisions'][..., 0] != 0).sum()
                        / out['precisions'].shape[0] ** 2)
        kl.append(distance(out['precisions'], gt['precisions']))

    gt["true_sparsity"] = (1. * (gt['precisions'][..., 0] != 0).sum()
                           / gt['precisions'].shape[0] ** 2)
    title = (("n_var: {n_var}, n_tasks: {n_tasks}, "
             + "true sparsity: {true_sparsity:.2f} "
             + "\ntol: {tol:.2e} samples: {min_samples}-{max_samples}").format(
                 true_sparsity=gt["true_sparsity"],
                 **parameters))

    plot(alpha, objective, label="objective", title=title)
    plot(alpha, log_likelihood, label="log-likelihood", new_figure=False)
    plot(alpha, ll_penalized, label="penalized L-L", new_figure=False)

    plot(alpha, sparsity, label="sparsity", title=title)
    pl.hlines(gt["true_sparsity"], min(alpha), max(alpha))

    plot(alpha, kl, label="distance", title=title)
    pl.show()
    # estimate second-order sensibility
    sens2 = np.empty(last_m.shape)
    for k in range(last_m.shape[-1]):
        sens2[..., k] = n_samples[k] * (
            np.dot(
                np.dot(last_m_inv[..., k],
                       derivative[..., k]),
                last_m_inv[..., k])
            )

    sens2 = np.abs(sens2 / 2.)

    matshow(np.sqrt(derivative[..., l]), title="sensitivity 1")
    matshow(np.sqrt(sens2[..., l]), title="sensitivity 2")
    matshow(np.sqrt(sens2[..., l] + derivative[..., l]),
            title="sensitivity 1+2")
    ## matshow((last_m - mean)[..., l], title="difference with mean")
    ## matshow(topo_count[..., l], title="non-zero count")
    pl.show()

if __name__ == "__main__":
    output_dir = "_gsc_sensitivity"
    ## parameters = {"n_var": 10, "n_tasks": 40, "density": 0.1,
    ##                "tol": 1e-4, "alpha": 0.02}
    parameters = {"n_var": 100, "n_tasks": 40, "density": 0.1,
                  "tol": 1e-2, "alpha": 0.02}

    cache_dir = get_cache_dir(parameters, output_dir)
    compute_stats(cache_dir)