Exemple #1
0
dict_title['random'] = 'Random-search'
dict_title['bayesian'] = 'Bayesian'
dict_title['grad_search'] = '1st order method'

plt.close('all')
fig, axarr = plt.subplots(1,
                          len(algorithms),
                          sharex=True,
                          sharey=True,
                          figsize=[10.67, 3])

objs_full = np.load("results/objs_grid_search100.npy", allow_pickle=True)
log_alphas_full = np.load("results/log_alphas_grid_search100.npy",
                          allow_pickle=True)

cmap = discrete_cmap(10, 'Reds')
c = np.linspace(1, 10, 10)

for i, algorithm in enumerate(algorithms):
    objs = np.load("results/objs_%s.npy" % algorithm, allow_pickle=True)
    log_alphas = np.load("results/log_alphas_%s.npy" % algorithm,
                         allow_pickle=True)

    axarr[i].plot(log_alphas_full,
                  objs_full / objs_full[0],
                  color=current_palette[0],
                  zorder=1)
    pcm = axarr[i].scatter(log_alphas,
                           objs / objs_full[0],
                           c=c,
                           cmap=cmap,
Exemple #2
0
    'adam': 'Reds'
}

fig, ax = plt.subplots(figsize=(8, 3))
ax.plot(alphas / alphas[0], objs, color=current_palette[0])
ax.plot(alphas / alphas[0],
        objs,
        'bo',
        label='0-order method (grid-search)',
        color=current_palette[1])

for optimizer_name in optimizer_names:
    monitor = monitors[optimizer_name]
    p_alphas_grad = np.array(monitor.alphas) / alpha_max
    objs_grad = np.array(monitor.objs)
    cmap = discrete_cmap(len(p_alphas_grad), dict_colors[optimizer_name])
    ax.scatter(p_alphas_grad,
               objs_grad,
               label=optimizer_name,
               marker='X',
               color=cmap(np.linspace(0, 1, 10)),
               zorder=10)

ax.set_xlabel(r"$\lambda / \lambda_{\max}$")
ax.set_ylabel(r"$ \sum_i^n \log \left ( 1 + e^{-y_i^{\rm{val}} X_i^{\rm{val}} "
              r"\hat \beta^{(\lambda)} } \right ) $")

ax.set_xscale("log")
plt.tick_params(width=5)
plt.legend()
plt.tight_layout()
Exemple #3
0
##############################################################################
# Plot results
# ------------
objs = reg.mse_path_.mean(axis=1)

p_alphas_grad = np.array(monitor_grad.alphas) / alpha_max
objs_grad = np.array(monitor_grad.objs)

print(f"Time for grid search: {t_sk:.2f} s")
print(f"Time for grad search (sparse-ho): {t_grad_search:.2f} s")

print(f'Minimum outer criterion value with grid search: {objs.min():.5f}')
print(f'Minimum outer criterion value with grad search: {objs_grad.min():.5f}')

current_palette = sns.color_palette("colorblind")
cmap = discrete_cmap(len(objs_grad), 'Greens')

fig, ax = plt.subplots(figsize=(5, 3))
ax.plot(alphas / alphas[0], objs, color=current_palette[0])
ax.plot(alphas / alphas[0],
        objs,
        'bo',
        label='0-th order method (grid search)',
        color=current_palette[1])
ax.scatter(p_alphas_grad,
           objs_grad,
           label='1-st order method',
           marker='X',
           color=cmap(np.linspace(0, 1, len(objs_grad))),
           s=40,
           zorder=40)
Exemple #4
0
            y,
            alpha0=alpha0,
            monitor=monitor)
t_grad_search += time.time()
monitor.alphas = np.array(monitor.alphas)

print("Time grid search %f" % t_grid_search)
print("Time grad-search %f" % t_grad_search)
print("Minimum grid search %0.3e" % results.min())
print("Minimum grad search %0.3e" % np.array(monitor.objs).min())

##############################################################################
# Plot results
# ------------

cmap = discrete_cmap(n_outer, 'Reds')
X, Y = np.meshgrid(alphas_l1 / alpha_max, alphas_l2 / alpha_max)
fig, ax = plt.subplots(1, 1)
cp = ax.contour(X, Y, results.T, levels=40)
ax.scatter(X,
           Y,
           s=10,
           c="orange",
           marker="o",
           label="$0$th order (grid search)",
           clip_on=False)
ax.scatter(monitor.alphas[:, 0] / alpha_max,
           monitor.alphas[:, 1] / alpha_max,
           s=40,
           color=cmap(np.linspace(0, 1, n_outer)),
           zorder=10,
Exemple #5
0
objs_grad = np.array(monitor_grad.objs)

t_grad_search = time.time() - t0

print('sparse-ho finished')
print(f"Time to compute grad search: {t_grad_search:.2f} s")


p_alphas_grad = np.array(monitor_grad.alphas) / alpha_max

objs_grad = np.array(monitor_grad.objs)

current_palette = sns.color_palette("colorblind")

fig = plt.figure(figsize=(5, 3))
cmap = discrete_cmap(len(p_alphas_grad), "Greens")

plt.plot(alphas / alphas[0], objs, color=current_palette[0])
plt.plot(
    alphas / alphas[0], objs, 'bo',
    label='0-order method (grid-search)', color=current_palette[1])
plt.scatter(
    p_alphas_grad, objs_grad, label='1-st order method',
    marker='X', color=cmap(np.linspace(0, 1, len(objs_grad))), zorder=10)
plt.xlabel(r"$\lambda / \lambda_{\max}$")
plt.ylabel(
    r"$ \sum_i^n \log \left ( 1 + e^{-y_i^{\rm{val}} X_i^{\rm{val}} "
    r"\hat \beta^{(\lambda)} } \right ) $")

plt.xscale("log")
plt.tick_params(width=5)
levels = np.geomspace(min_grid, objs_full.max(), num=40)
levels = round_down(levels, 2)

plt.figure()
plt.contourf(X, Y, Z)
plt.plot()

for i, algorithm in enumerate(algorithms):
    objs = np.load("results/%s_objs_%s_enet.npy" % (dataset, algorithm),
                   allow_pickle=True)
    log_alphas = np.load("results/%s_log_alphas_%s_enet.npy" %
                         (dataset, algorithm),
                         allow_pickle=True)
    assert objs.min() >= min_grid
    cmap = discrete_cmap(len(objs), 'Reds')
    c = np.linspace(1, len(objs), len(objs))
    cs = axarr[i].contourf(X, Y, Z.T, levels=levels, cmap='viridis')
    pcm = axarr[i].scatter(log_alphas[:, 0],
                           log_alphas[:, 1],
                           c=c,
                           marker='x',
                           cmap=cmap,
                           clip_on=False)

    axarr[i].set_title(dict_title[algorithm])
    axarr[i].set_xlabel("$\lambda_1 - \lambda_{\max}$")
    print(objs.min())

cba = fig.colorbar(pcm, ax=axarr[3], ticks=[1, 5, 10, 15, 20, 25])
cba.set_label('Iterations')
grad_search(
    algo, criterion, model, optimizer, X, y, alpha0=alpha0,
    monitor=monitor)
t_grad_search += time.time()
monitor.alphas = np.array(monitor.alphas)

print("Time grid search %f" % t_grid_search)
print("Time grad-search %f" % t_grad_search)
print("Minimum grid search %0.3e" % results.min())
print("Minimum grad search %0.3e" % np.array(monitor.objs).min())

##############################################################################
# Plot results
# ------------

cmap = discrete_cmap(n_outer, 'Greens')
X, Y = np.meshgrid(alphas_l1 / alpha_max, alphas_l2 / alpha_max)
fig, ax = plt.subplots(1, 1)
cp = ax.contourf(X, Y, results.T)
ax.scatter(
    X, Y, s=10, c="orange", marker="o", label="$0$th order (grid search)",
    clip_on=False)
ax.scatter(
    monitor.alphas[:, 0] / alpha_max, monitor.alphas[:, 1] / alpha_max,
    s=40, color=cmap(np.linspace(0, 1, n_outer)), zorder=10,
    marker="X", label="$1$st order")
ax.set_xlim(X.min(), X.max())
ax.set_xlabel("L1 regularization")
ax.set_ylabel("L2 regularization")
ax.set_ylim(Y.min(), Y.max())
ax.set_title("Elastic net held out prediction loss on test set")