Ejemplo n.º 1
0
# ======================================================================
# STEP 5: After verifying that your implementation of
#  utils.autoencoder_cost_and_grad is correct, You can start training your
#  autoencoder, using scipy.optimize.minimize L-BFGS-B.

if RUN_STEP_5_TRAIN_AUTOENCODER:
    #  Initialize the parameters
    theta = utils.initialize(hidden_size, visible_size)

    print(
        "\nRunning scipy.optimize.minimize on {0} parameters, over {1} training patches_train"
        .format(theta.shape[0], patches_train.shape[1]))
    start_time = datetime.datetime.now()
    print("    START TIME {0}".format(
        utils.get_pretty_time_string(start_time)))
    # define the objective function that returns cost and grad, used by scipy.optimizze.minimize
    import cProfile
    cProfile.run(
        "utils.autoencoder_cost_and_grad(theta, visible_size, hidden_size, lambda_, patches_train)"
    )
    # J = lambda x: utils.autoencoder_cost_and_grad(x, visible_size, hidden_size, lambda_, patches_train)
    for beta_ in [0.01, 0.1, 0.2]:
        for rho_ in [0.05, 0.01, 0.005]:

            J = lambda x: utils.autoencoder_cost_and_grad_sparse(
                x, visible_size, hidden_size, lambda_, beta_, rho_,
                patches_train)
            options_ = {'maxiter': 4000, 'disp': False}
            result = scipy.optimize.minimize(J,
                                             theta,
Ejemplo n.º 2
0
# ======================================================================
# STEP 5: After verifying that your implementation of
#  utils.autoencoder_cost_and_grad is correct, You can start training your
#  autoencoder, using scipy.optimize.minimize L-BFGS-B.

if RUN_STEP_5_TRAIN_AUTOENCODER:
    #  Initialize the parameters
    theta = utils.initialize(hidden_size, visible_size)

    print(
        "\nRunning scipy.optimize.minimize on {0} parameters, over {1} training patches_train"
        .format(theta.shape[0], patches_train.shape[1]))
    start_time = datetime.datetime.now()
    print("    START TIME {0}".format(
        utils.get_pretty_time_string(start_time)))
    # define the objective function that returns cost and grad, used by scipy.optimizze.minimize
    # J = lambda x: utils.autoencoder_cost_and_grad(x, visible_size, hidden_size, lambda_, patches_train)
    options_ = {'maxiter': 4000, 'disp': False}
    result = scipy.optimize.minimize(J,
                                     theta,
                                     method='L-BFGS-B',
                                     jac=True,
                                     options=options_)
    opt_theta = result.x  # theta found after optimization

    end_time = datetime.datetime.now()
    print("    END TIME {0}".format(utils.get_pretty_time_string(end_time)))
    total_time = end_time - start_time
    time_elapsed_string = utils.get_pretty_time_string(total_time, delta=True)
    print("    Total run time: {0}".format(time_elapsed_string))