Exemplo n.º 1
0
            type='exponential_decay'
        )
    )


    tf.set_random_seed(1)
    _, biggest_idx, _ = get_num_exps_and_res_files(save_path)
    if biggest_idx is None:
        initial_experiment_counter_value = 0
    else:
        initial_experiment_counter_value = biggest_idx + 1
    env.grid_search_for_meta(
        evaluation,
        kwargs_for_pupil_building,
        kwargs_for_optimizer_building,
        build_pupil_hyperparameters=build_pupil_hyperparameters,
        build_optimizer_hyperparameters=build_optimizer_hyperparameters,
        other_hyperparameters=other_hyperparameters,
        initial_experiment_counter_value=initial_experiment_counter_value,
        **launch_kwargs
    )


hp_names = get_hp_names_from_conf_file(parameter_set_file_name)
for_plotting = get_optimizer_evaluation_results(save_path, hp_names,  AVERAGING_NUMBER)

best = get_best(for_plotting, 'optimizer')

metric_res = best['adam_prep']['loss']

best_on_valid = metric_res['validation']
print(' ' * 2 + 'loss' + ':', best_on_valid[1])
    # opt_inf_pupil_restore_paths={
    #     'prelearn2000': 'lstm/test_res_net_1000_emb150_nl1_nn100_bs32_nu10/checkpoints/2000'
    # },
    # opt_inf_additions_to_feed_dict=opt_inf_add_feed,
    # opt_inf_validation_dataset_texts=[valid_text],
    # opt_inf_train_dataset_texts=[train_text],
    # validation_additions_to_feed_dict=valid_add_feed,
    vocabulary=vocabulary,
    batch_size=32,
    num_unrollings=4,
    learning_rate={
        'type': 'exponential_decay',
        'init': .002,
        'decay': .5,
        'period': 400
    },
    results_collect_interval=10,
    opt_inf_results_collect_interval=1,
    permute=False,
    summary=True,
    add_graph_to_summary=True)

env.grid_search_for_meta(
    evaluation,
    kwargs_for_pupil_building,
    kwargs_for_optimizer_building,
    build_pupil_hyperparameters=build_pupil_hyperparameters,
    build_optimizer_hyperparameters=build_optimizer_hyperparameters,
    other_hyperparameters=other_hyperparameters,
    **launch_kwargs)