x, y, l_infos, tags_oc = artificiel_occlusion(x, y, l_infos)

    list_minibatchs_vl = split_data_to_minibatchs_eval(
        {"x": x, "y":  y}, ts_batch_size)
    fold_exp = "../../exps/" + sys.argv[1]
    with open(fold_exp+"/model.pkl", 'r') as f:
        stuff = pkl.load(f)
        layers_infos, params_vl = stuff["layers_infos"], stuff["params_vl"]
        print layers_infos
        tag = stuff["tag"]
        dropout = stuff["dropout"]
        rng = np.random.RandomState(23455)
        input = T.fmatrix("x_input")
        for l in layers_infos:
            l["W"], l["b"], l["rng"] = None, None, rng
        model = ModelMLP(layers_infos, input, dropout=dropout)
    model.set_params_vals(fold_exp+"/model.pkl")

    eval_fn = get_eval_fn(model)
    # Unit test
    unit_test(fold_exp+"/unit_imgs/", w, h, path_mean_shap, eval_fn, ds)
    # Perf mean shape.
    # TRAIN
    tr_path = "../../inout/data/face/" + faceset + "_data/ch_tr_1800_0_0_0.pkl"
    print "TRAIN EVAL:"
    with open(tr_path, 'r') as f:
        tr_data = pkl.load(f)
    list_minibatchs_train = split_data_to_minibatchs_eval(
        {"x": tr_data["x"], "y":  tr_data['y']}, ts_batch_size)
    cdf_ms, cdf0_1_ms, auc_ms = evaluate_mean_shape(path_mean_shap, l_infos,
                                                    w, h, y=tr_data['y'],
Пример #2
0
        }
    layer3 = {
        "rng": rng,
        "n_in": nhid_l2,
        "n_out": 68*2,
        "W": dae_l3.hidden.W_prime,
        "b": dae_l3.hidden.b_prime,
        "activation": NonLinearity.TANH
        }
    layers = [layer0, layer1, layer2, layer3]
#    dropout = [float(sys.argv[1]), float(sys.argv[2]), float(sys.argv[3]),
#               float(sys.argv[4])]
    dropout = [0.0, 0.0, 0.0, 0.0]
    # number of the hidden layer just before the output ae. Default: None
    id_code = None
    model = ModelMLP(layers, input, l1_reg=0., l2_reg=0., reg_bias=False,
                     dropout=dropout, id_code=id_code)
    aes_in = []
    aes_out = []
    if id_code is not None:
        assert aes_out != []
    # Train
    # Data
    tr_batch_size = 10
    vl_batch_size = 8000

    with open(path_valid, 'r') as f:
        l_samples_vl = pkl.load(f)
    list_minibatchs_vl = split_data_to_minibatchs_eval(
        l_samples_vl, vl_batch_size)
    max_epochs = int(1000)
    lr_vl = 1e-3