コード例 #1
0
def create_all_amplitude_perturbation_corrs(folder_name,
                                            params,
                                            start,
                                            stop,
                                            with_square,
                                            with_square_cov,
                                            after_softmax,
                                            n_samples,
                                            perturbations='default'):
    if perturbations == 'default':
        perturbations = (
            (
                'no_dev',
                FuncAndArgs(
                    rand_diff,
                    with_blocks=False,  #just return 1
                    deviation_func=lambda arr, axis, keepdims: 1)),
            ('rand_mad',
             FuncAndArgs(rand_diff,
                         with_blocks=False,
                         deviation_func=median_absolute_deviation)),
            ('rand_std',
             FuncAndArgs(rand_diff, with_blocks=False, deviation_func=np.std)),
        )
    elif perturbations == 'simple_no_scale':
        perturbations = (
            (
                'no_dev',
                FuncAndArgs(
                    rand_diff,
                    with_blocks=False,  #just return 1
                    deviation_func=lambda arr, axis, keepdims: 1)), )
    assert not (with_square and with_square_cov)
    res_pool = ResultPool()
    res_pool.load_results(folder_name, params=params)
    res_file_names = res_pool.result_file_names()
    all_base_names = [
        name.replace('.result.pkl', '') for name in res_file_names
    ]
    start = start or 0
    stop = stop or len(all_base_names)
    for i_file, base_name in enumerate(all_base_names[start:stop]):
        log.info("Running {:s} ({:d} of {:d})".format(base_name,
                                                      i_file + start + 1,
                                                      stop))
        create_amplitude_perturbation_corrs(base_name,
                                            with_square=with_square,
                                            with_square_cov=with_square_cov,
                                            after_softmax=after_softmax,
                                            n_samples=n_samples,
                                            perturbations=perturbations)
コード例 #2
0
def get_templates():
    return  {'categorical_crossentropy': lambda : categorical_crossentropy,
        'tied_loss': lambda : FuncAndArgs(sum_of_losses,
            loss_expressions=[categorical_crossentropy,
                tied_neighbours_cnt_model ,
            ]
        )
        }
コード例 #3
0
    def _create_train_function(self):
        # Maybe replace self.exp.final_layer by self.cnt_model?
        # not clear to me why I am using self.exp.final_layer here
        targets = T.ivector()
        input_var = get_input_var(self.exp.final_layer)
        updates_expression = FuncAndArgs(adam,
                                         learning_rate=self.learning_rate)
        prediction = lasagne.layers.get_output(
            self.exp.final_layer,
            deterministic=self.deterministic_training,
            input_var=input_var,
            inputs=input_var)
        # Loss function might need layers or not...
        loss_fn = masked_loss_func(categorical_crossentropy)
        loss = loss_fn(prediction, targets).mean()
        # create parameter update expressions
        params = lasagne.layers.get_all_params(self.exp.final_layer,
                                               trainable=True)
        updates = updates_expression(loss, params)
        if self.exp.updates_modifier is not None:
            # put norm constraints on all layer, for now fixed to max kernel norm
            # 2 and max col norm 0.5
            updates = self.exp.updates_modifier.modify(updates,
                                                       self.exp.final_layer)

        # store only the parameters for training,
        # assumes parameters for layers already set
        self.train_params = []
        all_update_params = updates.keys()
        for update_param in all_update_params:
            if update_param not in params:
                self.train_params.append(update_param)

        self.train_func = theano.function([input_var, targets],
                                          updates=updates)

        # Set optimizer/train parameter values if not done
        if self.train_param_values is not None:
            log.info("Setting train parameter values")
            for param, val in zip(self.train_params, self.train_param_values):
                param.set_value(val)
            log.info("...Done setting parameter train values")
        else:
            log.info(
                "Not setting train parameter values, optimization values "
                "start from scratch (model params may be loaded anyways.)")
コード例 #4
0
    if len(sys.argv) > 2:
        stop = int(sys.argv[2])
    folder = 'data/models/paper/ours/cnt/deep4/car/'
    params = dict(cnt_preprocessors="$cz_zero_resample_car_demean",
                  trial_start=1500,
                  trial_stop=4000)  #3500 4000 # 1000 1500
    with_square = False
    with_square_cov = False
    with_blocks = False
    after_softmax = False
    n_samples = 300
    perturbations = (
        (
            'no_dev',
            FuncAndArgs(
                rand_diff,
                with_blocks=False,  #just return 1
                deviation_func=lambda arr, axis, keepdims: 1)), )  #,
    #         ('rand_mad',
    #           FuncAndArgs(rand_diff,
    #               with_blocks=False,
    #               deviation_func=median_absolute_deviation)),
    #         ('rand_std', FuncAndArgs(rand_diff,
    #               with_blocks=False,
    #               deviation_func=np.std)),
    #          )

    create_all_amplitude_perturbation_corrs(folder,
                                            params=params,
                                            start=start,
                                            stop=stop,
                                            with_square=with_square,