Beispiel #1
0
    def exception_thrower():

        argdict = rec_eval(expr, memo=memo, print_node_on_error=False)

        dataset_info = argdict['dataset_info']
        learning_algo = argdict['learning_algo']
        hp_space = argdict['hp_space']
        pipeline = argdict['pipeline']
        n_startup_trials = argdict['n_startup_trials']
        n_ok_trials = argdict['n_ok_trials']
        batchsize = argdict['batchsize']
        min_features = argdict['min_features']
        max_features = argdict['max_features']
        checkpoint_fname = argdict['checkpoint_fname']
        batched_lmap_speed_thresh = argdict['batched_lmap_speed_thresh']
        ctrl = argdict['ctrl']

        tid = ctrl.current_trial['tid']

        # -- checkpoint
        if isinstance(ctrl.trials, Trials):
            if tid > 0 and tid % checkpoint_every == 0:
                save_hp(hp_space, ctrl.trials, n_startup_trials,
                        checkpoint_fname)

        # -- retrieve trials from database
        if isinstance(ctrl.trials, MongoTrials):
            ctrl.trials.refresh()

        # -- check and signal stopping to optimizer
        current_ok_trials = count_ok_trials(ctrl.trials)
        if current_ok_trials >= n_ok_trials:
            raise SimpleHpStop(
                'number of ok trials reached - '
                'stopping process with %d ok trials out of '
                '%d trials.' % (current_ok_trials, tid), ctrl.trials)

        # -- feature extraction
        slm_t0 = time()

        fn_imgs = getattr(dataset_info['data_obj'], dataset_info['fn_imgs'])
        imgs = fn_imgs()

        limgs = lmap_ndarray(imgs)

        X = pyll_theano_batched_lmap(
            partial(callpipe1, pipeline),
            limgs,
            batchsize=batchsize,
            print_progress_every=10,
            speed_thresh=batched_lmap_speed_thresh,
            abort_on_rows_larger_than=max_features,
            x_dtype='uint8',
        )[:]

        feat_set = rec_eval(X, print_node_on_error=False)
        slm_time = time() - slm_t0

        # -- classification
        eval_t0 = time()

        # -- feat_set in 2-D
        feat_shape = feat_set.shape
        feat_set.shape = feat_set.shape[0], -1

        assert feat_set.shape[1] >= min_features, 'min_features not satisfied'

        fn_eval = getattr(dataset_info['data_obj'], dataset_info['fn_eval'])
        r_dict = fn_eval(learning_algo, feat_set)
        eval_time = time() - eval_t0

        r_dict['status'] = hyperopt.STATUS_OK
        r_dict['feat_shape'] = feat_shape
        r_dict['slm_time'] = slm_time
        r_dict['eval_time'] = eval_time

        return r_dict
Beispiel #2
0
        dataset_info = {'data_obj': data_obj,
                        'fn_imgs': 'hp_imgs',
                        'fn_eval': 'hp_eval'}

        trials = simple_hp(dataset_info, hp_algo, learning_algo, hp_space,
                   n_startup_trials, n_ok_trials, checkpoint_fname,
                   host, port)

    except SimpleHpStop, e:
        msg, trials = e.args
        print msg

    if opt_mode == 'serial':
        # -- save complete hyperparameter optimization trials in pickle format
        hp_fname = os.path.join(output_path, 'hp.pkl')
        save_hp(hp_space, trials, n_startup_trials, hp_fname)

    print 'done!'


def get_optparser():

    dataset_options = ''
    for k in sorted(datasets.keys()):
      dataset_options +=  ("     %s - %s \n" % (k, datasets[k].__name__))

    usage = ("usage: %prog <DATASET> <DATASET_PATH>\n\n"
             "DATASET is an integer corresponding to the following supported "
             "datasets:\n" + dataset_options
            )
Beispiel #3
0
    def exception_thrower():

        argdict = rec_eval(expr, memo=memo, print_node_on_error=False)

        dataset_info = argdict['dataset_info']
        learning_algo = argdict['learning_algo']
        hp_space = argdict['hp_space']
        pipeline = argdict['pipeline']
        n_startup_trials = argdict['n_startup_trials']
        n_ok_trials = argdict['n_ok_trials']
        batchsize = argdict['batchsize']
        min_features = argdict['min_features']
        max_features = argdict['max_features']
        checkpoint_fname = argdict['checkpoint_fname']
        batched_lmap_speed_thresh = argdict['batched_lmap_speed_thresh']
        ctrl = argdict['ctrl']

        tid = ctrl.current_trial['tid']

        # -- checkpoint
        if isinstance(ctrl.trials, Trials):
            if tid > 0 and tid % checkpoint_every == 0:
                save_hp(hp_space, ctrl.trials, n_startup_trials,
                        checkpoint_fname)

        # -- retrieve trials from database
        if isinstance(ctrl.trials, MongoTrials):
            ctrl.trials.refresh()

        # -- check and signal stopping to optimizer
        current_ok_trials = count_ok_trials(ctrl.trials)
        if current_ok_trials >= n_ok_trials:
            raise SimpleHpStop('number of ok trials reached - '
                               'stopping process with %d ok trials out of '
                               '%d trials.' % (
                               current_ok_trials, tid),
                               ctrl.trials)

        # -- feature extraction
        slm_t0 = time()

        fn_imgs = getattr(dataset_info['data_obj'], dataset_info['fn_imgs'])
        imgs = fn_imgs()

        limgs = lmap_ndarray(imgs)

        X = pyll_theano_batched_lmap(
            partial(callpipe1, pipeline),
            limgs,
            batchsize=batchsize,
            print_progress_every=10,
            speed_thresh=batched_lmap_speed_thresh,
            abort_on_rows_larger_than=max_features,
            x_dtype='uint8',
            )[:]

        feat_set = rec_eval(X, print_node_on_error=False)
        slm_time = time() - slm_t0

        # -- classification
        eval_t0 = time()

        # -- feat_set in 2-D
        feat_shape = feat_set.shape
        feat_set.shape = feat_set.shape[0], -1

        assert feat_set.shape[1] >= min_features, 'min_features not satisfied'

        fn_eval = getattr(dataset_info['data_obj'], dataset_info['fn_eval'])
        r_dict = fn_eval(learning_algo, feat_set)
        eval_time = time() - eval_t0

        r_dict['status'] = hyperopt.STATUS_OK
        r_dict['feat_shape'] = feat_shape
        r_dict['slm_time'] = slm_time
        r_dict['eval_time'] = eval_time

        return r_dict