def test_exponential_preprocessings():
    data = np.array([ 1, 3, 5, 9, 7, -2])
    exp_run_mean = np.array([ 0.200000, 0.160000, 0.328000, 0.262400, 0.409920, 0.327936])
    exp_run_var = np.array([ 0.928000, 1.745920, 3.697869, 9.952428, 9.922424, 12.347100])
    exp_standardized = np.array([ 0.830455, 1.695258, 1.763925, 1.874509, 0.993934, -1.336228])
    
    run_mean = exponential_running_mean(np.array(data), factor_new=0.2,start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned, factor_new=0.2, start_var=1)
    standardized = exponential_running_standardize(data, factor_new=0.2, start_mean=0, start_var=1)
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)

    data = np.array([ 2, 0, -5, -3, 0, 4])
    exp_run_mean = np.array([ 0.400000, 0.320000, -0.744000, -1.195200, -0.956160, 0.035072])
    exp_run_var = np.array([ 1.312000, 1.070080, 4.478771, 4.234478, 3.570431, 6.000475])
    exp_standardized = np.array([ 1.396861, -0.309344, -2.011047, -0.877060, 0.506023, 1.618611])
    
    run_mean = exponential_running_mean(np.array(data), factor_new=0.2,start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned, factor_new=0.2, start_var=1)
    standardized = exponential_running_standardize(data, factor_new=0.2, start_mean=0, start_var=1)
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)
    
    data = np.array([ -3, 5, 8, 7, 4, -2])
    exp_run_mean = np.array(np.array([ -0.600000, 0.520000, 2.016000, 3.012800, 3.210240, 2.168192]))
    exp_run_var = np.array([ 1.952000, 5.575680, 11.622195, 12.477309, 10.106591, 11.560038])
    exp_standardized = np.array([ -1.717795, 1.897270, 1.755284, 1.128775, 0.248424, -1.225937])
    
    run_mean = exponential_running_mean(np.array(data), factor_new=0.2,start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned, factor_new=0.2, start_var=1)
    standardized = exponential_running_standardize(data, factor_new=0.2, start_mean=0, start_var=1)
    
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)
    
    data = np.array([ 1, 0, 1, 0, 1, 0])
    exp_run_mean = np.array([ 0.200000, 0.160000, 0.328000, 0.262400, 0.409920, 0.327936])
    exp_run_var = np.array([ 0.928000, 0.747520, 0.688333, 0.564437, 0.521188, 0.438459])
    exp_standardized = np.array([ 0.830455, -0.185058, 0.809972, -0.349266, 0.817360, -0.495250])
    
    run_mean = exponential_running_mean(np.array(data), factor_new=0.2,start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned, factor_new=0.2, start_var=1)
    standardized = exponential_running_standardize(data, factor_new=0.2, start_mean=0, start_var=1)
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)
Beispiel #2
0
def exponential_standardize_cnt(cnt, init_block_size=1000, factor_new=1e-3,
        eps=1e-4):
    cnt_data = cnt.data
    standardized_data = exponential_running_standardize(cnt_data, 
        factor_new=factor_new, init_block_size=init_block_size, axis=None, 
        eps=eps)
    return cnt.copy(data=standardized_data)
def test_sum_prediction():
    """ Test with a model that predicts sum over four samples """
    rng = RandomState(3904890384)
    n_samples_in_buffer = 1000
    dataset = rng.rand(n_samples_in_buffer*2,5).astype(np.float32)
    markers = np.ones((n_samples_in_buffer*2,1)).astype(np.float32)
    set_and_markers = np.concatenate((dataset, markers), axis=1)
    
    factor_new=0.001
    n_stride = 10
    pred_freq = 11
    standardized = exponential_running_standardize(dataset,
        factor_new=factor_new, init_block_size=n_stride)
    model = InputLayer([1,1,4,1])
    model = GlobalPoolLayer(model,pool_function=T.sum)
    
    expected = [np.sum(standardized[stop-4:stop], axis=0) for stop in xrange(11, dataset.shape[0], 11)]
    expected = np.array(expected)
    
    processor = StandardizeProcessor(factor_new=factor_new,
        n_samples_in_buffer=n_samples_in_buffer)
    
    online_model = OnlineModel(model)
    online_pred = OnlineCoordinator(processor, online_model, pred_freq=pred_freq,
        trainer=NoTrainer())
    
    online_pred.initialize(n_chans=dataset.shape[1])
    all_preds = []
    for i_start_sample in xrange(0,dataset.shape[0]-n_stride+1,n_stride):
        online_pred.receive_samples(set_and_markers[i_start_sample:i_start_sample+n_stride])
        if online_pred.has_new_prediction():
            pred, _ = online_pred.pop_last_prediction_and_sample_ind()
            all_preds.append(pred)
        
    assert np.allclose(np.array(all_preds).squeeze(), expected, rtol=1e-3)
def test_online_predictor():
    """ Test whether predictions are done at correct timepoints.
    Model actually just returns input """
    
    rng = RandomState(3904890384)
    n_samples_in_buffer = 1000
    dataset = rng.rand(n_samples_in_buffer*2,5).astype(np.float32)
    markers = np.ones((n_samples_in_buffer*2,1)).astype(np.float32)
    set_and_markers = np.concatenate((dataset, markers), axis=1)
    
    factor_new=0.001
    n_stride = 10
    pred_freq = 11
    standardized = exponential_running_standardize(dataset,
        factor_new=factor_new, init_block_size=n_stride)
    model = InputLayer([1,1,1,1])
    
    processor = StandardizeProcessor(factor_new=factor_new,
        n_samples_in_buffer=n_samples_in_buffer)
    
    online_model = OnlineModel(model)
    online_pred = OnlineCoordinator(processor, online_model, pred_freq=pred_freq,
        trainer=NoTrainer())
    
    online_pred.initialize(n_chans=dataset.shape[1])
    all_preds = []
    for i_start_sample in xrange(0,dataset.shape[0]-n_stride+1,n_stride):
        online_pred.receive_samples(set_and_markers[i_start_sample:i_start_sample+n_stride])
        if online_pred.has_new_prediction():
            pred, _ = online_pred.pop_last_prediction_and_sample_ind()
            all_preds.append(pred)
        
    assert np.array_equal(np.array(all_preds).squeeze(), standardized[10::pred_freq])
Beispiel #5
0
def test_data_processor():
    """Compare standardized data to data standardized online
    always giving blocks of 10 samples"""
    rng = RandomState(3904890384)
    n_samples_in_buffer = 1000
    dataset = rng.rand(n_samples_in_buffer * 2, 5)

    factor_new = 0.001
    n_stride = 10
    standardized = exponential_running_standardize(dataset,
                                                   factor_new=factor_new,
                                                   init_block_size=n_stride)

    processor = StandardizeProcessor(factor_new=factor_new,
                                     n_samples_in_buffer=n_samples_in_buffer)

    processor.initialize(n_chans=dataset.shape[1])

    for i_start_sample in xrange(0, dataset.shape[0] - n_stride + 1, n_stride):
        processor.process_samples(dataset[i_start_sample:i_start_sample +
                                          n_stride])
        # compare all so far processed samples
        assert np.allclose(
            standardized[:i_start_sample + n_stride][-n_samples_in_buffer:],
            processor.get_samples(-i_start_sample - n_stride, None),
            rtol=1e-3,
            atol=1e-5)
def test_exponential_multidimensional():
    data = np.array([[[ 1, 3, 5, 9, 7, -2],
        [ 2, 0, -5, -3, 0, 4]],
        [[ -3, 5, 8, 7, 4, -2],
        [ 1, 0, 1, 0, 1, 0]]])
    exp_standardized = np.array([[[ 0.830455, 1.695258, 1.763925, 1.874509, 0.993934, -1.336228],
        [ 1.396861, -0.309344, -2.011047, -0.877060, 0.506023, 1.618611]],
        [[ -1.717795, 1.897270, 1.755284, 1.128775, 0.248424, -1.225937],
        [ 0.830455, -0.185058, 0.809972, -0.349266, 0.817360, -0.495250]]])
    standardized = exponential_running_standardize(data.swapaxes(0,2), factor_new=0.2, start_mean=0, start_var=1)
    assert np.allclose(standardized.swapaxes(0,2), exp_standardized)
Beispiel #7
0
def test_exponential_multidimensional():
    data = np.array([[[1, 3, 5, 9, 7, -2], [2, 0, -5, -3, 0, 4]],
                     [[-3, 5, 8, 7, 4, -2], [1, 0, 1, 0, 1, 0]]])
    exp_standardized = np.array(
        [[[0.830455, 1.695258, 1.763925, 1.874509, 0.993934, -1.336228],
          [1.396861, -0.309344, -2.011047, -0.877060, 0.506023, 1.618611]],
         [[-1.717795, 1.897270, 1.755284, 1.128775, 0.248424, -1.225937],
          [0.830455, -0.185058, 0.809972, -0.349266, 0.817360, -0.495250]]])
    standardized = exponential_running_standardize(data.swapaxes(0, 2),
                                                   factor_new=0.2,
                                                   start_mean=0,
                                                   start_var=1)
    assert np.allclose(standardized.swapaxes(0, 2), exp_standardized)
Beispiel #8
0
 def add_training_blocks_from_old_data(self, old_samples,
         old_markers, data_processor):
     # first standardize data
     old_samples = exponential_running_standardize(old_samples, 
         factor_new=data_processor.factor_new, init_block_size=1000, 
         eps=data_processor.eps)
     trial_starts, trial_stops = self.get_trial_start_stop_indices(
             old_markers)
     log.info("Adding {:d} trials".format(len(trial_starts)))
     for trial_start, trial_stop in zip(trial_starts, trial_stops):
         self.add_blocks(trial_start + self.trial_start_offset, 
             trial_stop, old_samples, old_markers)
     # now lets add breaks
     log.info("Adding {:d} breaks".format(len(trial_starts) - 1))
     for break_start, break_stop in zip(trial_stops[:-1], trial_starts[1:]):
         self.add_break(break_start, break_stop, old_samples, old_markers)
Beispiel #9
0
 def add_training_blocks_from_old_data(self, old_samples, old_markers,
                                       data_processor):
     # first standardize data
     old_samples = exponential_running_standardize(
         old_samples,
         factor_new=data_processor.factor_new,
         init_block_size=1000,
         eps=data_processor.eps)
     trial_starts, trial_stops = self.get_trial_start_stop_indices(
         old_markers)
     log.info("Adding {:d} trials".format(len(trial_starts)))
     for trial_start, trial_stop in zip(trial_starts, trial_stops):
         self.add_trial(trial_start, trial_stop, old_samples, old_markers)
     # now lets add breaks
     log.info("Adding {:d} breaks".format(len(trial_starts) - 1))
     for break_start, break_stop in zip(trial_stops[:-1], trial_starts[1:]):
         self.add_break(break_start, break_stop, old_samples, old_markers)
def test_data_processor():
    """Compare standardized data to data standardized online
    always giving blocks of 10 samples"""
    rng = RandomState(3904890384)
    n_samples_in_buffer = 1000
    dataset = rng.rand(n_samples_in_buffer*2,5)
    
    factor_new=0.001
    n_stride = 10
    standardized = exponential_running_standardize(dataset,
        factor_new=factor_new, init_block_size=n_stride)
    
    processor = StandardizeProcessor(factor_new=factor_new,
        n_samples_in_buffer=n_samples_in_buffer)
    
    processor.initialize(n_chans=dataset.shape[1])
    
    for i_start_sample in xrange(0,dataset.shape[0]-n_stride+1,n_stride):
        processor.process_samples(dataset[i_start_sample:i_start_sample+n_stride])
        # compare all so far processed samples
        assert np.allclose(standardized[:i_start_sample+n_stride][-n_samples_in_buffer:],
             processor.get_samples(-i_start_sample-n_stride,None), rtol=1e-3, atol=1e-5)  
Beispiel #11
0
def test_exponential_preprocessings():
    data = np.array([1, 3, 5, 9, 7, -2])
    exp_run_mean = np.array(
        [0.200000, 0.160000, 0.328000, 0.262400, 0.409920, 0.327936])
    exp_run_var = np.array(
        [0.928000, 1.745920, 3.697869, 9.952428, 9.922424, 12.347100])
    exp_standardized = np.array(
        [0.830455, 1.695258, 1.763925, 1.874509, 0.993934, -1.336228])

    run_mean = exponential_running_mean(np.array(data),
                                        factor_new=0.2,
                                        start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned,
                                                    factor_new=0.2,
                                                    start_var=1)
    standardized = exponential_running_standardize(data,
                                                   factor_new=0.2,
                                                   start_mean=0,
                                                   start_var=1)
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)

    data = np.array([2, 0, -5, -3, 0, 4])
    exp_run_mean = np.array(
        [0.400000, 0.320000, -0.744000, -1.195200, -0.956160, 0.035072])
    exp_run_var = np.array(
        [1.312000, 1.070080, 4.478771, 4.234478, 3.570431, 6.000475])
    exp_standardized = np.array(
        [1.396861, -0.309344, -2.011047, -0.877060, 0.506023, 1.618611])

    run_mean = exponential_running_mean(np.array(data),
                                        factor_new=0.2,
                                        start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned,
                                                    factor_new=0.2,
                                                    start_var=1)
    standardized = exponential_running_standardize(data,
                                                   factor_new=0.2,
                                                   start_mean=0,
                                                   start_var=1)
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)

    data = np.array([-3, 5, 8, 7, 4, -2])
    exp_run_mean = np.array(
        np.array([-0.600000, 0.520000, 2.016000, 3.012800, 3.210240,
                  2.168192]))
    exp_run_var = np.array(
        [1.952000, 5.575680, 11.622195, 12.477309, 10.106591, 11.560038])
    exp_standardized = np.array(
        [-1.717795, 1.897270, 1.755284, 1.128775, 0.248424, -1.225937])

    run_mean = exponential_running_mean(np.array(data),
                                        factor_new=0.2,
                                        start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned,
                                                    factor_new=0.2,
                                                    start_var=1)
    standardized = exponential_running_standardize(data,
                                                   factor_new=0.2,
                                                   start_mean=0,
                                                   start_var=1)

    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)

    data = np.array([1, 0, 1, 0, 1, 0])
    exp_run_mean = np.array(
        [0.200000, 0.160000, 0.328000, 0.262400, 0.409920, 0.327936])
    exp_run_var = np.array(
        [0.928000, 0.747520, 0.688333, 0.564437, 0.521188, 0.438459])
    exp_standardized = np.array(
        [0.830455, -0.185058, 0.809972, -0.349266, 0.817360, -0.495250])

    run_mean = exponential_running_mean(np.array(data),
                                        factor_new=0.2,
                                        start_mean=0)
    demeaned = data - run_mean
    run_var = exponential_running_var_from_demeaned(demeaned,
                                                    factor_new=0.2,
                                                    start_var=1)
    standardized = exponential_running_standardize(data,
                                                   factor_new=0.2,
                                                   start_mean=0,
                                                   start_var=1)
    assert np.allclose(exp_run_mean, run_mean)
    assert np.allclose(exp_run_var, run_var)
    assert np.allclose(exp_standardized, standardized)