def test_sum_prediction(): """ Test with a model that predicts sum over four samples """ rng = RandomState(3904890384) n_samples_in_buffer = 1000 dataset = rng.rand(n_samples_in_buffer*2,5).astype(np.float32) markers = np.ones((n_samples_in_buffer*2,1)).astype(np.float32) set_and_markers = np.concatenate((dataset, markers), axis=1) factor_new=0.001 n_stride = 10 pred_freq = 11 standardized = exponential_running_standardize(dataset, factor_new=factor_new, init_block_size=n_stride) model = InputLayer([1,1,4,1]) model = GlobalPoolLayer(model,pool_function=T.sum) expected = [np.sum(standardized[stop-4:stop], axis=0) for stop in xrange(11, dataset.shape[0], 11)] expected = np.array(expected) processor = StandardizeProcessor(factor_new=factor_new, n_samples_in_buffer=n_samples_in_buffer) online_model = OnlineModel(model) online_pred = OnlineCoordinator(processor, online_model, pred_freq=pred_freq, trainer=NoTrainer()) online_pred.initialize(n_chans=dataset.shape[1]) all_preds = [] for i_start_sample in xrange(0,dataset.shape[0]-n_stride+1,n_stride): online_pred.receive_samples(set_and_markers[i_start_sample:i_start_sample+n_stride]) if online_pred.has_new_prediction(): pred, _ = online_pred.pop_last_prediction_and_sample_ind() all_preds.append(pred) assert np.allclose(np.array(all_preds).squeeze(), expected, rtol=1e-3)
def test_online_predictor(): """ Test whether predictions are done at correct timepoints. Model actually just returns input """ rng = RandomState(3904890384) n_samples_in_buffer = 1000 dataset = rng.rand(n_samples_in_buffer*2,5).astype(np.float32) markers = np.ones((n_samples_in_buffer*2,1)).astype(np.float32) set_and_markers = np.concatenate((dataset, markers), axis=1) factor_new=0.001 n_stride = 10 pred_freq = 11 standardized = exponential_running_standardize(dataset, factor_new=factor_new, init_block_size=n_stride) model = InputLayer([1,1,1,1]) processor = StandardizeProcessor(factor_new=factor_new, n_samples_in_buffer=n_samples_in_buffer) online_model = OnlineModel(model) online_pred = OnlineCoordinator(processor, online_model, pred_freq=pred_freq, trainer=NoTrainer()) online_pred.initialize(n_chans=dataset.shape[1]) all_preds = [] for i_start_sample in xrange(0,dataset.shape[0]-n_stride+1,n_stride): online_pred.receive_samples(set_and_markers[i_start_sample:i_start_sample+n_stride]) if online_pred.has_new_prediction(): pred, _ = online_pred.pop_last_prediction_and_sample_ind() all_preds.append(pred) assert np.array_equal(np.array(all_preds).squeeze(), standardized[10::pred_freq])
def main(ui_hostname, ui_port, base_name, params_filename, plot_sensors, use_ui_server, adapt_model, save_data, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset, break_stop_offset, pred_gap, incoming_port,load_old_data,use_new_adam_params, input_time_length, train_on_breaks, min_break_samples, min_trial_samples): setup_logging() assert np.little_endian, "Should be in little endian" train_params = None # for trainer, e.g. adam params if params_filename is not None: if params_filename == 'newest': # sort will already sort temporally with our time string format all_params_files = sorted(glob(base_name + ".*.model_params.npy")) assert len(all_params_files) > 0, ("Expect atleast one params file " "if 'newest' given as argument") params_filename = all_params_files[-1] log.info("Loading model params from {:s}".format(params_filename)) params = np.load(params_filename) train_params_filename = params_filename.replace('model_params.npy', 'trainer_params.npy') if os.path.isfile(train_params_filename): if use_new_adam_params: log.info("Loading trainer params from {:s}".format(train_params_filename)) train_params = np.load(train_params_filename) else: log.warn("No train/adam params found, starting optimization params " "from scratch (model params will be loaded anyways).") else: params = np.load(base_name + '.npy') exp = create_experiment(base_name + '.yaml') # Possibly change input time length, for exmaple # if input time length very long during training and should be # shorter for online if input_time_length is not None: log.info("Change input time length to {:d}".format(input_time_length)) set_input_window_length(exp.final_layer, input_time_length) # probably unnecessary, just for safety exp.iterator.input_time_length = input_time_length # Have to set for both exp final layer and actually used model # as exp final layer might be used for adaptation # maybe check this all for correctness? cnt_model = exp.final_layer set_param_values_backwards_compatible(cnt_model, params) prediction_model = transform_to_normal_net(cnt_model) set_param_values_backwards_compatible(prediction_model, params) data_processor = StandardizeProcessor(factor_new=1e-3) online_model = OnlineModel(prediction_model) if adapt_model: online_trainer = BatchWiseCntTrainer(exp, n_updates_per_break, batch_size, learning_rate, n_min_trials, trial_start_offset, break_start_offset=break_start_offset, break_stop_offset=break_stop_offset, train_param_values=train_params, add_breaks=train_on_breaks, min_break_samples=min_break_samples, min_trial_samples=min_trial_samples) else: log.info("Not adapting model...") online_trainer = NoTrainer() coordinator = OnlineCoordinator(data_processor, online_model, online_trainer, pred_gap=pred_gap) hostname = '' server = PredictionServer((hostname, incoming_port), coordinator=coordinator, ui_hostname=ui_hostname, ui_port=ui_port, plot_sensors=plot_sensors, use_ui_server=use_ui_server, save_data=save_data, model_base_name=base_name, adapt_model=adapt_model) # Compilation takes some time so initialize trainer already # before waiting in connection in server online_trainer.initialize() if adapt_model and load_old_data: online_trainer.add_data_from_today(data_processor) log.info("Starting server on port {:d}".format(incoming_port)) server.start() log.info("Started server") server.serve_forever()