def __init__(self, data_processor, model, trainer, pred_gap): self.data_processor = data_processor self.model = model self.pred_gap = pred_gap self.marker_buffer = RingBuffer( np.zeros(data_processor.n_samples_in_buffer, dtype=np.int32)) self.trainer = trainer
def initialize(self, n_chans): self.running_mean = None self.running_var = None self.sample_buffer = RingBuffer( np.zeros((self.n_samples_in_buffer, n_chans), dtype=np.float32)) self.y_buffer = RingBuffer( np.zeros((self.n_samples_in_buffer), dtype=np.float32))
def __init__(self, data_processor, model, trainer, pred_freq): self.data_processor = data_processor self.model = model self.pred_freq = pred_freq self.marker_buffer = RingBuffer(np.zeros( data_processor.n_samples_in_buffer, dtype=np.int32)) self.trainer = trainer
class StandardizeProcessor(object): def __init__(self, factor_new=1e-3, eps=1e-4, n_samples_in_buffer=10000): self.factor_new = factor_new self.eps = eps self.n_samples_in_buffer = n_samples_in_buffer def initialize(self, n_chans): self.running_mean = None self.running_var = None self.sample_buffer = RingBuffer( np.zeros((self.n_samples_in_buffer, n_chans), dtype=np.float32)) self.y_buffer = RingBuffer( np.zeros((self.n_samples_in_buffer), dtype=np.float32)) def process_samples(self, samples): standardized_samples = self.update_and_standardize(samples) self.sample_buffer.extend(standardized_samples) def update_and_standardize(self, samples): if self.running_mean is not None: assert self.running_var is not None next_means = exponential_running_mean(samples, factor_new=self.factor_new, start_mean=self.running_mean) demeaned = samples - next_means next_vars = exponential_running_var_from_demeaned( demeaned, factor_new=self.factor_new, start_var=self.running_var) standardized = demeaned / np.maximum(self.eps, np.sqrt(next_vars)) self.running_mean = next_means[-1] self.running_var = next_vars[-1] return standardized else: self.running_mean = np.mean(samples, axis=0) self.running_var = np.var(samples, axis=0) return (samples - self.running_mean) / np.maximum( self.eps, np.sqrt(self.running_var)) def get_samples(self, start, stop): return self.sample_buffer[start:stop]
class OnlineCoordinator(object): """ Online coordinator accepts samples, preprocesses them with data processor, calls model to supply predictions when necessary. Online coordinator is mainly responsible for cutting out correct time windows for the model to predict on. """ def __init__(self, data_processor, model, trainer, pred_freq): self.data_processor = data_processor self.model = model self.pred_freq = pred_freq self.marker_buffer = RingBuffer(np.zeros( data_processor.n_samples_in_buffer, dtype=np.int32)) self.trainer = trainer def initialize(self, n_chans): assert self.data_processor.__class__.__name__ == 'StandardizeProcessor', ( "Else change atleast trainer.py add_training_blocks_from_old_data ", "function") self.data_processor.initialize(n_chans) self.n_samples = 0 self.i_last_pred = -1 self.last_pred = None self.model.initialize() self.n_samples_pred_window = self.model.get_n_samples_pred_window() self.trainer.set_predicting_model(self.model.model) # lasagne model... self.trainer.set_data_processor(self.data_processor) self.trainer.set_marker_buffer(self.marker_buffer) def receive_samples(self, samples): """Expect samples in timexchan format""" sensor_samples = samples[:,:-1] markers = samples[:,-1] assert np.all([m in [0,1,2,3,4,5] for m in markers]), ("Expect all " "markers to be from 0-5, instead got {:s}".format(markers)) self.marker_buffer.extend(markers) self.data_processor.process_samples(sensor_samples) self.n_samples += len(samples) if self.should_do_next_prediction(): self.predict() # important to do after marker buffer and data processor # have processed samples... self.trainer.process_markers(markers) def should_do_next_prediction(self): return (self.n_samples >= self.n_samples_pred_window and self.n_samples > (self.i_last_pred + self.pred_freq)) def predict(self): # Compute how many samples we already have past the # sample we wanted to predict # keep in mind: n_samples = n_samples (number of samples) # so how many samples are we past # last prediction + prediction frequency # -1 at the end below since python indexing is zerobased n_samples_after_pred = min(self.n_samples - self.n_samples_pred_window, self.n_samples - self.i_last_pred - self.pred_freq - 1) assert n_samples_after_pred < self.pred_freq, ("Other case " "not implemented yet") start = -self.n_samples_pred_window - n_samples_after_pred end = -n_samples_after_pred if end == 0: end = None topo = self.data_processor.get_samples(start, end) self.last_pred = self.model.predict(topo) # -1 since we have 0-based indexing in python self.i_last_pred = self.n_samples - n_samples_after_pred - 1 def pop_last_prediction_and_sample_ind(self): last_pred = self.last_pred self.last_pred = None return last_pred, self.i_last_pred def has_new_prediction(self): return self.last_pred is not None
class OnlineCoordinator(object): """ Online coordinator accepts samples, preprocesses them with data processor, calls model to supply predictions when necessary. Online coordinator is mainly responsible for cutting out correct time windows for the model to predict on. """ def __init__(self, data_processor, model, trainer, pred_gap): self.data_processor = data_processor self.model = model self.pred_gap = pred_gap self.marker_buffer = RingBuffer( np.zeros(data_processor.n_samples_in_buffer, dtype=np.int32)) self.trainer = trainer def initialize(self, n_chans): assert self.data_processor.__class__.__name__ == 'StandardizeProcessor', ( "Else change atleast trainer.py add_training_blocks_from_old_data ", "function") self.data_processor.initialize(n_chans) self.n_samples = 0 self.i_last_pred = -1 self.last_pred = None self.model.initialize() self.n_samples_pred_window = self.model.get_n_samples_pred_window() self.trainer.set_predicting_model(self.model.model) # lasagne model... self.trainer.set_data_processor(self.data_processor) self.trainer.set_marker_buffer(self.marker_buffer) def receive_samples(self, samples): """Expect samples in timexchan format""" sensor_samples = samples[:, :-1] markers = samples[:, -1] assert np.all([ m in [0, 1, 2, 3, 4, 5] for m in markers ]), ("Expect all " "markers to be from 0-5, instead got {:s}".format(markers)) self.marker_buffer.extend(markers) self.data_processor.process_samples(sensor_samples) self.n_samples += len(samples) if self.should_do_next_prediction(): self.predict() # important to do after marker buffer and data processor # have processed samples... self.trainer.process_markers(markers) def should_do_next_prediction(self): return (self.n_samples >= self.n_samples_pred_window and self.n_samples > (self.i_last_pred + self.pred_gap)) def predict(self): # Compute how many samples we already have past the # sample we wanted to predict # keep in mind: n_samples = n_samples (number of samples) # so how many samples are we past # last prediction + prediction frequency # -1 at the end below since python indexing is zerobased n_samples_after_pred = min( self.n_samples - self.n_samples_pred_window, self.n_samples - self.i_last_pred - self.pred_gap - 1) assert n_samples_after_pred < self.pred_gap, ( "Other case " "(multiple predictions should have happened in one " "block that was sent) not implemented yet") start = -self.n_samples_pred_window - n_samples_after_pred end = -n_samples_after_pred if end == 0: end = None topo = self.data_processor.get_samples(start, end) self.last_pred = self.model.predict(topo) # -1 since we have 0-based indexing in python self.i_last_pred = self.n_samples - n_samples_after_pred - 1 def pop_last_prediction_and_sample_ind(self): last_pred = self.last_pred self.last_pred = None return last_pred, self.i_last_pred def has_new_prediction(self): return self.last_pred is not None
def _init_data(self): self._data = dict() for sensor_name in self._sensor_names: self._data[sensor_name] = RingBuffer( np.sin(np.linspace(0, 10, 1000)))