class SizedBufferBackPressureStrategy(Observer): counter: Counter = Counter() def __init__(self, wrapped_observer: Observer, cache_size: int): self.wrapped_observer: Observer = wrapped_observer self.__function_runner = thread_function_runner self.__lock: Lock = BooleanLock() self.__cache_size: Optional[int] = cache_size self.__message_cache: List = [] self.__error_cache: List = [] self.__logger = Logger() @counter.processed_event @counter.time def on_next(self, message): if self.__lock.is_locked(): if not self.__update_cache(self.__message_cache, message): self.__logger.warning("value not added, buffer full") else: self.__lock.lock() self.__function_runner(self, self.__on_next, message) @staticmethod def __on_next(self, message: any): self.wrapped_observer.on_next(message) if len(self.__message_cache) > 0: self.__function_runner(self, self.__on_next, self.__message_cache.pop(0)) else: self.__lock.unlock() def on_error(self, error: any): if self.__lock.is_locked(): if not self.__update_cache(self.__error_cache, error): self.__logger.warning("value not added, buffer full") else: self.__lock.lock() self.__function_runner(self, self.__on_error, error) @staticmethod def __on_error(self, error: any): self.wrapped_observer.on_error(error) if len(self.__error_cache) > 0: self.__function_runner(self, self.__on_error, self.__error_cache.pop(0)) else: self.__lock.unlock() @counter.dropped_event def __update_cache(self, cache: List, item: Any) -> bool: if self.__cache_size is None or len(cache) < self.__cache_size: cache.append(item) return True return False def on_completed(self): self.wrapped_observer.on_completed() def is_locked(self): return self.__lock.is_locked()
ARGS = parser.parse_args() log = Logger("PREDICT", ARGS.debug, ARGS.log_dir) """ Main function to compute prediction by using a trained model together with the given input """ if __name__ == "__main__": if ARGS.checkpoint_path is not None: log.info("Restoring checkpoint from {} instead of using a model file.". format(ARGS.checkpoint_path)) checkpoint = torch.load(ARGS.checkpoint_path) model = UNet(1, 1, bilinear=False) model.load_state_dict(checkpoint["modelState"]) log.warning( "Using default preprocessing options. Provide Model file if they are changed" ) dataOpts = DefaultSpecDatasetOps else: if ARGS.jit_load: extra_files = {} extra_files['dataOpts'] = '' model = torch.jit.load(ARGS.model_path, _extra_files=extra_files) unetState = model.state_dict() dataOpts = eval(extra_files['dataOpts']) log.debug("Model successfully load via torch jit: " + str(ARGS.model_path)) else: model_dict = torch.load(ARGS.model_path) model = UNet(1, 1, bilinear=False) model.load_state_dict(model_dict["unetState"])