def test_initialization(tiny_model_2d): config = tiny_model_2d["config"] in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) ip = InferenceProcess(config=config, model=model) ip.shutdown()
def test_training(tiny_model_2d): config = tiny_model_2d["config"] config["num_iterations_per_update"] = 10 in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) training = TrainingProcess(config=config, model=model) try: training.set_devices([torch.device("cpu")]) data = TikTensorBatch([ TikTensor(torch.zeros(in_channels, 15, 15), ((1, ), (1, ))), TikTensor(torch.ones(in_channels, 9, 9), ((2, ), (2, ))), ]) labels = TikTensorBatch([ TikTensor(torch.ones(in_channels, 15, 15, dtype=torch.uint8), ((1, ), (1, ))), TikTensor(torch.full((in_channels, 9, 9), 2, dtype=torch.uint8), ((2, ), (2, ))), ]) training.update_dataset("training", data, labels) training.resume_training() import time time.sleep(10) finally: training.shutdown()
def test_training_in_proc(tiny_model_2d, log_queue): config = tiny_model_2d["config"] config["num_iterations_per_update"] = 10 in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) handler_conn, training_conn = mp.Pipe() p = mp.Process(target=run, kwargs={ "conn": training_conn, "model": model, "config": config, "log_queue": log_queue }) p.start() client = create_client(ITraining, handler_conn) try: client.set_devices([torch.device("cpu")]) data = TikTensorBatch([ TikTensor(torch.zeros(in_channels, 15, 15), ((1, ), (1, ))), TikTensor(torch.ones(in_channels, 9, 9), ((2, ), (2, ))), ]) labels = TikTensorBatch([ TikTensor(torch.ones(in_channels, 15, 15, dtype=torch.uint8), ((1, ), (1, ))), TikTensor(torch.full((in_channels, 9, 9), 2, dtype=torch.uint8), ((2, ), (2, ))), ]) client.update_dataset("training", data, labels) client.resume_training() finally: client.shutdown()
def test_initialization(tiny_model_2d): config = tiny_model_2d["config"] in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) dr = DryRunProcess(config=config, model=model) dr.shutdown()
def test_set_devices(tiny_model_2d): config = tiny_model_2d["config"] in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) tp = TrainingProcess(config=config, model=model) tp.set_devices([torch.device("cpu")]) tp.shutdown()
def test_inference2d(tiny_model_2d): config = tiny_model_2d["config"] in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) inference = InferenceProcess(config=config, model=model) inference.set_devices([torch.device("cpu")]) data = TikTensor(torch.zeros(in_channels, 15, 15), (0, )) pred = inference.forward(data) assert isinstance(pred.result(timeout=10), TikTensor) inference.shutdown()
def test_minimal_device_test(tiny_model_2d): config = tiny_model_2d["config"] in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) dr = DryRunProcess(config=config, model=model) try: assert dr.minimal_device_test([torch.device("cpu")]) finally: dr.shutdown()
def test_given_training_shape(tiny_model_2d): config = tiny_model_2d["config"] config[TRAINING][TRAINING_SHAPE] = (1, 15, 15) config[TRAINING][TRAINING_SHAPE_UPPER_BOUND] = (1, 15, 15) in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) dr = DryRunProcess(config=config, model=model) try: dr.dry_run(devices=[torch.device("cpu")]).result(timeout=30) finally: dr.shutdown()
def test_malicious_training_shape(tiny_model_2d): config = tiny_model_2d["config"] config[TRAINING].update({ TRAINING_SHAPE: (1, 0, 20), TRAINING_SHAPE_UPPER_BOUND: (1, 2, 2) }) in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) dr = DryRunProcess(config=config, model=model) try: fut = dr.dry_run(devices=[torch.device("cpu")]) assert isinstance(fut.exception(timeout=20), ValueError) finally: dr.shutdown()
def test_inference2d_in_proc(tiny_model_2d, log_queue): config = tiny_model_2d["config"] in_channels = config["input_channels"] model = TinyConvNet2d(in_channels=in_channels) handler_conn, inference_conn = mp.Pipe() p = mp.Process(target=run, kwargs={ "conn": inference_conn, "model": model, "config": config, "log_queue": log_queue }) p.start() client = create_client(IInference, handler_conn) try: client.set_devices([torch.device("cpu")]) data = TikTensor(torch.zeros(in_channels, 15, 15), (0, )) f = client.forward(data) f.result(timeout=10) finally: client.shutdown()