def jail_func(crypten=crypten): # pragma: no cover alice_tensor = crypten.load("crypten_data", 0) bob_tensor = crypten.load("crypten_data", 1) crypt = alice_tensor + bob_tensor result = crypt.get_plain_text() return result
def test_save_load_module(self): """Test that crypten.save and crypten.load properly save and load modules""" import tempfile comm = crypten.communicator for model_type in [TestModule, NestedTestModule]: # Create models with different parameter values on each rank rank = comm.get().get_rank() test_model = model_type(200, 10) test_model.set_all_parameters(rank) filename = tempfile.NamedTemporaryFile(delete=True).name for src in range(comm.get().get_world_size()): crypten.save(test_model, filename, src=src) dummy_model = model_type(200, 10) result = crypten.load(filename, dummy_model=dummy_model, src=src) if src == rank: for param in result.parameters(recurse=True): self.assertTrue( param.eq(rank).all().item(), "Model load failed") self.assertEqual(result.src, src) failure_dummy_model = model_type(200, 11) with self.assertRaises(AssertionError, msg="Expected load failure not raised"): result = crypten.load(filename, dummy_model=failure_dummy_model, src=src)
def jointly_train(): encrypt_digits() alice_images_enc = crypten.load("/tmp/data/alice_images.pth", src=ALICE) bob_labels_enc = crypten.load("/tmp/data/bob_labels.pth", src=BOB) model = LogisticRegression().encrypt() model = train_model(model, alice_images_enc, bob_labels_enc)
def plan_func(crypten=crypten): alice_tensor = crypten.load("crypten_data", 1) bob_tensor = crypten.load("crypten_data", 2) crypt = alice_tensor + bob_tensor result = crypt.get_plain_text() return result
def test(self): resnet_model = resnet32(1) transform = transforms.Compose([ lambda x: Image.open(x).convert('RGB'), transforms.Resize((32, 32)), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)) ]) if self.rank == 0: # ALICE model = crypten.load( './SCML_Project/training/models/covid_resnet32/checkpoint_cpu_cpu.pt', dummy_model=resnet_model, src=0) dummy_input = torch.empty((1, 3, 32, 32)) private_model = crypten.nn.from_pytorch(model.double(), dummy_input.double()) private_model.encrypt(src=0) data_enc = crypten.cryptensor(dummy_input, src=1) private_model.eval() start = time.time() output_enc = private_model(data_enc) comm.get().send_obj(output_enc.share, 1) end = time.time() else: # BOB model = crypten.load( './SCML_Project/training/models/covid_resnet32/resnet_random.pt', dummy_model=resnet_model, src=0) dummy_input = torch.empty((1, 3, 32, 32)) private_model = crypten.nn.from_pytorch(model.double(), dummy_input.double()) private_model.encrypt(src=0) data_enc = crypten.cryptensor( transform('./SCML_Project/training/dataset/COVID/COVID-1.png' ).unsqueeze(0), src=1) private_model.eval() start = time.time() output_enc = private_model(data_enc) done = comm.get().recv_obj(0) print("Bob received: ", output_enc.share + done) end = time.time() print('Time: ', end - start)
def plan_func_model(model=None, crypten=crypten): # noqa: F821 t = crypten.load("crypten_data", 0) model.encrypt() out = model(t) model.decrypt() out = out.get_plain_text() return model, out
def run_encrypted_eval(): # pragma: no cover rank = crypten.communicator.get().get_rank() t = crypten.load("crypten_data", 0) model.encrypt() # noqa: F821 out = model(t) # noqa: F821 model.decrypt() # noqa: F821 out = out.get_plain_text() return model, out # noqa: F821
def test_load(): ws = mpc_comm.get().world_size rank = mpc_comm.get().get_rank() print(rank) data = [] for rank in range(ws): data.append(crypten.load(f"test_{rank}.pth", src=rank)) print(data[0]) print(data[0].get_plain_text())
def plan_func_model(crypten=crypten): # noqa: F821 data = crypten.load("crypten_data", 0) # This should load the crypten model that is found at all parties model = crypten.load_model("crypten_model") model.encrypt() out = model(data) model.decrypt() out = out.get_plain_text() return out
def test_save_load(self): """Test that crypten.save and crypten.load properly save and load shares of cryptensors""" import io import pickle def custom_load_function(f): obj = pickle.load(f) return obj def custom_save_function(obj, f): pickle.dump(obj, f) all_save_fns = [torch.save, custom_save_function] all_load_fns = [torch.load, custom_load_function] tensor = get_random_test_tensor() cryptensor1 = crypten.cryptensor(tensor) for i, save_closure in enumerate(all_save_fns): load_closure = all_load_fns[i] f = [ io.BytesIO() for i in range(crypten.communicator.get().get_world_size()) ] crypten.save(cryptensor1, f[self.rank], save_closure=save_closure) f[self.rank].seek(0) cryptensor2 = crypten.load(f[self.rank], load_closure=load_closure) # test whether share matches self.assertTrue(cryptensor1.share.allclose(cryptensor2.share)) # test whether tensor matches self.assertTrue( cryptensor1.get_plain_text().allclose(cryptensor2.get_plain_text()) ) attributes = [ a for a in dir(cryptensor1) if not a.startswith("__") and not callable(getattr(cryptensor1, a)) and a not in ["share", "_tensor", "ctx"] ] for a in attributes: attr1, attr2 = getattr(cryptensor1, a), getattr(cryptensor2, a) if a == "encoder": self.assertTrue(attr1._scale == attr2._scale) self.assertTrue(attr1._precision_bits == attr2._precision_bits) elif torch.is_tensor(attr1): self.assertTrue(attr1.eq(attr2).all()) else: self.assertTrue(attr1 == attr2)
def test_save_load(self): """Test that crypten.save and crypten.load properly save and load tensors""" import tempfile filename = tempfile.NamedTemporaryFile(delete=True).name for dimensions in range(1, 5): # Create tensors with different sizes on each rank size = [self.rank + 1] * dimensions size = tuple(size) tensor = torch.randn(size=size) for src in range(crypten.communicator.get().get_world_size()): crypten.save(tensor, filename, src=src) encrypted_load = crypten.load(filename, src=src) reference_size = tuple([src + 1] * dimensions) self.assertEqual(encrypted_load.size(), reference_size) size_out = [src + 1] * dimensions reference = tensor if self.rank == src else torch.empty( size=size_out) dist.broadcast(reference, src=src) self._check(encrypted_load, reference, "crypten.load() failed")
def run_prediction(port0: int = 8080, model_name: str = "LeNet", model_file: Optional[str] = None): rank = comm.get().get_rank() # create empty model if hasattr(models, model_name): dummy_model = getattr(models, model_name)(pretrained=False, num_classes=2) img_size = 224 elif model_name == "LeNet": dummy_model = LeNet(num_classes=2) img_size = 32 elif model_name == "BigLeNet": dummy_model = BigLeNet(num_classes=2) img_size = 64 else: raise NotImplementedError(f"No model named {model_name} available") print_stderr(f"{rank} LOADED empty") if model_file is None: model_file = f"models/{model_name}.pth" # start web interface image_queue = start_web_app(port0, img_size) input_queue = image_queue.queue # Load pre-trained model to PREDICTOR # For demo purposes, we don't pass model_name to PATIENT, although it would # be ignored in crypten.load if rank == PREDICTOR: model = crypten.load(model_file, dummy_model=dummy_model, src=PREDICTOR) print_stderr(f"{rank} LOADED model") else: model = crypten.load(None, dummy_model=dummy_model, src=PREDICTOR) print_stderr(f"{rank} BROADCAST model") # Encrypt model from PREDICTOR or dummy dummy_input = torch.zeros((1, 3, img_size, img_size)) private_model = crypten.nn.from_pytorch(model, dummy_input) private_model.encrypt(src=PREDICTOR) print_stderr(f"{rank} ENCRYPTED {private_model.encrypted}") # Load image to PATIENT.. dummy_input for testing data_enc = crypten.cryptensor(dummy_input) # classify the encrypted data with torch.no_grad(): private_model.eval() output_enc = private_model(data_enc) # print_stderr output output = output_enc.get_plain_text() print_stderr(f"{rank} TEST OUTPUT {output})") if rank == PREDICTOR: print_stderr(f"providing .csv header") print("input_time,decision_time,doctor_is_cancer,predictor_is_cancer") with torch.no_grad(): while True: tensor_image_or_empty, input_time = input_queue.get() encrpyted_image = crypten.cryptensor(tensor_image_or_empty.unsqueeze(0), src=PATIENT) output_enc = private_model(encrpyted_image) output = output_enc.get_plain_text() probabilities = torch.softmax(output, dim=1)[0] prediction_is_cancer = probabilities[1].cpu().item() print_stderr(f"{rank} PRED {prediction_is_cancer:.2f}% cancer @ {input_time.strftime('%Y-%m-%dT%H:%M:%S')} " f"(image mean: {tensor_image_or_empty.mean().item()})") if rank == PREDICTOR: q_predictor: ImageQueueDoctor = image_queue q_predictor.current_pred_cancer = prediction_is_cancer answer_queue = q_predictor.answer_queue print_stderr(f"{rank} Waiting for final decision on http://localhost:{port0 + 1}/decision") doctor_is_cancer, decision_time = answer_queue.get() print_stderr(f"{rank} Thanks, you were saying there {'IS' if doctor_is_cancer else 'is NO'} CANCER") # input_time,decision_time,doctor_is_cancer,predictor_is_cancer csv_string = f"{input_time.strftime('%Y-%m-%dT%H:%M:%S')},{decision_time.strftime('%Y-%m-%dT%H:%M:%S')},{int(doctor_is_cancer)},{float(prediction_is_cancer)}" print_stderr(f"{rank} appending: " + csv_string) print(csv_string)