def do(ct_size: int, batch_size: int) -> None: # third party import numpy as np import tenseal as ts # syft absolute import syft as sy sy.load("tenseal") sy.logger.add(sys.stderr, "ERROR") duet = sy.launch_duet(loopback=True, network_url=f"http://127.0.0.1:{PORT}/") duet.requests.add_handler(action="accept") context = ts.context(ts.SCHEME_TYPE.CKKS, 8192, coeff_mod_bit_sizes=[60, 40, 40, 60], n_threads=1) context.global_scale = pow(2, 40) data = np.random.uniform(-10, 10, 100) enc = [] for i in range(ct_size): enc.append(ts.ckks_vector(context, data)) start = time.time() _ = context.send(duet, pointable=True) for chunk in chunks(enc, batch_size): _ = sy.lib.python.List(chunk).send(duet, pointable=True) sys.stderr.write( f"[{ct_size}][{batch_size}] DO sending took {time.time() - start} sec\n" ) sy.core.common.event_loop.loop.run_forever()
def test_remote_gym(root_client: sy.VirtualMachineClient) -> None: sy.load("gym") sy.load("numpy") # third party import gym import numpy as np remote_gym = root_client.gym env = gym.make("CartPole-v0") remote_env = remote_gym.make("CartPole-v0") env.seed(42) remote_env.seed(42) assert remote_env.__name__ == "TimeLimitPointer" initial_state = env.reset() remote_initial_state = remote_env.reset().get() assert np.array_equal(initial_state, remote_initial_state) state, reward, done, info = env.step(0) remote_state, remote_reward, remote_done, remote_info = remote_env.step( 0).get() assert np.array_equal(state, remote_state) assert reward == remote_reward assert done == remote_done assert info == remote_info
def ds(ct_size: int, batch_size: int) -> None: # syft absolute import syft as sy sy.load("tenseal") sy.logger.add(sys.stderr, "ERROR") duet = sy.join_duet(loopback=True, network_url=f"http://127.0.0.1:{PORT}/") time.sleep(10) cnt = int(ct_size / batch_size) start = time.time() ctx = duet.store[0].get(request_block=True, delete_obj=False) for idx in range(1, cnt + 1): data = duet.store[idx].get(request_block=True, delete_obj=False) for tensor in data: tensor.link_context(ctx) assert len(data) == batch_size, len(data) sys.stderr.write( f"[{ct_size}][{batch_size}] DS get took {time.time() - start} sec\n")
def test_slice_dataframe(root_client: sy.VirtualMachineClient) -> None: sy.load("pandas") # third party import pandas as pd data: Dict[str, Dict] = { "col_1": { 0: 3, 1: 2, 2: 1, 3: 0 }, "col_2": { 0: "a", 1: "b", 2: "c", 3: "d" }, } df = pd.DataFrame.from_dict(data) df_ptr = df.send(root_client) df_reverse_ptr = df_ptr[::-1] # use slice to reverse the column data df_reverse = df_reverse_ptr.get() data_reverse = df_reverse.to_dict() assert OrderedDict(data_reverse["col_1"]) != OrderedDict(data["col_1"]) assert OrderedDict(data_reverse["col_2"]) != OrderedDict(data["col_2"]) assert OrderedDict(data_reverse["col_1"]) == OrderedDict( reversed(list(data["col_1"].items()))) assert OrderedDict(data_reverse["col_2"]) == OrderedDict( reversed(list(data["col_2"].items())))
def test_pandas(root_client: sy.VirtualMachineClient) -> None: sy.load("pandas") # third party import pandas as pd data = { "col_1": { 0: 3, 1: 2, 2: 1, 3: 0 }, "col_2": { 0: "a", 1: "b", 2: "c", 3: "d" }, } df = pd.DataFrame.from_dict(data) df_ptr = df.send(root_client) df2 = df_ptr.get() assert df2.to_dict() == data
def test_logistic_model_serde(root_client: sy.VirtualMachineClient) -> None: sy.load("sklearn") sy.load("numpy") # third party import numpy as np X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]]) y = np.array([0, 0, 1, 1]) # third party from sklearn.linear_model import LogisticRegression clf = LogisticRegression(random_state=0).fit(X, y) clf_remote = clf.send(root_client) clf_2 = clf_remote.get() dict_1 = vars(clf) dict_2 = vars(clf_2) for key in dict_1.keys(): if type(dict_1[key]) == float: assert abs(dict_1[key] - dict_2[key]) < 0.0001 elif type(dict_1[key]) == np.ndarray: assert dict_1[key].all() == dict_2[key].all() else: assert dict_1[key] == dict_2[key]
def test_glm(root_client: sy.VirtualMachineClient) -> None: FAMILY = [ statsmodels.genmod.families.Binomial, statsmodels.genmod.families.Gamma, statsmodels.genmod.families.Gaussian, statsmodels.genmod.families.InverseGaussian, statsmodels.genmod.families.NegativeBinomial, statsmodels.genmod.families.Poisson, statsmodels.genmod.families.Tweedie, ] UNNECESSARY_STR = r"Time(.*)(?=Pearson)|Date(.*)(?=Deviance)" sy.load("pandas") sy.load("statsmodels") # load data url = "https://raw.githubusercontent.com/chemo-wakate/tutorial-6th/master/beginner/data/winequality-red.txt" df = pd.read_csv(url, sep="\t") df["quality"] = df["quality"].apply(lambda x: 1 if x >= 6 else 0) df = df.sample(100, random_state=42) df_ptr = df.send(root_client) # explanatory variable x = df["fixed acidity"] x_ptr = df_ptr["fixed acidity"] # add constant _x = statsmodels.api.add_constant(x) _x_ptr = root_client.statsmodels.api.add_constant(x_ptr) # dependent variable _y = df["quality"] _y_ptr = df_ptr["quality"] # test all possible combinations of families and links for family in FAMILY: for link in family.links: print(family, link) model = statsmodels.genmod.generalized_linear_model.GLM( _y, _x, family=family(link=link())) result = model.fit() summary = result.summary().as_csv() remote_model = root_client.statsmodels.genmod.generalized_linear_model.GLM( _y_ptr, _x_ptr, family=family(link=link())) remote_result = remote_model.fit() # `get` corresponds to `summary().as_csv()` remote_summary = remote_result.get() # remove unnnecessary strings such as proccesing time and date summary = re.sub(UNNECESSARY_STR, "", summary) remote_summary = re.sub(UNNECESSARY_STR, "", remote_summary) assert summary == remote_summary
def test_pydp(root_client: sy.VirtualMachineClient) -> None: sy.load("pydp") x_ptr = root_client.pydp.algorithms.laplacian.BoundedMean(1, 1, 50) input_data = [1, 88, 100, 5, 40, 30, 29, 56, 88, 23, 5, 1] * 100 list_ptr = root_client.python.List(input_data) res_ptr = x_ptr.quick_result(list_ptr) res = res_ptr.get() assert 22 < res < 35
def test_pydp_functions(root_client: sy.VirtualMachineClient) -> None: sy.load("pydp") x_ptr = root_client.pydp.algorithms.laplacian.BoundedMean(1, 1, 50) input_data = [1, 88, 100, 5, 40, 30, 29, 56, 88, 23, 5, 1] * 100 list_ptr = root_client.python.List(input_data) x_ptr.add_entries(list_ptr) res_ptr = x_ptr.result(0.7) assert isinstance(res_ptr.get(), float) assert round(x_ptr.privacy_budget_left().get(), 2) == 0.3
def test_send_and_get(root_client: sy.VirtualMachineClient) -> None: # third party import PIL sy.load("PIL") im = PIL.Image.open(LOGO_URL) remote_im = im.send(root_client) received_im = remote_im.get() assert PIL.ImageChops.difference(im, received_im).getbbox() is None
def test_allowlist( root_client: sy.VirtualMachineClient, tens: torch.Tensor, pil_img: PIL.Image.Image ) -> None: # Required for testing on torchvision==1.6.0 sy.load("PIL") torchvision = root_client.torchvision torch = root_client.torch try: tx = torch.rand(4) tx = tx * 2 except Exception as e: print(e) try: with open(__file__.replace(".py", "_params.json"), "r") as f: TEST_PARAMS = json.loads(f.read()) except Exception as e: print(f"Exception {e} triggered") raise e transforms = torchvision.transforms transforms.RandomAffine(2) for item in allowlist: arr = item.split(".") # print(item) if ( arr[1] == "datasets" and len(arr) <= 3 and item in TEST_PARAMS.keys() and version_supported(support_dict=allowlist[item]) ): try: exec(item + TEST_PARAMS[item]) except RuntimeError as e: assert ( "not found" in str(e) or "not present in the root directory" in str(e) or "does not exist" in str(e) ) except FileNotFoundError as e: assert "No such file or directory" in str( e ) or "cannot find the path" in str(e) except ModuleNotFoundError as e: assert "No module named" in str(e) except KeyError: pass elif item in TEST_PARAMS.keys() and version_supported( support_dict=allowlist[item] ): print(item + TEST_PARAMS[item]) exec(item + TEST_PARAMS[item])
def test_statement_zk_proof() -> None: vm = sy.VirtualMachine() client = vm.get_root_client() sy.load("zksk") # third party from zksk import DLRep from zksk import Secret from zksk import utils num = 2 seed = 42 num_sy = sy.lib.python.Int(num) seed_sy = sy.lib.python.Int(seed) # Setup: Peggy and Victor agree on two group generators. G, H = utils.make_generators(num=num, seed=seed) # Setup: generate a secret randomizer. r = Secret(utils.get_random_num(bits=128)) # This is Peggy's secret bit. top_secret_bit = 1 # A Pedersen commitment to the secret bit. C = top_secret_bit * G + r.value * H # Peggy's definition of the proof statement, and proof generation. # (The first or-clause corresponds to the secret value 0, and the second to the value 1. Because # the real value of the bit is 1, the clause that corresponds to zero is marked as simulated.) stmt = DLRep(C, r * H, simulated=True) | DLRep(C - G, r * H) zk_proof = stmt.prove() # send over the network and get back num_ptr = num_sy.send(client) seed_prt = seed_sy.send(client) c_ptr = C.send(client) zk_proof_ptr = zk_proof.send(client) num2 = num_ptr.get().upcast() seed2 = seed_prt.get().upcast() C2 = c_ptr.get() zk_proof2 = zk_proof_ptr.get() # Setup: get the agreed group generators. G, H = utils.make_generators(num=num2, seed=seed2) # Setup: define a randomizer with an unknown value. r = Secret() stmt = DLRep(C2, r * H) | DLRep(C2 - G, r * H) assert stmt.verify(zk_proof2)
def test_secret_serde() -> None: vm = sy.VirtualMachine() client = vm.get_root_client() # third party import zksk as zk sy.load("zksk") r = zk.Secret(zk.utils.get_random_num(bits=128)) r_ptr = r.send(client) r2 = r_ptr.get() assert r == r2
def test_pd_categoriesdtype(root_client: sy.VirtualMachineClient) -> None: sy.load("pandas") # third party import pandas as pd categories = ["b", "a"] ordered = False t = pd.CategoricalDtype(categories=categories, ordered=ordered) t_ptr = t.send(root_client) t2 = t_ptr.get() print(t2) assert t2.categories.to_list() == categories assert t2.ordered == ordered
def test_categorical_func( test_object: Any, inputs: dict, node: sy.VirtualMachine, client: sy.VirtualMachineClient, ) -> None: sy.load("pandas") sy.load("numpy") # third party import numpy as np import pandas as pd x = test_object x_ptr = test_object.send(client) func = inputs["func"] args = inputs["args"] kwargs = inputs["kwargs"] op = getattr(x, func) op_ptr = getattr(x_ptr, func) # if op is a method if callable(op): if args is not None: y = op(*args, **kwargs) y_ptr = op_ptr(*args, **kwargs) # op is a property else: y = op y_ptr = op_ptr y_dash = y_ptr.get() if ( isinstance(y, pd.Categorical) or isinstance(y, pd.DataFrame) or isinstance(y, pd.Series) ): assert y.equals(y_dash) elif isinstance(y, np.ndarray): assert (y == y_dash).all() elif isinstance(y, tuple): assert (y[0] == y_dash[0]).all() assert (y[1] == y_dash[1]).all() else: assert y == y_dash
def test_remote_create(root_client: sy.VirtualMachineClient) -> None: # third party import PIL import numpy as np import torch sy.load("PIL") remote_torchvision = root_client.torchvision im = PIL.Image.open(LOGO_URL) im_array = np.array(im) im_tensor = torch.Tensor(im_array).permute(2, 0, 1) remote_tensor = im_tensor.send(root_client) remote_im = remote_torchvision.transforms.functional.to_pil_image( remote_tensor) received_im = remote_im.get() assert PIL.ImageChops.difference(im, received_im).getbbox() is None
def test_remote_numpy_array(root_client: sy.VirtualMachineClient) -> None: # third party import numpy as np # syft absolute from syft.lib.numpy.array import SUPPORTED_BOOL_TYPES from syft.lib.numpy.array import SUPPORTED_DTYPES from syft.lib.numpy.array import SUPPORTED_FLOAT_TYPES from syft.lib.numpy.array import SUPPORTED_INT_TYPES sy.load("numpy") test_arrays: List[np.ndarray] = [] for dtype in SUPPORTED_DTYPES: # test their bounds if dtype in SUPPORTED_BOOL_TYPES: lower = False upper = True mid = False elif dtype in SUPPORTED_INT_TYPES: bounds = np.iinfo(dtype) lower = bounds.min upper = bounds.max mid = upper + lower # type: ignore if lower == 0: mid = round(mid / 2) # type: ignore elif dtype in SUPPORTED_FLOAT_TYPES: bounds = np.finfo(dtype) lower = bounds.min upper = bounds.max mid = upper + lower # type: ignore test_arrays.append(np.array([lower, mid, upper], dtype=dtype)) for test_array in test_arrays: remote_array = test_array.send(root_client) received_array = remote_array.get() assert all(test_array == received_array) assert test_array.dtype == received_array.dtype
def test_load_sympc() -> None: alice = sy.VirtualMachine() alice_client = alice.get_root_client() bob = sy.VirtualMachine() bob_client = bob.get_root_client() # third party from sympc.session import Session from sympc.session import SessionManager from sympc.tensor import MPCTensor sy.load("sympc") session = Session(parties=[alice_client, bob_client]) SessionManager.setup_mpc(session) y = th.Tensor([-5, 0, 1, 2, 3]) x_secret = th.Tensor([30]) x = MPCTensor(secret=x_secret, shape=(1,), session=session) assert ((x + y).reconstruct() == th.Tensor([25.0, 30.0, 31.0, 32.0, 33.0])).all()
def test_remote_engine_simple(root_client: sy.VirtualMachineClient) -> None: sy.load("opacus") remote_opacus = root_client.opacus remote_torch = root_client.torch model_ptr = remote_torch.nn.Linear(1, 1) batch_size = 16 sample_size = 16 noise_multiplier = 1.0 max_grad_norm = 1.0 privacy_engine_ptr = remote_opacus.privacy_engine.PrivacyEngine( model_ptr, batch_size=batch_size, sample_size=sample_size, noise_multiplier=noise_multiplier, max_grad_norm=max_grad_norm, ) assert privacy_engine_ptr.__name__ == "PrivacyEnginePointer"
def test_pandas_json_normalize(root_client: sy.VirtualMachineClient) -> None: sy.load("pandas") # third party import pandas as pd data = {"A": [1, 2]} df = pd.json_normalize(data) # create dict pointer sy_data = sy.lib.python.Dict(data) data_ptr = sy_data.send(root_client) remote_pandas = root_client.pandas df_ptr = remote_pandas.json_normalize(data_ptr) res_df = df_ptr.get() # Serde converts the list to an np.array. To allow comparison and prevent this test # being coupled with numpy as a dependency we just convert back to a list. res_df.iloc[0][0] = list(res_df.iloc[0][0]) assert df.equals(res_df)
def test_nizk_serde() -> None: vm = sy.VirtualMachine() client = vm.get_root_client() # third party from zksk import DLRep from zksk import Secret from zksk import utils sy.load("zksk") num = 2 seed = 42 G, H = utils.make_generators(num=num, seed=seed) # Setup: generate a secret randomizer. r = Secret(utils.get_random_num(bits=128)) # This is Peggy's secret bit. top_secret_bit = 1 # A Pedersen commitment to the secret bit. C = top_secret_bit * G + r.value * H # Peggy's definition of the proof statement, and proof generation. # (The first or-clause corresponds to the secret value 0, and the second to the value 1. Because # the real value of the bit is 1, the clause that corresponds to zero is marked as simulated.) stmt = DLRep(C, r * H, simulated=True) | DLRep(C - G, r * H) # zksk.base.NIZK zk_proof = stmt.prove() # test serde zk_proof_ptr = zk_proof.send(client) zk_proof2 = zk_proof_ptr.get() assert zk_proof == zk_proof2
def test_glm(root_client: sy.VirtualMachineClient) -> None: # stdlib import os import re import shutil import urllib.request # third party import pandas as pd import statsmodels FAMILY = [ statsmodels.genmod.families.Binomial, statsmodels.genmod.families.Gamma, statsmodels.genmod.families.Gaussian, statsmodels.genmod.families.InverseGaussian, statsmodels.genmod.families.NegativeBinomial, statsmodels.genmod.families.Poisson, statsmodels.genmod.families.Tweedie, ] UNNECESSARY_STR = r"Time(.*)(?=Pearson)|Date(.*)(?=Deviance)" sy.load("pandas") sy.load("statsmodels") # download data csv_file = "mort_match_nhis_all_years.csv" zip_file = f"{csv_file}.zip" url = f"https://datahub.io/madhava/mort_match_nhis_all_years/r/{zip_file}" data_path = f"{get_root_data_path()}/CDC" zip_path = f"{data_path}/{zip_file}" csv_path = f"{data_path}/{csv_file.upper()}" if not os.path.exists(zip_path): os.makedirs(data_path, exist_ok=True) urllib.request.urlretrieve(url, zip_path) if not os.path.exists(csv_path): shutil.unpack_archive(zip_path, data_path) assert os.path.exists(csv_path) # load data df = pd.read_csv(csv_path) df = df.head(100) df_ptr = df.send(root_client) # Drop any missing values in the dataset (those under 18) df = df.dropna(subset=["MORTSTAT"]) df_ptr = df_ptr.dropna(subset=["MORTSTAT"]) # Keep only the eligible portion df = df[df["ELIGSTAT"] == 1] df_ptr = df_ptr[df_ptr["ELIGSTAT"] == 1] # Ignore people > 80 df = df[df["AGE_P"] <= 80] df_ptr = df_ptr[df_ptr["AGE_P"] <= 80] # A person is alive if MORTSTAT==0 df["is_alive"] = df["MORTSTAT"] == 0 df_ptr["is_alive"] = df_ptr["MORTSTAT"] == 0 # Assign a helpful column for sex (0==male, 1==female) df["sex"] = "male" df_ptr["sex"] = "male" # df.loc[df["SEX"] == 2, "sex"] = "female" # explanatory variable x = df["AGE_P"] x_ptr = df_ptr["AGE_P"] # add constant _x = statsmodels.api.add_constant(x) _x_ptr = root_client.statsmodels.api.add_constant(x_ptr) # dependent variable _y = df["is_alive"] _y_ptr = df_ptr["is_alive"] # test all possible combinations of families and links for family in FAMILY: for link in family.links: model = statsmodels.genmod.generalized_linear_model.GLM( _y, _x, family=family(link=link())) result = model.fit() summary = result.summary().as_csv() remote_model = root_client.statsmodels.genmod.generalized_linear_model.GLM( _y_ptr, _x_ptr, family=family(link=link())) remote_result = remote_model.fit() # `get` corresponds to `summary().as_csv()` remote_summary = remote_result.get() # remove unnnecessary strings such as proccesing time and date summary = re.sub(UNNECESSARY_STR, "", summary) remote_summary = re.sub(UNNECESSARY_STR, "", remote_summary) assert summary == remote_summary
# third party import pytest import torch as th # syft absolute import syft as sy sympc = pytest.importorskip("sympc") Session = sympc.session.Session SessionManager = sympc.session.SessionManager MPCTensor = sympc.tensor.MPCTensor sy.load("sympc") @pytest.mark.asyncio @pytest.mark.vendor(lib="sympc") def test_load_sympc() -> None: alice = sy.VirtualMachine() alice_client = alice.get_root_client() bob = sy.VirtualMachine() bob_client = bob.get_root_client() session = Session(parties=[alice_client, bob_client]) SessionManager.setup_mpc(session) y = th.Tensor([-5, 0, 1, 2, 3]) x_secret = th.Tensor([30]) x = MPCTensor(secret=x_secret, shape=(1,), session=session) assert ((x + y).reconstruct() == th.Tensor([25.0, 30.0, 31.0, 32.0, 33.0])).all()
# stdlib import re # third party import pytest # syft absolute import syft as sy pd = pytest.importorskip("pandas") statsmodels = pytest.importorskip("statsmodels") sy.load("pandas") sy.load("sklearn") @pytest.mark.vendor(lib="statsmodels") def test_glm(root_client: sy.VirtualMachineClient) -> None: FAMILY = [ statsmodels.genmod.families.Binomial, statsmodels.genmod.families.Gamma, statsmodels.genmod.families.Gaussian, statsmodels.genmod.families.InverseGaussian, statsmodels.genmod.families.NegativeBinomial, statsmodels.genmod.families.Poisson, statsmodels.genmod.families.Tweedie, ] UNNECESSARY_STR = r"Time(.*)(?=Pearson)|Date(.*)(?=Deviance)" sy.load("pandas")
def test_psi(loadlib_before_client: bool, reveal_intersection: bool, node: sy.VirtualMachine) -> None: # third party import openmined_psi as psi # it should work when call load before or after create clients if loadlib_before_client: sy.load("openmined_psi") server_vm = node.get_root_client() client_vm = node.get_root_client() else: server_vm = node.get_root_client() client_vm = node.get_root_client() sy.load("openmined_psi") # server send reveal_intersection s_reveal_intersection = reveal_intersection s_sy_reveal_intersection = sy.lib.python.Bool(s_reveal_intersection) s_sy_reveal_intersection.send( server_vm, pointable=True, tags=["reveal_intersection"], description="reveal intersection value", ) assert (server_vm.store["reveal_intersection"].description == "reveal intersection value") # client get reval_intersection c_reveal_intersection = server_vm.store["reveal_intersection"].get() assert c_reveal_intersection == s_reveal_intersection # server send fpr s_fpr = 1e-6 s_sy_fpr = sy.lib.python.Float(s_fpr) s_sy_fpr.send(server_vm, pointable=True, tags=["fpr"], description="false positive rate") # client get fpr c_fpr = server_vm.store["fpr"].get() assert c_fpr == approx(s_fpr) # client send client_items_len psi_client = psi.client.CreateWithNewKey(c_reveal_intersection) c_items = ["Element " + str(i) for i in range(1000)] c_sy_client_items_len = sy.lib.python.Int(len(c_items)) c_sy_client_items_len.send( client_vm, pointable=True, tags=["client_items_len"], description="client items length", ) # server get client_items_len s_sy_client_items_len = client_vm.store["client_items_len"].get( delete_obj=False) assert s_sy_client_items_len == c_sy_client_items_len # server send setup message s_items = ["Element " + str(2 * i) for i in range(1000)] psi_server = psi.server.CreateWithNewKey(s_reveal_intersection) s_setup = psi_server.CreateSetupMessage(s_fpr, s_sy_client_items_len, s_items) s_setup.send( server_vm, pointable=True, tags=["setup"], description="psi.server Setup Message", ) assert server_vm.store["setup"].description == "psi.server Setup Message" # client get setup message c_setup = server_vm.store["setup"].get() assert c_setup == s_setup # client send request c_request = psi_client.CreateRequest(c_items) c_request.send(client_vm, tags=["request"], pointable=True, description="client request") # server get request s_request = client_vm.store["request"].get() assert s_request == c_request # server send response s_response = psi_server.ProcessRequest(s_request) s_response.send(server_vm, pointable=True, tags=["response"], description="psi.server response") # client get response c_response = server_vm.store["response"].get() assert c_response == s_response # client get result if c_reveal_intersection: intersection = psi_client.GetIntersection(c_setup, c_response) iset = set(intersection) for idx in range(len(c_items)): if idx % 2 == 0: assert idx in iset else: assert idx not in iset else: intersection = psi_client.GetIntersectionSize(c_setup, c_response) assert intersection >= (len(c_items) / 2.0) assert intersection <= (1.1 * len(c_items) / 2.0)
def test_xgb_base_module(root_client: sy.VirtualMachineClient) -> None: sy.load("xgboost") sy.load("numpy") # third party import numpy as np import xgboost as xgb xgb_remote = root_client.xgboost # import xgboost as xgb X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]]) y = np.array([0, 0, 1, 1]) param = {"eta": 0.3, "max_depth": 3, "num_class": 3} steps = 20 D_train = xgb.DMatrix(X, label=y) model = xgb.train(param, D_train, steps) preds = model.predict(D_train) D_train = xgb_remote.DMatrix(X, label=y) model = xgb_remote.train(param, D_train, steps) preds_remote = model.predict(D_train).get() classifier = xgb_remote.XGBClassifier( n_estimators=100, reg_lambda=1, gamma=0, max_depth=3, use_label_encoder=False ) classifier.fit(X, y) y_pred_classifier_remote = classifier.predict(X).get() classifier = xgb.XGBClassifier( n_estimators=100, reg_lambda=1, gamma=0, max_depth=3, use_label_encoder=False ) classifier.fit(X, y) y_pred_classifier = classifier.predict(X) classifier = xgb_remote.XGBRFClassifier( n_estimators=100, reg_lambda=1, gamma=0, max_depth=3, use_label_encoder=False ) classifier.fit(X, y) y_pred_classifier_rf_remote = classifier.predict(X).get() classifier = xgb.XGBRFClassifier( n_estimators=100, reg_lambda=1, gamma=0, max_depth=3, use_label_encoder=False ) classifier.fit(X, y) y_pred_classifier_rf = classifier.predict(X) regressor = xgb.XGBRegressor(n_estimators=100, reg_lambda=1, gamma=0, max_depth=3) regressor.fit(X, y) y_pred_regressor = regressor.predict(X) regressor = xgb_remote.XGBRegressor( n_estimators=100, reg_lambda=1, gamma=0, max_depth=3 ) regressor.fit(X, y) y_pred_regressor_remote = regressor.predict(X).get() regressor = xgb.XGBRFRegressor(n_estimators=100, reg_lambda=1, gamma=0, max_depth=3) regressor.fit(X, y) y_pred_regressor_rf = regressor.predict(X) regressor = xgb_remote.XGBRFRegressor( n_estimators=100, reg_lambda=1, gamma=0, max_depth=3 ) regressor.fit(X, y) y_pred_regressor_rf_remote = regressor.predict(X).get() assert np.array_equal(y_pred_classifier_rf, y_pred_classifier_rf_remote) assert np.array_equal(y_pred_regressor_rf, y_pred_regressor_rf_remote) assert np.array_equal(y_pred_regressor, y_pred_regressor_remote) assert np.array_equal(y_pred_classifier, y_pred_classifier_remote) assert np.array_equal(preds_remote, preds)
from typing import Union # third party from packaging import version import pytest import torch import torchvision as tv # syft absolute import syft as sy from syft.lib.torchvision.allowlist import allowlist TORCHVISION_VERSION = version.parse(tv.__version__) PIL = pytest.importorskip("PIL") Image = PIL.Image.Image sy.load("PIL") @pytest.fixture(scope="function") def pil_img() -> Any: img_file = "../../../../docs/img/logo.png" if path.isfile(img_file): return PIL.Image.open(img_file).convert("RGB") else: cwd = os.getcwd() img_file = cwd + "/docs/img/logo.png" return PIL.Image.open(img_file).convert("RGB") @pytest.fixture(scope="function") def tens(pil_img: Any) -> torch.Tensor:
# third party import pytest import torch # syft absolute import syft as sy from syft.grid.duet.ui import LOGO_URL PIL = pytest.importorskip("PIL") np = pytest.importorskip("numpy") sy.load("numpy", "PIL") @pytest.mark.vendor(lib="PIL") def test_send_and_get(root_client: sy.VirtualMachineClient) -> None: im = PIL.Image.open(LOGO_URL) remote_im = im.send(root_client) received_im = remote_im.get() assert PIL.ImageChops.difference(im, received_im).getbbox() is None @pytest.mark.vendor(lib="PIL") def test_remote_create(root_client: sy.VirtualMachineClient) -> None: remote_torchvision = root_client.torchvision im = PIL.Image.open(LOGO_URL) im_array = np.array(im) im_tensor = torch.Tensor(im_array).permute(2, 0, 1) remote_tensor = im_tensor.send(root_client)
# stdlib from collections import OrderedDict from typing import Any from typing import Dict from typing import List # third party import pytest # syft absolute import syft as sy pd = pytest.importorskip("pandas") np = pytest.importorskip("numpy") sy.load("pandas", "numpy") @pytest.mark.vendor(lib="pandas") def test_pandas(root_client: sy.VirtualMachineClient) -> None: data = { "col_1": { 0: 3, 1: 2, 2: 1, 3: 0 }, "col_2": { 0: "a", 1: "b", 2: "c", 3: "d"
from ..manager.environment_manager import EnvironmentManager from ..manager.setup_manager import SetupManager from ..manager.association_request_manager import AssociationRequestManager from nacl.signing import SigningKey from nacl.signing import VerifyKey from time import sleep import jwt from flask import current_app as app from threading import Thread import syft as sy import tenseal as ts sy.load("tenseal") class GridWorker(Domain): def __init__( self, name: Optional[str], network: Optional[Location] = None, domain: SpecificLocation = SpecificLocation(), device: Optional[Location] = None, vm: Optional[Location] = None, signing_key: Optional[SigningKey] = None, verify_key: Optional[VerifyKey] = None, root_key: Optional[VerifyKey] = None, db_path: Optional[str] = None, ):