def setUp(self): # compose a dummy keras Sequential model model = tf.keras.Sequential() model.add(tf.keras.Input(shape=(16, ))) model.add(tf.keras.layers.Dense(8)) model.compile(loss="categorical_crossentropy") with tempfile.TemporaryDirectory() as tmp_path: model.save(tmp_path) keras_model_bytes = _zip_dir_as_bytes(tmp_path) self.keras_model_param = NNModelParam() self.keras_model_param.saved_model_bytes = keras_model_bytes self.keras_model_meta = NNModelMeta() nn_param = HomoNNParam(config_type="keras", early_stop="diff", metrics="Accuracy", optimizer="SGD", loss="categorical_crossentropy") nn_param.check() self.keras_model_meta.params.CopyFrom(nn_param.generate_pb()) # a dummy pytorch version 0 model nn_param = HomoNNParam(config_type="pytorch", early_stop="diff", metrics="Accuracy", optimizer={ "optimizer": "Adam", "lr": 0.05 }, loss="CrossEntropyLoss") nn_param.check() nn_define = [{ "layer": "Linear", "name": "line1", "type": "normal", "config": [18, 5] }, { "layer": "Relu", "type": "activate", "name": "relu" }, { "layer": "Linear", "name": "line2", "type": "normal", "config": [5, 4] }] self.pytorch_model_param = NNModelParam() pytorch_nn_model = build_pytorch(nn_define, nn_param.optimizer, nn_param.loss, nn_param.metrics) self.pytorch_model_param.saved_model_bytes = pytorch_nn_model.export_model( ) self.pytorch_model_meta = NNModelMeta() self.pytorch_model_meta.params.CopyFrom(nn_param.generate_pb()) # a dummy pytorch lightning model nn_param = HomoNNParam(config_type="pytorch", early_stop="diff", metrics="Accuracy", optimizer={ "optimizer": "Adam", "lr": 0.05 }, loss="NLLLoss") nn_param.check() nn_define = [{ "layer": "Conv2d", "in_channels": 1, "out_channels": 10, "kernel_size": [5, 5] }, { "layer": "MaxPool2d", "kernel_size": 2 }, { "layer": "ReLU" }, { "layer": "Conv2d", "in_channels": 10, "out_channels": 20, "kernel_size": [5, 5] }, { "layer": "Dropout2d" }, { "layer": "MaxPool2d", "kernel_size": 2 }, { "layer": "ReLU" }, { "layer": "Flatten" }, { "layer": "Linear", "in_features": 320, "out_features": 50 }, { "layer": "ReLU" }, { "layer": "Linear", "in_features": 50, "out_features": 10 }, { "layer": "LogSoftmax" }] pl_module = FedLightModule( None, layers_config=nn_define, optimizer_config=nn_param.optimizer, loss_config={"loss": nn_param.loss}, ) pl_trainer = pl.Trainer() pl_trainer.model = pl_module self.pl_model_param = NNModelParam(api_version=2) with tempfile.TemporaryDirectory() as d: filepath = os.path.join(d, "model.ckpt") pl_trainer.save_checkpoint(filepath) with open(filepath, "rb") as f: self.pl_model_param.saved_model_bytes = f.read() self.pl_model_meta = NNModelMeta() self.pl_model_meta.params.CopyFrom(nn_param.generate_pb())
def __init__(self, trans_var): super().__init__() self.model_param = HomoNNParam() self.aggregate_iteration_num = 0 self.transfer_variable = trans_var
def __init__(self): super().__init__() self.model_param = HomoNNParam() self.role = None
def __init__(self, trans_var): super().__init__() self.model_param = HomoNNParam() self.transfer_variable = trans_var self._api_version = 0