def multi_input_model_generator(): util.set_tf_random_seed() input0 = keras.Input(shape=( 28, 28, 1, ), name='input0') input1 = keras.Input(shape=( 26, 26, 32, ), name='input1') x = keras.layers.Conv2D(32, 3, strides=(1, 1), padding='valid', name='conv')(input0) y = tnt_layers.SplitLayer(name="split_layer0")(x) z = tnt_layers.SplitLayer(name="split_layer1")(x) x = keras.layers.Conv2D(32, 1, strides=(1, 1), padding='valid', name='conv2')(y) x = tnt_layers.SplitLayer(name="split_layer2")(x) x = keras.layers.Concatenate(name='concat')([input1, x, z]) x = keras.layers.Flatten(name='flatten')(x) outputs = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) model = keras.Model(inputs=[input0, input1], outputs=outputs) return model
def fc_partitioned_core_model(rank): # --- core model on partition 0 util.set_tf_random_seed() reference_input = keras.Input(shape=( 28, 28, 1, ), name='reference_input') reference_x = keras.layers.Flatten(name='flatten')(reference_input) reference_x = keras.layers.Dense(10, activation='relu', name='dense_relu')(reference_x) reference_x = keras.layers.Layer(name="split_layer1_input")(reference_x) p0_model = keras.Model(inputs=reference_input, outputs=reference_x, name="p_0") # --- core model on partition 1 input0 = keras.Input(shape=(10, ), name='split_layer1_output') output = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(input0) p1_model = keras.Model(inputs=input0, outputs=output, name="p_1") if rank == p_0_rank: return p0_model elif rank == p_1_rank: return p1_model
def multi_output_partitioned_core_model(rank): util.set_tf_random_seed() # --- core model on partition 0 input0 = keras.Input(shape=( 28, 28, 1, ), name='input') x = keras.layers.Flatten(name='flatten')(input0) x = keras.layers.Dense(10, activation='relu', name='dense_relu')(x) y = keras.layers.Layer(name="ten_classes_input")(x) z = keras.layers.Layer(name="two_classes_input")(x) core_model0 = keras.Model(inputs=input0, outputs=[y, z]) # --- core model on partition 1 input1 = keras.Input(shape=(10, ), name='ten_classes_output') input2 = keras.Input(shape=(10, ), name='two_classes_output') x = keras.layers.Add(name='add')([input1, input2]) output0 = keras.layers.Dense(10, activation='relu', name='dense_softmax10')(x) output1 = keras.layers.Dense(2, activation='softmax', name='dense_softmax2')(x) core_model1 = keras.Model(inputs=[input1, input2], outputs=[output0, output1]) if rank == p_0_rank: return core_model0 elif rank == p_1_rank: return core_model1
def alexnet_model_generator(): util.set_tf_random_seed() inputs = keras.Input(shape=( 28, 28, 1, ), name='input') x = keras.layers.Conv2D(32, 3, strides=(1, 1), name='conv')(inputs) x = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(1, 1), name='maxpool')(x) x = tnt_layers.SplitLayer(name="split_layer0")(x) x = keras.layers.Conv2D(32, 3, strides=(1, 1), name='conv_two')(x) x = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(1, 1), name='maxpool_two')(x) x = tnt_layers.SplitLayer(name="split_layer1")(x) x = keras.layers.Conv2D(64, 3, strides=(1, 1), name='conv_three')(x) x = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2), name='maxpool_three')(x) x = keras.layers.Flatten(name='flatten')(x) x = keras.layers.Dense(512, activation='relu', name='dense_relu')(x) outputs = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) model = keras.Model(inputs=inputs, outputs=outputs) return model
def get_partitioned_core_model(): # --- core model on partition 0 util.set_tf_random_seed() # reset seed, so initial weights are same as for the reference model p_0_core_input = keras.Input(shape=(28,28,1,)) # may be more than one p_0_core_x = layers.Flatten()(p_0_core_input) p_0_core_output_0 = layers.Dense(fc_units, activation='relu', name='dense_relu'+'_0')(p_0_core_x) p_0_core_output_1 = tnt_layers.IdentityLayer(name='dense_relu'+'_1')(p_0_core_output_0) p_0_core = keras.Model(inputs=p_0_core_input, outputs=[p_0_core_output_0, p_0_core_output_1], name="core_layers_p_0") # --- core model on partition 1 p_1_core_input_0_shape = p_0_core.outputs[0].shape[1:] p_1_core_input_1_shape = p_0_core.outputs[1].shape[1:] p_1_core_input_0 = keras.Input(shape=p_1_core_input_0_shape) # TODO: Maybe add dtypes? p_1_core_input_1 = keras.Input(shape=p_1_core_input_1_shape) p_1_core_x = p_1_core_input_0 + p_1_core_input_1 p_1_core_output_0 = layers.Dense(num_mnist_classes, activation='softmax', name='dense_softmax')(p_1_core_x) p_1_core = keras.Model(inputs=[p_1_core_input_0, p_1_core_input_1], outputs=[p_1_core_output_0], name="core_layers_p_1") if rank == p_0_rank: return p_0_core elif rank == p_1_rank: return p_1_core
def skip_connection_model_generator(): util.set_tf_random_seed() inputs = keras.Input(shape=( 28, 28, 1, ), name='input') x = keras.layers.Conv2D(32, 3, strides=(1, 1), padding='valid', name='conv')(inputs) y = tnt_layers.SplitLayer(name="split_layer0")(x) z = tnt_layers.SplitLayer(name="split_layer1")(x) x = keras.layers.Conv2D(32, 1, strides=(1, 1), padding='valid', activation='relu', name='conv_relu')(y) x = tnt_layers.SplitLayer(name="split_layer2")(x) x = keras.layers.Concatenate(name='concat')([x, z]) x = keras.layers.Flatten(name='flatten')(x) outputs = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) model = keras.Model(inputs=inputs, outputs=outputs) return model
def multi_input_partitioned_core_model(rank): util.set_tf_random_seed() # --- core model on partition 0 input0 = keras.Input(shape=( 28, 28, 1, ), name='input0') x = keras.layers.Conv2D(32, 3, strides=(1, 1), padding='valid', name='conv')(input0) y = keras.layers.Layer(name="split_layer0_input")(x) z = keras.layers.Layer(name="split_layer1_input")(x) core_model0 = keras.Model(inputs=input0, outputs=[y, z]) # --- core model on partition 1 input2 = keras.Input(shape=( 26, 26, 32, ), name='split_layer0_output') x = keras.layers.Conv2D(32, 1, strides=(1, 1), padding='valid', name='conv2')(input2) x = keras.layers.Layer(name="split_layer2_input")(x) core_model1 = keras.Model(inputs=input2, outputs=x) # --- core model on partition 2 input1 = keras.Input(shape=( 26, 26, 32, ), name='input1') input3 = keras.Input(shape=( 26, 26, 32, ), name='split_layer1_output') input4 = keras.Input(shape=( 26, 26, 32, ), name='split_layer2_output') x = keras.layers.Concatenate(name='concat')([input1, input4, input3]) x = keras.layers.Flatten(name='flatten')(x) outputs = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) core_model2 = keras.Model(inputs=[input1, input3, input4], outputs=outputs) if rank == p_0_rank: return core_model0 elif rank == p_1_rank: return core_model1 elif rank == p_2_rank: return core_model2
def simple_model_generator(): util.set_tf_random_seed() input0 = keras.Input(shape=(28,28,1,), name='input') x = keras.layers.Flatten()(input0) x = keras.layers.Dense(2, activation='relu')(x) x = tnt_layers.SplitLayer(name="split_layer1")(x) output = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) model = keras.Model(inputs=input0, outputs=output) return model
def get_reference_model(): util.set_tf_random_seed() reference_input = keras.Input(shape=(28,28,1,), name='reference_input') reference_x = layers.Flatten()(reference_input) reference_x = layers.Dense(fc_units, activation='relu', name='dense_relu')(reference_x) reference_output = layers.Dense(num_mnist_classes, activation='softmax', name='dense_softmax')(reference_x + reference_x) reference_model = keras.Model(inputs=reference_input, outputs=reference_output, name="reference_model") return reference_model
def compile_model(self, optimizer): util.set_tf_random_seed() kwargs = {} if tf.__version__.startswith('2.0') or \ tf.__version__.startswith('2.1'): kwargs['experimental_run_tf_function'] = False self.model.compile(optimizer=optimizer, loss=self.loss, metrics=[self.metric], **kwargs) # required for `keras` models
def simple_model_generator(): util.set_tf_random_seed() input0 = keras.Input(shape=( 28, 28, 1, ), name='input') x = keras.layers.Flatten(name='flatten')(input0) x = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) model = keras.Model(inputs=input0, outputs=x) return model
def alexnet_partitioned_core_model(rank): # --- core model on partition 0 util.set_tf_random_seed() input0 = keras.Input(shape=( 28, 28, 1, ), name='input') x = keras.layers.Conv2D(32, 3, strides=(1, 1), name='conv')(input0) x = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(1, 1), name='maxpool')(x) x = keras.layers.Layer(name="split_layer0_input")(x) core_model0 = keras.Model(inputs=input0, outputs=x) # --- core model on partition 1 input1 = keras.Input(shape=( 24, 24, 32, ), name='split_layer0_output') x = keras.layers.Conv2D(32, 3, strides=(1, 1), name='conv_two')(input1) x = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(1, 1), name='maxpool_two')(x) x = keras.layers.Layer(name="split_layer1_input")(x) core_model1 = keras.Model(inputs=input1, outputs=x) # --- core model on partition 2 input2 = keras.Input(shape=( 20, 20, 32, ), name='split_layer1_output') x = keras.layers.Conv2D(64, 3, strides=(1, 1), name='conv_three')(input2) x = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2), name='maxpool_three')(x) x = keras.layers.Flatten(name='flatten')(x) x = keras.layers.Dense(512, activation='relu', name='dense_relu')(x) outputs = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(x) core_model2 = keras.Model(inputs=input2, outputs=outputs) if rank == p_0_rank: return core_model0 elif rank == p_1_rank: return core_model1 elif rank == p_2_rank: return core_model2
def fc_model_generator(): util.set_tf_random_seed() inputs = keras.Input(shape=( 28, 28, 1, ), name='input') x = layers.Flatten()(inputs) x = layers.Dense(80, activation='relu', name='FC1')(x) x = layers.Dense(80, activation='relu', name='FC2')(x) outputs = layers.Dense(10, activation='softmax', name='softmax')(x) model = keras.Model(inputs=inputs, outputs=outputs) logging.getLogger().info("Initialized FC model") return model
def load_microbatched_datasets(micro_batch_size, num_micro_batches, num_batches, num_test_batches, partition_info): util.set_tf_random_seed() (x_train, y_train), (x_val, y_val), (x_test, y_test) = load_dataset_as_arrays( micro_batch_size * num_micro_batches, num_batches, num_test_batches) train_dataset = get_microbatched_dataset(x_train, y_train, micro_batch_size, num_micro_batches, partition_info, shuffle = True) val_dataset = get_microbatched_dataset(x_val, y_val, micro_batch_size, num_micro_batches, partition_info) test_dataset = get_microbatched_dataset(x_test, y_test, micro_batch_size, num_micro_batches, partition_info) return {"train" : train_dataset, "val" : val_dataset, "test" : test_dataset }
def sequential_model_generator(): util.set_tf_random_seed() model = keras.Sequential() model.add(keras.layers.Flatten(input_shape=( 28, 28, 1, ))) model.add(layers.Dense(20, activation='relu', name='FC1')) model.add(layers.Dense(20, activation='relu', name='FC2')) model.add(layers.Dense(10, activation='softmax', name='softmax')) logging.getLogger().info("Initialized Sequential model") return model
def load_reference_datasets(batch_size, num_batches, num_test_batches): util.set_tf_random_seed() train_size = num_batches * batch_size test_size = num_test_batches * batch_size train_dataset, val_dataset, test_dataset = util.load_dataset(mnist.load_mnist_dataset, train_size = train_size, train_batch_size = batch_size, val_size = test_size, val_batch_size = batch_size, test_size = test_size, test_batch_size = batch_size, shuffle = True) return {"train" : train_dataset, "val" : val_dataset, "test" : test_dataset }
def fc_model_generator_two_partitions(): util.set_tf_random_seed() inputs = keras.Input(shape=( 28, 28, 1, ), name='input') x = layers.Flatten()(inputs) x = layers.Dense(80, activation='relu', name='FC1')(x) x = tnt.keras.layers.SplitLayer(name="split1")(x) x = layers.Dense(80, activation='relu', name='FC2')(x) outputs = layers.Dense(10, activation='softmax', name='softmax')(x) model = keras.Model(inputs=inputs, outputs=outputs) logging.getLogger().info( "Initialized FC model with one `SplitLayer` (two partitions)") return model
def fc_model_generator(): util.set_tf_random_seed() reference_input = keras.Input(shape=( 28, 28, 1, ), name='reference_input') reference_x = keras.layers.Flatten(name='flatten')(reference_input) reference_x = keras.layers.Dense(10, activation='relu', name='dense_relu')(reference_x) reference_x = tnt_layers.SplitLayer(name="split_layer1")(reference_x) reference_output = keras.layers.Dense(10, activation='softmax', name='dense_softmax')(reference_x) reference_model = keras.Model(inputs=reference_input, outputs=reference_output, name="reference_model") return reference_model
def lenet5_model_generator(): util.set_tf_random_seed() inputs = keras.Input(shape=( 28, 28, 1, ), name='input') x = layers.Conv2D(32, 3, padding="same", activation='relu', name="conv1")(inputs) x = layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2))(x) x = layers.Conv2D(32, 3, padding="same", activation='relu', name="conv2")(x) x = layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2))(x) x = layers.Flatten()(x) x = layers.Dense(20, activation='relu')(x) outputs = layers.Dense(10, activation='softmax')(x) model = keras.Model(inputs=inputs, outputs=outputs) logging.getLogger().info("Initialized LeNet5 model") return model
def multi_output_model_generator(): util.set_tf_random_seed() input0 = keras.Input(shape=( 28, 28, 1, ), name='input') x = keras.layers.Flatten(name='flatten')(input0) x = keras.layers.Dense(10, activation='relu', name='dense_relu')(x) y = tnt_layers.SplitLayer(name="ten_classes")(x) z = tnt_layers.SplitLayer(name="two_classes")(x) x = keras.layers.Add(name='add')([y, z]) output0 = keras.layers.Dense(10, activation='relu', name='dense_softmax10')(x) output1 = keras.layers.Dense(2, activation='softmax', name='dense_softmax2')(x) model = keras.Model(inputs=input0, outputs=[output0, output1], name="model") return model
def alexnet_model_generator(): util.set_tf_random_seed() inputs = keras.Input(shape=( 28, 28, 1, ), name='input') x = layers.Conv2D(32, 3, strides=(1, 1), padding='valid', activation='relu')(inputs) x = layers.MaxPooling2D(pool_size=(3, 3), strides=(1, 1), padding='valid')(x) x = layers.Conv2D(32, 3, strides=(1, 1), padding='valid', activation='relu')(x) x = layers.MaxPooling2D(pool_size=(3, 3), strides=(1, 1), padding='valid')(x) x = layers.Conv2D(64, 3, strides=(1, 1), padding='valid', activation='relu')(x) x = layers.Conv2D(64, 3, strides=(1, 1), padding='valid', activation='relu')(x) x = layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='valid')(x) x = layers.Flatten()(x) x = layers.Dense(512, activation='relu')(x) outputs = layers.Dense(10, activation='softmax')(x) model = keras.Model(inputs=inputs, outputs=outputs) logging.getLogger().info("Initialized AlexNet model") return model
def subclassed_model_generator(): util.set_tf_random_seed() model = SubclassedModel() model.build((None, 28, 28, 1)) return model