def initialize_generator(self, discriminator, *inputs): if len(inputs) != 2: raise ValueError('this generator is supposed to get two inputs.' ' the message and the key.') message_input, key_input = inputs if self.tie_alice_and_bob: if self.alice_share_bitwise_weights != self.bob_share_bitwise_weights: raise ValueError('bob and alice are tied however they do not ' 'have the same weight sharing settings.') enc_dec = ElementWise( self.alice_bitwise_latent_dims, activation='tanh', share_element_weights=self.alice_share_bitwise_weights) alice_encryption = Flatten()(enc_dec([message_input, key_input])) bob_decryption = Flatten(name='decryption')(enc_dec( [alice_encryption, key_input]), ) else: alice_encryption = Flatten(name='decryption')(ElementWise( self.bob_bitwise_latent_dims, activation='tanh', share_element_weights=self.bob_share_bitwise_weights)( [message_input, key_input]), ) bob_decryption = Flatten(name='decryption')(ElementWise( self.bob_bitwise_latent_dims, activation='tanh', share_element_weights=self.bob_share_bitwise_weights)( [alice_encryption, key_input]), ) eves_opinion = discriminator([message_input, alice_encryption]) alice = Model(inputs=inputs, outputs=alice_encryption) alice_bob = Model(inputs=inputs, outputs=[bob_decryption, eves_opinion]) def discriminator_loss(y_true, y_pred): return K.abs(0.5 - K.mean(y_pred)) alice_bob.compile(optimizer=Adam(), loss=[mean_absolute_error, binary_crossentropy]) return alice_bob, alice
def initialize_model(self): message_input = Input(shape=(self.message_length,), name='message_input') possible_ciphertext_input = Input(shape=(self.message_length,), name='possible_ciphertext_input') bitwise_function = Flatten()( ElementWise([8, 1], activation='tanh')([ message_input, possible_ciphertext_input ]) ) dense = Dense( self.message_length, activation='relu' )(bitwise_function) pred = Dense( 1, activation='sigmoid' )(dense) model = Model(inputs=[message_input, possible_ciphertext_input], outputs=pred) model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc']) self.model = model return [model]
def initialize_discriminator(self, *inputs): if len(inputs) != 2: raise ValueError( 'this discriminator is supposed to get two inputs.' 'the message and the key.') message_input = inputs[0] possible_ciphertext_input = inputs[1] bitwise_function = Flatten()(ElementWise( self.eve_bitwise_latent_dims, activation='tanh', share_element_weights=self.eve_share_bitwise_weights)( [message_input, possible_ciphertext_input])) dense = Dense(self.eve_latent_dim[0], activation='relu')(bitwise_function) for units in self.eve_latent_dim[1:]: dense = Dense(units, activation='relu')(dense) pred = Dense(1, activation='sigmoid')(dense) model = Model(inputs=inputs, outputs=pred) model.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=['acc']) return model
def initialize_generator(self, discriminator, *inputs): if len(inputs) != 2: raise ValueError('this generator is supposed to get two inputs.' ' the message and the key.') message_input, key_input = inputs alice_encryption = Flatten()( ElementWise(self.alice_bitwise_latent_dims, activation='tanh', share_element_weights=self.alice_share_bitwise_weights)([ message_input, key_input ]) ) bob_decryption = Flatten(name='decryption')( ElementWise(self.bob_bitwise_latent_dims, activation='tanh', share_element_weights=self.bob_share_bitwise_weights)([ alice_encryption, key_input ]), ) eves_opinion = discriminator([message_input, alice_encryption]) alice = Model(inputs=inputs, outputs=alice_encryption) alice_bob = Model(inputs=inputs, outputs=[bob_decryption, eves_opinion]) def discriminator_loss(y_true, y_pred): return K.abs(0.5 - binary_accuracy(y_true, y_pred)) ** 2 alice_bob.compile(optimizer=Adam(lr=0.0008), loss=[mean_absolute_error, discriminator_loss]) return alice_bob, alice
def initialize_model(self): input_1 = Input(shape=(self.input_length, ), name='input_1') input_2 = Input(shape=(self.input_length, ), name='input_2') # reshape = Reshape((-1, 1)) # ORIGINAL BITWISE FUNCTION IMPLEMENTATION # # bitwise_function = Flatten()( # TimeDistributed( # Dense(1, activation='tanh') # )(LocallyConnected1D( # self.latent_dim, # kernel_size=2, # strides=2, # activation='relu' # )(reshape( # Intertwine()([ # input_1, # input_2 # ])) # ) # ) # ) bitwise_function = Flatten()(ElementWise( [self.latent_dim, 1], activation=['relu', 'tanh'], share_element_weights=True)([input_1, input_2])) model = Model(inputs=[input_1, input_2], outputs=bitwise_function) model.compile(optimizer=Adam(), loss=mean_absolute_error) self.model = model if self.verbose: model.summary() return [model]