def get_hidden_activations(self): ''' In the model, we will merge the VGG image representation with the word embeddings. We need to feed the data as a list, in which the order of the elements in the list is _crucial_. ''' self.data_generator = VisualWordDataGenerator(self.args, self.args.dataset) self.args.checkpoint = self.find_best_checkpoint() self.data_generator.set_vocabulary(self.args.checkpoint) self.vocab_len = len(self.data_generator.index2word) t = self.args.generation_timesteps if self.args.use_predicted_tokens else self.data_generator.max_seq_len m = models.NIC(self.args.embed_size, self.args.hidden_size, self.vocab_len, self.args.dropin, self.args.optimiser, self.args.l2reg, weights=self.args.checkpoint, gru=self.args.gru, t=t) self.fhs = m.buildHSNActivations(use_image=self.use_image) if self.args.use_predicted_tokens and self.args.no_image == False: gen_m = models.NIC(self.args.embed_size, self.args.hidden_size, self.vocab_len, self.args.dropin, self.args.optimiser, self.args.l2reg, weights=self.args.checkpoint, gru=self.args.gru, t=self.args.generation_timesteps) self.full_model = gen_m.buildKerasModel(use_image=self.use_image) self.new_generate_activations('train') self.new_generate_activations('val')
def __create_nic_component(self): nic_info = self.clean_data.get('nic') if nic_info: for nic_item in nic_info: try: self.__verify_field(nic_item, 'macaddress', str) if not len(self.response['error'] ): #no processing when there's no error happend data_set = { 'asset_id': self.asset_obj.id, 'name': nic_item.get('name'), 'sn': nic_item.get('sn'), 'macaddress': nic_item.get('macaddress'), 'ipaddress': nic_item.get('ipaddress'), 'bonding': nic_item.get('bonding'), 'model': nic_item.get('model'), 'netmask': nic_item.get('netmask'), } obj = models.NIC(**data_set) obj.save() except Exception, e: self.response_msg('error', 'ObjectCreationException', 'Object [nic] %s' % str(e))
def build_model(self, generate=False): ''' Build a Keras model if one does not yet exist. Helper function for generate(). ''' if generate: t = self.args.generation_timesteps else: t = self.data_gen.max_seq_len if self.args.mrnn: m = models.MRNN(self.args.embed_size, self.args.hidden_size, self.vocab_len, self.args.dropin, self.args.optimiser, self.args.l2reg, hsn_size=self.hsn_size, weights=self.args.checkpoint, gru=self.args.gru, clipnorm=self.args.clipnorm, t=t) else: m = models.NIC(self.args.embed_size, self.args.hidden_size, self.vocab_len, self.args.dropin, self.args.optimiser, self.args.l2reg, hsn_size=self.hsn_size, weights=self.args.checkpoint, gru=self.args.gru, clipnorm=self.args.clipnorm, t=t) self.model = m.buildKerasModel(use_sourcelang=self.use_sourcelang, use_image=self.use_image)
def train_model(self): ''' Initialise the data generator to process the data in a memory-friendly manner. Then build the Keras model, given the user-specified arguments (or the initial defaults). Train the model for self.args.max_epochs and return the training and validation losses. The losses object contains a history variable. The history variable is a dictionary with a list of training and validation losses: losses.history.['loss'] losses.history.['val_loss'] ''' if not self.use_sourcelang: hsn_size = 0 else: hsn_size = self.data_generator.hsn_size # ick if self.args.mrnn: m = models.MRNN(self.args.embed_size, self.args.hidden_size, self.V, self.args.dropin, self.args.optimiser, self.args.l2reg, hsn_size=hsn_size, weights=self.args.init_from_checkpoint, gru=self.args.gru, clipnorm=self.args.clipnorm, t=self.data_generator.max_seq_len, lr=self.args.lr) else: m = models.NIC(self.args.embed_size, self.args.hidden_size, self.V, self.args.dropin, self.args.optimiser, self.args.l2reg, hsn_size=hsn_size, weights=self.args.init_from_checkpoint, gru=self.args.gru, clipnorm=self.args.clipnorm, t=self.data_generator.max_seq_len, lr=self.args.lr) model = m.buildKerasModel(use_sourcelang=self.use_sourcelang, use_image=self.use_image) callbacks = CompilationOfCallbacks(self.data_generator.word2index, self.data_generator.index2word, self.args, self.args.dataset, self.data_generator, use_sourcelang=self.use_sourcelang, use_image=self.use_image) train_generator = self.data_generator.random_generator('train') train_size = self.data_generator.split_sizes['train'] val_generator = self.data_generator.fixed_generator('val') val_size = self.data_generator.split_sizes['val'] losses = model.fit_generator(generator=train_generator, samples_per_epoch=train_size, nb_epoch=self.args.max_epochs, verbose=1, callbacks=[callbacks], nb_worker=1, validation_data=val_generator, nb_val_samples=val_size) return losses