Example #1
0
    def normlize_data(self):
        ### normalize train data ###
        if os.path.isfile(self.inp_stats_file) and os.path.isfile(
                self.out_stats_file):
            self.inp_scaler = data_utils.load_norm_stats(self.inp_stats_file,
                                                         self.inp_dim,
                                                         method=self.inp_norm)
            self.out_scaler = data_utils.load_norm_stats(self.out_stats_file,
                                                         self.out_dim,
                                                         method=self.out_norm)
        else:
            print(
                'preparing train_x, train_y from input and output feature files...'
            )
            train_x, train_y, train_flen = data_utils.read_data_from_file_list(self.inp_train_file_list, self.out_train_file_list,\
                    self.inp_dim, self.out_dim, sequential_training=True if self.sequential_training or self.encoder_decoder else False)

            print('computing norm stats for train_x...')
            inp_scaler = data_utils.compute_norm_stats(train_x,
                                                       self.inp_stats_file,
                                                       method=self.inp_norm)

            print('computing norm stats for train_y...')
            out_scaler = data_utils.compute_norm_stats(train_y,
                                                       self.out_stats_file,
                                                       method=self.out_norm)
    def train_tensorflow_model(self):
        print('preparing train_x, train_y from input and output feature files...')
                 #### load the data ####

        train_x, train_y, train_flen = data_utils.read_data_from_file_list(self.inp_train_file_list, self.out_train_file_list,
                     self.inp_dim, self.out_dim, sequential_training=True if self.sequential_training or self.encoder_decoder else False)
                #### normalize the data ####
        data_utils.norm_data(train_x, self.inp_scaler, sequential_training=True if self.sequential_training or self.encoder_decoder else False)
        data_utils.norm_data(train_y, self.out_scaler, sequential_training=True if self.sequential_training or self.encoder_decoder else False)

        #### define the model ####
        if self.sequential_training:
           utt_length=train_flen["utt2framenum"].values()
           self.tensorflow_models.get_max_step(max(utt_length))
           self.tensorflow_models.define_sequence_model()

        elif self.encoder_decoder:
             utt_length=train_flen["utt2framenum"].values()
             super(Train_Encoder_Decoder_Models,self.encoder_decoder_models).__setattr__("max_step",max(utt_length))
             self.encoder_decoder_models.define_encoder_decoder()
        else:
            self.tensorflow_models.define_feedforward_model()

        #### train the model ####
        print('training...')
        if self.sequential_training:
            ### Train feedforward model ###
            self.tensorflow_models.train_sequence_model(train_x, train_y, batch_size=self.batch_size, num_of_epochs=self.num_of_epochs, shuffle_data=self.shuffle_data,utt_length=utt_length)

        elif self.encoder_decoder:
            self.encoder_decoder_models.train_encoder_decoder_model(train_x,train_y,batch_size=self.batch_size,num_of_epochs=self.num_of_epochs,shuffle_data=True,utt_length=utt_length)
        else:
            self.tensorflow_models.train_feedforward_model(train_x, train_y, batch_size=self.batch_size, num_of_epochs=self.num_of_epochs, shuffle_data=self.shuffle_data)
Example #3
0
    def train_tensorflow_model(self):
        print('preparing train_x, train_y from input and output feature files...')
                 #### load the data ####

        train_x, train_y, train_flen = data_utils.read_data_from_file_list(self.inp_train_file_list, self.out_train_file_list,
                     self.inp_dim, self.out_dim, sequential_training=True if self.sequential_training or self.encoder_decoder else False)
                #### normalize the data ####
        data_utils.norm_data(train_x, self.inp_scaler, sequential_training=True if self.sequential_training or self.encoder_decoder else False)
        data_utils.norm_data(train_y, self.out_scaler, sequential_training=True if self.sequential_training or self.encoder_decoder else False)

        #### define the model ####
        if self.sequential_training:
           utt_length=train_flen["utt2framenum"].values()
           self.tensorflow_models.get_max_step(max(utt_length))
           self.tensorflow_models.define_sequence_model()

        elif self.encoder_decoder:
             utt_length=train_flen["utt2framenum"].values()
             super(Train_Encoder_Decoder_Models,self.encoder_decoder_models).__setattr__("max_step",max(utt_length))
             self.encoder_decoder_models.define_encoder_decoder()
        else:
            self.tensorflow_models.define_feedforward_model()

        #### train the model ####
        print('training...')
        if self.sequential_training:
            ### Train feedforward model ###
            self.tensorflow_models.train_sequence_model(train_x, train_y, batch_size=self.batch_size, num_of_epochs=self.num_of_epochs, shuffle_data=self.shuffle_data,utt_length=utt_length)

        elif self.encoder_decoder:
            self.encoder_decoder_models.train_encoder_decoder_model(train_x,train_y,batch_size=self.batch_size,num_of_epochs=self.num_of_epochs,shuffle_data=True,utt_length=utt_length)
        else:
            self.tensorflow_models.train_feedforward_model(train_x, train_y, batch_size=self.batch_size, num_of_epochs=self.num_of_epochs, shuffle_data=self.shuffle_data)
    def normlize_data(self):
        ### normalize train data ###
        if os.path.isfile(self.inp_stats_file) and os.path.isfile(self.out_stats_file):
            self.inp_scaler = data_utils.load_norm_stats(self.inp_stats_file, self.inp_dim, method=self.inp_norm)
            self.out_scaler = data_utils.load_norm_stats(self.out_stats_file, self.out_dim, method=self.out_norm)
        else:
            print('preparing train_x, train_y from input and output feature files...')
            train_x, train_y, train_flen = data_utils.read_data_from_file_list(self.inp_train_file_list, self.out_train_file_list,\
                    self.inp_dim, self.out_dim, sequential_training=True if self.sequential_training or self.encoder_decoder else False)

            print('computing norm stats for train_x...')
            inp_scaler = data_utils.compute_norm_stats(train_x, self.inp_stats_file, method=self.inp_norm)

            print('computing norm stats for train_y...')
            out_scaler = data_utils.compute_norm_stats(train_y, self.out_stats_file, method=self.out_norm)