def _save_training_parameters(self, optimizer_encoder, optimizer_decoder, epoch):
     state = {'optimizer_encoder': optimizer_encoder.state_dict(),
              'optimizer_decoder': optimizer_decoder.state_dict()
              }
     file_name = os.path.join(self.save_path, f'checkpoint/optimizer_{epoch}.pt')
     uf.make_directory(file_name, is_dir=False)
     torch.save(state, file_name)
    def save(self, model, optim, epoch, vocab_size, opt):
        """
        Saves the model, optimizer and model hyperparameters
        """
        save_dict = {
            'model_state_dict': model.state_dict(),
            'optimizer_state_dict': optim.save_state_dict(),
            'model_parameters': self._get_model_parameters(vocab_size, opt)
        }

        file_name = os.path.join(self.save_path, f'checkpoint/model_{epoch}.pt')
        uf.make_directory(file_name, is_dir=False)

        torch.save(save_dict, file_name)
 def save(self, file):
     """
     Saves the model into a file
     :param file: file path
     """
     save_dict = {
         'vocabulary': self.vocabulary,
         'tokenizer': self.tokenizer,
         'max_sequence_length': self.max_sequence_length,
         'encoder': self.network.encoder.state_dict(),
         'decoder': self.network.decoder.state_dict(),
         'encoder_params': self.network.encoder.get_params(),
         'decoder_params': self.network.decoder.get_params()
     }
     uf.make_directory(file, is_dir=False)
     torch.save(save_dict, file)
Esempio n. 4
0
    def __init__(self, data_path, num_samples, range_evaluation):

        self.save_path = uf.get_parent_dir(data_path)
        global LOG
        LOG = ul.get_logger(name="evaluation",
                            log_path=os.path.join(self.save_path,
                                                  'evaluation.log'))
        self.data_path = data_path
        self.data = pd.read_csv(self.data_path, sep=",")
        self.num_samples = num_samples

        self.output_path = self.save_path
        self.range_evaluation = range_evaluation
        if self.range_evaluation != "":
            self.output_path = os.path.join(self.output_path,
                                            '{}'.format(self.range_evaluation))
        uf.make_directory(self.output_path)
def get_logger(name, log_path, level=logging.INFO):
    formatter = logging.Formatter(
        fmt=
        "%(asctime)s: %(module)s.%(funcName)s +%(lineno)s: %(levelname)-8s %(message)s",
        datefmt="%H:%M:%S")

    logger = logging.getLogger(name)
    logger.setLevel(level)

    # # Logging to console
    stream_handler = logging.StreamHandler()
    stream_handler.setFormatter(formatter)
    logger.addHandler(stream_handler)

    # Logging to a file
    uf.make_directory(log_path, is_dir=False)
    file_handler = logging.FileHandler(log_path)
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)

    return logger
Esempio n. 6
0
def run_main():
    """Main function."""
    parser = argparse.ArgumentParser(
        description='mmp_analysis.py',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    opts.evaluation_opts(parser)
    opt = parser.parse_args()

    save_path = uf.get_parent_dir(opt.data_path)
    global LOG
    LOG = ul.get_logger(name="mmp_analysis",
                        log_path=os.path.join(save_path, 'mmp_analysis.log'))
    LOG.info(opt)
    if opt.only_desirable:
        temp_files_path = os.path.join(save_path, 'temp_files',
                                       'MMP_desirable')
    else:
        temp_files_path = os.path.join(save_path, 'temp_files', 'MMP')
    uf.make_directory(temp_files_path, is_dir=True)

    perform_mmp_analysis(opt.data_path, opt.train_path, temp_files_path,
                         save_path, opt.mmpdb_path, opt.num_samples,
                         opt.only_desirable)
 def save(self, model, optimizer_encoder, optimizer_decoder, epoch):
     file_name = os.path.join(self.save_path, f'checkpoint/model_{epoch}.pt')
     uf.make_directory(file_name, is_dir=False)
     model.save(file_name)
     self._save_training_parameters(optimizer_encoder, optimizer_decoder, epoch)