Exemple #1
0
    def __init__(self, model_config, compound_encoder):
        super(GeoPredModel, self).__init__()
        self.compound_encoder = compound_encoder

        self.hidden_size = model_config['hidden_size']
        self.dropout_rate = model_config['dropout_rate']
        self.act = model_config['act']
        self.pretrain_tasks = model_config['pretrain_tasks']

        # context mask
        if 'Cm' in self.pretrain_tasks:
            self.Cm_vocab = model_config['Cm_vocab']
            self.Cm_linear = nn.Linear(compound_encoder.embed_dim,
                                       self.Cm_vocab + 3)
            self.Cm_loss = nn.CrossEntropyLoss()
        # functinal group
        self.Fg_linear = nn.Linear(compound_encoder.embed_dim,
                                   model_config['Fg_size'])  # 494
        self.Fg_loss = nn.BCEWithLogitsLoss()
        # bond angle with regression
        if 'Bar' in self.pretrain_tasks:
            self.Bar_mlp = MLP(2,
                               hidden_size=self.hidden_size,
                               act=self.act,
                               in_size=compound_encoder.embed_dim * 3,
                               out_size=1,
                               dropout_rate=self.dropout_rate)
            self.Bar_loss = nn.SmoothL1Loss()
        # bond length with regression
        if 'Blr' in self.pretrain_tasks:
            self.Blr_mlp = MLP(2,
                               hidden_size=self.hidden_size,
                               act=self.act,
                               in_size=compound_encoder.embed_dim * 2,
                               out_size=1,
                               dropout_rate=self.dropout_rate)
            self.Blr_loss = nn.SmoothL1Loss()
        # atom distance with classification
        if 'Adc' in self.pretrain_tasks:
            self.Adc_vocab = model_config['Adc_vocab']
            self.Adc_mlp = MLP(2,
                               hidden_size=self.hidden_size,
                               in_size=self.compound_encoder.embed_dim * 2,
                               act=self.act,
                               out_size=self.Adc_vocab + 3,
                               dropout_rate=self.dropout_rate)
            self.Adc_loss = nn.CrossEntropyLoss()

        print('[GeoPredModel] pretrain_tasks:%s' % str(self.pretrain_tasks))
    def __init__(self, **cfg):
        """Initialize the AnimeGANV2 class.

    Parameters:
        opt (config dict)-- stores all the experiment flags; needs to be a subclass of Dict
    """
        super(AnimeGANV2Model, self).__init__()
        # define networks (both generator and discriminator)
        self.cfg = EasyDict(**cfg)
        self.nets['netG'] = build_generator(self.cfg.generator)
        init_weights(self.nets['netG'])
        # define a discriminator; conditional GANs need to take both input and output images; Therefore, #channels for D is input_nc + output_nc
        if self.is_train:
            self.nets['netD'] = build_discriminator(self.cfg.discriminator)
            init_weights(self.nets['netD'])

            self.pretrained = CaffeVGG19()

            self.losses = {}
            # define loss functions
            self.criterionGAN = GANLoss(self.cfg.gan_mode)
            self.criterionL1 = nn.L1Loss()
            self.criterionHub = nn.SmoothL1Loss()

            if self.cfg.pretrain_ckpt:
                state_dicts = load(self.cfg.pretrain_ckpt)
                self.nets['netG'].set_state_dict(state_dicts['netG'])
                print('Load pretrained generator from', self.cfg.pretrain_ckpt)
Exemple #3
0
 def __init__(self, mode="l2", **kargs):
     super().__init__()
     assert mode in ["l1", "l2", "smooth_l1"]
     if mode == "l1":
         self.loss_func = nn.L1Loss(**kargs)
     elif mode == "l2":
         self.loss_func = nn.MSELoss(**kargs)
     elif mode == "smooth_l1":
         self.loss_func = nn.SmoothL1Loss(**kargs)
Exemple #4
0
 def __init__(self, config):
     super(PretrainBondLength, self).__init__()
     log.info("Using pretrain bond length")
     hidden_size = config.emb_dim
     self.bond_length_pred_linear = nn.Sequential(
         L.Linear(hidden_size * 2, hidden_size // 2),
         L.batch_norm_1d(hidden_size // 2), nn.Swish(),
         L.Linear(hidden_size // 2, hidden_size // 4),
         L.batch_norm_1d(hidden_size // 4), nn.Swish(),
         L.Linear(hidden_size // 4, 1))
     self.loss = nn.SmoothL1Loss(reduction='none')
Exemple #5
0
    def __init__(self,
                 generator,
                 discriminator=None,
                 gan_criterion=None,
                 pretrain_ckpt=None,
                 g_adv_weight=300.,
                 d_adv_weight=300.,
                 con_weight=1.5,
                 sty_weight=2.5,
                 color_weight=10.,
                 tv_weight=1.):
        """Initialize the AnimeGANV2 class.

        Parameters:
            opt (config dict)-- stores all the experiment flags; needs to be a subclass of Dict
        """
        super(AnimeGANV2Model, self).__init__()
        self.g_adv_weight = g_adv_weight
        self.d_adv_weight = d_adv_weight
        self.con_weight = con_weight
        self.sty_weight = sty_weight
        self.color_weight = color_weight
        self.tv_weight = tv_weight
        # define networks (both generator and discriminator)
        self.nets['netG'] = build_generator(generator)
        init_weights(self.nets['netG'])

        # define a discriminator; conditional GANs need to take both input and output images; Therefore, #channels for D is input_nc + output_nc
        if self.is_train:
            self.nets['netD'] = build_discriminator(discriminator)
            init_weights(self.nets['netD'])

            self.pretrained = CaffeVGG19()

            self.losses = {}
            # define loss functions
            self.criterionGAN = build_criterion(gan_criterion)
            self.criterionL1 = nn.L1Loss()
            self.criterionHub = nn.SmoothL1Loss()

            if pretrain_ckpt:
                state_dicts = load(pretrain_ckpt)
                self.nets['netG'].set_state_dict(state_dicts['netG'])
                print('Load pretrained generator from', pretrain_ckpt)