def __init__(self, params, lr, momentum=0, dampening=0, weight_decay=0, nesterov=True, min_lr=1e-10): log.info(f" >> lr= {lr}") log.info(f" >> momentum= {momentum}") log.info(f" >> dampening= {dampening}") log.info(f" >> weight_decay= {weight_decay}") log.info(f" >> nesterov= {nesterov}") log.info(f" >> min_lr= {min_lr}") optim.SGD.__init__(self, params, lr=lr, momentum=momentum, dampening=dampening, weight_decay=weight_decay, nesterov=nesterov) Saver.__init__(self) self.min_lr = min_lr
def __init__(self, params, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False, min_lr=1e-10): log.info(f" >> lr= {lr}") log.info(f" >> betas= {betas}") log.info(f" >> eps= {eps}") log.info(f" >> weight_decay= {weight_decay}") log.info(f" >> amsgrad= {amsgrad}") log.info(f" >> min_lr= {min_lr}") optim.Adam.__init__(self, params, lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, amsgrad=amsgrad) Saver.__init__(self) self.min_lr = min_lr
def __init__(self, cell, input_size, hidden_size, bidirectional, num_layers=1, dropout=0., proj=None, num_embeddings=None): nn.Module.__init__(self) Saver.__init__(self) log.info(f" >> cell= {cell}") log.info(f" >> input_size= {input_size}") log.info(f" >> hidden_size= {hidden_size}") log.info(f" >> bidirectional= {bidirectional}") log.info(f" >> num_layers= {num_layers}") log.info(f" >> dropout between layers= {dropout}") if num_embeddings is not None: log.info(f" >> num input embeddings= {num_embeddings}") self.emb = nn.Embedding(num_embeddings=num_embeddings + 1, embedding_dim=input_size, padding_idx=0) self.rnn = getattr(nn, cell)(input_size=input_size, hidden_size=hidden_size, bidirectional=bidirectional, num_layers=num_layers, dropout=dropout if num_layers > 1 else 0., batch_first=True) if proj is not None: log.info(f" >> proj after rnn= {proj}") self.proj = Linear(self.output_size, proj)
def __init__(self, view1_num_layers, view1_input_size, view1_d_model, view1_nhead, view1_dim_feedforward, view1_dropout, view1_activation, view1_proj, view1_norm, view2_num_layers, view2_input_size, view2_d_model, view2_nhead, view2_dim_feedforward, view2_dropout, view2_activation, view2_num_embeddings, view2_proj, view2_norm, proj=None, norm=None, loss_fn=None): nn.Module.__init__(self) Saver.__init__(self) self.net = nn.ModuleDict() log.info(f"view1:") self.net["view1"] = TransformerEncoder_default( num_layers=view1_num_layers, input_size=view1_input_size, d_model=view1_d_model, nhead=view1_nhead, dim_feedforward=view1_dim_feedforward, dropout=view1_dropout, activation=view1_activation, norm=view1_norm, proj=view1_proj) log.info(f"view2:") self.net["view2"] = TransformerEncoder_default( num_layers=view2_num_layers, input_size=view2_input_size, d_model=view2_d_model, nhead=view2_nhead, dim_feedforward=view2_dim_feedforward, dropout=view2_dropout, activation=view2_activation, num_embeddings=view2_num_embeddings, norm=view2_norm, proj=view2_proj) if proj is not None: log.info(f"proj:") self.net["proj"] = Linear(self.net["view1"].output_size, proj) if loss_fn is not None: self.loss_fn = loss_fn
def __init__(self, in_features, out_features): nn.Module.__init__(self) Saver.__init__(self) log.info(f" >> in_features= {in_features}") log.info(f" >> out_features= {out_features}") self.lin = nn.Linear(in_features, out_features)
class OffersFeeder(object): def __init__(self): self.downloader_ctrl = DownloaderController() self.saver = Saver() def feed(self): offers = self.downloader_ctrl.get_offers() self.saver.save_offers(offers)
def __init__(self, optim, eval_fn, gamma, patience, net=None): LRScheduler.__init__(self, optim, gamma) ScoreTracker.__init__(self, eval_fn) Saver.__init__(self) log.info(f" >> patience={patience}") log.info(f" >> revert={net is not None}") self.net = net self.patience = patience self.bad_evals = 0
def __init__(self, num_layers, input_size, d_model, nhead, dim_feedforward, dropout, activation, num_embeddings=None, norm=None, proj=None): nn.Module.__init__(self) Saver.__init__(self) log.info(f" >> num_layers= {num_layers}") log.info(f" >> input_size= {input_size}") log.info(f" >> d_model= {d_model}") log.info(f" >> nhead= {nhead}") log.info(f" >> dim_feedforward= {dim_feedforward}") log.info(f" >> dropout= {dropout}") log.info(f" >> activation= {activation}") """ if num_embeddings is not None: log.info(f" >> num input embeddings= {num_embeddings}") self.emb = nn.Embedding(num_embeddings=num_embeddings + 1, embedding_dim=input_size, padding_idx=0) """ if num_embeddings is not None: log.info(f" >> num input embeddings= {num_embeddings}") self.emb = nn.Embedding(num_embeddings=num_embeddings + 1, embedding_dim=d_model, padding_idx=0) else: self.emb = nn.Linear(in_features=input_size, out_features=d_model) self.encoder_layer = nn.TransformerEncoderLayer(d_model=d_model, nhead=nhead, dim_feedforward=dim_feedforward, dropout=dropout, activation=activation) self.transformer_encoder = nn.TransformerEncoder(encoder_layer=self.encoder_layer, num_layers=num_layers, norm=norm) self.pos_encoder = PositionalEncoding(d_model=d_model) if proj is not None: log.info(f" >> proj after transformer= {proj}") self.proj = Linear(self.output_size, proj)
def __init__(self, view1_cell, view1_input_size, view1_hidden_size, view1_num_layers, view1_bidirectional, view1_dropout, view1_proj, view2_cell, view2_num_embeddings, view2_input_size, view2_hidden_size, view2_num_layers, view2_bidirectional, view2_dropout, view2_proj, proj=None, loss_fn=None): nn.Module.__init__(self) Saver.__init__(self) self.net = nn.ModuleDict() log.info(f"view1:") self.net["view1"] = RNN_default(cell=view1_cell, input_size=view1_input_size, hidden_size=view1_hidden_size, num_layers=view1_num_layers, bidirectional=view1_bidirectional, dropout=view1_dropout, proj=view1_proj) log.info(f"view2:") self.net["view2"] = RNN_default(cell=view2_cell, num_embeddings=view2_num_embeddings, input_size=view2_input_size, hidden_size=view2_hidden_size, num_layers=view2_num_layers, bidirectional=view2_bidirectional, dropout=view2_dropout, proj=view2_proj) if proj is not None: log.info(f"proj:") self.net["proj"] = Linear(self.net["view1"].output_size, proj) if loss_fn is not None: self.loss_fn = loss_fn
def download_offers(): downloader_ctrl = DownloaderController() saver = Saver() offers = downloader_ctrl.get_offers() saver.save_offers(offers)
def __init__(self): self.downloader_ctrl = DownloaderController() self.saver = Saver()