def forward(self, input): check_size(input, (-1, self.input_size, -1)) # input b x v x l if self.which == 'lstm': out, _ = self.net(input.transpose(1, 2)) return out.transpose(1, 2) return self.net(input)
def forward(self, feats): check_size(feats, (-1, -1, -1)) # feats b x f x l feats = self.pool(feats).squeeze(1) out = self.net(feats).chunk(2, dim=1) out = [x.chunk(self.lstm_layers, dim=1) for x in out] return out
def forward(self, hidden, feats): check_size(hidden, (-1, self.hidden_size)) # hidden b x h check_size(feats, (hidden.size()[0], self.feats_size, -1)) # feats b x f x l attn = self.net(hidden.unsqueeze(1)) # b x 1 x f attn = attn.squeeze(1).unsqueeze(2) # b x f x 1 attn = attn * feats # b x f x l attn = attn.sum(dim=1) # b x l attn = self.softmax(attn) # b x l return self.pool(feats * attn.unsqueeze(1)), attn
def experiment(self, data): """ Run experiment with data provided Parameters ---------- data : `List(np.array, np.array)` """ check_size(data, dim=2) posterior = self.find_posterior(data) decision = self.decision(posterior) if plt.plot: plt.show() return decision
def forward(self, input, ctx, hidden, cell): check_size(input, (-1, self.input_size)) # input b x v check_size( ctx, (input.size()[0], self.feats_size, self.ctx_size)) # ctx b x f x c check_size( hidden, (input.size()[0], self.hidden_size)) # hidden tuple of b x h check_size(cell, (input.size()[0], self.hidden_size)) # cell tuple of b x h ctx = ctx.reshape((ctx.size()[0], -1)) hidden_states = [] for i, layer in enumerate(self.layers): hidden_states.append(layer(input, (hidden[i], cell[i]), ctx)) input = hidden_states[-1][0] return zip(*hidden_states)
def forward(self, input, feats, hidden, cell): check_size(input, (-1, self.input_size)) # input b x v check_size(feats, (input.size()[0], self.feats_size, -1)) # feats b x f x l check_size((hidden, cell), (input.size()[0], self.hidden_size)) # hidden b x h ctx, attn = self.attention(hidden[-1], feats) hidden, cell = self.lstm(input, ctx, hidden, cell) out = self.output(hidden[-1]) return out, attn, hidden, cell
def write_size(message): """ Adm or shop writing a size of a unique product """ logging.info(message.text) if message.text == "0": bot.send_message(message.chat.id, text = "Вы прекратили заполнять заявку на произвольный размер.") worker_db.set_state(message.chat.id, config.States.START.value) return try: index = message.text.index('*') except: index = None logging.info(index) check, txt = utils.check_size(message.text, index) if check == 0: bot.send_message(message.chat.id, text = txt) return else: bot.send_message(message.chat.id, text = txt) worker_db.set_state(message.chat.id, config.States.P_M.value)
def forward(self, input, hidden, ctx): check_size(input, (-1, self.input_size)) # input b x v check_size(hidden, (input.size()[0], self.hidden_size)) # hidden b x h check_size(ctx, (input.size()[0], self.ctx_size)) # ctx b x (f * c) hx, cx = hidden # n_b x hidden_dim gates = self.input_weights(input) + self.hidden_weights( hx) + self.ctx_weights(ctx) ingate, forgetgate, cellgate, outgate = gates.chunk(4, 1) ingate = self.sigmoid(ingate) forgetgate = self.sigmoid(forgetgate) cellgate = self.tanh(cellgate) outgate = self.sigmoid(outgate) cy = (forgetgate * cx) + (ingate * cellgate) hy = outgate * self.tanh(cy) return hy, cy
def height(self, value): if not utils.check_size((value,)): raise SizeException("The height is out of bounds or not correct") self._height = value
def width(self, value): if not utils.check_size((value,)): raise SizeException("The width is out of bounds or not correct") self._width = value
def forward(self, input): check_size(input, (-1, self.input_size, -1)) # input b x v x l return self.net(input.unsqueeze(dim=1)).squeeze(dim=2)
def forward(self, input): check_size(input, (-1, self.hidden_size)) # input b x h return self.net(input)
def forward(self, input): check_size(input, (-1, self.input_size, -1)) # input b x v x l return torch.cat((self.left(input), self.right(input)), dim=1)