def forward(self, inputs):
        if self.demo_mode:
            inputs = reshape_to_indeces(inputs, ((1, 4), (1, 4)),
                                        80).contiguous()
        inputs = inputs.reshape((-1, 1, 10, 8))
        x = F.relu(self.ct1(inputs))
        x = F.relu(self.ct3(x))
        x = F.relu(self.ct5(x))
        x = F.relu(self.ct6(x))
        x = F.relu(self.ctr(x))

        x = F.relu(self.c1(x))
        x = F.relu(self.cu(x))
        x = F.relu(self.ck(x))
        x = F.relu(self.cj(x))
        ###
        if self.round_at is not None:
            x = x.masked_fill((x >= self.round_at), 1.)
            x = x.masked_fill((x < self.round_at), 0.)
        x = F.relu(self.maxpool(self.cc2(x)))
        x = F.relu(self.maxpool(self.cc3(x)))
        x = F.relu(self.maxpool(self.cc4(x)))

        x = F.relu(self.maxpool(self.cc5(x)))
        x = F.relu(self.cc6(x))
        x = x.view((x.shape[0], 2 * 1024, -1)).contiguous()
        x = x.mean(-1).contiguous()
        x = self.dropout(x)
        x = F.relu(self.lin1(x))
        x = self.dropout(x)
        x = torch.sigmoid(self.lin3(x))
        return x
示例#2
0
 def forward(self, _input):
     if self.demo_mode:
         _input = reshape_to_indeces(_input, ((1, 8), (1, 8)), 20)
         _input = _input.reshape(-1, 20)
     out = F.leaky_relu(self.fc(_input))
     out = F.relu(self.fc2(out))
     out = torch.sigmoid(self.fc3(out))
     return out
示例#3
0
 def forward(self, _input):
     # With the following hack, we can use the full data here too -> saving online storage
     if self.demo_mode:
         _input = reshape_to_indeces(_input, ((1, 4), (1, 4)), 80)
         _input = _input.reshape(-1, 80)
     out = F.leaky_relu(self.fc(_input))
     out = F.relu(self.fc2(out))
     out = torch.sigmoid(self.fc3(out))
     return out
    def forward(self, inputs):
        if self.demo_mode:
            inputs = reshape_to_indeces(inputs, ((1, 8), (1, 8)),
                                        20).contiguous()
        inp = inputs.reshape((-1, 1, 5, 4))
        k = F.relu(self.ct1(inp))
        k = F.relu(self.ct2(k))
        k = F.relu(self.ct3(k))
        k = F.relu(self.ct4(k))

        k = F.relu(self.details(k))
        k = torch.sigmoid(self.details2(k))
        return torch.squeeze(k, dim=1)
    def forward(self, inputs):
        if self.demo_mode:
            inputs = reshape_to_indeces(inputs, ((1, 4), (1, 4)),
                                        80).contiguous()
        inp = inputs.reshape((-1, 1, 10, 8)).contiguous()
        x = F.relu(self.ct1(inp)).contiguous()
        x = F.relu(self.ct3(x)).contiguous()
        x = F.relu(self.ct5(x)).contiguous()
        x = F.relu(self.ct6(x)).contiguous()
        x = F.relu(self.ctr(x)).contiguous()

        x = F.relu(self.c1(x))
        x = F.relu(self.cu(x))
        x = F.relu(self.ck(x))
        x = torch.sigmoid(self.cj(x))
        if self.round_at is not None:
            x = x.masked_fill((x >= self.round_at), 1.)
            x = x.masked_fill((x < self.round_at), 0.)

        return torch.squeeze(x, dim=1)
    def forward(self, inputs):
        if self.demo_mode:
            inputs = reshape_to_indeces(inputs, ((1, 4), (1, 4)),
                                        80).contiguous()
        inputs = inputs.reshape((-1, 1, 10, 8))
        x = F.relu(self.ct1(inputs))
        x = F.relu(self.ct3(x))
        x = F.relu(self.ct5(x))
        x = F.relu(self.ct6(x))
        x = F.relu(self.ctr(x))

        x = F.relu(self.c1(x))
        x = F.relu(self.cu(x))
        x = F.relu(self.ck(x))
        x = F.relu(self.cj(x))
        ###
        x = x.repeat(1, 3, 1, 1)
        x = self.upsample(x)

        x = self.transfer_model(x)
        x = F.sigmoid(x)
        return x
    def forward(self, inputs):
        if self.demo_mode:
            inputs = reshape_to_indeces(inputs, ((1, 8), (1, 8)),
                                        20).contiguous()
        frs = inputs.reshape((-1, 1, 5, 4))

        x = F.relu(self.ct1(frs))
        x = F.relu(self.ct2(x))
        x = F.relu(self.ct3(x))
        x = F.relu(self.ct4(x))
        x = F.relu(self.details(x))

        if self.round_at is not None:
            x = x.masked_fill((x >= self.round_at), 1.)
            x = x.masked_fill((x < self.round_at), 0.)

        # Shape: [1, 8, 127, 111]
        # x = self.bn8(x)
        x = F.relu(self.c2(x))
        x = self.maxpool(x)
        x = F.relu(self.c3(x))
        x = self.maxpool(x)
        x = F.relu(self.c4(x))
        x = self.maxpool(x)
        x = F.relu(self.c5(x))
        x = self.maxpool(x)
        x = F.relu(self.c6(x))
        x = F.relu(self.c7(x))

        # x = self.bn512(x)
        x = x.view((x.shape[0], 1024, -1)).contiguous()
        x = x.mean(-1).contiguous()
        x = F.relu(self.lin1(x))
        x = self.dropout(x)
        x = torch.sigmoid(self.lin2(x))

        return x