Esempio n. 1
0
 def forward(x, t):
     y = conv1(x)
     y = autograd.relu(y)
     y = conv2(y)
     y = autograd.relu(y)
     y = pooling(y)
     y = autograd.flatten(y)
     y = linear(y)
     loss = autograd.softmax_cross_entropy(y, t)
     return loss, y
Esempio n. 2
0
 def forward(x, t):
     y = conv1(x)
     y = autograd.relu(y)
     y1 = conv21(y)
     y2 = conv22(y)
     y = autograd.cat((y1, y2), 1)
     y = autograd.relu(y)
     y = autograd.flatten(y)
     y = linear(y)
     loss = autograd.softmax_cross_entropy(y, t)
     return loss, y
Esempio n. 3
0
 def forward(x, t):
     y = conv1(x)
     y = autograd.relu(y)
     y = conv2(y)
     y = autograd.relu(y)
     y = autograd.max_pool_2d(y)
     y = autograd.flatten(y)
     y = linear(y)
     y = autograd.soft_max(y)
     loss = autograd.cross_entropy(y, t)
     return loss, y
Esempio n. 4
0
 def forward(x, t):
     y = conv1(x)
     y = autograd.relu(y)
     y = conv2(y)
     y = autograd.relu(y)
     y = autograd.max_pool_2d(y)
     y = autograd.flatten(y)
     y = linear(y)
     y = autograd.soft_max(y)
     loss = autograd.cross_entropy(y, t)
     return loss, y
Esempio n. 5
0
 def forward(self, x):
     y = self.conv1(x)
     y = autograd.relu(y)
     y = self.pooling1(y)
     y = self.conv2(y)
     y = autograd.relu(y)
     y = self.pooling2(y)
     y = autograd.flatten(y)
     y = self.linear1(y)
     y = autograd.relu(y)
     y = self.linear2(y)
     return y
Esempio n. 6
0
    def __call__(self, x):
        x = self.conv1(x)
        x = self.bn1(x)
        x = autograd.relu(x)
        x = self.maxpool(x)

        x = self.layer1(x)
        x = self.layer2(x)
        x = self.layer3(x)
        x = self.layer4(x)

        x = self.avgpool(x)
        x = autograd.flatten(x)
        x = self.fc(x)

        return x
Esempio n. 7
0
 def forward(x, t):
     y = conv1(x)
     y = autograd.relu(y)
     y = bn1(y)
     y = pooling1(y)
     y1 = conv21(y)
     y2 = conv22(y)
     y = autograd.cat((y1, y2), 1)
     y = bn2(y)
     y = autograd.relu(y)
     y = bn2(y)
     y = pooling2(y)
     y = autograd.flatten(y)
     y = linear(y)
     loss = autograd.softmax_cross_entropy(y, t)
     return loss, y
Esempio n. 8
0
    def forward(x, t):
        y = conv1(x)
        y = autograd.tanh(y)
        y1 = conv21(y)
        y2 = conv22(y)
        y = autograd.cat((y1, y2), 1)
        y = autograd.sigmoid(y)
        y = bn(y)
        y = autograd.relu(y)
        y = autograd.mul(y, y)
        y = pooling1(y)
        y = autograd.sigmoid(y)

        y = pooling2(y)

        print(tensor.to_numpy(y).shape)
        y = autograd.flatten(y)
        y = linear(y)
        print(tensor.to_numpy(y).shape)
        loss = autograd.softmax_cross_entropy(y, t)
        return loss, y
Esempio n. 9
0
    def run(model, modeldic, layer,inputs):
        '''
            input: input for singa model
            load other nodes of onnx
            '''
        supportLayer = ['Linear','Conv','MaxPool','AveragePool','BatchNormalization']
        #supportLayer = ['Conv', 'MaxPool', 'AveragePool', 'BatchNormalization']
        oper=modeldic

        for counter,i in enumerate(model.graph.input):
            oper[i.name] = inputs[counter]
        for i in model.graph.node:
            if (i.op_type == 'Relu'):
                oper[str(i.output[0])] = autograd.relu(oper[str(i.input[0])])
            elif (i.op_type == 'Softmax'):
                oper[str(i.output[0])] = autograd.softmax(oper[str(i.input[0])])
            elif (i.op_type == 'Add'):
                oper[str(i.output[0])] = autograd.add(oper[str(i.input[0])], oper[str(i.input[1])])
            elif (i.op_type == 'MatMul'):
                oper[str(i.output[0])] = autograd.matmul(oper[str(i.input[0])], oper[str(i.input[1])])
            elif (i.op_type == 'Flatten'):
                oper[str(i.output[0])] = autograd.flatten(oper[str(i.input[0])])
            elif(i.op_type == 'Concat'):
                oper[str(i.output[0])] = autograd.cat((oper[str(i.input[0])], oper[str(i.input[1])]),int(i.attribute[0].i))
            elif(i.op_type == 'Tanh'):
                oper[str(i.output[0])] = autograd.tanh(oper[str(i.input[0])])
            elif (i.op_type == 'Sigmoid'):
                oper[str(i.output[0])] = autograd.sigmoid(oper[str(i.input[0])])
            elif (i.op_type == 'Mul'):
                oper[str(i.output[0])] = autograd.mul(oper[str(i.input[0])],oper[str(i.input[1])])
            elif (i.op_type in supportLayer):
                oper[str(i.output[0])] = layer[str(i.output[0])](oper[str(i.input[0])])
        out =[]
        for counter,i in enumerate(model.graph.output):
            out.append(modeldic[i.name])
        return out
Esempio n. 10
0
 def logits(self, features):
     x = autograd.relu(features)
     x = self.globalpooling(x)
     x = autograd.flatten(x)
     x = self.fc(x)
     return x
Esempio n. 11
0
 def logits(self, features):
     x = autograd.relu(features)
     x = self.globalpooling(x)
     x = autograd.flatten(x)
     x = self.fc(x)
     return x