def forward(self, x): x = self.res3d(x) logits = self.linear(x) logits = BFPFullyConnet.transform_fc_online(logits, self.exp_bit, self.mantisa_bit, -1) return logits
def forward(self, x): x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[0]) x = self.features(x) x = x.view(-1, self.last_channel) x = self.classifier(x) x = BFPFullyConnet.transform_fc_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[-1]) return x
def forward(self, x): x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1) x = self.features(x) x = self.logits(x) #x = BFPFullyConnet.transform_fc_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[-1]) x = BFPFullyConnet.transform_fc_online(x, self.exp_bit, self.mantisa_bit, -1) return x
def forward(self, x): x = self.res3d(x) logits = self.linear(x) logits = BFPFullyConnet.transform_fc_offline(logits, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[-1]) return logits
def forward(self, x): x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1) x = self.conv1(x) #x = self.bn1(x) #Fused BN x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1) x = self.relu(x) x = self.maxpool(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) #x = BFPFullyConnet.transform_fc_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[-1]) x = BFPFullyConnet.transform_fc_online(x, self.exp_bit, self.mantisa_bit, -1) return x
def forward(self, x): x = F.linear(x, self.weight, self.bias) x = BFPFullyConnet.transform_fc_offline( x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[self.start_exp_ind]) return x
def forward(self, x): x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[0], is_3d=True) #x = self.bn1(self.conv1(x)) x = self.conv1(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[1], is_3d=True) x = self.relu(x) x = self.pool1(x) #x = self.bn2(self.conv2(x)) x = self.conv2(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[2], is_3d=True) x = self.relu(x) x = self.pool2(x) #x = self.bn3a(self.conv3a(x)) x = self.conv3a(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[3], is_3d=True) x = self.relu(x) #x = self.bn3b(self.conv3b(x)) x = self.conv3b(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[4], is_3d=True) x = self.relu(x) x = self.pool3(x) #x = self.bn4a(self.conv4a(x)) x = self.conv4a(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[5], is_3d=True) x = self.relu(x) #x = self.bn4b(self.conv4b(x)) x = self.conv4b(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[6], is_3d=True) x = self.relu(x) x = self.pool4(x) #x = self.bn5a(self.conv5a(x)) x = self.conv5a(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[7], is_3d=True) x = self.relu(x) #x = self.bn5b(self.conv5b(x)) x = self.conv5b(x) x = BFPActivation.transform_activation_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[8], is_3d=True) x = self.relu(x) x = self.pool5(x) x = x.view(-1, 8192) x = self.fc6(x) x = BFPFullyConnet.transform_fc_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[9]) x = self.relu(x) x = self.dropout(x) x = self.fc7(x) x = BFPFullyConnet.transform_fc_offline(x, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[10]) x = self.relu(x) x = self.dropout(x) logits = self.fc8(x) #print (logits.shape) x = BFPFullyConnet.transform_fc_offline(logits, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[11]) return logits
def forward(self, x): x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) #x = self.bn1(self.conv1(x)) x = self.conv1(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) x = self.pool1(x) #x = self.bn2(self.conv2(x)) x = self.conv2(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) x = self.pool2(x) #x = self.bn3a(self.conv3a(x)) x = self.conv3a(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) #x = self.bn3b(self.conv3b(x)) x = self.conv3b(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) x = self.pool3(x) #x = self.bn4a(self.conv4a(x)) x = self.conv4a(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) #x = self.bn4b(self.conv4b(x)) x = self.conv4b(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) x = self.pool4(x) #x = self.bn5a(self.conv5a(x)) x = self.conv5a(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) #x = self.bn5b(self.conv5b(x)) x = self.conv5b(x) x = BFPActivation.transform_activation_online(x, self.exp_bit, self.mantisa_bit, -1, is_3d=True) x = self.relu(x) x = self.pool5(x) x = x.view(-1, 8192) x = self.fc6(x) x = BFPFullyConnet.transform_fc_online(x, self.exp_bit, self.mantisa_bit, -1) x = self.relu(x) x = self.dropout(x) x = self.fc7(x) x = BFPFullyConnet.transform_fc_online(x, self.exp_bit, self.mantisa_bit, -1) x = self.relu(x) x = self.dropout(x) logits = self.fc8(x) #print (logits.shape) x = BFPFullyConnet.transform_fc_online(x, self.exp_bit, self.mantisa_bit, -1) return logits