def get_logits(self, image): with argscope([Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format=self.data_format): return resnet_backbone( image, self.num_blocks, preresnet_group if self.mode == 'preact' else resnet_group, self.block_func)
def get_logits(self, image): def block_func(l, ch_out, stride): l = resnet_bottleneck(l, ch_out, stride, group=self.group, res2_bottleneck=self.res2_bottleneck, activation_name=self.activation_name) return l return resnet_backbone(image, self.num_blocks, resnet_group, block_func, activation_name=self.activation_name)
def get_logits(image): with argscope( [Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format="NHWC"): return resnet_backbone( image, num_blocks, preresnet_group if mode == 'preact' else resnet_group, block_func, CLASS_NUM, ASPP=False)
def build_graph(self, image, label): with argscope([ mpusim_conv2d, MaxPooling, AvgPooling, GlobalAvgPooling, BatchNorm ], data_format=self.data_format): if self.mode == 'googlenet': l = googlenet_backbone( image, self.activations_datatype_size_byte, self.weights_datatype_size_byte, self.results_datatype_size_byte, self.systolic_array_height, self.systolic_array_width, self.accumulator_array_height, self.mpusim_logdir) elif self.mode == 'densenet': l = densenet_backbone( image, self.activations_datatype_size_byte, self.weights_datatype_size_byte, self.results_datatype_size_byte, self.systolic_array_height, self.systolic_array_width, self.accumulator_array_height, self.mpusim_logdir) else: group_func = resnet_group l = resnet_backbone( image, self.resnet_depth, self.num_blocks, group_func, self.block_func, self.activations_datatype_size_byte, self.weights_datatype_size_byte, self.results_datatype_size_byte, self.systolic_array_height, self.systolic_array_width, self.accumulator_array_height, self.mpusim_logdir) tf.nn.softmax(l, name='output') loss3 = tf.nn.sparse_softmax_cross_entropy_with_logits( logits=l, labels=label) loss3 = tf.reduce_mean(loss3, name='loss3') cost = tf.add_n([loss3, 0.3, 0.3], name='weighted_cost') def prediction_incorrect(logits, label, topk, name): return tf.cast(tf.logical_not( tf.nn.in_top_k(logits, label, topk)), tf.float32, name=name) wrong = prediction_incorrect(l, label, 1, name='wrong-top1') wrong = prediction_incorrect(l, label, 5, name='wrong-top5') wd_cost = tf.multiply(0.5, regularize_cost('.*/W', tf.nn.l2_loss), name='l2_regularize_loss') total_cost = tf.add_n([cost, wd_cost], name='cost') return total_cost
def get_logits(self, image): def group_func(name, *args): """ Feature Denoising, Sec 6: we add 4 denoising blocks to a ResNet: each is added after the last residual block of res2, res3, res4, and res5, respectively. """ l = resnet_group(name, *args) l = denoising(name + '_denoise', l, embed=True, softmax=True) return l return resnet_backbone(image, self.num_blocks, group_func, resnet_bottleneck)
def get_logits(self, image): def block_func(l, ch_out, stride): """ Feature Denoising, Sec 6.2: The winning entry, shown in the blue bar, was based on our method by using a ResNeXt101-32 8 backbone with non-local denoising blocks added to all residual blocks. """ l = resnet_bottleneck(l, ch_out, stride, group=32, res2_bottleneck=8) l = denoising('non_local', l, embed=False, softmax=False) return l return resnet_backbone(image, self.num_blocks, resnet_group, block_func)
def get_logits(self, image): def weight_standardization(v): if not self.use_WS: return v if (not v.name.endswith('/W:0')) or v.shape.ndims != 4: return v mean, var = tf.nn.moments(v, [0, 1, 2], keep_dims=True) v = (v - mean) / (tf.sqrt(var) + 1e-5) return v num_blocks = {50: [3, 4, 6, 3], 101: [3, 4, 23, 3]}[self.depth] block_func = resnet_bottleneck with argscope([Conv2D, MaxPooling, GlobalAvgPooling], data_format=self.data_format), \ varreplace.remap_variables(weight_standardization): return resnet_backbone(image, num_blocks, resnet_group, block_func)
def get_logits(self, image): with argscope([Conv2D, MaxPooling, AvgPooling, GlobalAvgPooling, BatchNorm], data_format=self.data_format), \ argscope([QuantizedActiv], nbit=self.qa): if self.mode == 'vgg': return vgg_backbone(image, self.qw) elif self.mode == 'alexnet': return alexnet_backbone(image, self.qw) elif self.mode == 'googlenet': return googlenet_backbone(image, self.qw) elif self.mode == 'densenet': return densenet_backbone(image, self.qw) else: if self.mode == 'preact': group_func = preresnet_group elif self.mode == 'preact_typeA': group_func = preresnet_group_typeA else: group_func = resnet_group return resnet_backbone(image, self.num_blocks, group_func, self.block_func, self.qw)
def get_logits(self, image): with argscope([Conv2D, MaxPooling, AvgPooling, GlobalAvgPooling, BatchNorm], data_format=self.data_format), \ argscope([QuantizedActiv], nbit=self.qa): if self.mode == 'vgg': return vgg_backbone(image, self.qw) elif self.mode == 'alexnet': return alexnet_backbone(image, self.qw) elif self.mode == 'googlenet': return googlenet_backbone(image, self.qw) elif self.mode == 'densenet': return densenet_backbone(image, self.qw) else: if self.mode == 'preact': group_func = preresnet_group elif self.mode == 'preact_typeA': group_func = preresnet_group_typeA else: group_func = resnet_group return resnet_backbone( image, self.num_blocks, group_func, self.block_func, self.qw)
def get_logits(self, image): with argscope([Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format='NCHW'), \ argscope(Norm, type=self.norm), \ weight_standardization_context(enable=self.use_ws): return resnet_backbone(image, self.num_blocks, resnet_group, resnet_bottleneck)
def get_logits(self, image): num_blocks = {50: [3, 4, 6, 3], 101: [3, 4, 23, 3]}[self.depth] block_func = resnet_bottleneck with argscope([Conv2D, BatchNorm, MaxPooling, GlobalAvgPooling], data_format=self.data_format): return resnet_backbone( image, num_blocks, resnet_group, block_func)
def get_logits(self, image): with argscope([Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format=self.data_format): return resnet_backbone( image, self.num_blocks, preresnet_group if self.mode == 'preact' else resnet_group, self.block_func)
def get_logits(self, image): return resnet_backbone(image, self.num_blocks, resnet_group, resnet_bottleneck)
def get_logits(self, image): with argscope([Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format='NCHW'): return resnet_backbone(image, self.num_blocks, resnet_group, resnet_bottleneck)