def __call__(self, input_var=None, use_from=None, use_up_to='classifier', training=False, force_global_pooling=False, check_global_pooling=True, returns_net=False, verbose=0): input_var = self.get_input_var(input_var) callback = NnpNetworkPass(verbose) callback.remove_and_rewire('ImageAugmentationX') callback.set_variable('TrainingInput', input_var) self.configure_global_average_pooling(callback, force_global_pooling, check_global_pooling, 'NIN/AveragePooling', by_type=False) callback.set_batch_normalization_batch_stat_all(training) self.use_up_to(use_up_to, callback) if not training: callback.remove_and_rewire('NIN/Dropout') callback.fix_parameters() batch_size = input_var.shape[0] net = self.nnp.get_network('Training', batch_size=batch_size, callback=callback) if returns_net: return net else: return list(net.outputs.values())[0]
def __call__(self, input_var=None, use_from=None, use_up_to='classifier', training=False, force_global_pooling=False, check_global_pooling=True, returns_net=False, verbose=0): assert use_from is None, 'This should not be set because it is for forward compatibility.' input_var = self.get_input_var(input_var) callback = NnpNetworkPass(verbose) callback.remove_and_rewire('ImageAugmentationX') callback.set_variable('InputX', input_var) self.configure_global_average_pooling(callback, force_global_pooling, check_global_pooling, 'AveragePooling') callback.set_batch_normalization_batch_stat_all(training) index = 0 if self.num_layers == 18 else 1 self.use_up_to(use_up_to, callback, index=index) if not training: callback.fix_parameters() batch_size = input_var.shape[0] net = self.nnp.get_network('Training', batch_size=batch_size, callback=callback) if returns_net: return net return list(net.outputs.values())[0]
def __call__(self, input_var=None, use_from=None, use_up_to='classifier', training=False, force_global_pooling=False, check_global_pooling=True, returns_net=False, verbose=0, with_aux_tower=False): if not training: assert not with_aux_tower, "Aux Tower should be disabled when inference process." input_var = self.get_input_var(input_var) callback = NnpNetworkPass(verbose) callback.remove_and_rewire('ImageAugmentationX') callback.set_variable('InputX', input_var) self.configure_global_average_pooling(callback, force_global_pooling, check_global_pooling, 'AveragePooling_3') callback.set_batch_normalization_batch_stat_all(training) if with_aux_tower: self.use_up_to('_aux_classifier_1', callback) funcs_to_drop1 = ("Affine_2", "SoftmaxCrossEntropy", "MulScalarLoss1") self.use_up_to('_aux_classifier_2', callback) funcs_to_drop2 = ("Affine_4", "SoftmaxCrossEntropy_2", "MulScalarLoss2") else: self.use_up_to('_branching_point_1', callback) funcs_to_drop1 = ("AveragePooling", "Convolution_22", "ReLU_22", "Affine", "ReLU_23", "Dropout", "Affine_2", "SoftmaxCrossEntropy", "MulScalarLoss1") self.use_up_to('_branching_point_2', callback) funcs_to_drop2 = ("AveragePooling_2", "Convolution_41", "ReLU_42", "Affine_3", "ReLU_43", "Dropout_2", "Affine_4", "SoftmaxCrossEntropy_2", "MulScalarLoss2") callback.drop_function(*funcs_to_drop1) callback.drop_function(*funcs_to_drop2) if not training: callback.remove_and_rewire('Dropout_3') callback.fix_parameters() self.use_up_to(use_up_to, callback) batch_size = input_var.shape[0] net = self.nnp.get_network('Train', batch_size=batch_size, callback=callback) if returns_net: return net elif with_aux_tower: return list(net.outputs.values()) else: return list(net.outputs.values())[0]
def __call__(self, input_var=None, use_from=None, use_up_to='classifier', training=False, force_global_pooling=False, check_global_pooling=True, returns_net=False, verbose=0, with_aux_tower=False): input_var = self.get_input_var(input_var) callback = NnpNetworkPass(verbose) callback.remove_and_rewire('ImageAugmentationX') callback.set_variable('TrainingInput', input_var) self.use_up_to(use_up_to, callback) if not training: callback.remove_and_rewire('TrainNet/Dropout') callback.remove_and_rewire('TrainNet/Dropout_2') callback.fix_parameters() batch_size = input_var.shape[0] net = self.nnp.get_network('Training', batch_size=batch_size, callback=callback) if returns_net: return net else: return list(net.outputs.values())[0]
def __call__(self, input_var=None, use_from=None, use_up_to='classifier', training=False, returns_net=False, verbose=0): assert use_from is None, 'This should not be set because it is for forward compatibility.' input_var = self.get_input_var(input_var) callback = NnpNetworkPass(verbose) callback.remove_and_rewire('ImageAugmentationX') callback.set_variable('TrainingInput', input_var) callback.set_batch_normalization_batch_stat_all(training) self.use_up_to(use_up_to, callback) if not training: callback.remove_and_rewire( 'VGG{}/Dropout_1'.format(self.num_layers)) callback.remove_and_rewire( 'VGG{}/Dropout_2'.format(self.num_layers)) callback.fix_parameters() batch_size = input_var.shape[0] net = self.nnp.get_network( 'Training', batch_size=batch_size, callback=callback) if returns_net: return net return list(net.outputs.values())[0]
def DLAUp(x, test, residual_root=False, channel_last=False): r, hidden = dla_imagenet(x, num_classes=1000, num_layers=34, test=test, channel_last=channel_last) callback = NnpNetworkPass(True) callback.remove_and_rewire('fc') ochannels = [256, 128, 64, 32] with nn.parameter_scope("up16"): x = upsample(hidden['level5'], ochannels[0], test, kernel_size=4, channel_last=channel_last) hidden['up16'] = x with nn.parameter_scope("up8"): x = root(x, [hidden['level4']], ochannels[0], test, kernel_size=3, channel_last=channel_last) x = upsample(x, ochannels[1], test, kernel_size=4, channel_last=channel_last) hidden['up8'] = x with nn.parameter_scope("up4"): with nn.parameter_scope("residual_level3"): level4up = upsample(hidden['level4'], ochannels[1], test, kernel_size=4, channel_last=channel_last) with nn.parameter_scope("level3up_root"): level3up = root(level4up, [hidden['level3']], ochannels[1], test, kernel_size=3, channel_last=channel_last) with nn.parameter_scope("x_root"): x = root(x, [level3up], ochannels[1], test, kernel_size=1, channel_last=channel_last) x = upsample(x, ochannels[2], test, kernel_size=4, channel_last=channel_last) hidden['up4'] = x with nn.parameter_scope("up2_b"): level3up_b = upsample(level3up, ochannels[2], test, kernel_size=4, channel_last=channel_last) with nn.parameter_scope("up2_c"): level3up_c = upsample(hidden['level3'], ochannels[2], test, kernel_size=4, channel_last=channel_last) with nn.parameter_scope("level3up_c_root"): level3up_c = root(hidden['level2'], [level3up_c], ochannels[2], test, kernel_size=3, channel_last=channel_last) with nn.parameter_scope("level2up_root"): level2up = root(level3up_b, [level3up_c], ochannels[2], test, kernel_size=3, channel_last=channel_last) with nn.parameter_scope("x_root"): x = root(x, [level2up], ochannels[2], test, kernel_size=3, channel_last=channel_last) return x