Exemplo n.º 1
0
def arch_summary(arch):
    model = arch(False)
    tot = 0
    for i, l in enumerate(model.children()):
        n_layers = len(flatten_model(l))
        tot += n_layers
        print(f'({i}) {l.__class__.__name__:<12}: {n_layers:<4}layers (total: {tot})')
Exemplo n.º 2
0
 def __init__(
         self,
         learn: Learner,
         do_remove: bool = True,
         hMin=-1,
         hMax=1,
         nBins=100,
         useClasses=False,  # if true compute histogram of classes in the last layer
         liveChart=True,  # show live chart of last layer
         modulesId=-1,  # array of modules to keep
 ):
     self.hMin = hMin or (-hMax)
     self.hMax = hMax
     self.nBins = nBins
     self.zero_bin = math.floor(-self.nBins * self.hMin /
                                (self.hMax - self.hMin))
     self.liveChart = liveChart
     self.allModules = [m for m in flatten_model(learn.model)]
     self.useClasses = useClasses
     modules = self.allModules
     if modulesId:
         modules = [self.allModules[i] for i in listify(modulesId)]
     self.allModules = modules if modules else self.allModules
     self.c = learn.data.c  # Number of Calsses
     super().__init__(learn, modules, do_remove)
 def _freeze(self):
     "Freezes the pretrained backbone."
     for idx, i in enumerate(flatten_model(self.learn.model.backbone)):
         if isinstance(i, (torch.nn.BatchNorm2d)):
             continue
         for p in i.parameters():
             p.requires_grad = False
     return idx
Exemplo n.º 4
0
def get_groups(model, layer_groups):
    group_indices = [len(g) for g in layer_groups]
    curr_i = 0
    group = []
    for layer in model:
        group_indices[curr_i] -= len(flatten_model(layer))
        group.append(layer.__class__.__name__)
        if group_indices[curr_i] == 0:
            curr_i += 1
            print(f'Group {curr_i}:', group)
            group = []
    def __init__(self, model, chip_size, num_classes):
        super(AuxPSUnet, self).__init__()      
        self.model = model

        for idx, i in enumerate(flatten_model(self.model)):
            if hasattr(i, 'dilation'):
                dilation = i.dilation
                dilation = dilation[0] if isinstance(dilation, tuple) else dilation
                if dilation > 1:
                    break   

        self.hook = hook_output(flatten_model(model)[idx - 1])

        ## returns the size of various activations
        model_sizes(self.model, size=(chip_size, chip_size))

        ## Geting the stored parameters inside of the hook
        aux_in_channels = self.hook.stored.shape[1]
        del self.hook.stored                     
        self.aux_logits = nn.Conv2d(aux_in_channels, num_classes, kernel_size=1)       
Exemplo n.º 6
0
    def freeze(self):
        "Freezes the pretrained backbone."
        for idx, i in enumerate(flatten_model(self.learn.model)):
            if hasattr(i, 'dilation'):
                dilation = i.dilation
                dilation = dilation[0] if isinstance(dilation, tuple) else dilation
                if dilation > 1:
                    break        
            for p in i.parameters():
                p.requires_grad = False

        self.learn.layer_groups = split_model_idx(self.learn.model, [idx])  ## Could also call self.learn.freeze after this line because layer groups are now present.      
Exemplo n.º 7
0
def test_cs_learner_freeze(learn):
    model = learn.model
    for layer in fv.flatten_model(model.bu_body):
        should_require = False
        if isinstance(layer, fv.bn_types):
            should_require = True
        for p in layer.parameters():
            assert p.requires_grad == should_require

    branches = [model.td, model.laterals, model.emb]
    if model.bu_head:
        branches.append(model.bu_head)
    for p in itertools.chain.from_iterable(b.parameters() for b in branches):
        assert p.requires_grad
Exemplo n.º 8
0
 def _get_layers(model, cond=noop):
     return [m for m in flatten_model(model) if cond(m)]