Пример #1
0
    def __init__(self, n_hidden_list, activation_list, n_input=None, skip=2):
    # def __init__(self, n_input, n_hidden_list, activation_list, skip=2):
        super().__init__()
        if type(activation_list) is not list:
            activation_list = [dcopy(activation_list)]*len(n_hidden_list)
        assert len(activation_list)==len(n_hidden_list), 'length of layers and activations must match. If you want no activation, use nn.Identity'
        assert len(n_hidden_list) >= skip, 'number of layers must be equal or greater than skip. len(n_hidden_list): %s, skip: %s'%(len(n_hidden_list), skip)
        assert skip >= 2, 'skip needs to be: skip >= 2, given: %s'%(skip)

        # 1st layer
        if n_input is None:
            layers = [nn.LazyLinear(n_hidden_list[0])]
        else:
            layers = [nn.Linear(n_input, n_hidden_list[0])]
        # Hidden layers ~ Output layer
        layers.extend([nn.Linear(n_hidden_list[i], n_hidden_list[i+1]) for i in range(len(n_hidden_list)-1)])
        self.fc = nn.ModuleList(layers)
        self.activation_list = nn.ModuleList(activation_list)

        # Skip layers
        # Bias already present at original layers
        self.skip = skip
        if n_input is None:
            n_skip_hidden_list = [n_hidden_list[i] for i in range(skip-1, len(n_hidden_list), skip)]
            skip_layers = [nn.LazyLinear(n_skip_hidden_list[0], bias=False)] + [nn.Linear(n_skip_hidden_list[i], n_skip_hidden_list[i+1], bias=False) for i in range(len(n_skip_hidden_list)-1)]
        else:
            n_skip_hidden_list = [n_input] + [n_hidden_list[i] for i in range(skip-1, len(n_hidden_list), skip)]
            skip_layers = [nn.Linear(n_skip_hidden_list[i], n_skip_hidden_list[i+1], bias=False) for i in range(len(n_skip_hidden_list)-1)]
        self.skip_layers = nn.ModuleList(skip_layers) # Need ModuleList, not list, so that the model can recognize this layers
        self.n_skip = len(n_hidden_list) // skip # number of skip layers
Пример #2
0
 def __init__(self, channels):
     super(Net, self).__init__()
     five = (5, 5)
     three = (3, 3)
     self.norm1 = torch.nn.BatchNorm2d(channels)
     self.conv1 = nn.Conv2d(in_channels=channels,
                            out_channels=24,
                            kernel_size=five,
                            stride=(2, 2))
     self.conv2 = nn.Conv2d(in_channels=24,
                            out_channels=36,
                            kernel_size=five,
                            stride=(2, 2))
     self.conv3 = nn.Conv2d(in_channels=36,
                            out_channels=48,
                            kernel_size=five,
                            stride=(2, 2))
     self.conv4 = nn.Conv2d(in_channels=48,
                            out_channels=64,
                            kernel_size=three)
     self.conv5 = nn.Conv2d(in_channels=64,
                            out_channels=64,
                            kernel_size=three)
     self.fc1 = nn.LazyLinear(100)
     self.fc2 = nn.LazyLinear(50)
     self.fc3 = nn.LazyLinear(10)
Пример #3
0
    def __init__(self, n_hidden_list, activation_list, n_input=None, skip=2):
    # def __init__(self, n_input, n_hidden_list, activation_list, skip=2):
        super().__init__()
        if type(activation_list) is not list:
            activation_list = [dcopy(activation_list)]*len(n_hidden_list)
        assert len(activation_list)==len(n_hidden_list), 'length of layers and activations must match. If you want no activation, use nn.Identity'
        assert len(n_hidden_list) >= skip, 'number of layers must be equal or greater than skip. len(n_hidden_list): %s, skip: %s'%(len(n_hidden_list), skip)
        assert skip >= 2, 'skip needs to be: skip >= 2, given: %s'%(skip)

        # 1st layer
        if n_input is None:
            layers = [nn.LazyLinear(n_hidden_list[0])]
        else:
            layers = [nn.Linear(n_input, n_hidden_list[0])]
        # Hidden layers ~ Output layer
        layers.extend([nn.Linear(n_hidden_list[i], n_hidden_list[i+1]) for i in range(len(n_hidden_list)-1)])
        self.fc = nn.ModuleList(layers)
        self.activation_list = nn.ModuleList(activation_list)

        # Skip layers
        self.skip = skip
        if n_input is None:
            n_skip_hidden_list = [None] + [n_hidden_list[i] for i in range(skip-1, len(n_hidden_list), skip)]
            skip_layers = nn.ModuleDict()
            for i in range(1, len(n_skip_hidden_list)):
                for j in range(i):
                    if j==0:
                        skip_layers[str(j)+'_'+str(i)]=nn.LazyLinear(n_skip_hidden_list[i], bias=False)
                    else:
                        skip_layers[str(j)+'_'+str(i)]=nn.Linear(n_skip_hidden_list[j], n_skip_hidden_list[i], bias=False)
        else:
            n_skip_hidden_list = [n_input] + [n_hidden_list[i] for i in range(skip-1, len(n_hidden_list), skip)]
            skip_layers = nn.ModuleDict()
            for i in range(1, len(n_skip_hidden_list)):
                for j in range(i):
                    skip_layers[str(j)+'_'+str(i)]=nn.Linear(n_skip_hidden_list[j], n_skip_hidden_list[i], bias=False)
        self.skip_layers = skip_layers

        n = len(n_skip_hidden_list)-1
        self.n_skip = int(n*(n+1)/2)
        assert self.n_skip == len(skip_layers), f'something wrong: n_skip({self.n_skip}), skip_layers({len(skip_layers)})'
Пример #4
0
    def __init__(
        self,
        config: Config,
        forward_model: Optional[ForwardModel] = None,
    ) -> None:
        super().__init__()
        # self.save_hyperparameters()
        self.config = config
        # self.config["num_wavelens"] = len(
        #     torch.load(Path("/data-new/alok/laser/data.pt"))["interpolated_wavelength"][0]
        # )
        if forward_model is None:
            self.forward_model = None
        else:
            self.forward_model = forward_model
            self.forward_model.freeze()

        self.trunk = nn.Sequential(
            Rearrange("b c -> b c 1 1"),
            MLPMixer(
                in_channels=self.config["num_wavelens"],
                image_size=1,
                patch_size=1,
                num_classes=1_000,
                dim=512,
                depth=8,
                token_dim=256,
                channel_dim=2048,
                dropout=0.5,
            ),
            nn.Flatten(),
        )

        self.continuous_head = nn.LazyLinear(2)
        self.discrete_head = nn.LazyLinear(12)
        # XXX this call *must* happen to initialize the lazy layers
        _dummy_input = torch.rand(2, self.config["num_wavelens"])
        self.forward(_dummy_input)
Пример #5
0
    def __init__(self, n_hidden_list, activation_list, n_input=None):
    # def __init__(self, n_input, n_hidden_list, activation_list):
        super().__init__()
        if type(activation_list) is not list:
            activation_list = [dcopy(activation_list)]*len(n_hidden_list)
        assert len(activation_list)==len(n_hidden_list), 'length of layers and activations must match. If you want no activation, use nn.Identity'

        # 1st layer - Select Lazy if n_input is not specified
        if n_input is None:
            layers = [nn.Flatten(1), nn.LazyLinear(n_hidden_list[0]), activation_list[0]]
        else:
            layers = [nn.Flatten(1), nn.Linear(n_input, n_hidden_list[0]), activation_list[0]]
        # Hidden layers ~ Output layer
        for i in range(len(n_hidden_list) - 1):
            layers.extend([nn.Linear(n_hidden_list[i], n_hidden_list[i+1]), activation_list[i+1]])

        self.fc = nn.Sequential(*layers)
Пример #6
0
 def _get_model() -> nn.Sequential:  # pyre-fixme[11]
     return nn.Sequential(nn.LazyLinear(10))
Пример #7
0
 def __init__(self):
     super().__init__()
     self.fc = nn.LazyLinear(1).cuda()            
Пример #8
0
 def __init__(self):
     super().__init__()
     self.layer1 = nn.LazyLinear(5)
     self.layer2 = nn.LazyLinear(2)
Пример #9
0
 def _get_model() -> nn.Sequential:
     return nn.Sequential(nn.LazyLinear(10))
Пример #10
0
    def __init__(self, config: ResMLPConfig = None) -> None:
        num_classes = config_pop_argument(config, "num_classes")
        super().__init__(**config.__dict__)

        self.proj_head = nn.LazyLinear(num_classes)