Example #1
0
def test_text_cnn():
    model = torch.nn.Sequential(
        torchlayers.Conv(64),  # specify ONLY out_channels
        torch.nn.ReLU(),  # use torch.nn wherever you wish
        torchlayers.BatchNorm(),  # BatchNormNd inferred from input
        torchlayers.Conv(128),  # Default kernel_size equal to 3
        torchlayers.ReLU(),
        torchlayers.Conv(256, kernel_size=11),  # "same" padding as default
        torchlayers.GlobalMaxPool(),  # Known from Keras
        torchlayers.Linear(10),  # Output for 10 classes
    )

    torchlayers.build(model, torch.randn(2, 300, 1))
Example #2
0
def model():
    return torchlayers.Sequential(
        torchlayers.Conv(64),
        torchlayers.BatchNorm(),
        torchlayers.ReLU(),
        torchlayers.Conv(128),
        torchlayers.BatchNorm(),
        torchlayers.ReLU(),
        torchlayers.Conv(256),
        torchlayers.GlobalMaxPool(),
        torchlayers.Linear(64),
        torchlayers.BatchNorm(),
        torchlayers.Linear(10),
    )
Example #3
0
def model():
    return tl.Sequential(
        tl.Conv(64),
        tl.BatchNorm(),
        tl.ReLU(),
        tl.Conv(128),
        tl.BatchNorm(),
        tl.ReLU(),
        tl.Conv(256),
        tl.GlobalMaxPool(),
        tl.Linear(64),
        tl.BatchNorm(),
        tl.Linear(10),
    )
Example #4
0
 def create_bottleneck(self, labels, tasks, linear_cls):
     return torch.nn.Sequential(torchlayers.GlobalMaxPool(),
                                linear_cls(labels * tasks))