def __init__(self, lin_arc = lin_arc, res_arc = res_arc, depths = ([1]*(len(res_arc)+1)), block = resnet.basic_block, *args, **kwargs): super().__init__() self.lin_arc = lin_arc[::-1] self.linear = nn.ModuleList([ nn.Linear(self.lin_arc[0], self.lin_arc[1]), *[nn.Linear(input, output) for (input, output) in zip(self.lin_arc[1:], self.lin_arc[2:])] ]) self.res_arc = [int(res_arc[-1] / 2)] + res_arc[::-1] self.in_out_block_sizes = list(zip(self.res_arc, self.res_arc[1:])) self.conv = nn.ModuleList([ resnet.layer(self.res_arc[0], self.res_arc[0], n=depths[0], block=block, conv=conv, sampling=1, *args, **kwargs), *[resnet.layer(in_channels * block.expansion, out_channels, n=n, conv=conv, block=block, sampling=1, *args, **kwargs) for (in_channels, out_channels), n in zip(self.in_out_block_sizes, depths[1:])], ]) self.gate = nn.Sequential( nn.Conv1d(self.res_arc[-1], 20, kernel_size=1, stride=1, padding=0, bias=False), nn.ReLU(), )
def __init__(self, conv, in_channels=20, block_arch=[1024, 256, 64, 3], deepths=[2, 2, 2, 2], block = resnet.basic_block, *args, **kwargs): super().__init__() self.block_arch = block_arch self.in_out_block_sizes = list(zip(block_arch, block_arch[1:])) self.blocks = nn.ModuleList([ resnet.layer(in_channels, block_arch[0], n=deepths[0], block=block, conv=conv, *args, **kwargs), *[resnet.layer(in_channels * block.expansion, out_channels, n=n, conv=conv, block=block, *args, **kwargs) for (in_channels, out_channels), n in zip(self.in_out_block_sizes, deepths[1:])], ]) self.linit = nn.Linear(standard_length, bottleneck)
def __init__(self, lin_arc = lin_arc, res_arc = res_arc, depths =([1]*len(res_arc)), block = resnet.basic_block, *args, **kwargs): super().__init__() self.linear = nn.ModuleList([ nn.Linear(lin_arc[0], lin_arc[1]), *[nn.Linear(input, output) for (input, output) in zip(lin_arc[1:], lin_arc[2:])] ]) self.in_out_block_sizes = list(zip(res_arc, res_arc[1:])) self.conv = nn.ModuleList([ resnet.layer(in_channels, res_arc[0], n=depths[0], block=block, conv=conv, sampling=1, *args, **kwargs), *[resnet.layer(in_channels * block.expansion, out_channels, n=n, conv=conv, sampling=1, block=block, *args, **kwargs) for (in_channels, out_channels), n in zip(self.in_out_block_sizes, depths[1:])], ])
def __init__(self, conv, block_arch=[3, 64, 256, 1024], deepths=[2, 2, 2, 2], block=resnet.basic_block, *args, **kwargs): super().__init__() self.block_arch = block_arch self.linit = nn.Linear(bottleneck, standard_length) self.in_out_block_sizes = list(zip(block_arch, block_arch[1:])) self.blocks = nn.ModuleList([ resnet.layer(block_arch[0], block_arch[0], n=deepths[0], block=block, conv=conv, *args, **kwargs), *[resnet.layer(in_channels * block.expansion, out_channels, n=n, conv=conv, block=block, *args, **kwargs) for (in_channels, out_channels), n in zip(self.in_out_block_sizes, deepths[1:])], ]) self.gate = nn.Sequential( nn.Conv1d(block_arch[-1], 20, kernel_size=1, stride=1, padding=0, bias=False), nn.ReLU(), )
def __init__(self, lin_arc=lin_arc, res_arc=res_arc, block=resnet.basic_block, *args, **kwargs): super().__init__() #linear layers self.lin_arc = lin_arc[::-1] self.linear = nn.ModuleList([ nn.Linear(self.lin_arc[0], self.lin_arc[1]), *[ nn.Linear(input, output) for (input, output) in zip(self.lin_arc[1:], self.lin_arc[2:]) ] ]) #convolutional layers self.res_arc = res_arc[::-1] + [in_channels] self.conv = nn.ModuleList([ resnet.layer(in_channels * block.expansion, out_channels, conv=conv, sampling=1, block=block, *args, **kwargs) for (in_channels, out_channels) in zip(self.res_arc[0:], self.res_arc[1:]) ] + [ resnet.layer(self.res_arc[-1], in_channels, block=block, conv=conv, sampling=1, *args, **kwargs) ])