def __init__( self, n_mel_channels: int, n_flows: int, n_group: int, n_early_every: int, n_early_size: int, n_wn_channels: int, n_wn_layers: int, wn_kernel_size: int, ): """ WaveGlow module Args: n_mel_channels (int): Number of mel channels to output. n_flows (int): Number of flow layers n_group (int): Number of groups to respace the inputs n_early_every (int): Every n_early_every layers, n_early_size gets skip connected to the output n_early_size (int): The size of the chunk to be skip connected n_wn_channels (int): Number of channels for the non-invertible wavenet transformation n_wn_layers (int): Number of layers for the non-invertible wavenet transformation wn_kernel_size (int): Kernel size for the non-invertible wavenet transformation """ super().__init__() self.upsample = torch.nn.ConvTranspose1d(n_mel_channels, n_mel_channels, 1024, stride=256) self.n_mel_channels = n_mel_channels assert n_group % 2 == 0 self.n_flows = n_flows self.n_group = n_group self.n_early_every = n_early_every self.n_early_size = n_early_size self.wavenet = torch.nn.ModuleList() self.convinv = torch.nn.ModuleList() self.mode = OperationMode.infer n_half = n_group // 2 # Set up layers with the right sizes based on how many dimensions # have been output already n_remaining_channels = n_group for k in range(n_flows): if k % self.n_early_every == 0 and k > 0: n_half = n_half - int(self.n_early_size / 2) n_remaining_channels = n_remaining_channels - self.n_early_size self.convinv.append(Invertible1x1Conv(n_remaining_channels)) self.wavenet.append( WaveNet( n_half, n_mel_channels * n_group, n_layers=n_wn_layers, n_channels=n_wn_channels, kernel_size=wn_kernel_size, )) self.n_remaining_channels = n_remaining_channels self.removed_weightnorm = False
def __init__( self, n_mel_channels: int, n_flows: int, n_group: int, n_wn_channels: int, n_wn_layers: int, wn_kernel_size: int, upsample_factor: int, ): """ UniGlow module Args: n_mel_channels (int): Number of mel channels to output. n_flows (int): Number of flow layers n_group (int): Number of groups to respace the inputs n_early_every (int): Every n_early_every layers, n_early_size gets skip connected to the output n_early_size (int): The size of the chunk to be skip connected n_wn_channels (int): Number of channels for the non-invertible wavenet transformation n_wn_layers (int): Number of layers for the non-invertible wavenet transformation wn_kernel_size (int): Kernel size for the non-invertible wavenet transformation """ super().__init__() assert n_group % 2 == 0 self.n_flows = n_flows self.n_group = n_group n_half = int(n_group / 2) self.conv = Invertible1x1Conv(n_group) self.wn = WaveNet(n_half, n_mel_channels, n_wn_layers, n_wn_channels, wn_kernel_size) self.upsample_factor = upsample_factor