Path: blob/main/modules/parallel_wavegan/layers/residual_block.py
694 views
# -*- coding: utf-8 -*-12"""Residual block module in WaveNet.34This code is modified from https://github.com/r9y9/wavenet_vocoder.56"""78import math910import torch11import torch.nn.functional as F121314class Conv1d(torch.nn.Conv1d):15"""Conv1d module with customized initialization."""1617def __init__(self, *args, **kwargs):18"""Initialize Conv1d module."""19super(Conv1d, self).__init__(*args, **kwargs)2021def reset_parameters(self):22"""Reset parameters."""23torch.nn.init.kaiming_normal_(self.weight, nonlinearity="relu")24if self.bias is not None:25torch.nn.init.constant_(self.bias, 0.0)262728class Conv1d1x1(Conv1d):29"""1x1 Conv1d with customized initialization."""3031def __init__(self, in_channels, out_channels, bias):32"""Initialize 1x1 Conv1d module."""33super(Conv1d1x1, self).__init__(in_channels, out_channels,34kernel_size=1, padding=0,35dilation=1, bias=bias)363738class ResidualBlock(torch.nn.Module):39"""Residual block module in WaveNet."""4041def __init__(self,42kernel_size=3,43residual_channels=64,44gate_channels=128,45skip_channels=64,46aux_channels=80,47dropout=0.0,48dilation=1,49bias=True,50use_causal_conv=False51):52"""Initialize ResidualBlock module.5354Args:55kernel_size (int): Kernel size of dilation convolution layer.56residual_channels (int): Number of channels for residual connection.57skip_channels (int): Number of channels for skip connection.58aux_channels (int): Local conditioning channels i.e. auxiliary input dimension.59dropout (float): Dropout probability.60dilation (int): Dilation factor.61bias (bool): Whether to add bias parameter in convolution layers.62use_causal_conv (bool): Whether to use use_causal_conv or non-use_causal_conv convolution.6364"""65super(ResidualBlock, self).__init__()66self.dropout = dropout67# no future time stamps available68if use_causal_conv:69padding = (kernel_size - 1) * dilation70else:71assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size."72padding = (kernel_size - 1) // 2 * dilation73self.use_causal_conv = use_causal_conv7475# dilation conv76self.conv = Conv1d(residual_channels, gate_channels, kernel_size,77padding=padding, dilation=dilation, bias=bias)7879# local conditioning80if aux_channels > 0:81self.conv1x1_aux = Conv1d1x1(aux_channels, gate_channels, bias=False)82else:83self.conv1x1_aux = None8485# conv output is split into two groups86gate_out_channels = gate_channels // 287self.conv1x1_out = Conv1d1x1(gate_out_channels, residual_channels, bias=bias)88self.conv1x1_skip = Conv1d1x1(gate_out_channels, skip_channels, bias=bias)8990def forward(self, x, c):91"""Calculate forward propagation.9293Args:94x (Tensor): Input tensor (B, residual_channels, T).95c (Tensor): Local conditioning auxiliary tensor (B, aux_channels, T).9697Returns:98Tensor: Output tensor for residual connection (B, residual_channels, T).99Tensor: Output tensor for skip connection (B, skip_channels, T).100101"""102residual = x103x = F.dropout(x, p=self.dropout, training=self.training)104x = self.conv(x)105106# remove future time steps if use_causal_conv conv107x = x[:, :, :residual.size(-1)] if self.use_causal_conv else x108109# split into two part for gated activation110splitdim = 1111xa, xb = x.split(x.size(splitdim) // 2, dim=splitdim)112113# local conditioning114if c is not None:115assert self.conv1x1_aux is not None116c = self.conv1x1_aux(c)117ca, cb = c.split(c.size(splitdim) // 2, dim=splitdim)118xa, xb = xa + ca, xb + cb119120x = torch.tanh(xa) * torch.sigmoid(xb)121122# for skip connection123s = self.conv1x1_skip(x)124125# for residual connection126x = (self.conv1x1_out(x) + residual) * math.sqrt(0.5)127128return x, s129130131