Path: blob/main/modules/parallel_wavegan/layers/residual_stack.py
694 views
# -*- coding: utf-8 -*-12# Copyright 2020 Tomoki Hayashi3# MIT License (https://opensource.org/licenses/MIT)45"""Residual stack module in MelGAN."""67import torch89from . import CausalConv1d101112class ResidualStack(torch.nn.Module):13"""Residual stack module introduced in MelGAN."""1415def __init__(self,16kernel_size=3,17channels=32,18dilation=1,19bias=True,20nonlinear_activation="LeakyReLU",21nonlinear_activation_params={"negative_slope": 0.2},22pad="ReflectionPad1d",23pad_params={},24use_causal_conv=False,25):26"""Initialize ResidualStack module.2728Args:29kernel_size (int): Kernel size of dilation convolution layer.30channels (int): Number of channels of convolution layers.31dilation (int): Dilation factor.32bias (bool): Whether to add bias parameter in convolution layers.33nonlinear_activation (str): Activation function module name.34nonlinear_activation_params (dict): Hyperparameters for activation function.35pad (str): Padding function module name before dilated convolution layer.36pad_params (dict): Hyperparameters for padding function.37use_causal_conv (bool): Whether to use causal convolution.3839"""40super(ResidualStack, self).__init__()4142# defile residual stack part43if not use_causal_conv:44assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size."45self.stack = torch.nn.Sequential(46getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params),47getattr(torch.nn, pad)((kernel_size - 1) // 2 * dilation, **pad_params),48torch.nn.Conv1d(channels, channels, kernel_size, dilation=dilation, bias=bias),49getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params),50torch.nn.Conv1d(channels, channels, 1, bias=bias),51)52else:53self.stack = torch.nn.Sequential(54getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params),55CausalConv1d(channels, channels, kernel_size, dilation=dilation,56bias=bias, pad=pad, pad_params=pad_params),57getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params),58torch.nn.Conv1d(channels, channels, 1, bias=bias),59)6061# defile extra layer for skip connection62self.skip_layer = torch.nn.Conv1d(channels, channels, 1, bias=bias)6364def forward(self, c):65"""Calculate forward propagation.6667Args:68c (Tensor): Input tensor (B, channels, T).6970Returns:71Tensor: Output tensor (B, chennels, T).7273"""74return self.stack(c) + self.skip_layer(c)757677