Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
prophesier
GitHub Repository: prophesier/diff-svc
Path: blob/main/modules/parallel_wavegan/layers/residual_block.py
694 views
1
# -*- coding: utf-8 -*-
2
3
"""Residual block module in WaveNet.
4
5
This code is modified from https://github.com/r9y9/wavenet_vocoder.
6
7
"""
8
9
import math
10
11
import torch
12
import torch.nn.functional as F
13
14
15
class Conv1d(torch.nn.Conv1d):
16
"""Conv1d module with customized initialization."""
17
18
def __init__(self, *args, **kwargs):
19
"""Initialize Conv1d module."""
20
super(Conv1d, self).__init__(*args, **kwargs)
21
22
def reset_parameters(self):
23
"""Reset parameters."""
24
torch.nn.init.kaiming_normal_(self.weight, nonlinearity="relu")
25
if self.bias is not None:
26
torch.nn.init.constant_(self.bias, 0.0)
27
28
29
class Conv1d1x1(Conv1d):
30
"""1x1 Conv1d with customized initialization."""
31
32
def __init__(self, in_channels, out_channels, bias):
33
"""Initialize 1x1 Conv1d module."""
34
super(Conv1d1x1, self).__init__(in_channels, out_channels,
35
kernel_size=1, padding=0,
36
dilation=1, bias=bias)
37
38
39
class ResidualBlock(torch.nn.Module):
40
"""Residual block module in WaveNet."""
41
42
def __init__(self,
43
kernel_size=3,
44
residual_channels=64,
45
gate_channels=128,
46
skip_channels=64,
47
aux_channels=80,
48
dropout=0.0,
49
dilation=1,
50
bias=True,
51
use_causal_conv=False
52
):
53
"""Initialize ResidualBlock module.
54
55
Args:
56
kernel_size (int): Kernel size of dilation convolution layer.
57
residual_channels (int): Number of channels for residual connection.
58
skip_channels (int): Number of channels for skip connection.
59
aux_channels (int): Local conditioning channels i.e. auxiliary input dimension.
60
dropout (float): Dropout probability.
61
dilation (int): Dilation factor.
62
bias (bool): Whether to add bias parameter in convolution layers.
63
use_causal_conv (bool): Whether to use use_causal_conv or non-use_causal_conv convolution.
64
65
"""
66
super(ResidualBlock, self).__init__()
67
self.dropout = dropout
68
# no future time stamps available
69
if use_causal_conv:
70
padding = (kernel_size - 1) * dilation
71
else:
72
assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size."
73
padding = (kernel_size - 1) // 2 * dilation
74
self.use_causal_conv = use_causal_conv
75
76
# dilation conv
77
self.conv = Conv1d(residual_channels, gate_channels, kernel_size,
78
padding=padding, dilation=dilation, bias=bias)
79
80
# local conditioning
81
if aux_channels > 0:
82
self.conv1x1_aux = Conv1d1x1(aux_channels, gate_channels, bias=False)
83
else:
84
self.conv1x1_aux = None
85
86
# conv output is split into two groups
87
gate_out_channels = gate_channels // 2
88
self.conv1x1_out = Conv1d1x1(gate_out_channels, residual_channels, bias=bias)
89
self.conv1x1_skip = Conv1d1x1(gate_out_channels, skip_channels, bias=bias)
90
91
def forward(self, x, c):
92
"""Calculate forward propagation.
93
94
Args:
95
x (Tensor): Input tensor (B, residual_channels, T).
96
c (Tensor): Local conditioning auxiliary tensor (B, aux_channels, T).
97
98
Returns:
99
Tensor: Output tensor for residual connection (B, residual_channels, T).
100
Tensor: Output tensor for skip connection (B, skip_channels, T).
101
102
"""
103
residual = x
104
x = F.dropout(x, p=self.dropout, training=self.training)
105
x = self.conv(x)
106
107
# remove future time steps if use_causal_conv conv
108
x = x[:, :, :residual.size(-1)] if self.use_causal_conv else x
109
110
# split into two part for gated activation
111
splitdim = 1
112
xa, xb = x.split(x.size(splitdim) // 2, dim=splitdim)
113
114
# local conditioning
115
if c is not None:
116
assert self.conv1x1_aux is not None
117
c = self.conv1x1_aux(c)
118
ca, cb = c.split(c.size(splitdim) // 2, dim=splitdim)
119
xa, xb = xa + ca, xb + cb
120
121
x = torch.tanh(xa) * torch.sigmoid(xb)
122
123
# for skip connection
124
s = self.conv1x1_skip(x)
125
126
# for residual connection
127
x = (self.conv1x1_out(x) + residual) * math.sqrt(0.5)
128
129
return x, s
130
131