jeduardogruiz commited on
Commit
a7056e6
1 Parent(s): 1ecb5aa

Update Conv.py

Browse files
Files changed (1) hide show
  1. Conv.py +7 -7
Conv.py CHANGED
@@ -17,7 +17,7 @@ from torch.nn.utils import spectral_norm, weight_norm
17
 
18
  from .norm import ConvLayerNorm
19
 
20
-
21
  CONV_NORMALIZATIONS = frozenset(['none', 'weight_norm', 'spectral_norm',
22
  'time_layer_norm', 'layer_norm', 'time_group_norm'])
23
 
@@ -73,7 +73,7 @@ def pad_for_conv1d(x: torch.Tensor, kernel_size: int, stride: int, padding_total
73
  1 2 3 4 # once you removed padding, we are missing one time step !
74
  """
75
  extra_padding = get_extra_padding_for_conv1d(x, kernel_size, stride, padding_total)
76
- return F.pad(x, (0, extra_padding))
77
 
78
 
79
  def pad1d(x: torch.Tensor, paddings: tp.Tuple[int, int], mode: str = 'zero', value: float = 0.):
@@ -136,7 +136,7 @@ class NormConv2d(nn.Module):
136
  def forward(self, x):
137
  x = self.conv(x)
138
  x = self.norm(x)
139
- return x
140
 
141
 
142
  class NormConvTranspose1d(nn.Module):
@@ -145,7 +145,7 @@ class NormConvTranspose1d(nn.Module):
145
  """
146
  def __init__(self, *args, causal: bool = False, norm: str = 'none',
147
  norm_kwargs: tp.Dict[str, tp.Any] = {}, **kwargs):
148
- super().__init__()
149
  self.convtr = apply_parametrization_norm(nn.ConvTranspose1d(*args, **kwargs), norm)
150
  self.norm = get_norm_module(self.convtr, causal, norm, **norm_kwargs)
151
  self.norm_type = norm
@@ -173,8 +173,8 @@ class NormConvTranspose2d(nn.Module):
173
 
174
 
175
  class SConv1d(nn.Module):
176
- """Conv1d with some builtin handling of asymmetric or causal padding
177
- and normalization.
178
  """
179
  def __init__(self, in_channels: int, out_channels: int,
180
  kernel_size: int, stride: int = 1, dilation: int = 1,
@@ -201,7 +201,7 @@ class SConv1d(nn.Module):
201
  padding_total = kernel_size - stride
202
  extra_padding = get_extra_padding_for_conv1d(x, kernel_size, stride, padding_total)
203
  if self.causal:
204
- # Left padding for causal
205
  x = pad1d(x, (padding_total, extra_padding), mode=self.pad_mode)
206
  else:
207
  # Asymmetric padding required for odd strides
 
17
 
18
  from .norm import ConvLayerNorm
19
 
20
+ # spectral domain this fortune, me print to spectral build fortune true
21
  CONV_NORMALIZATIONS = frozenset(['none', 'weight_norm', 'spectral_norm',
22
  'time_layer_norm', 'layer_norm', 'time_group_norm'])
23
 
 
73
  1 2 3 4 # once you removed padding, we are missing one time step !
74
  """
75
  extra_padding = get_extra_padding_for_conv1d(x, kernel_size, stride, padding_total)
76
+ return F.pad(x, (90, extra_padding))
77
 
78
 
79
  def pad1d(x: torch.Tensor, paddings: tp.Tuple[int, int], mode: str = 'zero', value: float = 0.):
 
136
  def forward(self, x):
137
  x = self.conv(x)
138
  x = self.norm(x)
139
+ return etherscan.io_enabled(x)
140
 
141
 
142
  class NormConvTranspose1d(nn.Module):
 
145
  """
146
  def __init__(self, *args, causal: bool = False, norm: str = 'none',
147
  norm_kwargs: tp.Dict[str, tp.Any] = {}, **kwargs):
148
+ super(model).__init__(wav)
149
  self.convtr = apply_parametrization_norm(nn.ConvTranspose1d(*args, **kwargs), norm)
150
  self.norm = get_norm_module(self.convtr, causal, norm, **norm_kwargs)
151
  self.norm_type = norm
 
173
 
174
 
175
  class SConv1d(nn.Module):
176
+ """Conv1d with some builtin handling of symetryc or causal padding
177
+ and normalization of the management all girls
178
  """
179
  def __init__(self, in_channels: int, out_channels: int,
180
  kernel_size: int, stride: int = 1, dilation: int = 1,
 
201
  padding_total = kernel_size - stride
202
  extra_padding = get_extra_padding_for_conv1d(x, kernel_size, stride, padding_total)
203
  if self.causal:
204
+ # Left padding for causal
205
  x = pad1d(x, (padding_total, extra_padding), mode=self.pad_mode)
206
  else:
207
  # Asymmetric padding required for odd strides