How to use the asteroid.masknn.norms.get function in asteroid

To help you get started, we’ve selected a few asteroid examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mpariente / AsSteroid / egs / fuss / baseline / model.py View on Github external
super().__init__()
        self.in_chan = in_chan
        self.n_src = n_src
        out_chan = out_chan if out_chan else in_chan
        self.out_chan = out_chan
        self.n_blocks = n_blocks
        self.n_repeats = n_repeats
        self.bn_chan = bn_chan
        self.hid_chan = hid_chan
        self.skip_chan = skip_chan
        self.kernel_size = kernel_size
        self.norm_type = norm_type
        self.mask_act = mask_act
        self.learnable_scaling = learnable_scaling

        layer_norm = norms.get(norm_type)(in_chan)
        bottleneck_conv = nn.Conv1d(in_chan, bn_chan, 1)
        self.bottleneck = nn.Sequential(layer_norm, bottleneck_conv)
        # Succession of Conv1DBlock with exponentially increasing dilation.
        self.TCN = nn.ModuleList()
        for r in range(n_repeats):
            for x in range(n_blocks):
                padding = (kernel_size - 1) * 2**x // 2
                self.TCN.append(Conv1DBlock(bn_chan, hid_chan, skip_chan,
                                            kernel_size, padding=padding,
                                            dilation=2**x, norm_type=norm_type))
        # Dense connection in TDCNpp
        self.dense_skip = nn.ModuleList()
        for r in range(n_repeats-1):
            self.dense_skip.append(nn.Conv1d(bn_chan, bn_chan, 1))

        scaling_param = torch.tensor([0.9**l for l in range(1, n_blocks)])
github mpariente / AsSteroid / asteroid / masknn / convolutional.py View on Github external
norm_type="gLN", mask_act='relu'):
        super(TDConvNet, self).__init__()
        self.in_chan = in_chan
        self.n_src = n_src
        out_chan = out_chan if out_chan else in_chan
        self.out_chan = out_chan
        self.n_blocks = n_blocks
        self.n_repeats = n_repeats
        self.bn_chan = bn_chan
        self.hid_chan = hid_chan
        self.skip_chan = skip_chan
        self.kernel_size = kernel_size
        self.norm_type = norm_type
        self.mask_act = mask_act

        layer_norm = norms.get(norm_type)(in_chan)
        bottleneck_conv = nn.Conv1d(in_chan, bn_chan, 1)
        self.bottleneck = nn.Sequential(layer_norm, bottleneck_conv)
        # Succession of Conv1DBlock with exponentially increasing dilation.
        self.TCN = nn.ModuleList()
        for r in range(n_repeats):
            for x in range(n_blocks):
                padding = (kernel_size - 1) * 2**x // 2
                self.TCN.append(Conv1DBlock(bn_chan, hid_chan, skip_chan,
                                            kernel_size, padding=padding,
                                            dilation=2**x, norm_type=norm_type))
        mask_conv_inp = skip_chan if skip_chan else bn_chan
        mask_conv = nn.Conv1d(mask_conv_inp, n_src*out_chan, 1)
        self.mask_net = nn.Sequential(nn.PReLU(), mask_conv)
        # Get activation function.
        mask_nl_class = activations.get(mask_act)
        # For softmax, feed the source dimension.
github mpariente / AsSteroid / asteroid / masknn / recurrent.py View on Github external
self.out_chan = out_chan
        self.bn_chan = bn_chan
        self.hid_size = hid_size
        self.chunk_size = chunk_size
        hop_size = hop_size if hop_size is not None else chunk_size // 2
        self.hop_size = hop_size
        self.n_repeats = n_repeats
        self.n_src = n_src
        self.norm_type = norm_type
        self.mask_act = mask_act
        self.bidirectional = bidirectional
        self.rnn_type = rnn_type
        self.num_layers = num_layers
        self.dropout = dropout

        layer_norm = norms.get(norm_type)(in_chan)
        bottleneck_conv = nn.Conv1d(in_chan, bn_chan, 1)
        self.bottleneck = nn.Sequential(layer_norm, bottleneck_conv)

        # Succession of DPRNNBlocks.
        net = []
        for x in range(self.n_repeats):
            net += [DPRNNBlock(bn_chan, hid_size, norm_type=norm_type,
                               bidirectional=bidirectional, rnn_type=rnn_type,
                               num_layers=num_layers, dropout=dropout)]
        self.net = nn.Sequential(*net)
        # Masking in 3D space
        net_out_conv = nn.Conv2d(bn_chan, n_src*bn_chan, 1)
        self.first_out = nn.Sequential(nn.PReLU(), net_out_conv)
        # Gating and masking in 2D space (after fold)
        self.net_out = nn.Sequential(nn.Conv1d(bn_chan, bn_chan, 1), nn.Tanh())
        self.net_gate = nn.Sequential(nn.Conv1d(bn_chan, bn_chan, 1),
github mpariente / AsSteroid / asteroid / masknn / convolutional.py View on Github external
def __init__(self, in_chan, hid_chan, skip_out_chan, kernel_size, padding,
                 dilation, norm_type="gLN"):
        super(Conv1DBlock, self).__init__()
        self.skip_out_chan = skip_out_chan
        conv_norm = norms.get(norm_type)
        in_conv1d = nn.Conv1d(in_chan, hid_chan, 1)
        depth_conv1d = nn.Conv1d(hid_chan, hid_chan, kernel_size,
                                 padding=padding, dilation=dilation,
                                 groups=hid_chan)
        self.shared_block = nn.Sequential(in_conv1d, nn.PReLU(),
                                          conv_norm(hid_chan), depth_conv1d,
                                          nn.PReLU(), conv_norm(hid_chan))
        self.res_conv = nn.Conv1d(hid_chan, in_chan, 1)
        if skip_out_chan:
            self.skip_conv = nn.Conv1d(hid_chan, skip_out_chan, 1)
github mpariente / AsSteroid / asteroid / masknn / recurrent.py View on Github external
def __init__(self, in_chan, hid_size, norm_type="gLN", bidirectional=True,
                 rnn_type="LSTM", num_layers=1, dropout=0):
        super(DPRNNBlock, self).__init__()
        # IntraRNN and linear projection layer (always bi-directional)
        self.intra_RNN = SingleRNN(rnn_type, in_chan, hid_size, num_layers,
                                   dropout=dropout, bidirectional=True)
        self.intra_linear = nn.Linear(hid_size * 2, in_chan)
        self.intra_norm = norms.get(norm_type)(in_chan)
        # InterRNN block and linear projection layer (uni or bi-directional)
        self.inter_RNN = SingleRNN(rnn_type, in_chan, hid_size, num_layers,
                                   dropout=dropout, bidirectional=bidirectional)
        num_direction = int(bidirectional) + 1
        self.inter_linear = nn.Linear(hid_size * num_direction, in_chan)
        self.inter_norm = norms.get(norm_type)(in_chan)
github mpariente / AsSteroid / egs / wham / WaveSplit / wavesplit.py View on Github external
def __init__(self, in_chan, hid_chan, spk_vec_chan, kernel_size, padding,
                 dilation, norm_type="gLN", use_FiLM=True):
        super(SepConv1DBlock, self).__init__()

        self.use_FiLM = use_FiLM
        conv_norm = norms.get(norm_type)
        self.depth_conv1d = nn.Conv1d(in_chan, hid_chan, kernel_size,
                                 padding=padding, dilation=dilation)
        self.out = nn.Sequential(nn.PReLU(),
                                          conv_norm(hid_chan))

        # FiLM conditioning
        if self.use_FiLM:
            self.mul_lin = nn.Linear(spk_vec_chan, hid_chan)
        self.add_lin = nn.Linear(spk_vec_chan, hid_chan)
github mpariente / AsSteroid / asteroid / masknn / recurrent.py View on Github external
def __init__(self, in_chan, hid_size, norm_type="gLN", bidirectional=True,
                 rnn_type="LSTM", num_layers=1, dropout=0):
        super(DPRNNBlock, self).__init__()
        # IntraRNN and linear projection layer (always bi-directional)
        self.intra_RNN = SingleRNN(rnn_type, in_chan, hid_size, num_layers,
                                   dropout=dropout, bidirectional=True)
        self.intra_linear = nn.Linear(hid_size * 2, in_chan)
        self.intra_norm = norms.get(norm_type)(in_chan)
        # InterRNN block and linear projection layer (uni or bi-directional)
        self.inter_RNN = SingleRNN(rnn_type, in_chan, hid_size, num_layers,
                                   dropout=dropout, bidirectional=bidirectional)
        num_direction = int(bidirectional) + 1
        self.inter_linear = nn.Linear(hid_size * num_direction, in_chan)
        self.inter_norm = norms.get(norm_type)(in_chan)