How to use the asteroid.engine.optimizers.make_optimizer function in asteroid

To help you get started, we’ve selected a few asteroid examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mpariente / AsSteroid / egs / fuss / baseline / model.py View on Github external
improved = mask_conf.pop('improved')
    # We will take magnitude and concat with ReIm
    if improved:
        masker = TDCNpp(in_chan=3 * enc.filterbank.n_feats_out // 2,
                        out_chan=enc.filterbank.n_feats_out,
                        n_src=3,  # Hardcoded here because of FUSS
                        **mask_conf)
    else:
        masker = TDConvNet(in_chan=3 * enc.filterbank.n_feats_out // 2,
                           out_chan=enc.filterbank.n_feats_out,
                           n_src=3,  # Hardcoded here because of FUSS
                           **mask_conf)

    model = Model(enc, masker, dec, learnable_scaling=mask_conf["learnable_scaling"])
    # Define optimizer of this model
    optimizer = make_optimizer(model.parameters(), **conf['optim'])
    return model, optimizer
github mpariente / AsSteroid / egs / wsj0-mix / DeepClustering / model.py View on Github external
def make_model_and_optimizer(conf):
    """ Function to define the model and optimizer for a config dictionary.
    Args:
        conf: Dictionary containing the output of hierachical argparse.
    Returns:
        model, optimizer.
    The main goal of this function is to make reloading for resuming
    and evaluation very simple.
    """
    enc, dec = fb.make_enc_dec('stft', **conf['filterbank'])
    masker = Chimera(enc.n_feats_out // 2,
                     **conf['masknet'])
    model = Model(enc, masker, dec)
    optimizer = make_optimizer(model.parameters(), **conf['optim'])
    return model, optimizer
github mpariente / AsSteroid / egs / MiniLibriMix / lhotse / train.py View on Github external
class Model(torch.nn.Module):
        def __init__(self, net):
            super(Model, self).__init__()
            #self.transf = torch.nn.Conv1d(23, 32, 1, bias=True)
            self.net = net
            #self.back = torch.nn.Conv1d(32, 23, 1, bias=True)
        def forward(self, x):
            #x = self.transf(x)
            mask = self.net(x)
            masked = x.unsqueeze(1)*mask
            #b, s, ch, frames = masked.size()
            return masked #self.back(masked.reshape(b*s, ch, frames)).reshape(b, s, -1, frames)

    model = Model(DPRNN(**conf['masknet'])) # no filterbanks we just mask the features
    optimizer = make_optimizer(model.parameters(), **conf['optim'])
    # Define scheduler
    scheduler = None
    if conf['training']['half_lr']:
        scheduler = ReduceLROnPlateau(optimizer=optimizer, factor=0.5,
                                      patience=5)
    # Just after instantiating, save the args. Easy loading in the future.
    exp_dir = conf['main_args']['exp_dir']
    os.makedirs(exp_dir, exist_ok=True)
    conf_path = os.path.join(exp_dir, 'conf.yml')
    with open(conf_path, 'w') as outfile:
        yaml.safe_dump(conf, outfile)

    # Define Loss function.

    loss_func = PITLossWrapper(lambda x, y: pairwise_neg_sisdr(x, y).mean(-1), pit_from='pw_mtx')
    system = System(model=model, loss_func=loss_func, optimizer=optimizer,
github mpariente / AsSteroid / egs / wham / TwoStep / model.py View on Github external
# Define building blocks for local model
    if model_part == 'filterbank':
        model = AdaptiveEncoderDecoder(
            freq_res=conf['filterbank']['n_filters'],
            sample_res=conf['filterbank']['kernel_size'],
            n_sources=conf['masknet']['n_src'])
    elif model_part == 'separator':
        if pretrained_filterbank is None:
            raise ValueError('A pretrained filterbank is required for the '
                             'initialization of the separator.')
        model = Model(pretrained_filterbank, conf)
    else:
        raise ValueError('Part to train: {} is not available.'.format(
            model_part))
    # Define optimizer of this model
    optimizer = make_optimizer(
        model.parameters(),
        optimizer=conf[model_part + '_training'][model_part[0] + '_optimizer'],
        lr=conf[model_part + '_training'][model_part[0] + '_lr'])
    return model, optimizer
github mpariente / AsSteroid / egs / wham / ConvTasNet / model.py View on Github external
""" Function to define the model and optimizer for a config dictionary.
    Args:
        conf: Dictionary containing the output of hierachical argparse.
    Returns:
        model, optimizer.
    The main goal of this function is to make reloading for resuming
    and evaluation very simple.
    """
    # Define building blocks for local model
    enc, dec = fb.make_enc_dec('free', **conf['filterbank'])
    masker = TDConvNet(in_chan=enc.filterbank.n_feats_out,
                       out_chan=enc.filterbank.n_feats_out,
                       **conf['masknet'])
    model = Model(enc, masker, dec)
    # Define optimizer of this model
    optimizer = make_optimizer(model.parameters(), **conf['optim'])
    return model, optimizer
github mpariente / AsSteroid / egs / wham / DPRNN / model.py View on Github external
def make_model_and_optimizer(conf):
    """ Function to define the model and optimizer for a config dictionary.
    Args:
        conf: Dictionary containing the output of hierachical argparse.
    Returns:
        model, optimizer.
    The main goal of this function is to make reloading for resuming
    and evaluation very simple.
    """
    # Define building blocks for local model
    enc, dec = fb.make_enc_dec('free', **conf['filterbank'])
    masker = DPRNN(**conf['masknet'])
    model = Model(enc, masker, dec)
    # Define optimizer of this model
    optimizer = make_optimizer(model.parameters(), **conf['optim'])
    return model, optimizer
github mpariente / AsSteroid / egs / wham / FilterbankDesign / model.py View on Github external
# The input post-processing changes the dimensions of input features to
    # the mask network. Different type of masks impose different output
    # dimensions to the mask network's output. We correct for these here.
    nn_in = int(encoder.n_feats_out * encoder.in_chan_mul)
    nn_out = int(encoder.n_feats_out * encoder.out_chan_mul)
    masker = TDConvNet(in_chan=nn_in, out_chan=nn_out,
                       **conf['masknet'])
    # Another possibility is to correct for these effects inside of Model,
    # but then instantiation of masker should also be done inside.
    model = Model(encoder, masker, decoder)

    # The model is defined in Container, which is passed to DataParallel.

    # Define optimizer : can be instantiate from dictonary as well.
    optimizer = make_optimizer(model.parameters(), **conf['optim'])
    return model, optimizer
github mpariente / AsSteroid / egs / avspeech / looking-to-listen / model.py View on Github external
Returns:
        model, optimizer
    """
    device = torch.device(conf["training"]["device"])
    model = Audio_Visual_Fusion(conf["main_args"]["n_src"], device)
    model = model.to(device)
    device_count = torch.cuda.device_count()
    if len(gpu_ids) > 1 and device_count > 1:
        if len(gpu_ids) != device_count:
            print(f"Using {gpu_ids} GPUs")
        else:
            print(f"Using all {device_count} GPUs")
        model = torch.nn.DataParallel(model, device_ids=gpu_ids)

    optimizer = make_optimizer(model.parameters(), **conf["optim"])
    return model, optimizer