How to use the asteroid.torch_utils function in asteroid

To help you get started, we’ve selected a few asteroid examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mpariente / AsSteroid / egs / dns_challenge / baseline / model.py View on Github external
exp_dir(str): Experiment directory. Expects to find
            `'best_k_models.json'` there.

    Returns:
        nn.Module the best pretrained model according to the val_loss.
    """
    # Create the model from recipe-local function
    model, _ = make_model_and_optimizer(train_conf)
    # Last best model summary
    with open(os.path.join(exp_dir, 'best_k_models.json'), "r") as f:
        best_k = json.load(f)
    best_model_path = min(best_k, key=best_k.get)
    # Load checkpoint
    checkpoint = torch.load(best_model_path, map_location='cpu')
    # Load state_dict into model.
    model = torch_utils.load_state_dict_in(checkpoint['state_dict'],
                                           model)
    model.eval()
    return model
github mpariente / AsSteroid / egs / wsj0-mix / DeepClustering / model.py View on Github external
# Last best model summary
        with open(os.path.join(exp_dir, 'best_k_models.json'), "r") as f:
            best_k = json.load(f)
        best_model_path = min(best_k, key=best_k.get)
    except FileNotFoundError:
        # Get last checkpoint
        all_ckpt = os.listdir(os.path.join(exp_dir, 'checkpoints/'))
        all_ckpt = [(ckpt, int("".join(filter(str.isdigit,
                                              os.path.basename(ckpt)))))
                    for ckpt in all_ckpt if ckpt.find('ckpt') >= 0]
        all_ckpt.sort(key=lambda x: x[1])
        best_model_path = os.path.join(exp_dir, 'checkpoints', all_ckpt[-1][0])
    # Load checkpoint
    checkpoint = torch.load(best_model_path, map_location='cpu')
    # Load state_dict into model.
    model = torch_utils.load_state_dict_in(checkpoint['state_dict'], model)
    model.eval()
    return model
github mpariente / AsSteroid / egs / libri_2_mix / ConvTasNet / model.py View on Github external
exp_dir(str): Experiment directory. Expects to find
            `'best_k_models.json'` there.

    Returns:
        nn.Module the best pretrained model according to the val_loss.
    """
    # Create the model from recipe-local function
    model, _ = make_model_and_optimizer(train_conf)
    # Last best model summary
    with open(os.path.join(exp_dir, 'best_k_models.json'), "r") as f:
        best_k = json.load(f)
    best_model_path = min(best_k, key=best_k.get)
    # Load checkpoint
    checkpoint = torch.load(best_model_path, map_location='cpu')
    # Load state_dict into model.
    model = torch_utils.load_state_dict_in(checkpoint['state_dict'],
                                           model)
    model.eval()
    return model
github mpariente / AsSteroid / egs / avspeech / looking-to-listen / model.py View on Github external
exp_dir: Logdir created by Catalyst.

    Returns:
        model
    """
    model, optimizer = make_model_and_optimizer(train_conf)

    # Catalyst stores the best model as: logdir/checkpoints/best_full.pth
    exp_dir = Path(exp_dir) if isinstance(exp_dir, str) else exp_dir
    best_model_path = exp_dir / "checkpoints" / "best_full.pth"
    if not best_model_path.is_file():
        print(f"No best path in logdir: {exp_dir}. Initializing model...")
        return model

    checkpoint = torch.load(best_model_path)
    model = torch_utils.load_state_dict_in(checkpoint["model_state_dict"], model)
    return model
github mpariente / AsSteroid / egs / wham / TwoStep / model.py View on Github external
available_filter_banks = [p for p in os.listdir(checkpoint_dir)
                                  if '.ckpt' in p]
        if available_filter_banks:
           filterbank_available = True

    if not filterbank_available:
        return None

    filterbank_path = os.path.join(checkpoint_dir, available_filter_banks[0])
    print('Going to load from: {}'.format(filterbank_path))
    checkpoint = torch.load(filterbank_path, map_location='cpu')
    # Update number of source values (It depends on the task)
    conf['masknet'].update(
        {'n_src': checkpoint['training_config']['masknet']['n_src']})
    filterbank, _ = make_model_and_optimizer(conf, model_part='filterbank')
    model = torch_utils.load_state_dict_in(checkpoint['state_dict'], filterbank)
    print('Successfully loaded filterbank from: {}'.format(filterbank_path))
    return model
github mpariente / AsSteroid / egs / wham / ConvTasNet / model.py View on Github external
exp_dir(str): Experiment directory. Expects to find
            `'best_k_models.json'` there.

    Returns:
        nn.Module the best pretrained model according to the val_loss.
    """
    # Create the model from recipe-local function
    model, _ = make_model_and_optimizer(train_conf)
    # Last best model summary
    with open(os.path.join(exp_dir, 'best_k_models.json'), "r") as f:
        best_k = json.load(f)
    best_model_path = min(best_k, key=best_k.get)
    # Load checkpoint
    checkpoint = torch.load(best_model_path, map_location='cpu')
    # Load state_dict into model.
    model = torch_utils.load_state_dict_in(checkpoint['state_dict'],
                                           model)
    model.eval()
    return model
github mpariente / AsSteroid / egs / whamr / TasNet / model.py View on Github external
exp_dir(str): Experiment directory. Expects to find
            `'best_k_models.json'` there.

    Returns:
        nn.Module the best pretrained model according to the val_loss.
    """
    # Create the model from recipe-local function
    model, _ = make_model_and_optimizer(train_conf)
    # Last best model summary
    with open(os.path.join(exp_dir, 'best_k_models.json'), "r") as f:
        best_k = json.load(f)
    best_model_path = min(best_k, key=best_k.get)
    # Load checkpoint
    checkpoint = torch.load(best_model_path, map_location='cpu')
    # Load state_dict into model.
    model = torch_utils.load_state_dict_in(checkpoint['state_dict'],
                                           model)
    model.eval()
    return model