How to use the mirdata.utils.get_default_dataset_path function in mirdata

To help you get started, we’ve selected a few mirdata examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mir-dataset-loaders / mirdata / mirdata / ikala.py View on Github external
def download(data_home=None, force_overwrite=False):
    """Download iKala Dataset. However, iKala dataset is not available for
    download anymore. This function prints a helper message to organize
    pre-downloaded iKala dataset.

    Args:
        data_home (str):
            Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    download_message = """
        Unfortunately the iKala dataset is not available for download.
        If you have the iKala dataset, place the contents into a folder called
        {ikala_dir} with the following structure:
            > {ikala_dir}/
                > Lyrics/
                > PitchLabel/
                > Wavfile/
        and copy the {ikala_dir} folder to {save_path}
    """.format(
        ikala_dir=DATASET_DIR, save_path=data_home
    )

    download_utils.downloader(
        data_home,
github mir-dataset-loaders / mirdata / mirdata / rwc_classical.py View on Github external
def validate(data_home=None, silence=False):
    """Validate if the stored dataset is a valid version

    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        missing_files (list): List of file paths that are in the dataset index
            but missing locally
        invalid_checksums (list): List of file paths that file exists in the dataset
            index but has a different checksum compare to the reference checksum

    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    missing_files, invalid_checksums = utils.validator(
        DATA.index, data_home, silence=silence
    )
    return missing_files, invalid_checksums
github mir-dataset-loaders / mirdata / mirdata / medleydb_pitch.py View on Github external
def validate(data_home=None, silence=False):
    """Validate if the stored dataset is a valid version

    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        missing_files (list): List of file paths that are in the dataset index
            but missing locally
        invalid_checksums (list): List of file paths that file exists in the dataset
            index but has a different checksum compare to the reference checksum
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    missing_files, invalid_checksums = utils.validator(
        DATA.index, data_home, silence=silence
    )
    return missing_files, invalid_checksums
github mir-dataset-loaders / mirdata / mirdata / tinysol.py View on Github external
def load(data_home=None):
    """Load TinySOL
    Args:
        data_home (str): Local path where TinySOL is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`
    Returns:
        (dict): {`track_id`: track data}
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    tinysol_data = {}
    for key in DATA.index.keys():
        tinysol_data[key] = Track(key, data_home=data_home)
    return tinysol_data
github mir-dataset-loaders / mirdata / mirdata / orchset.py View on Github external
def download(data_home=None, force_overwrite=False):
    """Download ORCHSET Dataset.

    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`
        force_overwrite (bool): Whether to overwrite the existing downloaded data

    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    download_utils.downloader(
        data_home, zip_downloads=[REMOTE], force_overwrite=force_overwrite
    )

    # files get downloaded to a folder called Orchset - move everything up a level
    duplicated_orchset_dir = os.path.join(data_home, 'Orchset')
    orchset_files = glob.glob(os.path.join(duplicated_orchset_dir, '*'))

    for fpath in orchset_files:
        shutil.move(fpath, data_home)

    if os.path.exists(duplicated_orchset_dir):
        os.removedirs(duplicated_orchset_dir)
github mir-dataset-loaders / mirdata / mirdata / ikala.py View on Github external
def load(data_home=None):
    """Load iKala dataset

    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        (dict): {`track_id`: track data}

    """

    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    ikala_data = {}
    for key in track_ids():
        ikala_data[key] = Track(key, data_home=data_home)
    return ikala_data
github mir-dataset-loaders / mirdata / mirdata / medley_solos_db.py View on Github external
def load(data_home=None):
    """Load Medley-solos-DB
    Args:
        data_home (str): Local path where Medley-solos-DB is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`
    Returns:
        (dict): {`track_id`: track data}
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    medley_solos_db_data = {}
    for key in DATA.index.keys():
        medley_solos_db_data[key] = Track(key, data_home=data_home)
    return medley_solos_db_data
github mir-dataset-loaders / mirdata / mirdata / rwc_classical.py View on Github external
def load(data_home=None):
    """Load RWC-Classical dataset

        Args:
            data_home (str): Local path where the dataset is stored.
                If `None`, looks for the data in the default directory, `~/mir_datasets`

        Returns:
            (dict): {`track_id`: track data}

        """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    rwc_classical_data = {}
    for key in track_ids():
        rwc_classical_data[key] = Track(key, data_home=data_home)
    return rwc_classical_data
github mir-dataset-loaders / mirdata / mirdata / orchset.py View on Github external
def load(data_home=None):
    """Load ORCHSET dataset

    Args:
        data_home (str): Local path where the dataset is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        (dict): {`track_id`: track data}

    """

    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    orchset_data = {}
    for key in track_ids():
        orchset_data[key] = Track(key, data_home=data_home)
    return orchset_data
github mir-dataset-loaders / mirdata / mirdata / gtzan_genre.py View on Github external
def load(data_home=None):
    """Load GTZAN-Genre

    Args:
        data_home (str): Local path where GTZAN-Genre is stored.
            If `None`, looks for the data in the default directory, `~/mir_datasets`

    Returns:
        (dict): {`track_id`: track data}
    """
    if data_home is None:
        data_home = utils.get_default_dataset_path(DATASET_DIR)

    data = {}
    for key in DATA.index.keys():
        data[key] = Track(key, data_home=data_home)
    return data