How to use the mirdata.utils.F0Data function in mirdata

To help you get started, we’ve selected a few mirdata examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mir-dataset-loaders / mirdata / tests / test_medleydb_melody.py View on Github external
def test_load_melody():
    # load a file which exists
    melody_path = (
        'tests/resources/mir_datasets/MedleyDB-Melody/'
        + 'melody1/MusicDelta_Beethoven_MELODY1.csv'
    )
    melody_data = medleydb_melody.load_melody(melody_path)

    # check types
    assert type(melody_data) == utils.F0Data
    assert type(melody_data.times) is np.ndarray
    assert type(melody_data.frequencies) is np.ndarray
    assert type(melody_data.confidence) is np.ndarray

    # check values
    assert np.array_equal(
        melody_data.times, np.array([0.0058049886621315194, 0.052244897959183675])
    )
    assert np.array_equal(melody_data.frequencies, np.array([0.0, 965.99199999999996]))
    assert np.array_equal(melody_data.confidence, np.array([0.0, 1.0]))

    # load a file which doesn't exist
    melody_data_none = medleydb_melody.load_melody('fake/file/path')
    assert melody_data_none is None
github mir-dataset-loaders / mirdata / tests / test_orchset.py View on Github external
+ 'GT/Beethoven-S3-I-ex1.mel',
        'composer': 'Beethoven',
        'work': 'S3-I',
        'excerpt': '1',
        'predominant_melodic_instruments': ['strings', 'winds'],
        'alternating_melody': True,
        'contains_winds': True,
        'contains_strings': True,
        'contains_brass': False,
        'only_strings': False,
        'only_winds': False,
        'only_brass': False,
    }

    expected_property_types = {
        'melody': utils.F0Data
    }

    run_track_tests(track, expected_attributes, expected_property_types)

    y_mono, sr_mono = track.audio_mono
    assert sr_mono == 44100
    assert y_mono.shape == (44100 * 2,)

    y_stereo, sr_stereo = track.audio_stereo
    assert sr_stereo == 44100
    assert y_stereo.shape == (2, 44100 * 2)
github mir-dataset-loaders / mirdata / tests / test_guitarset.py View on Github external
'mode': 'solo',
        'style': 'Bossa Nova',
    }

    expected_property_types = {
        'beats': utils.BeatData,
        'leadsheet_chords': utils.ChordData,
        'inferred_chords': utils.ChordData,
        'key_mode': utils.KeyData,
        'pitch_contours': dict,
        'notes': dict,
    }

    run_track_tests(track, expected_attributes, expected_property_types)

    assert type(track.pitch_contours['E']) is utils.F0Data
    assert type(track.notes['E']) is utils.NoteData
github mir-dataset-loaders / mirdata / tests / test_jams_utils.py View on Github external
(
            utils.F0Data(
                np.array([0.016, 0.048]), np.array([0.0, 260.9]), np.array([0.0, 1.0])
            ),
            'f0s_1',
        )
    ]
    f0_data_3 = [
        (
            utils.F0Data(
                np.array([0.016, 0.048]), np.array([0.0, 260.9]), np.array([0.0, 1.0])
            ),
            'f0s_1',
        ),
        (
            utils.F0Data(
                np.array([0.003, 0.012]), np.array([0.0, 230.5]), np.array([0.0, 1.0])
            ),
            'f0s_2',
        ),
    ]
    f0_data_4 = (
        utils.F0Data(
            np.array([0.016, 0.048]), np.array([0.0, 260.9]), np.array([0.0, 1.0])
        ),
        'f0s_1',
    )
    f0_data_5 = [
        [
            utils.F0Data(
                np.array([0.016, 0.048]), np.array([0.0, 260.9]), np.array([0.0, 1.0])
            ),
github mir-dataset-loaders / mirdata / mirdata / ikala.py View on Github external
def load_f0(f0_path):
    if not os.path.exists(f0_path):
        raise IOError("f0_path {} does not exist".format(f0_path))

    with open(f0_path) as fhandle:
        lines = fhandle.readlines()
    f0_midi = np.array([float(line) for line in lines])
    f0_hz = librosa.midi_to_hz(f0_midi) * (f0_midi > 0)
    confidence = (f0_hz > 0).astype(float)
    times = (np.arange(len(f0_midi)) * TIME_STEP) + (TIME_STEP / 2.0)
    f0_data = utils.F0Data(times, f0_hz, confidence)
    return f0_data
github mir-dataset-loaders / mirdata / mirdata / guitarset.py View on Github external
def load_pitch_contour(jams_path, string_num):
    """
    Args:
        jams_path (str): Path of the jams annotation file

    string_num (int), in range(6): Which string to load.
        0 is the Low E string, 5 is the high e string.
    """
    jam = jams.load(jams_path)
    anno_arr = jam.search(namespace='pitch_contour')
    anno = anno_arr.search(data_source=str(string_num))[0]
    times, values = anno.to_event_values()
    frequencies = [v['frequency'] for v in values]
    return utils.F0Data(times, frequencies, np.ones_like(times))
github mir-dataset-loaders / mirdata / mirdata / medleydb_pitch.py View on Github external
def load_pitch(pitch_path):
    if not os.path.exists(pitch_path):
        return None
    times = []
    freqs = []
    with open(pitch_path, 'r') as fhandle:
        reader = csv.reader(fhandle, delimiter=',')
        for line in reader:
            times.append(float(line[0]))
            freqs.append(float(line[1]))

    times = np.array(times)
    freqs = np.array(freqs)
    confidence = (freqs > 0).astype(float)
    pitch_data = utils.F0Data(times, freqs, confidence)
    return pitch_data
github mir-dataset-loaders / mirdata / mirdata / orchset.py View on Github external
def load_melody(melody_path):
    if not os.path.exists(melody_path):
        return None

    times = []
    freqs = []
    confidence = []
    with open(melody_path, 'r') as fhandle:
        reader = csv.reader(fhandle, delimiter='\t')
        for line in reader:
            times.append(float(line[0]))
            freqs.append(float(line[1]))
            confidence.append(0.0 if line[1] == '0' else 1.0)

    melody_data = utils.F0Data(np.array(times), np.array(freqs), np.array(confidence))
    return melody_data