Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _load_metadata(data_home):
metadata_path = os.path.join(data_home, 'medleydb_pitch_metadata.json')
if not os.path.exists(metadata_path):
logging.info('Metadata file {} not found.'.format(metadata_path))
return None
with open(metadata_path, 'r') as fhandle:
metadata = json.load(fhandle)
metadata['data_home'] = data_home
return metadata
DATA = utils.LargeData('medleydb_pitch_index.json', _load_metadata)
class Track(track.Track):
"""medleydb_pitch Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
artist (str): artist
audio_path (str): path to the audio file
genre (str): genre
instrument (str): instrument of the track
pitch_path (str): path to the pitch annotation file
'contains_strings': tf_dict[line[4]],
'contains_brass': tf_dict[line[5]],
'only_strings': tf_dict[line[6]],
'only_winds': tf_dict[line[7]],
'only_brass': tf_dict[line[8]],
'composer': id_split[0],
'work': '-'.join(id_split[1:-1]),
'excerpt': id_split[-1][2:],
}
metadata_index['data_home'] = data_home
return metadata_index
DATA = utils.LargeData('orchset_index.json', _load_metadata)
class Track(track.Track):
"""orchset Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
alternating_melody (bool): True if the melody alternates between instruments
audio_path_mono (str): path to the mono audio file
audio_path_stereo (str): path to the stereo audio file
composer (str): the work's composer
contains_brass (bool): True if the track contains any brass instrument
DATASET_DIR = 'DALI'
def _load_metadata(data_home):
metadata_path = os.path.join(data_home, os.path.join('dali_metadata.json'))
if not os.path.exists(metadata_path):
logging.info('Metadata file {} not found.'.format(metadata_path))
return None
with open(metadata_path, 'r') as fhandle:
metadata_index = json.load(fhandle)
metadata_index['data_home'] = data_home
return metadata_index
DATA = utils.LargeData('dali_index.json', _load_metadata)
class Track(track.Track):
"""DALI melody Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored.
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
album (str): the track's album
annotation_path (str): path to the track's annotation file
artist (str): the track's artist
audio_path (str): path to the track's audio file
audio_url (str): youtube ID
)
AUDIO_MIX_REMOTE = download_utils.RemoteFileMetadata(
filename='audio_mono-pickup_mix.zip',
url='https://zenodo.org/record/3371780/files/audio_mono-pickup_mix.zip?download=1',
checksum='aecce79f425a44e2055e46f680e10f6a',
destination_dir='audio_mono-pickup_mix',
)
_STYLE_DICT = {
'Jazz': 'Jazz',
'BN': 'Bossa Nova',
'Rock': 'Rock',
'SS': 'Singer-Songwriter',
'Funk': 'Funk',
}
_GUITAR_STRINGS = ['E', 'A', 'D', 'G', 'B', 'e']
DATA = utils.LargeData('guitarset_index.json')
class Track(track.Track):
"""guitarset Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
audio_hex_cln_path (str): path to the debleeded hex wave file
audio_hex_path (str): path to the original hex wave file
audio_mic_path (str): path to the mono wave via microphone
audio_mix_path (str): path to the mono wave via downmixing hex pickup
jams_path (str): path to the jams file
'piece_number': line[0],
'suffix': line[1],
'track_number': line[2],
'title': line[3],
'artist': line[4],
'duration': _duration_to_sec(line[5]),
'variation': line[6],
'instruments': line[7],
}
metadata_index['data_home'] = data_home
return metadata_index
DATA = utils.LargeData('rwc_jazz_index.json', _load_metadata)
class Track(track.Track):
"""rwc_jazz Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
artist (str): Artist name
audio_path (str): path of the audio file
beats_path (str): path of the beat annotation file
duration (float): Duration of the track in seconds
instruments (str): list of used instruments.
return None
with open(id_map_path, 'r') as fhandle:
reader = csv.reader(fhandle, delimiter='\t')
singer_map = {}
for line in reader:
if line[0] == 'singer':
continue
singer_map[line[1]] = line[0]
singer_map['data_home'] = data_home
return singer_map
DATA = utils.LargeData('ikala_index.json', _load_metadata)
class Track(track.Track):
"""ikala Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
audio_path (str): path to the track's audio file
f0_path (str): path to the track's f0 annotation file
lyrics_path (str): path to the track's lyric annotation file
section (str): section. Either 'verse' or 'chorus'
singer_id (str): singer id
import os
from mirdata import download_utils
from mirdata import jams_utils
from mirdata import track
from mirdata import utils
DATASET_DIR = 'Beatles'
ANNOTATIONS_REMOTE = download_utils.RemoteFileMetadata(
filename='The Beatles Annotations.tar.gz',
url='http://isophonics.net/files/annotations/The%20Beatles%20Annotations.tar.gz',
checksum='62425c552d37c6bb655a78e4603828cc',
destination_dir='annotations',
)
DATA = utils.LargeData('beatles_index.json')
class Track(track.Track):
"""Beatles track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored.
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
audio_path (str): track audio path
beats_path (str): beat annotation path
chords_path (str): chord annotation path
keys_path (str): key annotation path
sections_path (str): sections annotation path
'style': str(style),
'tempo': int(bpm),
'beat_type': str(beat_type),
'time_signature': str(time_signature),
'midi_filename': str(midi_filename),
'audio_filename': str(audio_filename),
'duration': float(duration),
'split': str(split),
}
metadata_index['data_home'] = data_home
return metadata_index
DATA = utils.LargeData('groove_midi_index.json', _load_metadata)
class Track(track.Track):
"""Groove MIDI Track class
Args:
track_id (str): track id of the track
data_home (str): Local path where the dataset is stored. default=None
If `None`, looks for the data in the default directory, `~/mir_datasets`
Attributes:
drummer (str): Drummer id of the track (ex. 'drummer1')
session (str): Type of session (ex. 'session1', 'eval_session')
track_id (str): track id of the track (ex. 'drummer1/eval_session/1')
style (str): Style (genre, groove type) of the track (ex. 'funk/groove1')
tempo (int): Track tempo in beats per minute (ex. 138)