Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async function warmUpModel() {
await mvae.initialize();
// Warm up the model.
const ns = {notes: [{pitch:60, velocity: 100, startTime: 0, endTime: 1}]}
const quantizedNS = mmcore.sequences.quantizeNoteSequence(ns, 4);
await drumify(quantizedNS, 80);
app.listen(PORT, function() {
console.log('Your app is listening on port ' + PORT);
});
}
function fixSequence(ns) {
// unquantized -> quantized -> unquantized
const quant = mmcore.sequences.quantizeNoteSequence(ns, 4);
const unquant = mmcore.sequences.unquantizeSequence(quant);
for (let i = 0; i < unquant.notes.length; i++) {
delete unquant.notes[i].quantizedStartStep;
delete unquant.notes[i].quantizedEndStep;
}
delete unquant.totalQuantizedSteps;
delete unquant.quantizationInfo;
return unquant;
}
const express = require('express');
const bodyParser = require('body-parser');
const mm = require('@magenta/music/node/music_vae');
const mmcore = require('@magenta/music/node/core');
//const tf = require('@tensorflow/tfjs-node');
// Fix: magenta.js uses performance.now() for timing logging
// And fetch for the checkpoint, so fake both :/
if (!global.performance) global.performance = require('perf_hooks').performance;
global.fetch = require('node-fetch');
// If you run this locally, and not on Glitch, you might not have the
// env variables set.
const PORT = process.env && process.env.PORT ? process.env.PORT : 9876;
const mvae = new mm.MusicVAE('https://storage.googleapis.com/magentadata/js/checkpoints/groovae/tap2drum_2bar');
warmUpModel();
// http://expressjs.com/en/starter/static-files.html
const app = express();
app.use(express.static('client'));
app.use(bodyParser.json());
app.get('/', function(request, response) {
response.sendFile(__dirname + '/client/index.html');
});
app.post('/drumify', async function(request, response) {
if (!mvae.isInitialized()) {
await mvae.initialize();
}
const original_ns = request.body;
require.ensure(['@magenta/music'], async () => {
try {
await this.piano.load()
const { OnsetsAndFrames } = require('@magenta/music')
this.model = new OnsetsAndFrames('/assets/model')
await this.model.initialize()
this.loading = false
this._enabled = true
//add a notification
if (WebMidi.supported){
document.querySelector('acc-snackbar').setAttribute('message', 'Choose an audio file to transcribe, or play live with a MIDI keyboard.')
} else {
document.querySelector('acc-snackbar').setAttribute('message', 'Choose an audio file to transcribe.')
}
} catch (e){
this.loading = false
this.emit('error', e)
console.log(e)
document.querySelector('#error-snack').setAttribute('message', 'Transcription not supported')
}
})
initChordRNN(chordProgression) {
const modelCheckPoint = './checkpoints/chord_pitches_improv';
const model = new MusicRNN(modelCheckPoint);
model.initialize()
.then(() => {
console.log('initialized!');
return model.continueSequence(
presetMelodies['Twinkle'],
this.nOfBars * 16,
1.0,
chordProgression,
);
})
.then((i) => {
console.log(i);
this.setMelodies([i], chordProgression);
this.model = model;
this.setState({
loadingModel: false,
initRNN() {
const modelCheckPoint = './checkpoints/basic_rnn';
const model = new MusicRNN(modelCheckPoint);
model.initialize()
.then(() => {
console.log('initialized!');
return model.continueSequence(
presetMelodies['Twinkle'],
this.nOfBars * 16,
1.0)
})
.then((i) => {
this.setMelodies([i]);
this.model = model;
this.setState({
loadingModel: false,
});
this.sound.triggerSoundEffect(4);