How to use @magenta/music - 6 common examples

To help you get started, we’ve selected a few @magenta/music examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github magenta / drumbot / server / app.js View on Github external
async function warmUpModel() {
  await mvae.initialize();

  // Warm up the model.
  const ns = {notes: [{pitch:60, velocity: 100, startTime: 0, endTime: 1}]}
  const quantizedNS = mmcore.sequences.quantizeNoteSequence(ns, 4);
  await drumify(quantizedNS, 80);

  app.listen(PORT, function() {
    console.log('Your app is listening on port ' + PORT);
  });
}
github magenta / drumbot / server / app.js View on Github external
function fixSequence(ns) {
  // unquantized -> quantized -> unquantized
  const quant = mmcore.sequences.quantizeNoteSequence(ns, 4);
  const unquant = mmcore.sequences.unquantizeSequence(quant);

  for (let i = 0; i < unquant.notes.length; i++) {
    delete unquant.notes[i].quantizedStartStep;
    delete unquant.notes[i].quantizedEndStep;
  }
  delete unquant.totalQuantizedSteps;
  delete unquant.quantizationInfo;

  return unquant;
}
github magenta / drumbot / server / app.js View on Github external
const express = require('express');
const bodyParser = require('body-parser');

const mm = require('@magenta/music/node/music_vae');
const mmcore = require('@magenta/music/node/core');
//const tf = require('@tensorflow/tfjs-node');

// Fix: magenta.js uses performance.now() for timing logging
// And fetch for the checkpoint, so fake both :/
if (!global.performance) global.performance = require('perf_hooks').performance;
global.fetch = require('node-fetch');

// If you run this locally, and not on Glitch, you might not have the
// env variables set.
const PORT = process.env && process.env.PORT ? process.env.PORT : 9876;
const mvae = new mm.MusicVAE('https://storage.googleapis.com/magentadata/js/checkpoints/groovae/tap2drum_2bar');
warmUpModel();

// http://expressjs.com/en/starter/static-files.html
const app = express();
app.use(express.static('client'));
app.use(bodyParser.json());

app.get('/', function(request, response) {
  response.sendFile(__dirname + '/client/index.html');
});

app.post('/drumify', async function(request, response) {
  if (!mvae.isInitialized()) {
    await mvae.initialize();
  }
  const original_ns = request.body;
github googlecreativelab / creatability-seeing-music / src / piano / Model.js View on Github external
require.ensure(['@magenta/music'], async () => {
				try {
					await this.piano.load()
					const { OnsetsAndFrames } = require('@magenta/music')
					this.model = new OnsetsAndFrames('/assets/model')
					await this.model.initialize()
					this.loading = false
					this._enabled = true
					//add a notification
					if (WebMidi.supported){
						document.querySelector('acc-snackbar').setAttribute('message', 'Choose an audio file to transcribe, or play live with a MIDI keyboard.')
					} else {
						document.querySelector('acc-snackbar').setAttribute('message', 'Choose an audio file to transcribe.')
					}
				} catch (e){
					this.loading = false
					this.emit('error', e)
					console.log(e)
					document.querySelector('#error-snack').setAttribute('message', 'Transcription not supported')
				}
			})
github vibertthio / runn / src / index.js View on Github external
initChordRNN(chordProgression) {
    const modelCheckPoint = './checkpoints/chord_pitches_improv';
    const model = new MusicRNN(modelCheckPoint);
    model.initialize()
      .then(() => {
        console.log('initialized!');
        return model.continueSequence(
          presetMelodies['Twinkle'],
          this.nOfBars * 16,
          1.0,
          chordProgression,
          );
      })
      .then((i) => {
        console.log(i);
        this.setMelodies([i], chordProgression);
        this.model = model;
        this.setState({
          loadingModel: false,
github vibertthio / runn / src / index.js View on Github external
initRNN() {
    const modelCheckPoint = './checkpoints/basic_rnn';
    const model = new MusicRNN(modelCheckPoint);

    model.initialize()
      .then(() => {
        console.log('initialized!');
        return model.continueSequence(
          presetMelodies['Twinkle'],
          this.nOfBars * 16,
          1.0)
      })
      .then((i) => {
        this.setMelodies([i]);
        this.model = model;
        this.setState({
          loadingModel: false,
        });
        this.sound.triggerSoundEffect(4);

@magenta/music

Make music with machine learning, in the browser.

Apache-2.0
Latest version published 2 years ago

Package Health Score

53 / 100
Full package analysis

Similar packages