How to use the tone.context function in tone

To help you get started, we’ve selected a few tone examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tensorflow / magenta-js / music / demos / gansynth.ts View on Github external
async function runGANSynth() {
  const gansynth = new mm.GANSynth(GANSYNTH_CHECKPOINT);
  await gansynth.initialize();

  const start = await performance.now();
  const specgrams = await gansynth.randomSample(60);
  const audio = await gansynth.specgramsToAudio(specgrams);
  await writeTimer('single-sample-gen-time', start);

  // Play sound.
  const T = 4.0;
  const SR = 16000;

  const audioBuffer = Tone.context.createBuffer(1, T * SR, SR);
  audioBuffer.copyToChannel(audio, 0, 0);
  const options = {'url': audioBuffer, 'loop': true, 'volume': -24};
  const player = new Tone.Player(options).toMaster();

  // Plotting.
  await Promise.all([
    plotSpectra(specgrams, 'mag-canvas', 0),
    plotSpectra(specgrams, 'ifreq-canvas', 1)
  ]);

  // Connect GUI actions.
  document.getElementById('start-button').addEventListener('click', () => {
    player.start();
  });
  document.getElementById('stop-button').addEventListener('click', () => {
    player.stop();
github googlecreativelab / creatability-seeing-music / src / interface / FileDrop.js View on Github external
async _getFile(f){
		this.emit('dropped')
		if (this._canPlayFile(f)){
			if (this.returnType === 'url'){
				const src = window.URL.createObjectURL(f)
				this.emit('file', f, src)
			} else {
				const results = await FileReader.readAsArrayBuffer(f)
				const buffer = await Tone.context.decodeAudioData(results)
				this.emit('file', f, buffer)
			}
		} else {
			this.emit('error', `Sorry cannot play file type ${f.name}.`)
		}
	}
}
github generative-music / pieces-alex-bainter / packages / piece-little-bells / src / piece.js View on Github external
.then(sampleSpec => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      return getGlock(sampleSpec, preferredFormat);
    })
    .then(glock => {
github stc / HackPact / Day-17-Cellular-Structure / src / index.js View on Github external
p.mousePressed = () => {
        StartAudioContext(Tone.context).then(function(){});
    }
github generative-music / pieces-alex-bainter / packages / piece-timbral-oscillations / src / piece.js View on Github external
phrase.forEach((note, i) => {
          if (Tone.context.state !== 'running') {
            Tone.context.resume();
          }
          piano.triggerAttack(note, `+${i * 1.5}`);
        });
        Tone.Transport.scheduleOnce(() => {
github generative-music / pieces-alex-bainter / packages / piece-quarter-eyes / src / piece.js View on Github external
return () => {
            [
              reverb,
              strings,
              bassdrum1,
              bassdrum2,
              sleighbells,
              fingerCymbals,
              tom,
            ].forEach(node => node.dispose());
            Tone.context.latencyHint = 'balanced';
          };
        }
github generative-music / pieces-alex-bainter / packages / piece-timbral-oscillations / src / piece.js View on Github external
.then(({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      return getSampledInstrument(samples['vsco2-piano-mf'][preferredFormat]);
    })
    .then(piano => {
github SonyCSLParis / NONOTO / src / renderer / playback.ts View on Github external
export function stop(){
    Tone.context.resume().then(() => {
        Tone.Transport.stop();
    })
};
github stc / HackPact / Day-24-Noise-Pianist / src / index.js View on Github external
p.mousePressed = () => {
        StartAudioContext(Tone.context).then(function(){});
    }
}