How to use the tone.setContext function in tone

To help you get started, we’ve selected a few tone examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ISNIT0 / webaudio-generator / src / nodes / inputs / midi.ts View on Github external
initWANode(audioCtx: AudioContext, node: NodeDef) {
        Tone.setContext(audioCtx);
        const synth = new Tone.Synth();

        WebMidi.enable(function (err: any) {
            if (err) {
                alert("WebMidi could not be enabled.");
            }

            const output = WebMidi.outputs[0];
            const input = WebMidi.inputs[0];

            input.addListener('noteon', "all",
                function (e: any) {
                    console.log("Received 'noteon' message (" + e.note.name + e.note.octave + ").");
                    output.playNote(`${e.note.name}${e.note.octave}`, 'all', { velocity: 1 });
                    synth.triggerAttack(`${e.note.name}${e.note.octave}`);
                }
github generative-music / pieces-alex-bainter / packages / piece-peace / src / piece.js View on Github external
({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      const samplesByNote =
        samples['native-american-flute-susvib'][preferredFormat];
      return Promise.all([
        getBuffers(samplesByNote),
        new Tone.Reverb({
          decay: 10,
        }).generate(),
      ]).then(([flute, reverb]) => {
        const bufferSources = [];
        reverb.connect(destination);
        const delay = new Tone.FeedbackDelay({
          feedback: 0.7,
          delayTime: 1,
          wet: 0.3,
        }).connect(reverb);
github generative-music / pieces-alex-bainter / packages / piece-trees / src / piece.js View on Github external
.then(({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      return getSampledInstrument(samples['vsco2-piano-mf'][preferredFormat]);
    })
    .then(piano => {
github generative-music / pieces-alex-bainter / packages / piece-apoapsis / src / piece.js View on Github external
({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      const noise = new Tone.Noise('brown');
      const eq = new Tone.EQ3(-15, -Infinity, -Infinity).connect(destination);
      eq.lowFrequency.value = Note.freq('C1');
      const lfo = new Tone.LFO({
        min: -50,
        max: -15,
        frequency: Math.random() / 100,
        phase: 45,
      });
      lfo.connect(eq.low);
      noise.connect(eq);
      lfo.start();

      const delay1 = new Tone.FeedbackDelay({
        feedback: 0.7,
github generative-music / pieces-alex-bainter / packages / piece-otherness / src / piece.js View on Github external
.then(({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      return getSampledInstrument(samples.otherness[preferredFormat]);
    })
    .then(instrument => {
github generative-music / pieces-alex-bainter / packages / piece-bhairav / src / piece.js View on Github external
({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      return Promise.all([
        getSampler(samples['vsco2-piano-mf'][preferredFormat]),
        getSampler(samples['vsco2-cellos-susvib-mp'][preferredFormat], {
          attack: 2,
          curve: 'linear',
          release: 2,
        }),
        new Tone.Reverb(15).set({ wet: 0.6 }).generate(),
      ]).then(([pianoSampler, cellos, reverb]) => {
        reverb.connect(destination);
        pianoSampler.connect(reverb);

        let tonic = Math.random() < 0.5 ? 'C#4' : 'C#5';

        const playNote = (note, time = 0, velocity = 1) =>
github generative-music / pieces-alex-bainter / packages / piece-spring-again / src / piece.js View on Github external
.then(({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      Tone.context.latencyHint = 'interactive';
      return Promise.all([
        getInstrument(samples['vsco2-piano-mf'][preferredFormat]),
        getInstrument(samples['vsco2-violins-susvib'][preferredFormat]),
        getInstrument(samples['vsco2-cello-susvib-f'][preferredFormat]),
      ]);
    })
    .then(([piano, violins, cello]) => {
github generative-music / pieces-alex-bainter / packages / piece-drones-2 / src / piece.js View on Github external
({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }

      const samplesByNote = samples['vsco2-violins-susvib'][preferredFormat];

      return getBuffers(samplesByNote).then(buffers => {
        const filter = new Tone.Filter(6000, 'lowpass', -48).connect(
          destination
        );
        const disposableNodes = [filter, buffers];

        const drone = (
          note,
          droneDestination,
          pitchShift = 0,
          reverse = false
        ) => {
github generative-music / pieces-alex-bainter / packages / piece-townsend / src / piece.js View on Github external
.then(samplesSpec => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      const fluteReverb = new Tone.Reverb(50).set({ wet: 1 });
      return Promise.all([
        getFlute(samplesSpec, preferredFormat),
        getGuitarSounds(samplesSpec, preferredFormat),
        fluteReverb.generate(),
      ]);
    })
    .then(([flute, guitarBuffers, fluteReverb]) => {
github generative-music / pieces-alex-bainter / packages / piece-quarter-eyes / src / piece.js View on Github external
({ samples }) => {
      if (Tone.context !== audioContext) {
        Tone.setContext(audioContext);
      }
      Tone.context.latencyHint = 'interactive';
      return Promise.all([
        new Tone.Reverb({ decay: 15, wet: 0.5 }).generate(),
        getStrings(samples['vsco2-cellos-susvib-mp'][preferredFormat]),
        ...[
          'vcsl-bassdrum-hit-f',
          'vcsl-bassdrum-hit-ff',
          'vcsl-sleighbells',
          'vcsl-finger-cymbals',
          'vcsl-tom',
        ].map(instrumentName =>
          getPercussionInstrument(samples[instrumentName][preferredFormat])
        ),
      ]).then(
        ([