How to use the @tensorflow/tfjs.concat function in @tensorflow/tfjs

To help you get started, we’ve selected a few @tensorflow/tfjs examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github shaqian / tfjs-yolo / src / postprocess.js View on Github external
function yoloHead(
  isV3,
  feats,
  anchors,
  numClasses,
  inputShape
) {
  const numAnchors = anchors.shape[0];
  // Reshape to height, width, num_anchors, box_params.
  const anchorsTensor = tf.reshape(anchors, [1, 1, numAnchors, 2]);

  const gridShape = feats.shape.slice(1, 3); // height, width

  const gridY = tf.tile(tf.reshape(tf.range(0, gridShape[0]), [-1, 1, 1, 1]), [1, gridShape[1], 1, 1]);
  const gridX = tf.tile(tf.reshape(tf.range(0, gridShape[1]), [1, -1, 1, 1]), [gridShape[0], 1, 1, 1]);
  const grid = tf.concat([gridX, gridY], 3).cast(feats.dtype);

  feats = feats.reshape([gridShape[0], gridShape[1], numAnchors, numClasses + 5]);

  const [xy, wh, con, probs] = tf.split(feats, [2, 2, 1, numClasses], 3);
  // Adjust preditions to each spatial grid point and anchor size.
  const boxXy = tf.div(tf.add(tf.sigmoid(xy), grid), gridShape.reverse());
  const boxWh = tf.div(tf.mul(tf.exp(wh), anchorsTensor), inputShape.reverse());
  const boxConfidence = tf.sigmoid(con);

  let boxClassProbs;
  if (isV3) {
    boxClassProbs = tf.sigmoid(probs);
  } else {
    boxClassProbs = tf.softmax(probs);
  }
github tensorflow / tfjs-examples / date-conversion-attention / train.js View on Github external
return tf.tidy(() => {
      const inputs =
          dateFormat.INPUT_FNS.map(fn => dateTuples.map(tuple => fn(tuple)));
      const inputStrings = [];
      inputs.forEach(inputs => inputStrings.push(...inputs));
      const encoderInput =
          dateFormat.encodeInputDateStrings(inputStrings);
      const trainTargetStrings = dateTuples.map(
          tuple => dateFormat.dateTupleToYYYYDashMMDashDD(tuple));
      let decoderInput =
          dateFormat.encodeOutputDateStrings(trainTargetStrings)
          .asType('float32');
      // One-step time shift: The decoder input is shifted to the left by
      // one time step with respect to the encoder input. This accounts for
      // the step-by-step decoding that happens during inference time.
      decoderInput = tf.concat([
        tf.ones([decoderInput.shape[0], 1]).mul(dateFormat.START_CODE),
        decoderInput.slice(
            [0, 0], [decoderInput.shape[0], decoderInput.shape[1] - 1])
      ], 1).tile([dateFormat.INPUT_FNS.length, 1]);
      const decoderOutput = tf.oneHot(
          dateFormat.encodeOutputDateStrings(trainTargetStrings),
          dateFormat.OUTPUT_VOCAB.length).tile(
              [dateFormat.INPUT_FNS.length, 1, 1]);
      return {encoderInput, decoderInput, decoderOutput};
    });
  }
github tensorflow / tfjs-examples / cart-pole / index.js View on Github external
varGameGradients => tf.stack(varGameGradients));
        // Expand dimensions of reward tensors to prepare for multiplication
        // with broadcasting.
        const expandedDims = [];
        for (let i = 0; i < varGradients[0].rank - 1; ++i) {
          expandedDims.push(1);
        }
        const reshapedNormalizedRewards = normalizedRewards.map(
            rs => rs.reshape(rs.shape.concat(expandedDims)));
        for (let g = 0; g < varGradients.length; ++g) {
          // This mul() call uses broadcasting.
          varGradients[g] = varGradients[g].mul(reshapedNormalizedRewards[g]);
        }
        // Concatenate the scaled gradients together, then average them across
        // all the steps of all the games.
        return tf.mean(tf.concat(varGradients, 0), 0);
      });
    }
github charliegerard / gestures-ml-js / arduino-mkr1000 / training-refactor-hp.js View on Github external
const xTests = [];
        const yTests = [];
        for (let i = 0; i < gestureClasses.length; ++i) {
            const [xTrain, yTrain, xTest, yTest] =
                convertToTensors(features[i], labels[i], 0.20); // create tensors from our data
            xTrains.push(xTrain);
            yTrains.push(yTrain);
            xTests.push(xTest);
            yTests.push(yTest);
        }

        const concatAxis = 0;

        // Concat all training labels together, all training features, all test labels and all test features
        return [
            tf.concat(xTrains, concatAxis), tf.concat(yTrains, concatAxis),
            tf.concat(xTests, concatAxis), tf.concat(yTests, concatAxis)
        ];
    })
}
github tensorflow / magenta-js / music / src / gansynth / audio_utils.ts View on Github external
1, -1, -1, 1
                         ]).reshape([1, 128, 1024]);
    const ifreq = ifreqSlice as tf.Tensor3D;
    const phase = ifreqToPhase(ifreq);

    // Reflect all frequencies except for the Nyquist, which is shared between
    // positive and negative frequencies for even nFft.
    let real = mag.mul(tf.cos(phase));
    const mirrorReal = tf.reverse(real.slice([0, 0, 0], [1, 128, 1023]), 2);
    real = tf.concat([real, mirrorReal], 2);

    // Reflect all frequencies except for the Nyquist, take complex conjugate of
    // the negative frequencies.
    let imag = mag.mul(tf.sin(phase));
    const mirrorImag = tf.reverse(imag.slice([0, 0, 0], [1, 128, 1023]), 2);
    imag = tf.concat([imag, tf.mul(mirrorImag, -1.0)], 2);
    return [real, imag];
  });
github cstefanache / tfjs-model-view / app / iris / data.js View on Github external
const xTrains = [];
    const yTrains = [];
    const xTests = [];
    const yTests = [];
    for (let i = 0; i < IRIS_CLASSES.length; ++i) {
      const [xTrain, yTrain, xTest, yTest] =
      convertToTensors(dataByClass[i], targetsByClass[i], testSplit);
      xTrains.push(xTrain);
      yTrains.push(yTrain);
      xTests.push(xTest);
      yTests.push(yTest);
    }

    const concatAxis = 0;
    return [
      tf.concat(xTrains, concatAxis), tf.concat(yTrains, concatAxis),
      tf.concat(xTests, concatAxis), tf.concat(yTests, concatAxis)
    ];
  });
}
github charliegerard / gestures-ml-js / arduino-mkr1000 / training-old-hp.js View on Github external
const yTrains = [];
    const xTests = [];
    const yTests = [];
    for (let i = 0; i < gestureClasses.length; ++i) {
      const [xTrain, yTrain, xTest, yTest] =
          convertToTensors(features[i], labels[i], 0.20);
      xTrains.push(xTrain);
      yTrains.push(yTrain);
      xTests.push(xTest);
      yTests.push(yTest);
    }

    const concatAxis = 0;
    return [
      tf.concat(xTrains, concatAxis), tf.concat(yTrains, concatAxis),
      tf.concat(xTests, concatAxis), tf.concat(yTests, concatAxis)
    ];
  })
}
github googlecreativelab / teachablemachine-community / libraries / pose / src / custom-posenet.ts View on Github external
public poseOutputsToAray(
		heatmapScores: tf.Tensor3D,
		offsets: tf.Tensor3D,
		displacementFwd: tf.Tensor3D,
		displacementBwd: tf.Tensor3D
	) {
		const axis = 2;
		const concat = tf.concat([heatmapScores, offsets], axis);
		const concatArray = concat.dataSync() as Float32Array;

		concat.dispose();
		
		return concatArray;
	}
github machinelearnjs / machinelearnjs / src / lib / preprocessing / data.ts View on Github external
export function add_dummy_feature(
  X: Type2DMatrix = null,
  value: number = 1.0
): number[][] {
  if (Array.isArray(X) && X.length === 0) {
    throw new TypeError('X cannot be empty');
  }
  validateMatrix2D(X);
  const tensorX = tf.tensor2d(X) as tf.Tensor;
  const [nSamples] = tensorX.shape;
  const ones = tf.ones([nSamples, 1]) as tf.Tensor;
  const sValue = tf.scalar(value) as tf.Tensor;
  const multipledOnes = tf.mul(ones, sValue);
  const hStacked = tf.concat([multipledOnes, tensorX], 1);
  return reshape(Array.from(hStacked.dataSync()), hStacked.shape) as number[][];
}
github shaqian / tfjs-yolo / src / postprocess.js View on Github external
for (let i = 0; i < numLayers; i++) {
      const [_boxes, _boxScores] = yoloBoxesAndScores(
        isV3,
        isV3 ? outputs[i] : outputs,
        isV3 ? anchorsTensor.gather(tf.tensor1d(anchorMask[i], 'int32')) : anchorsTensor,
        numClasses,
        inputShape,
        imageShape
      );

      boxes.push(_boxes);
      boxScores.push(_boxScores);
    };

    boxes = tf.concat(boxes);
    boxScores = tf.concat(boxScores);

    return [boxes, boxScores];
  });
}