Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
);
// We parameterize batch size as a fraction of the entire dataset because the
// number of examples that are collected depends on how many examples the user
// collects. This allows us to have a flexible batch size.
const batchSize = Math.floor(
dataset.images.shape[0] * trainingParams.batchSizeFraction
);
if (!(batchSize > 0)) {
throw new Error(
`Batch size is 0 or NaN. Please choose a non-zero fraction.`
);
}
const shuffledIndices = new Int32Array(
tf.util.createShuffledIndices(dataset.labels.shape[0])
);
// Train the model! Model.fit() will shuffle xs & ys so we don't have to.
console.time("Training Time");
return this.model.fit(
dataset.images.gather(shuffledIndices),
dataset.labels.gather(shuffledIndices),
{
batchSize,
epochs: trainingParams.epochs,
validationSplit: 0.15,
callbacks: {
onBatchEnd: async (batch, logs) => {
trainingParams.trainStatus(
"Loss: " + logs.loss.toFixed(5)
);
async loadTrainingData(model) {
const numClasses = this.labelsAndImages.length;
const numImages = this.labelsAndImages.reduce(
(acc, item) => acc + item.images.length,
0
);
const embeddingsShape = model.outputs[0].shape.slice(1);
const embeddingsFlatSize = tf.util.sizeFromShape(embeddingsShape);
embeddingsShape.unshift(numImages);
const embeddings = new Float32Array(
tf.util.sizeFromShape(embeddingsShape)
);
const labels = new Int32Array(numImages);
// Loop through the files and populate the 'images' and 'labels' arrays
let embeddingsOffset = 0;
let labelsOffset = 0;
console.log("Loading Training Data");
console.time("Loading Training Data");
for (const element of this.labelsAndImages) {
let labelIndex = this.labelIndex(element.label);
for (const image of element.images) {
let t = await fileToTensor(image);
tf.tidy(() => {
async loadTrainingData(model) {
const numClasses = this.labelsAndImages.length;
const numImages = this.labelsAndImages.reduce(
(acc, item) => acc + item.images.length,
0
);
const embeddingsShape = model.outputs[0].shape.slice(1);
const embeddingsFlatSize = tf.util.sizeFromShape(embeddingsShape);
embeddingsShape.unshift(numImages);
const embeddings = new Float32Array(
tf.util.sizeFromShape(embeddingsShape)
);
const labels = new Int32Array(numImages);
// Loop through the files and populate the 'images' and 'labels' arrays
let embeddingsOffset = 0;
let labelsOffset = 0;
console.log("Loading Training Data");
console.time("Loading Training Data");
for (const element of this.labelsAndImages) {
let labelIndex = this.labelIndex(element.label);
for (const image of element.images) {
let t = await fileToTensor(image);
tf.tidy(() => {
let prediction = model.predict(t);
embeddings.set(prediction.dataSync(), embeddingsOffset);
labels.set([labelIndex], labelsOffset);
.then(imageTensor => {
const input = imageTensor.expandDims(0);
// Feed the image tensor into the model for inference.
const startTime = tf.util.now();
let outputTensor = model.predict({ 'x': input });
// Parse the model output to get meaningful result(get detection class and
// object location).
const scores = outputTensor['detection_scores'].arraySync();
const boxes = outputTensor['detection_boxes'].arraySync();
const names = outputTensor['detection_classes'].arraySync();
const endTime = tf.util.now();
outputTensor['detection_scores'].dispose();
outputTensor['detection_boxes'].dispose();
outputTensor['detection_classes'].dispose();
outputTensor['num_detections'].dispose();
const detectedBoxes = [];
const detectedNames = [];
for (let i = 0; i < scores[0].length; i++) {
if (scores[0][i] > 0.3) {
detectedBoxes.push(boxes[0][i]);
detectedNames.push(labels[names[0][i]]);
}
}
var res = {
boxes: detectedBoxes,
names: detectedNames,
inferenceTime: endTime - startTime
.then(imageTensor => {
const input = imageTensor.expandDims(0);
// Feed the image tensor into the model for inference.
const startTime = tf.util.now();
let outputTensor = model.predict({ 'x': input });
// Parse the model output to get meaningful result(get detection class and
// object location).
const scores = outputTensor['detection_scores'].arraySync();
const boxes = outputTensor['detection_boxes'].arraySync();
const names = outputTensor['detection_classes'].arraySync();
const endTime = tf.util.now();
outputTensor['detection_scores'].dispose();
outputTensor['detection_boxes'].dispose();
outputTensor['detection_classes'].dispose();
outputTensor['num_detections'].dispose();
const detectedBoxes = [];
const detectedNames = [];
for (let i = 0; i < scores[0].length; i++) {
if (scores[0][i] > 0.3) {
const shuffleData = (features, labels) => {
const indices = [...Array(numSamplesPerGesture).keys()];
tf.util.shuffle(indices);
const shuffledFeatures = [];
const shuffledLabels = [];
features.map((featuresArray, index) => {
shuffledFeatures.push(features[indices[index]])
shuffledLabels.push(labels[indices[index]]);
})
return [shuffledFeatures, shuffledLabels];
}
const shuffleData = (features, labels) => {
const indices = [...Array(numSamplesPerGesture).keys()];
tf.util.shuffle(indices);
const shuffledFeatures = [];
const shuffledLabels = [];
features.map((featuresArray, index) => {
shuffledFeatures.push(features[indices[index]])
shuffledLabels.push(labels[indices[index]]);
})
return [shuffledFeatures, shuffledLabels];
}
const shuffleData = (features, labels) => {
const indices = [...Array(numSamplesPerGesture).keys()];
tf.util.shuffle(indices);
const shuffledFeatures = [];
const shuffledLabels = [];
features.map((featuresArray, index) => {
shuffledFeatures.push(features[indices[index]])
shuffledLabels.push(labels[indices[index]]);
})
return [shuffledFeatures, shuffledLabels];
}
const shuffleData = (features, labels) => {
const indices = [...Array(numSamplesPerGesture).keys()];
tf.util.shuffle(indices);
const shuffledFeatures = [];
const shuffledLabels = [];
features.map((featuresArray, index) => {
shuffledFeatures.push(features[indices[index]])
shuffledLabels.push(labels[indices[index]]);
})
return [shuffledFeatures, shuffledLabels];
}