How to use the @tensorflow/tfjs-node.train function in @tensorflow/tfjs-node

To help you get started, we’ve selected a few @tensorflow/tfjs-node examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github victordibia / anomagram / experiments / ecg.js View on Github external
async function loadSavedModel() {
    model = await tf.loadLayersModel(modelSavePath + "/model.json");
    console.log("model loaded");

    // const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
    const optimizer = tf.train.adam(modelParams.learningRate, modelParams.adamBeta1)

    model.compile({ optimizer: optimizer, loss: "meanSquaredError" })

    for (let i = 0; i < numSteps; i++) {
        const res = await model.fit(xs,
            xs, { epochs: numEpochs, verbose: 0, batchSize: batchSize });
        console.log("Step loss", i, res.history.loss[0]);
    }

    await model.save(modelSavePath);
    await model.save("file://../app/public/webmodel/ecg");
}
github charliegerard / gestures-ml-js / daydream / examples / harry-potter / train.js View on Github external
const createModel = async(xTrain, yTrain, xTest, yTest) => {
  const params = {learningRate: 0.1, epochs: 40};
  // Define the topology of the model: two dense layers.
  const model = tf.sequential();
  model.add(tf.layers.dense({units: 10, activation: 'sigmoid', inputShape: [xTrain.shape[1]]}));
  model.add(tf.layers.dense({units: numClasses, activation: 'softmax'}));
  model.summary();

  const optimizer = tf.train.adam(params.learningRate);
  model.compile({
    optimizer: optimizer,
    loss: 'categoricalCrossentropy',
    metrics: ['accuracy'],
  });

  await model.fit(xTrain, yTrain, {
    epochs: params.epochs,
    validationData: [xTest, yTest],
  });
  
  await model.save('file://model');
  return model;
}
github charliegerard / gestures-ml-js / phone / examples / harry-potter / train.js View on Github external
const createModel = async(xTrain, yTrain, xTest, yTest) => {
  const params = {learningRate: 0.1, epochs: 40};
  // Define the topology of the model: two dense layers.
  const model = tf.sequential();
  model.add(tf.layers.dense({units: 10, activation: 'sigmoid', inputShape: [xTrain.shape[1]]}));
  model.add(tf.layers.dense({units: numClasses, activation: 'softmax'}));
  model.summary();

  const optimizer = tf.train.adam(params.learningRate);
  model.compile({
    optimizer: optimizer,
    loss: 'categoricalCrossentropy',
    metrics: ['accuracy'],
  });

  await model.fit(xTrain, yTrain, {
    epochs: params.epochs,
    validationData: [xTest, yTest],
  });
  
  await model.save('file://model');
  return model;
}
github loretoparisi / tensorflow-node-examples / lstm / index.js View on Github external
return new Promise((resolve, reject) => {
        // Define the topology of the model.
        const model = tf.sequential();
        model.add(tf.layers.lstm({ units: 8, inputShape: [sequenceLength, 2] }));
        model.add(tf.layers.dense({ units: 1, activation: 'sigmoid' }));

        // Compile model to prepare for training.
        const learningRate = 4e-3;
        const optimizer = tf.train.rmsprop(learningRate);
        model.compile({
            loss: 'binaryCrossentropy',
            optimizer: optimizer,
            metrics: ['acc']
        });

        // Generate a number of examples for training.
        const numTrainExamples = 500;
        console.log('Generating training data...');
        const [trainSequences, trainLabels] = generateDataset(numTrainExamples, 10);

        let status = {
            train_epoch: 0,
            train_loss: 0,
            train_acc: 0,
            val_loss: 0,
github DiscreetAI / dml-library-js / dml-library-js / library / runner.js View on Github external
static _compileModel(model, optimization_data) {
        var optimizer;
        var optimizer_config = optimization_data['optimizer_config']
        if (optimizer_config['class_name'] == 'SGD') {
            // SGD
            optimizer = tfjs_1.train.sgd(optimizer_config['config']['learning_rate']);
        } else if (optimizer_config['class_name'] == 'Adam') {
            optimizer = tfjs_1.train.adam(optimizer_config['config']['learning_rate'], optimizer_config['config']['beta1'], optimizer_config['config']['beta2'])
        } else {
            // Not supported!
            throw "Optimizer not supported!";
        }
        model.compile({
            optimizer: optimizer,
            loss: Runner._lowerCaseToCamelCase(optimization_data['loss']),
            metrics: optimization_data['metrics']
        });
        return model;
    }
github victordibia / anomagram / experiments / models / ae.js View on Github external
let j = hiddenDim.length - 1
    while (j > 0) {
        j--;
        decoderHidden = tf.layers.dense({ units: hiddenDim[j], activation: "relu" }).apply(decoderHidden);

    }

    const decoderOutput = tf.layers.dense({ units: numFeatures, activation: outputActivation }).apply(decoderHidden);
    const decoder = tf.model({ inputs: latentInput, outputs: decoderOutput, name: "decoder" })

    // link output of ender to decoder 
    output = decoder.apply(encoder.apply(input))

    // Construct AE with both encoder and decoder
    const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
    const optimizer = tf.train.adam(learningRate, adamBeta1)

    ae.compile({ optimizer: optimizer, loss: "meanSquaredError" })

    return [ae, encoder, decoder]
}
github victordibia / anomagram / experiments / iris.js View on Github external
inputShape: [4],
    activation: "sigmoid",
    units: 10,
    name: "layer1"
}))

model.add(tf.layers.dense({
    inputShape: [10],
    activation: "softmax",
    units: 3,
    name: "layer2"
}))

model.compile({
    loss: "categoricalCrossentropy",
    optimizer: tf.train.adam(),
    metrics: ['accuracy'],
})

model.summary()

async function train_data() {
    for (let i = 0; i < 15; i++) {
        const res = await model.fit(trainingData,
            outputData, { epochs: 60 });
        console.log(res.history.loss[0]);
    }
}

async function main() {
    let train = await train_data();
github bobiblazeski / js-gym / dist / agents.node.js View on Github external
minBufferSize=MIN_BUFFER_SIZE, updateEvery=UPDATE_EVERY,
      bufferSize=BUFFER_SIZE, batchSize=BATCH_SIZE} = {},
      buffer) {
    this.epsilon = epsilon;
    this.epsilonDecay = epsilonDecay;
    this.minEpsilon = minEpsilon;
    this.minBufferSize = minBufferSize;
    this.updateEvery = updateEvery;
    this.noise = new OUNoise(actionSize);
    this.buffer = buffer || new ReplayBuffer(bufferSize, batchSize);

    this.actor = makeActor();
    this.actorTarget = makeActor();
    this.critic =makeCritic();
    this.criticTarget =makeCritic();
    this.actorOptimizer = tf.train.adam(lrActor);
    this.criticOptimizer = tf.train.adam(lrCritic);

    hardUpdate(this.actor, this.actorTarget);
    hardUpdate(this.critic, this.criticTarget);      
  }
github bobiblazeski / js-gym / dist / agents.node.js View on Github external
bufferSize=BUFFER_SIZE, batchSize=BATCH_SIZE} = {},
      buffer) {
    this.epsilon = epsilon;
    this.epsilonDecay = epsilonDecay;
    this.minEpsilon = minEpsilon;
    this.minBufferSize = minBufferSize;
    this.updateEvery = updateEvery;
    this.noise = new OUNoise(actionSize);
    this.buffer = buffer || new ReplayBuffer(bufferSize, batchSize);

    this.actor = makeActor();
    this.actorTarget = makeActor();
    this.critic =makeCritic();
    this.criticTarget =makeCritic();
    this.actorOptimizer = tf.train.adam(lrActor);
    this.criticOptimizer = tf.train.adam(lrCritic);

    hardUpdate(this.actor, this.actorTarget);
    hardUpdate(this.critic, this.criticTarget);      
  }
github tensorflow / tfjs-examples / abalone-node / model.js View on Github external
function createModel(inputShape) {
  const model = tf.sequential();
  model.add(tf.layers.dense({
    inputShape: inputShape,
    activation: 'sigmoid',
    units: 50,
  }));
  model.add(tf.layers.dense({
    activation: 'sigmoid',
    units: 50,
  }));
  model.add(tf.layers.dense({
    units: 1,
  }));
  model.compile({optimizer: tf.train.sgd(0.01), loss: 'meanSquaredError'});
  return model;
}