How to use the @tensorflow/tfjs-node.loadLayersModel function in @tensorflow/tfjs-node

To help you get started, we’ve selected a few @tensorflow/tfjs-node examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github victordibia / anomagram / experiments / ecg.js View on Github external
async function loadSavedModel() {
    model = await tf.loadLayersModel(modelSavePath + "/model.json");
    console.log("model loaded");

    // const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
    const optimizer = tf.train.adam(modelParams.learningRate, modelParams.adamBeta1)

    model.compile({ optimizer: optimizer, loss: "meanSquaredError" })

    for (let i = 0; i < numSteps; i++) {
        const res = await model.fit(xs,
            xs, { epochs: numEpochs, verbose: 0, batchSize: batchSize });
        console.log("Step loss", i, res.history.loss[0]);
    }

    await model.save(modelSavePath);
    await model.save("file://../app/public/webmodel/ecg");
}
github tensorflow / tfjs-examples / abalone-node / train.js View on Github external
const datasetObj = await createDataset('file://' + csvPath);
  const model = createModel([datasetObj.numOfColumns]);
  // The dataset has 4177 rows. Split them into 2 groups, one for training and
  // one for validation. Take about 3500 rows as train dataset, and the rest as
  // validation dataset.
  const trainBatches = Math.floor(3500 / batchSize);
  const dataset = datasetObj.dataset.shuffle(1000).batch(batchSize);
  const trainDataset = dataset.take(trainBatches);
  const validationDataset = dataset.skip(trainBatches);

  await model.fitDataset(
      trainDataset, {epochs: epochs, validationData: validationDataset});

  await model.save(savePath);

  const loadedModel = await tf.loadLayersModel(savePath + '/model.json');
  const result = loadedModel.predict(
      tf.tensor2d([[0, 0.625, 0.495, 0.165, 1.262, 0.507, 0.318, 0.39]]));
  console.log(
      'The actual test abalone age is 10, the inference result from the model is ' +
      result.dataSync());
}
github adblockradio / adblockradio / predictor-ml / ml-worker.js View on Github external
(async function() {
	const handler = tf.io.fileSystem(process.env.modelFile); // see https://stackoverflow.com/a/53766926/5317732
	model = await tf.loadLayersModel(handler);

	// load model from remote file
	//const path = 'https://www.adblockradio.com/models/' + canonical + '/model.json';
	//model = await tf.loadModel(path);
	log.info(process.env.canonical + ': ML model loaded');
	send({ type: 'loading', err: null, loaded: true });
})();
github adwellj / node-tfjs-retrain / model.js View on Github external
async function loadDecapitatedMobilenet() {
    const mobilenet = await tf.loadLayersModel(
        "https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_1.0_224/model.json"
    );

    // Return a model that outputs an internal activation.
    const layer = mobilenet.getLayer("conv_pw_13_relu");
    return tf.model({ inputs: mobilenet.inputs, outputs: layer.output });
}
github loretoparisi / tensorflow-node-examples / translation / index.js View on Github external
this.loadModel = () => new Promise((resolve, reject) => {
        let self = this;
        tf.loadLayersModel(model_path)
            .then(model => {
                model.summary();
                self.loadMetadata();
                resolve(model);
            })
            .catch(error => {
                console.error(error)
                reject(error)
            })
    })
github DiscreetAI / dml-library-js / dml-library-js / library / runner.js View on Github external
static async _getModel(request, callback) {
        const model_url = request.cloud_url + "/model/model.json";
        var model = await tfjs_1.loadLayersModel(model_url);
        fetch(model_url)
        .then(res => res.json())
        .then((out) => {
            model = Runner._compileModel(model, out["modelTopology"]["training_config"]);
            DMLDB._get(request, callback, model);
        }).catch(err => console.error(err));
    }
github loretoparisi / tensorflow-node-examples / lstm-text-generator / index.js View on Github external
Donec laoreet leo ligula, ut condimentum mi placerat ut. Sed pretium sollicitudin nisl quis tincidunt. Proin id nisl ornare, interdum lorem quis, posuere lacus. Cras cursus mollis scelerisque. Mauris mattis mi sed orci feugiat, et blandit velit tincidunt. Donec ultrices leo vel tellus tincidunt, id vehicula mi commodo. Nulla egestas mollis massa. Etiam blandit nisl eu risus luctus viverra. Mauris eget mi sem.
`;

function createTextDataForTest(sampleLen, sampleStep = 1) {
    return new TextData('LoremIpsum', FAKE_TEXT, sampleLen, sampleStep);
}

function readTextData(localTextDataPath, sampleLen, sampleStep = 1) {
    const text = fs.readFileSync(localTextDataPath, { encoding: 'utf-8' });
    const textData =
        new TextData('text-data', text, sampleLen, sampleStep);
    return textData;
}

let model_path = 'file://./model/nietzsche.json';
tf.loadLayersModel(model_path)
    .then(model => {

        model.summary();

        const sampleLen = 1000;
        const generateLength = 250
        const temperature = 0.6

        const textData = readTextData('./data/nietzsche.txt', sampleLen);
        
        let seedSentence;
        let seedSentenceIndices;
        [seedSentence, seedSentenceIndices] = textData.getRandomSlice();

        let generated = generateText(model, textData, seedSentenceIndices, generateLength, temperature,
            onTextGenerationChar);
github bobiblazeski / js-gym / dist / agents.node.js View on Github external
async load(infix) {
    const aPath = `${SAVE_METHOD}${infix}_actor/model.json`;
    const cPath = `${SAVE_METHOD}${infix}_critic/model.json`;
    this.actor = await tf.loadLayersModel(aPath);
    this.critic = await tf.loadLayersModel(cPath);
    hardUpdate(this.actor, this.actorTarget);
    hardUpdate(this.critic, this.criticTarget);
  }
}