How to use the @tensorflow/tfjs-node.model function in @tensorflow/tfjs-node

To help you get started, we’ve selected a few @tensorflow/tfjs-node examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github victordibia / anomagram / experiments / models / ae.js View on Github external
const latentDim = params.latentDim
    const hiddenDim = params.hiddenDim
    const learningRate = params.learningRate, adamBeta1 = params.adamBeta1
    const outputActivation = "sigmoid"
    // console.log(numFeatures);

    // Specify encoder
    const input = tf.input({ shape: [numFeatures] })
    let encoderHidden = tf.layers.dense({ units: hiddenDim[0], activation: "relu" }).apply(input);
    let i = 1
    while (i < hiddenDim.length) {
        encoderHidden = tf.layers.dense({ units: hiddenDim[i], activation: "relu" }).apply(encoderHidden);
        i++
    }
    const z_ = tf.layers.dense({ units: latentDim }).apply(encoderHidden);
    const encoder = tf.model({ inputs: input, outputs: z_, name: "encoder" })


    // Specify decoder
    const latentInput = tf.input({ shape: [latentDim] })
    let decoderHidden = tf.layers.dense({ units: hiddenDim[hiddenDim.length - 1], activation: "relu" }).apply(latentInput);
    let j = hiddenDim.length - 1
    while (j > 0) {
        j--;
        decoderHidden = tf.layers.dense({ units: hiddenDim[j], activation: "relu" }).apply(decoderHidden);

    }

    const decoderOutput = tf.layers.dense({ units: numFeatures, activation: outputActivation }).apply(decoderHidden);
    const decoder = tf.model({ inputs: latentInput, outputs: decoderOutput, name: "decoder" })

    // link output of ender to decoder 
github victordibia / anomagram / experiments / models / ae.js View on Github external
let decoderHidden = tf.layers.dense({ units: hiddenDim[hiddenDim.length - 1], activation: "relu" }).apply(latentInput);
    let j = hiddenDim.length - 1
    while (j > 0) {
        j--;
        decoderHidden = tf.layers.dense({ units: hiddenDim[j], activation: "relu" }).apply(decoderHidden);

    }

    const decoderOutput = tf.layers.dense({ units: numFeatures, activation: outputActivation }).apply(decoderHidden);
    const decoder = tf.model({ inputs: latentInput, outputs: decoderOutput, name: "decoder" })

    // link output of ender to decoder 
    output = decoder.apply(encoder.apply(input))

    // Construct AE with both encoder and decoder
    const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
    const optimizer = tf.train.adam(learningRate, adamBeta1)

    ae.compile({ optimizer: optimizer, loss: "meanSquaredError" })

    return [ae, encoder, decoder]
}
github urish / ml-comments-gen / tfjs / src / comment-predictor.ts View on Github external
constructor(model: tf.LayersModel, private tokenizers: ITokenizersJson) {
    const { lstm_layer_size } = tokenizers.params;
    const encoderInputs = (model.input as tf.SymbolicTensor[])[0];
    const [, stateHEnc, stateCEnc] = model.layers[2].output as tf.SymbolicTensor[]; // lstm_1
    const encoderStates = [stateHEnc, stateCEnc];
    this.encoderModel = tf.model({ inputs: encoderInputs, outputs: encoderStates });

    const decoder_inputs = (model.input as tf.SymbolicTensor[])[1]; // input_2
    const decoder_state_input_h = tf.input({ shape: [lstm_layer_size], name: 'input_3' });
    const decoder_state_input_c = tf.input({ shape: [lstm_layer_size], name: 'input_4' });
    const decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c];
    const decoder_lstm = model.layers[3];
    const [decoder_lstm_outputs, state_h_dec, state_c_dec] = decoder_lstm.apply(decoder_inputs, {
      initialState: decoder_states_inputs
    }) as tf.SymbolicTensor[];
    const decoder_states = [state_h_dec, state_c_dec];
    const decoder_dense = model.layers[4];
    const decoder_outputs = decoder_dense.apply(decoder_lstm_outputs) as tf.SymbolicTensor;
    this.decoderModel = tf.model({
      inputs: [decoder_inputs, ...decoder_states_inputs],
      outputs: [decoder_outputs, ...decoder_states]
    });
github victordibia / anomagram / experiments / models / ae.js View on Github external
const z_ = tf.layers.dense({ units: latentDim }).apply(encoderHidden);
    const encoder = tf.model({ inputs: input, outputs: z_, name: "encoder" })


    // Specify decoder
    const latentInput = tf.input({ shape: [latentDim] })
    let decoderHidden = tf.layers.dense({ units: hiddenDim[hiddenDim.length - 1], activation: "relu" }).apply(latentInput);
    let j = hiddenDim.length - 1
    while (j > 0) {
        j--;
        decoderHidden = tf.layers.dense({ units: hiddenDim[j], activation: "relu" }).apply(decoderHidden);

    }

    const decoderOutput = tf.layers.dense({ units: numFeatures, activation: outputActivation }).apply(decoderHidden);
    const decoder = tf.model({ inputs: latentInput, outputs: decoderOutput, name: "decoder" })

    // link output of ender to decoder 
    output = decoder.apply(encoder.apply(input))

    // Construct AE with both encoder and decoder
    const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
    const optimizer = tf.train.adam(learningRate, adamBeta1)

    ae.compile({ optimizer: optimizer, loss: "meanSquaredError" })

    return [ae, encoder, decoder]
}
github adwellj / node-tfjs-retrain / model.js View on Github external
async function loadDecapitatedMobilenet() {
    const mobilenet = await tf.loadLayersModel(
        "https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_1.0_224/model.json"
    );

    // Return a model that outputs an internal activation.
    const layer = mobilenet.getLayer("conv_pw_13_relu");
    return tf.model({ inputs: mobilenet.inputs, outputs: layer.output });
}
github urish / ml-comments-gen / tfjs / src / comment-predictor.ts View on Github external
const [, stateHEnc, stateCEnc] = model.layers[2].output as tf.SymbolicTensor[]; // lstm_1
    const encoderStates = [stateHEnc, stateCEnc];
    this.encoderModel = tf.model({ inputs: encoderInputs, outputs: encoderStates });

    const decoder_inputs = (model.input as tf.SymbolicTensor[])[1]; // input_2
    const decoder_state_input_h = tf.input({ shape: [lstm_layer_size], name: 'input_3' });
    const decoder_state_input_c = tf.input({ shape: [lstm_layer_size], name: 'input_4' });
    const decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c];
    const decoder_lstm = model.layers[3];
    const [decoder_lstm_outputs, state_h_dec, state_c_dec] = decoder_lstm.apply(decoder_inputs, {
      initialState: decoder_states_inputs
    }) as tf.SymbolicTensor[];
    const decoder_states = [state_h_dec, state_c_dec];
    const decoder_dense = model.layers[4];
    const decoder_outputs = decoder_dense.apply(decoder_lstm_outputs) as tf.SymbolicTensor;
    this.decoderModel = tf.model({
      inputs: [decoder_inputs, ...decoder_states_inputs],
      outputs: [decoder_outputs, ...decoder_states]
    });
  }
github loretoparisi / tensorflow-node-examples / translation / index.js View on Github external
this.prepareEncoderModel = (model) => {
        this.numEncoderTokens = model.input[0].shape[2];
        console.log('numEncoderTokens = ' + this.numEncoderTokens);

        const encoderInputs = model.input[0];
        const stateH = model.layers[2].output[1];
        const stateC = model.layers[2].output[2];
        const encoderStates = [stateH, stateC];

        this.encoderModel =
            tf.model({ inputs: encoderInputs, outputs: encoderStates });
    }
github loretoparisi / tensorflow-node-examples / translation / index.js View on Github external
const decoderStateInputC =
            tf.input({ shape: [latentDim], name: 'decoder_state_input_c' });
        const decoderStateInputs = [decoderStateInputH, decoderStateInputC];

        const decoderLSTM = model.layers[3];
        const decoderInputs = decoderLSTM.input[0];
        const applyOutputs =
            decoderLSTM.apply(decoderInputs, { initialState: decoderStateInputs });
        let decoderOutputs = applyOutputs[0];
        const decoderStateH = applyOutputs[1];
        const decoderStateC = applyOutputs[2];
        const decoderStates = [decoderStateH, decoderStateC];

        const decoderDense = model.layers[4];
        decoderOutputs = decoderDense.apply(decoderOutputs);
        this.decoderModel = tf.model({
            inputs: [decoderInputs].concat(decoderStateInputs),
            outputs: [decoderOutputs].concat(decoderStates)
        });
    }