Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async function loadSavedModel() {
model = await tf.loadLayersModel(modelSavePath + "/model.json");
console.log("model loaded");
// const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
const optimizer = tf.train.adam(modelParams.learningRate, modelParams.adamBeta1)
model.compile({ optimizer: optimizer, loss: "meanSquaredError" })
for (let i = 0; i < numSteps; i++) {
const res = await model.fit(xs,
xs, { epochs: numEpochs, verbose: 0, batchSize: batchSize });
console.log("Step loss", i, res.history.loss[0]);
}
await model.save(modelSavePath);
await model.save("file://../app/public/webmodel/ecg");
}
const createModel = async(xTrain, yTrain, xTest, yTest) => {
const params = {learningRate: 0.1, epochs: 40};
// Define the topology of the model: two dense layers.
const model = tf.sequential();
model.add(tf.layers.dense({units: 10, activation: 'sigmoid', inputShape: [xTrain.shape[1]]}));
model.add(tf.layers.dense({units: numClasses, activation: 'softmax'}));
model.summary();
const optimizer = tf.train.adam(params.learningRate);
model.compile({
optimizer: optimizer,
loss: 'categoricalCrossentropy',
metrics: ['accuracy'],
});
await model.fit(xTrain, yTrain, {
epochs: params.epochs,
validationData: [xTest, yTest],
});
await model.save('file://model');
return model;
}
const createModel = async(xTrain, yTrain, xTest, yTest) => {
const params = {learningRate: 0.1, epochs: 40};
// Define the topology of the model: two dense layers.
const model = tf.sequential();
model.add(tf.layers.dense({units: 10, activation: 'sigmoid', inputShape: [xTrain.shape[1]]}));
model.add(tf.layers.dense({units: numClasses, activation: 'softmax'}));
model.summary();
const optimizer = tf.train.adam(params.learningRate);
model.compile({
optimizer: optimizer,
loss: 'categoricalCrossentropy',
metrics: ['accuracy'],
});
await model.fit(xTrain, yTrain, {
epochs: params.epochs,
validationData: [xTest, yTest],
});
await model.save('file://model');
return model;
}
return new Promise((resolve, reject) => {
// Define the topology of the model.
const model = tf.sequential();
model.add(tf.layers.lstm({ units: 8, inputShape: [sequenceLength, 2] }));
model.add(tf.layers.dense({ units: 1, activation: 'sigmoid' }));
// Compile model to prepare for training.
const learningRate = 4e-3;
const optimizer = tf.train.rmsprop(learningRate);
model.compile({
loss: 'binaryCrossentropy',
optimizer: optimizer,
metrics: ['acc']
});
// Generate a number of examples for training.
const numTrainExamples = 500;
console.log('Generating training data...');
const [trainSequences, trainLabels] = generateDataset(numTrainExamples, 10);
let status = {
train_epoch: 0,
train_loss: 0,
train_acc: 0,
val_loss: 0,
static _compileModel(model, optimization_data) {
var optimizer;
var optimizer_config = optimization_data['optimizer_config']
if (optimizer_config['class_name'] == 'SGD') {
// SGD
optimizer = tfjs_1.train.sgd(optimizer_config['config']['learning_rate']);
} else if (optimizer_config['class_name'] == 'Adam') {
optimizer = tfjs_1.train.adam(optimizer_config['config']['learning_rate'], optimizer_config['config']['beta1'], optimizer_config['config']['beta2'])
} else {
// Not supported!
throw "Optimizer not supported!";
}
model.compile({
optimizer: optimizer,
loss: Runner._lowerCaseToCamelCase(optimization_data['loss']),
metrics: optimization_data['metrics']
});
return model;
}
let j = hiddenDim.length - 1
while (j > 0) {
j--;
decoderHidden = tf.layers.dense({ units: hiddenDim[j], activation: "relu" }).apply(decoderHidden);
}
const decoderOutput = tf.layers.dense({ units: numFeatures, activation: outputActivation }).apply(decoderHidden);
const decoder = tf.model({ inputs: latentInput, outputs: decoderOutput, name: "decoder" })
// link output of ender to decoder
output = decoder.apply(encoder.apply(input))
// Construct AE with both encoder and decoder
const ae = tf.model({ inputs: input, outputs: output, name: "autoencoder" })
const optimizer = tf.train.adam(learningRate, adamBeta1)
ae.compile({ optimizer: optimizer, loss: "meanSquaredError" })
return [ae, encoder, decoder]
}
inputShape: [4],
activation: "sigmoid",
units: 10,
name: "layer1"
}))
model.add(tf.layers.dense({
inputShape: [10],
activation: "softmax",
units: 3,
name: "layer2"
}))
model.compile({
loss: "categoricalCrossentropy",
optimizer: tf.train.adam(),
metrics: ['accuracy'],
})
model.summary()
async function train_data() {
for (let i = 0; i < 15; i++) {
const res = await model.fit(trainingData,
outputData, { epochs: 60 });
console.log(res.history.loss[0]);
}
}
async function main() {
let train = await train_data();
minBufferSize=MIN_BUFFER_SIZE, updateEvery=UPDATE_EVERY,
bufferSize=BUFFER_SIZE, batchSize=BATCH_SIZE} = {},
buffer) {
this.epsilon = epsilon;
this.epsilonDecay = epsilonDecay;
this.minEpsilon = minEpsilon;
this.minBufferSize = minBufferSize;
this.updateEvery = updateEvery;
this.noise = new OUNoise(actionSize);
this.buffer = buffer || new ReplayBuffer(bufferSize, batchSize);
this.actor = makeActor();
this.actorTarget = makeActor();
this.critic =makeCritic();
this.criticTarget =makeCritic();
this.actorOptimizer = tf.train.adam(lrActor);
this.criticOptimizer = tf.train.adam(lrCritic);
hardUpdate(this.actor, this.actorTarget);
hardUpdate(this.critic, this.criticTarget);
}
bufferSize=BUFFER_SIZE, batchSize=BATCH_SIZE} = {},
buffer) {
this.epsilon = epsilon;
this.epsilonDecay = epsilonDecay;
this.minEpsilon = minEpsilon;
this.minBufferSize = minBufferSize;
this.updateEvery = updateEvery;
this.noise = new OUNoise(actionSize);
this.buffer = buffer || new ReplayBuffer(bufferSize, batchSize);
this.actor = makeActor();
this.actorTarget = makeActor();
this.critic =makeCritic();
this.criticTarget =makeCritic();
this.actorOptimizer = tf.train.adam(lrActor);
this.criticOptimizer = tf.train.adam(lrCritic);
hardUpdate(this.actor, this.actorTarget);
hardUpdate(this.critic, this.criticTarget);
}
function createModel(inputShape) {
const model = tf.sequential();
model.add(tf.layers.dense({
inputShape: inputShape,
activation: 'sigmoid',
units: 50,
}));
model.add(tf.layers.dense({
activation: 'sigmoid',
units: 50,
}));
model.add(tf.layers.dense({
units: 1,
}));
model.compile({optimizer: tf.train.sgd(0.01), loss: 'meanSquaredError'});
return model;
}