Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async loadPretrained(url) {
console.log('load model start');
// 'https://foo.bar/tfjs_artifacts/model.json'
this.preTrainedModel = await tf.loadModel(url);
console.log('load model ok');
}
componentDidMount() {
// init SignaturePad
this.drawElement = document.getElementById('draw-area');
this.signaturePad = new SignaturePad(this.drawElement, {
minWidth: 6,
maxWidth: 6,
penColor: 'white',
backgroundColor: 'black',
});
// load pre-trained model
tf.loadModel('./model/model.json')
.then(pretrainedModel => {
document.getElementById('predict-button').classList.remove('is-loading');
this.model = pretrainedModel;
});
}
async function loadMobilenet(): Promise {
// TODO(nsthorat): Move these to GCP when they are no longer JSON.
const model = await tf.loadModel(
// tslint:disable-next-line:max-line-length
'https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_0.25_224/model.json');
// Return a model that outputs an internal activation.
const layer = model.getLayer('conv_pw_13_relu');
return tf.model({inputs: model.inputs, outputs: layer.output});
}
const data = snap.data()
if (data.label) return;
const tempJSONPath = path.join(os.tmpdir(), "model.json");
const tempBINPath = path.join(os.tmpdir(), "weights.bin");
const existJSON = await bucket.file("model.json").exists().then(ex => ex[0]);
const existBIN = await bucket.file("weights.bin").exists().then(ex => ex[0]);
if (!existJSON || !existBIN) throw Error("Missing artifacts.")
await bucket.file("model.json").download({ destination: tempJSONPath });
await bucket.file("weights.bin").download({ destination: tempBINPath });
const modelPath = "file://" + tempJSONPath;
const model = await tf.loadModel(modelPath);
const test_x = [fitData(data.text)];
const score = model.predict(tf.tensor2d(test_x)).dataSync()[0];
data.label = score < 0.5 ? "negative" : "positive";
db.collection("comments").doc(snap.id).set(data)
fs.unlinkSync(tempJSONPath);
fs.unlinkSync(tempBINPath);
});
async init(backend, modelPath, imageSize) {
this.imageSize = imageSize;
tf.disposeVariables();
if(backend) {
console.log(`Setting the backend to ${backend}`);
tf.setBackend(backend);
}
this.model = await tf.loadModel(modelPath);
console.log('Model loaded');
}
async runModel(data) {
async loadModel(lstmLayerSizes) {
const modelsInfo = await tf.io.listModels();
if (this.modelSavePath_ in modelsInfo) {
console.log(`Loading existing model...`);
this.model = await tf.loadModel(this.modelSavePath_);
console.log(`Loaded model from ${this.modelSavePath_}`);
} else {
throw new Error(
`Cannot find model at ${this.modelSavePath_}. ` +
`Creating model from scratch.`);
}
}
async loadModel(lstmLayerSizes) {
const modelsInfo = await tf.io.listModels();
if (this.modelSavePath_ in modelsInfo) {
console.log(`Loading existing model...`);
this.model = await tf.loadModel(this.modelSavePath_);
console.log(`Loaded model from ${this.modelSavePath_}`);
} else {
throw new Error(
`Cannot find model at ${this.modelSavePath_}. ` +
`Creating model from scratch.`);
}
}
async loadModel(){
this.model = await tfjs.loadModel("https://raw.githubusercontent.com/lukasy09/IchLerneCNN.py/master/MNIST/model/model.json");
};
async loadModel() {
this.model = await tf.loadModel("https://raw.githubusercontent.com/lukasy09/KernelBase.py/master/Objects/src/models/model_40.json/model.json");
};
async loadModel() {
this.model = await tf.loadModel('/assets/model.json');
}