How to use onnxjs - 10 common examples

To help you get started, we’ve selected a few onnxjs examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github microsoft / onnxjs / benchmark / src / index.js View on Github external
preprocess(data, width, height) {
      // data processing
      const dataTensor = ndarray(new Float32Array(data), [width, height, 4]);
      const dataProcessedTensor = ndarray(new Float32Array(width * height * 3), [1, 3, width, height]);

      ops.divseq(dataTensor, 128.0);
      ops.subseq(dataTensor, 1.0);

      ops.assign(dataProcessedTensor.pick(0, 0, null, null), dataTensor.pick(null, null, 2));
      ops.assign(dataProcessedTensor.pick(0, 1, null, null), dataTensor.pick(null, null, 1));
      ops.assign(dataProcessedTensor.pick(0, 2, null, null), dataTensor.pick(null, null, 0));

      const tensor = new onnx.Tensor(dataProcessedTensor.data, 'float32', [1, 3, width, height]);
      return tensor;
    }
}
github microsoft / onnxjs-demo / src / components / models / Yolo.vue View on Github external
async postprocess(tensor: Tensor, inferenceTime: number) {
    try {
      const originalOutput = new Tensor(tensor.data as Float32Array, 'float32', [1, 125, 13, 13]);
      const outputTensor = yoloTransforms.transpose(originalOutput, [0, 2, 3, 1]);

      // postprocessing
      const boxes = await yolo.postprocess(outputTensor, 20);
      boxes.forEach(box => {
        const {
          top, left, bottom, right, classProb, className,
        } = box;

        this.drawRect(left, top, right-left, bottom-top,
          `${className} Confidence: ${Math.round(classProb * 100)}% Time: ${inferenceTime.toFixed(1)}ms`);
      });
    } catch (e) {
      alert('Model is not valid!');
    }
  }
github microsoft / onnxjs-demo / src / components / models / Yolo.vue View on Github external
preprocess(ctx: CanvasRenderingContext2D): Tensor {
    const imageData = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height);
    const { data, width, height } = imageData;
    // data processing
    const dataTensor = ndarray(new Float32Array(data), [width, height, 4]);
    const dataProcessedTensor = ndarray(new Float32Array(width * height * 3), [1, 3, width, height]);

    ops.assign(dataProcessedTensor.pick(0, 0, null, null), dataTensor.pick(null, null, 0));
    ops.assign(dataProcessedTensor.pick(0, 1, null, null), dataTensor.pick(null, null, 1));
    ops.assign(dataProcessedTensor.pick(0, 2, null, null), dataTensor.pick(null, null, 2));

    const tensor = new Tensor(new Float32Array(width* height* 3), 'float32', [1, 3, width, height]);
    (tensor.data as Float32Array).set(dataProcessedTensor.data);
    return tensor;
  }
github microsoft / onnxjs / benchmark / src / index.js View on Github external
async init(backend, modelPath, imageSize) {
        this.imageSize = imageSize;
        const hint = {backendHint: backend };
        this.model = new onnx.InferenceSession(hint);
        await this.model.loadModel(modelPath);
    }
    async runModel(data) {
github microsoft / onnxjs-demo / src / components / common / DrawingModelUI.vue View on Github external
this.session = this.gpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.gpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.gpuSession;
    }
    if (this.sessionBackend === 'wasm') {        
      if (this.cpuSession) {
        this.session = this.cpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.cpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.cpuSession;
    }    
    
    try {
      await this.session!.loadModel(this.modelFile);
    } catch (e){
      this.modelLoading = false;
      this.modelInitializing = false;
      if (this.sessionBackend === 'webgl') {
        this.gpuSession = undefined;
      } else {
        this.cpuSession = undefined;
      }
      throw new Error('Error: Backend not supported. ');
    }
    this.modelLoading = false;
github microsoft / onnxjs-demo / src / components / common / ImageModelUI.vue View on Github external
async initSession() {
    this.sessionRunning = false;
    this.modelLoadingError = false;
    if (this.sessionBackend === 'webgl') { 
      if (this.gpuSession) {
        this.session = this.gpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.gpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.gpuSession;
    }
    if (this.sessionBackend === 'wasm') {        
      if (this.cpuSession) {
        this.session = this.cpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.cpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.cpuSession;
    }    
    
    try {
      await this.session!.loadModel(this.modelFile);
    } catch (e){
github microsoft / onnxjs-demo / src / components / common / DrawingModelUI.vue View on Github external
async initSession() {
    this.sessionRunning = false;
    this.modelLoadingError = false;
    if (this.sessionBackend === 'webgl') {        
      if (this.gpuSession) {
        this.session = this.gpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.gpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.gpuSession;
    }
    if (this.sessionBackend === 'wasm') {        
      if (this.cpuSession) {
        this.session = this.cpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.cpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.cpuSession;
    }    
    
    try {
      await this.session!.loadModel(this.modelFile);
    } catch (e){
github microsoft / onnxjs-demo / src / components / common / WebcamModelUI.vue View on Github external
async initSession() {
    this.sessionRunning = false;
    this.modelLoadingError = false;
    if (this.sessionBackend === 'webgl') {        
      if (this.gpuSession) {
        this.session = this.gpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.gpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.gpuSession;
    }
    if (this.sessionBackend === 'wasm') {        
      if (this.cpuSession) {
        this.session = this.cpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.cpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.cpuSession;
    }    
    
    try {
      await this.session!.loadModel(this.modelFile);
    } catch (e){
github microsoft / onnxjs-demo / src / components / common / ImageModelUI.vue View on Github external
this.session = this.gpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.gpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.gpuSession;
    }
    if (this.sessionBackend === 'wasm') {        
      if (this.cpuSession) {
        this.session = this.cpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.cpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.cpuSession;
    }    
    
    try {
      await this.session!.loadModel(this.modelFile);
    } catch (e){
      this.modelLoading = false;
      this.modelInitializing = false;
      if (this.sessionBackend === 'webgl') {
        this.gpuSession = undefined;
      } else {
        this.cpuSession = undefined;
      }
      throw new Error('Error: Backend not supported. ');
    }
    this.modelLoading = false;
github microsoft / onnxjs-demo / src / components / common / WebcamModelUI.vue View on Github external
this.session = this.gpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.gpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.gpuSession;
    }
    if (this.sessionBackend === 'wasm') {        
      if (this.cpuSession) {
        this.session = this.cpuSession;
        return;
      }
      this.modelLoading = true;
      this.modelInitializing = true;  
      this.cpuSession = new InferenceSession({backendHint: this.sessionBackend});
      this.session = this.cpuSession;
    }    
    
    try {
      await this.session!.loadModel(this.modelFile);
    } catch (e){
      this.modelLoading = false;
      this.modelInitializing = false;
      if (this.sessionBackend === 'webgl') {
        this.gpuSession = undefined;
      } else {
        this.cpuSession = undefined;
      }
      throw new Error('Error: Backend not supported. ');
    }
    this.modelLoading = false;

onnxjs

A Javascript library for running ONNX models on browsers and on Node.js

MIT
Latest version published 4 years ago

Package Health Score

56 / 100
Full package analysis

Popular onnxjs functions