How to use kinect2 - 10 common examples

To help you get started, we’ve selected a few kinect2 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github kinectron / kinectron / app / kinectron-app.js View on Github external
//busy = false;

      // limit raw depth to 25 fps
      if (Date.now() > sentTime + 40) {
        packageData("rgbd", rgbdImg);
        sentTime = Date.now();
      }

      setTimeout(function() {
        busy = false;
      });
    }); // kinect.on
  } // open
  kinect.openMultiSourceReader({
    frameTypes: Kinect2.FrameType.depth | Kinect2.FrameType.depthColor
  });
}
github kinectron / kinectron / app / kinectron-app.js View on Github external
currentFrames = frames;

  // TO DO Simplify the case and result per Shawn
  for (var j = 0; j < frames.length; j++) {
    var frameName;
    var tempName;

    frameName = frames[j];

    switch (frameName) {
      case "color":
        multiFrames.push(Kinect2.FrameType.color);
        break;

      case "depth":
        multiFrames.push(Kinect2.FrameType.depth);
        break;

      case "body":
        multiFrames.push(Kinect2.FrameType.body);
        break;

      case "raw-depth":
        multiFrames.push(Kinect2.FrameType.rawDepth);
        break;

      // case 'bodyIndexColor':
      //   multiFrames.push(Kinect2.FrameType.bodyIndexColor);
      // break;

      // case 'depthColor':
      //   multiFrames.push(Kinect2.FrameType.depthColor);
github kinectron / kinectron / app / kinectron-app.js View on Github external
var frameName;
    var tempName;

    frameName = frames[j];

    switch (frameName) {
      case "color":
        multiFrames.push(Kinect2.FrameType.color);
        break;

      case "depth":
        multiFrames.push(Kinect2.FrameType.depth);
        break;

      case "body":
        multiFrames.push(Kinect2.FrameType.body);
        break;

      case "raw-depth":
        multiFrames.push(Kinect2.FrameType.rawDepth);
        break;

      // case 'bodyIndexColor':
      //   multiFrames.push(Kinect2.FrameType.bodyIndexColor);
      // break;

      // case 'depthColor':
      //   multiFrames.push(Kinect2.FrameType.depthColor);
      // break;

      //infrared is not implemented for multiframe yet
      // case 'infrared':
github kinectron / kinectron / app / kinectron-app.js View on Github external
switch (frameName) {
      case "color":
        multiFrames.push(Kinect2.FrameType.color);
        break;

      case "depth":
        multiFrames.push(Kinect2.FrameType.depth);
        break;

      case "body":
        multiFrames.push(Kinect2.FrameType.body);
        break;

      case "raw-depth":
        multiFrames.push(Kinect2.FrameType.rawDepth);
        break;

      // case 'bodyIndexColor':
      //   multiFrames.push(Kinect2.FrameType.bodyIndexColor);
      // break;

      // case 'depthColor':
      //   multiFrames.push(Kinect2.FrameType.depthColor);
      // break;

      //infrared is not implemented for multiframe yet
      // case 'infrared':
      //    multiFrames.push(Kinect2.FrameType.infrared);
      // break;

      // case 'le-infrared':
github kinectron / kinectron / app / kinectron-app.js View on Github external
return;
  }

  // Set global frames variable for use in preview message
  currentFrames = frames;

  // TO DO Simplify the case and result per Shawn
  for (var j = 0; j < frames.length; j++) {
    var frameName;
    var tempName;

    frameName = frames[j];

    switch (frameName) {
      case "color":
        multiFrames.push(Kinect2.FrameType.color);
        break;

      case "depth":
        multiFrames.push(Kinect2.FrameType.depth);
        break;

      case "body":
        multiFrames.push(Kinect2.FrameType.body);
        break;

      case "raw-depth":
        multiFrames.push(Kinect2.FrameType.rawDepth);
        break;

      // case 'bodyIndexColor':
      //   multiFrames.push(Kinect2.FrameType.bodyIndexColor);
github kinectron / kinectron / index.html View on Github external
// get closest body
            var closestBodyIndex = getClosestBodyIndex(frame.body.bodies);
            if(closestBodyIndex !== trackedBodyIndex) {
              if(closestBodyIndex > -1) {
                kinect.trackPixelsForBodyIndices([closestBodyIndex]);
              } else {
                kinect.trackPixelsForBodyIndices(false);
              }
            }
            else {
              if(closestBodyIndex > -1) {
                //measure distance from floor
                if(frame.body.floorClipPlane)
                {
                  //get position of left hand
                  var joint = frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.handLeft];

                  //https://social.msdn.microsoft.com/Forums/en-US/594cf9ed-3fa6-4700-872c-68054cac5bf0/angle-of-kinect-device-and-effect-on-xyz-positional-data?forum=kinectv2sdk
                  var cameraAngleRadians= Math.atan(frame.body.floorClipPlane.z / frame.body.floorClipPlane.y);
                  var cosCameraAngle = Math.cos(cameraAngleRadians);
                  var sinCameraAngle = Math.sin(cameraAngleRadians);
                  var yprime = joint.cameraY * cosCameraAngle + joint.cameraZ * sinCameraAngle;
                  var jointDistanceFromFloor = frame.body.floorClipPlane.w + yprime;

                  //show height in canvas
                  showHeight(context, joint, jointDistanceFromFloor);
                  showHeight(outputContext, joint, jointDistanceFromFloor);

                  //send height data to remote
                  var jointDataToSend = {joint: joint, distance: jointDistanceFromFloor};

                  sendToPeer('floorHeightTracker', jointDataToSend);
github kinectron / kinectron / examples.js View on Github external
showHeight(outputContext, joint, jointDistanceFromFloor);

            //send height data to remote
            var jointDataToSend = {joint: joint, distance: jointDistanceFromFloor};

            sendToPeer('floorHeightTracker', jointDataToSend);
          }
        }
      }

      trackedBodyIndex = closestBodyIndex;
      busy = false;
    });

    kinect.openMultiSourceReader({
      frameTypes: Kinect2.FrameType.body | Kinect2.FrameType.color
    });
  }
}
github kinectron / kinectron / index.html View on Github external
showHeight(outputContext, joint, jointDistanceFromFloor);

                  //send height data to remote
                  var jointDataToSend = {joint: joint, distance: jointDistanceFromFloor};

                  sendToPeer('floorHeightTracker', jointDataToSend);
                }
              }
            }

            trackedBodyIndex = closestBodyIndex;
            busy = false;
          });

          kinect.openMultiSourceReader({
            frameTypes: Kinect2.FrameType.body | Kinect2.FrameType.color
          });
        }
      }
github kinectron / kinectron / examples.js View on Github external
function getClosestBodyIndex(bodies) {
  var closestZ = Number.MAX_VALUE;
  var closestBodyIndex = -1;
  for(var i = 0; i < bodies.length; i++) {
    if(bodies[i].tracked && bodies[i].joints[Kinect2.JointType.spineMid].cameraZ < closestZ) {
      closestZ = bodies[i].joints[Kinect2.JointType.spineMid].cameraZ;
      closestBodyIndex = i;
    }
  }
  return closestBodyIndex;
}
github kinectron / kinectron / index.html View on Github external
}
                }

                var pixelWidth = calculatePixelWidth(frame.bodyIndexColor.horizontalFieldOfView, frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.spineMid].cameraZ * 1000);
                scale = 0.3 * pixelWidth;

                //head joint is in middle of head, add area (y-distance from neck to head joint) above
                topJoint = {
                  colorX: topJoint.colorX,
                  colorY: Math.min(topJoint.colorY, frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.head].colorY - (frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.neck].colorY - frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.head].colorY))
                };
                var srcRect = {
                  x: leftJoint.colorX * canvas.width,
                  y: topJoint.colorY * canvas.height,
                  width: (rightJoint.colorX - leftJoint.colorX) * canvas.width,
                  height: (frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.spineMid].floorColorY - topJoint.colorY) * canvas.height
                };
                var dstRect = {
                  x: outputCanvas.width * 0.5,
                  y: outputCanvas.height - (srcRect.height * scale),
                  width: srcRect.width * scale,
                  height: srcRect.height * scale
                };
                //center the user horizontally - is not minus half width of image as user might reach to one side or the other
                //do minus the space on the left size of the spine
                var spaceLeft = frame.body.bodies[closestBodyIndex].joints[Kinect2.JointType.spineMid].colorX - leftJoint.colorX;
                dstRect.x -= (spaceLeft * canvas.width * scale);
                
                newPixelData = frame.bodyIndexColor.bodies[closestBodyIndex].buffer;

                for (var i = 0; i < imageDataSize; i++) {
                  imageDataArray[i] = newPixelData[i];

kinect2

Nodejs library to access the kinect 2 data from the official MS SDK

MIT
Latest version published 2 months ago

Package Health Score

63 / 100
Full package analysis