How to use the react-native-webrtc.MediaStreamTrack.getSources function in react-native-webrtc

To help you get started, we’ve selected a few react-native-webrtc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github atyenoria / react-native-webrtc-janus-gateway / src / janus.mobile.js View on Github external
};
                                  constraints.video.mandatory.chromeMediaSourceId = event.data.sourceId;
                                  getScreenMedia(constraints, callback);
                              }
                          } else if (event.data.type == 'janusGetScreenPending') {
                              window.clearTimeout(event.data.id);
                          }
                      });
                      return;
                  }
              }
              // If we got here, we're not screensharing
              if(media === null || media === undefined || media.video !== 'screen') {
                  // Check whether all media sources are actually available or not

                  MediaStreamTrack.getSources(sourceInfos => {
                      console.log(sourceInfos);
                      getUserMedia({
                          audio: true,
                          video: {
                            facingMode: (true ? "user" : "environment"),
                          }
                      },  (stream) => {
                        localStream = stream
                        console.log("Succeeded to get the local camera!")
                        streamsDone(handleId, jsep, media, callbacks, stream)
                      }, (error) => {
                        console.log("Failed to get the local camera!")
                        console.log(error)
                      }
                      )
                  });
github WorldViews / JanusMobile / app / lib / JanusClient.js View on Github external
initWebRTC(cb) {
        let isFront = true;
        let self = this;
        MediaStreamTrack.getSources(sourceInfos => {
          console.log(sourceInfos);
          let videoSourceId = self.options.useOTG ? "UVCCamera" : undefined;
          // for (const i = 0; i < sourceInfos.length; i++) {
          //     const sourceInfo = sourceInfos[i];
          //     if (sourceInfo.kind == "video" && sourceInfo.facing == (isFront ? "front" : "back")) {
          //     videoSourceId = sourceInfo.id;
          //     }
          // }
          let constraints = {
              audio: true,
              video: {
                mandatory: {
                    minWidth: 1280, // Provide your own width, height and frame rate here
                    minHeight: 720,
                    minFrameRate: 30
              },
github WorldViews / JanusMobile / app / lib / janus.js View on Github external
Janus.listDevices = function (callback) {
            callback = (typeof callback == "function") ? callback : Janus.noop;
            MediaStreamTrack.getSources({ audio: true, video: true }, function (devices) {
                Janus.debug(devices);
                callback(devices);
            });
        }
        // Helper methods to attach/reattach a stream to a video element (previously part of adapter.js)
github sieuhuflit / react-native-live-stream-webrtc-example / src / Utils.js View on Github external
const getLocalStreamDevice = (isFront, callback) => {
  let videoSourceId;
  if (Platform.OS === 'ios') {
    MediaStreamTrack.getSources(sourceInfos => {
      console.log('sourceInfos: ', sourceInfos);
      for (const i = 0; i < sourceInfos.length; i++) {
        const sourceInfo = sourceInfos[i];
        if (
          sourceInfo.kind == 'video' &&
          sourceInfo.facing == (isFront ? 'front' : 'back')
        ) {
          videoSourceId = sourceInfo.id;
        }
      }
    });
  }
  getUserMedia(
    {
      audio: true,
      video: {