File: source/stream-media.js

  1. /**
  2. * <blockquote class="info">
  3. * Note that if the video codec is not supported, the SDK will not configure the local <code>"offer"</code> or
  4. * <code>"answer"</code> session description to prefer the codec.
  5. * </blockquote>
  6. * The list of available video codecs to set as the preferred video codec to use to encode
  7. * sending video data when available encoded video codec for Peer connections
  8. * configured in the <a href="#method_init"><code>init()</code> method</a>.
  9. * @attribute VIDEO_CODEC
  10. * @param {String} AUTO <small>Value <code>"auto"</code></small>
  11. * The value of the option to not prefer any video codec but rather use the created
  12. * local <code>"offer"</code> / <code>"answer"</code> session description video codec preference.
  13. * @param {String} VP8 <small>Value <code>"VP8"</code></small>
  14. * The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/VP8">VP8</a> video codec.
  15. * @param {String} H264 <small>Value <code>"H264"</code></small>
  16. * The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/H.264/MPEG-4_AVC">H264</a> video codec.
  17. * @type JSON
  18. * @readOnly
  19. * @for Skylink
  20. * @since 0.5.10
  21. */
  22. Skylink.prototype.VIDEO_CODEC = {
  23. AUTO: 'auto',
  24. VP8: 'VP8',
  25. H264: 'H264'
  26. //H264UC: 'H264UC'
  27. };
  28.  
  29. /**
  30. * <blockquote class="info">
  31. * Note that if the audio codec is not supported, the SDK will not configure the local <code>"offer"</code> or
  32. * <code>"answer"</code> session description to prefer the codec.
  33. * </blockquote>
  34. * The list of available audio codecs to set as the preferred audio codec to use to encode
  35. * sending audio data when available encoded audio codec for Peer connections
  36. * configured in the <a href="#method_init"><code>init()</code> method</a>.
  37. * @attribute AUDIO_CODEC
  38. * @param {String} AUTO <small>Value <code>"auto"</code></small>
  39. * The value of the option to not prefer any audio codec but rather use the created
  40. * local <code>"offer"</code> / <code>"answer"</code> session description audio codec preference.
  41. * @param {String} OPUS <small>Value <code>"opus"</code></small>
  42. * The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/Opus_(audio_format)">OPUS</a> audio codec.
  43. * @param {String} ISAC <small>Value <code>"ISAC"</code></small>
  44. * The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/Internet_Speech_Audio_Codec">ISAC</a> audio codec.
  45. * @type JSON
  46. * @readOnly
  47. * @for Skylink
  48. * @since 0.5.10
  49. */
  50. Skylink.prototype.AUDIO_CODEC = {
  51. AUTO: 'auto',
  52. ISAC: 'ISAC',
  53. OPUS: 'opus',
  54. //ILBC: 'ILBC',
  55. //G711: 'G711',
  56. //G722: 'G722',
  57. //SILK: 'SILK'
  58. };
  59.  
  60. /**
  61. * <blockquote class="info">
  62. * Note that currently <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> only configures
  63. * the maximum resolution of the Stream due to browser interopability and support.
  64. * </blockquote>
  65. * The list of <a href="https://en.wikipedia.org/wiki/Graphics_display_resolution#Video_Graphics_Array">
  66. * video resolutions</a> sets configured in the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.
  67. * @attribute VIDEO_RESOLUTION
  68. * @param {JSON} QQVGA <small>Value <code>{ width: 160, height: 120 }</code></small>
  69. * The value of the option to configure QQVGA resolution.
  70. * <small>Aspect ratio: <code>4:3</code></small>
  71. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  72. * @param {JSON} HQVGA <small>Value <code>{ width: 240, height: 160 }</code></small>
  73. * The value of the option to configure HQVGA resolution.
  74. * <small>Aspect ratio: <code>3:2</code></small>
  75. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  76. * @param {JSON} QVGA <small>Value <code>{ width: 320, height: 240 }</code></small>
  77. * The value of the option to configure QVGA resolution.
  78. * <small>Aspect ratio: <code>4:3</code></small>
  79. * @param {JSON} WQVGA <small>Value <code>{ width: 384, height: 240 }</code></small>
  80. * The value of the option to configure WQVGA resolution.
  81. * <small>Aspect ratio: <code>16:10</code></small>
  82. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  83. * @param {JSON} HVGA <small>Value <code>{ width: 480, height: 320 }</code></small>
  84. * The value of the option to configure HVGA resolution.
  85. * <small>Aspect ratio: <code>3:2</code></small>
  86. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  87. * @param {JSON} VGA <small>Value <code>{ width: 640, height: 480 }</code></small>
  88. * The value of the option to configure VGA resolution.
  89. * <small>Aspect ratio: <code>4:3</code></small>
  90. * @param {JSON} WVGA <small>Value <code>{ width: 768, height: 480 }</code></small>
  91. * The value of the option to configure WVGA resolution.
  92. * <small>Aspect ratio: <code>16:10</code></small>
  93. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  94. * @param {JSON} FWVGA <small>Value <code>{ width: 854, height: 480 }</code></small>
  95. * The value of the option to configure FWVGA resolution.
  96. * <small>Aspect ratio: <code>16:9</code></small>
  97. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  98. * @param {JSON} SVGA <small>Value <code>{ width: 800, height: 600 }</code></small>
  99. * The value of the option to configure SVGA resolution.
  100. * <small>Aspect ratio: <code>4:3</code></small>
  101. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  102. * @param {JSON} DVGA <small>Value <code>{ width: 960, height: 640 }</code></small>
  103. * The value of the option to configure DVGA resolution.
  104. * <small>Aspect ratio: <code>3:2</code></small>
  105. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  106. * @param {JSON} WSVGA <small>Value <code>{ width: 1024, height: 576 }</code></small>
  107. * The value of the option to configure WSVGA resolution.
  108. * <small>Aspect ratio: <code>16:9</code></small>
  109. * @param {JSON} HD <small>Value <code>{ width: 1280, height: 720 }</code></small>
  110. * The value of the option to configure HD resolution.
  111. * <small>Aspect ratio: <code>16:9</code></small>
  112. * <small>Note that configurating this resolution may not be supported depending on device supports.</small>
  113. * @param {JSON} HDPLUS <small>Value <code>{ width: 1600, height: 900 }</code></small>
  114. * The value of the option to configure HDPLUS resolution.
  115. * <small>Aspect ratio: <code>16:9</code></small>
  116. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  117. * @param {JSON} FHD <small>Value <code>{ width: 1920, height: 1080 }</code></small>
  118. * The value of the option to configure FHD resolution.
  119. * <small>Aspect ratio: <code>16:9</code></small>
  120. * <small>Note that configurating this resolution may not be supported depending on device supports.</small>
  121. * @param {JSON} QHD <small>Value <code>{ width: 2560, height: 1440 }</code></small>
  122. * The value of the option to configure QHD resolution.
  123. * <small>Aspect ratio: <code>16:9</code></small>
  124. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  125. * @param {JSON} WQXGAPLUS <small>Value <code>{ width: 3200, height: 1800 }</code></small>
  126. * The value of the option to configure WQXGAPLUS resolution.
  127. * <small>Aspect ratio: <code>16:9</code></small>
  128. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  129. * @param {JSON} UHD <small>Value <code>{ width: 3840, height: 2160 }</code></small>
  130. * The value of the option to configure UHD resolution.
  131. * <small>Aspect ratio: <code>16:9</code></small>
  132. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  133. * @param {JSON} UHDPLUS <small>Value <code>{ width: 5120, height: 2880 }</code></small>
  134. * The value of the option to configure UHDPLUS resolution.
  135. * <small>Aspect ratio: <code>16:9</code></small>
  136. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  137. * @param {JSON} FUHD <small>Value <code>{ width: 7680, height: 4320 }</code></small>
  138. * The value of the option to configure FUHD resolution.
  139. * <small>Aspect ratio: <code>16:9</code></small>
  140. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  141. * @param {JSON} QUHD <small>Value <code>{ width: 15360, height: 8640 }</code></small>
  142. * The value of the option to configure QUHD resolution.
  143. * <small>Aspect ratio: <code>16:9</code></small>
  144. * <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
  145. * @type JSON
  146. * @readOnly
  147. * @for Skylink
  148. * @since 0.5.6
  149. */
  150. Skylink.prototype.VIDEO_RESOLUTION = {
  151. QQVGA: { width: 160, height: 120 /*, aspectRatio: '4:3'*/ },
  152. HQVGA: { width: 240, height: 160 /*, aspectRatio: '3:2'*/ },
  153. QVGA: { width: 320, height: 240 /*, aspectRatio: '4:3'*/ },
  154. WQVGA: { width: 384, height: 240 /*, aspectRatio: '16:10'*/ },
  155. HVGA: { width: 480, height: 320 /*, aspectRatio: '3:2'*/ },
  156. VGA: { width: 640, height: 480 /*, aspectRatio: '4:3'*/ },
  157. WVGA: { width: 768, height: 480 /*, aspectRatio: '16:10'*/ },
  158. FWVGA: { width: 854, height: 480 /*, aspectRatio: '16:9'*/ },
  159. SVGA: { width: 800, height: 600 /*, aspectRatio: '4:3'*/ },
  160. DVGA: { width: 960, height: 640 /*, aspectRatio: '3:2'*/ },
  161. WSVGA: { width: 1024, height: 576 /*, aspectRatio: '16:9'*/ },
  162. HD: { width: 1280, height: 720 /*, aspectRatio: '16:9'*/ },
  163. HDPLUS: { width: 1600, height: 900 /*, aspectRatio: '16:9'*/ },
  164. FHD: { width: 1920, height: 1080 /*, aspectRatio: '16:9'*/ },
  165. QHD: { width: 2560, height: 1440 /*, aspectRatio: '16:9'*/ },
  166. WQXGAPLUS: { width: 3200, height: 1800 /*, aspectRatio: '16:9'*/ },
  167. UHD: { width: 3840, height: 2160 /*, aspectRatio: '16:9'*/ },
  168. UHDPLUS: { width: 5120, height: 2880 /*, aspectRatio: '16:9'*/ },
  169. FUHD: { width: 7680, height: 4320 /*, aspectRatio: '16:9'*/ },
  170. QUHD: { width: 15360, height: 8640 /*, aspectRatio: '16:9'*/ }
  171. };
  172.  
  173. /**
  174. * The list of <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> or
  175. * <a href="#method_shareScreen"><code>shareScreen()</code> method</a> Stream fallback states.
  176. * @attribute MEDIA_ACCESS_FALLBACK_STATE
  177. * @param {JSON} FALLBACKING <small>Value <code>0</code></small>
  178. * The value of the state when <code>getUserMedia()</code> will retrieve audio track only
  179. * when retrieving audio and video tracks failed.
  180. * <small>This can be configured by <a href="#method_init"><code>init()</code> method</a>
  181. * <code>audioFallback</code> option.</small>
  182. * @param {JSON} FALLBACKED <small>Value <code>1</code></small>
  183. * The value of the state when <code>getUserMedia()</code> or <code>shareScreen()</code>
  184. * retrieves camera / screensharing Stream successfully but with missing originally required audio or video tracks.
  185. * @param {JSON} ERROR <small>Value <code>-1</code></small>
  186. * The value of the state when <code>getUserMedia()</code> failed to retrieve audio track only
  187. * after retrieving audio and video tracks failed.
  188. * @readOnly
  189. * @for Skylink
  190. * @since 0.6.14
  191. */
  192. Skylink.prototype.MEDIA_ACCESS_FALLBACK_STATE = {
  193. FALLBACKING: 0,
  194. FALLBACKED: 1,
  195. ERROR: -1
  196. };
  197.  
  198. /**
  199. * Stores the flag that indicates if <code>getUserMedia()</code> should fallback to retrieve
  200. * audio only Stream after retrieval of audio and video Stream had failed.
  201. * @attribute _audioFallback
  202. * @type Boolean
  203. * @default false
  204. * @private
  205. * @for Skylink
  206. * @since 0.5.4
  207. */
  208. Skylink.prototype._audioFallback = false;
  209.  
  210. /**
  211. * Stores the Streams.
  212. * @attribute _streams
  213. * @type JSON
  214. * @private
  215. * @for Skylink
  216. * @since 0.6.15
  217. */
  218. Skylink.prototype._streams = {
  219. userMedia: null,
  220. screenshare: null
  221. };
  222.  
  223. /**
  224. * Stores the default camera Stream settings.
  225. * @attribute _streamsDefaultSettings
  226. * @type JSON
  227. * @private
  228. * @for Skylink
  229. * @since 0.6.15
  230. */
  231. Skylink.prototype._streamsDefaultSettings = {
  232. userMedia: {
  233. audio: {
  234. stereo: false
  235. },
  236. video: {
  237. resolution: {
  238. width: 640,
  239. height: 480
  240. },
  241. frameRate: 50
  242. }
  243. },
  244. screenshare: {
  245. video: true
  246. }
  247. };
  248.  
  249. /**
  250. * Stores all the Stream required muted settings.
  251. * @attribute _streamsMutedSettings
  252. * @type JSON
  253. * @private
  254. * @for Skylink
  255. * @since 0.6.15
  256. */
  257. Skylink.prototype._streamsMutedSettings = {
  258. audioMuted: false,
  259. videoMuted: false
  260. };
  261.  
  262. /**
  263. * Stores all the Stream sending maximum bandwidth settings.
  264. * @attribute _streamsBandwidthSettings
  265. * @type JSON
  266. * @private
  267. * @for Skylink
  268. * @since 0.6.15
  269. */
  270. Skylink.prototype._streamsBandwidthSettings = {};
  271.  
  272. /**
  273. * Stores all the Stream stopped callbacks.
  274. * @attribute _streamsStoppedCbs
  275. * @type JSON
  276. * @private
  277. * @for Skylink
  278. * @since 0.6.15
  279. */
  280. Skylink.prototype._streamsStoppedCbs = {};
  281.  
  282. /**
  283. * Function that retrieves camera Stream.
  284. * @method getUserMedia
  285. * @param {JSON} [options] The camera Stream configuration options.
  286. * - When not provided, the value is set to <code>{ audio: true, video: true }</code>.
  287. * <small>To fallback to retrieve audio track only when retrieving of audio and video tracks failed,
  288. * enable the <code>audioFallback</code> flag in the <a href="#method_init"><code>init()</code> method</a>.</small>
  289. * @param {Boolean} [options.useExactConstraints=false] <blockquote class="info">
  290. * Note that by enabling this flag, exact values will be requested when retrieving camera Stream,
  291. * but it does not prevent constraints related errors. By default when not enabled,
  292. * expected mandatory maximum values (or optional values for source ID) will requested to prevent constraints related
  293. * errors, with an exception for <code>options.video.frameRate</code> option in Safari and IE (plugin-enabled) browsers,
  294. * where the expected maximum value will not be requested due to the lack of support.</blockquote>
  295. * The flag if <code>getUserMedia()</code> should request for camera Stream to match exact requested values of
  296. * <code>options.audio.deviceId</code> and <code>options.video.deviceId</code>, <code>options.video.resolution</code>
  297. * and <code>options.video.frameRate</code> when provided.
  298. * @param {Boolean|JSON} [options.audio=false] The audio configuration options.
  299. * @param {Boolean} [options.audio.stereo=false] The flag if stereo band should be configured
  300. * when encoding audio codec is <a href="#attr_AUDIO_CODEC"><code>OPUS</code></a> for sending audio data.
  301. * @param {Boolean} [options.audio.mute=false] The flag if audio tracks should be muted upon receiving them.
  302. * <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.audioMuted</code>,
  303. * but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.audioMuted</code> value to
  304. * <code>true</code> and mutes any existing <a href="#method_shareScreen">
  305. * <code>shareScreen()</code> Stream</a> audio tracks as well.</small>
  306. * @param {Array} [options.audio.optional] <blockquote class="info">
  307. * Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
  308. * <code>true</code>. If you are looking to set the requested source ID of the audio track,
  309. * use <code>options.audio.deviceId</code> instead.</blockquote>
  310. * The <code>navigator.getUserMedia()</code> API <code>audio: { optional [..] }</code> property.
  311. * @param {String} [options.audio.deviceId] <blockquote class="info">
  312. * Note this is currently not supported in Firefox browsers.
  313. * </blockquote> The audio track source ID of the device to use.
  314. * <small>The list of available audio source ID can be retrieved by the <a href="https://developer.
  315. * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
  316. * API</a>.</small>
  317. * @param {Boolean|JSON} [options.video=false] The video configuration options.
  318. * @param {Boolean} [options.video.mute=false] The flag if video tracks should be muted upon receiving them.
  319. * <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.videoMuted</code>,
  320. * but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.videoMuted</code> value to
  321. * <code>true</code> and mutes any existing <a href="#method_shareScreen">
  322. * <code>shareScreen()</code> Stream</a> video tracks as well.</small>
  323. * @param {JSON} [options.video.resolution] The video resolution.
  324. * <small>By default, <a href="#attr_VIDEO_RESOLUTION"><code>VGA</code></a> resolution option
  325. * is selected when not provided.</small>
  326. * [Rel: Skylink.VIDEO_RESOLUTION]
  327. * @param {Number} [options.video.resolution.width] The video resolution width.
  328. * @param {Number} [options.video.resolution.height] The video resolution height.
  329. * @param {Number} [options.video.frameRate] The video <a href="https://en.wikipedia.org/wiki/Frame_rate">
  330. * frameRate</a> per second (fps).
  331. * @param {Array} [options.video.optional] <blockquote class="info">
  332. * Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
  333. * <code>true</code>. If you are looking to set the requested source ID of the video track,
  334. * use <code>options.video.deviceId</code> instead.</blockquote>
  335. * The <code>navigator.getUserMedia()</code> API <code>video: { optional [..] }</code> property.
  336. * @param {String} [options.video.deviceId] <blockquote class="info">
  337. * Note this is currently not supported in Firefox browsers.
  338. * </blockquote> The video track source ID of the device to use.
  339. * <small>The list of available video source ID can be retrieved by the <a href="https://developer.
  340. * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
  341. * API</a>.</small>
  342. * @param {Function} [callback] The callback function fired when request has completed.
  343. * <small>Function parameters signature is <code>function (error, success)</code></small>
  344. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  345. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter
  346. * payload value as <code>false</code> for request success.</small>
  347. * @param {Error|String} callback.error The error result in request.
  348. * <small>Defined as <code>null</code> when there are no errors in request</small>
  349. * <small>Object signature is the <code>getUserMedia()</code> error when retrieving camera Stream.</small>
  350. * @param {MediaStream} callback.success The success result in request.
  351. * <small>Defined as <code>null</code> when there are errors in request</small>
  352. * <small>Object signature is the camera Stream object.</small>
  353. * @example
  354. * // Example 1: Get both audio and video.
  355. * skylinkDemo.getUserMedia(function (error, success) {
  356. * if (error) return;
  357. * attachMediaStream(document.getElementById("my-video"), success);
  358. * });
  359. *
  360. * // Example 2: Get only audio.
  361. * skylinkDemo.getUserMedia({
  362. * audio: true
  363. * }, function (error, success) {
  364. * if (error) return;
  365. * attachMediaStream(document.getElementById("my-audio"), success);
  366. * });
  367. *
  368. * // Example 3: Configure resolution for video
  369. * skylinkDemo.getUserMedia({
  370. * audio: true,
  371. * video: {
  372. * resolution: skylinkDemo.VIDEO_RESOLUTION.HD
  373. * }
  374. * }, function (error, success) {
  375. * if (error) return;
  376. * attachMediaStream(document.getElementById("my-video"), success);
  377. * });
  378. *
  379. * // Example 4: Configure stereo flag for OPUS codec audio (OPUS is always used by default)
  380. * skylinkDemo.init({
  381. * appKey: "xxxxxx",
  382. * audioCodec: skylinkDemo.AUDIO_CODEC.OPUS
  383. * }, function (initErr, initSuccess) {
  384. * skylinkDemo.getUserMedia({
  385. * audio: {
  386. * stereo: true
  387. * },
  388. * video: true
  389. * }, function (error, success) {
  390. * if (error) return;
  391. * attachMediaStream(document.getElementById("my-video"), success);
  392. * });
  393. * });
  394. *
  395. * // Example 5: Configure frameRate for video
  396. * skylinkDemo.getUserMedia({
  397. * audio: true,
  398. * video: {
  399. * frameRate: 50
  400. * }
  401. * }, function (error, success) {
  402. * if (error) return;
  403. * attachMediaStream(document.getElementById("my-video"), success);
  404. * });
  405. *
  406. * // Example 6: Configure video and audio based on selected sources. Does not work for Firefox currently.
  407. * var sources = { audio: [], video: [] };
  408. *
  409. * function selectStream (audioSourceId, videoSourceId) {
  410. * if (window.webrtcDetectedBrowser === 'firefox') {
  411. * console.warn("Currently this feature is not supported by Firefox browsers!");
  412. * return;
  413. * }
  414. * skylinkDemo.getUserMedia({
  415. * audio: {
  416. * optional: [{ sourceId: audioSourceId }]
  417. * },
  418. * video: {
  419. * optional: [{ sourceId: videoSourceId }]
  420. * }
  421. * }, function (error, success) {
  422. * if (error) return;
  423. * attachMediaStream(document.getElementById("my-video"), success);
  424. * });
  425. * }
  426. *
  427. * navigator.mediaDevices.enumerateDevices().then(function(devices) {
  428. * var selectedAudioSourceId = "";
  429. * var selectedVideoSourceId = "";
  430. * devices.forEach(function(device) {
  431. * console.log(device.kind + ": " + device.label + " source ID = " + device.deviceId);
  432. * if (device.kind === "audio") {
  433. * selectedAudioSourceId = device.deviceId;
  434. * } else {
  435. * selectedVideoSourceId = device.deviceId;
  436. * }
  437. * });
  438. * selectStream(selectedAudioSourceId, selectedVideoSourceId);
  439. * }).catch(function (error) {
  440. * console.error("Failed", error);
  441. * });
  442. * @trigger <ol class="desc-seq">
  443. * <li>If <code>options.audio</code> value is <code>false</code> and <code>options.video</code>
  444. * value is <code>false</code>: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  445. * <li>Retrieve camera Stream. <ol><li>If retrieval was succesful: <ol>
  446. * <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
  447. * <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
  448. * <li>If there are missing audio or video tracks requested: <ol>
  449. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
  450. * <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>false</code> and
  451. * <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
  452. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
  453. * <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  454. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers parameter payload
  455. * <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
  456. * value as <code>false</code>.</li></ol></li><li>Else: <ol>
  457. * <li>If <code>options.audioFallback</code> is enabled in the <a href="#method_init"><code>init()</code> method</a>,
  458. * <code>options.audio</code> value is <code>true</code> and <code>options.video</code> value is <code>true</code>: <ol>
  459. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  460. * parameter payload <code>state</code> as <code>FALLBACKING</code>, <code>isScreensharing</code>
  461. * value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  462. * <li>Retrieve camera Stream with audio tracks only. <ol><li>If retrieval was successful: <ol>
  463. * <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
  464. * <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
  465. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  466. * parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  467. * value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  468. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
  469. * <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  470. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  471. * parameter payload <code>isScreensharing</code> value as <code>false</code> and
  472. * <code>isAudioFallback</code> value as <code>true</code>.</li></ol></li><li>Else: <ol>
  473. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers
  474. * parameter payload <code>isScreensharing</code> value as <code>false</code> and
  475. * <code>isAudioFallbackError</code> value as <code>true</code>.</li>
  476. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  477. * parameter payload <code>state</code> as <code>ERROR</code>, <code>isScreensharing</code> value as
  478. * <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  479. * <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  480. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
  481. * <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallbackError</code> value as
  482. * <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li></ol>
  483. * @for Skylink
  484. * @since 0.5.6
  485. */
  486. Skylink.prototype.getUserMedia = function(options,callback) {
  487. var self = this;
  488.  
  489. if (typeof options === 'function'){
  490. callback = options;
  491. options = {
  492. audio: true,
  493. video: true
  494. };
  495.  
  496. } else if (typeof options !== 'object' || options === null) {
  497. if (typeof options === 'undefined') {
  498. options = {
  499. audio: true,
  500. video: true
  501. };
  502.  
  503. } else {
  504. var invalidOptionsError = 'Please provide a valid options';
  505. log.error(invalidOptionsError, options);
  506. if (typeof callback === 'function') {
  507. callback(new Error(invalidOptionsError), null);
  508. }
  509. return;
  510. }
  511.  
  512. } else if (!options.audio && !options.video) {
  513. var noConstraintOptionsSelectedError = 'Please select audio or video';
  514. log.error(noConstraintOptionsSelectedError, options);
  515. if (typeof callback === 'function') {
  516. callback(new Error(noConstraintOptionsSelectedError), null);
  517. }
  518. return;
  519. }
  520.  
  521. /*if (window.location.protocol !== 'https:' && window.webrtcDetectedBrowser === 'chrome' &&
  522. window.webrtcDetectedVersion > 46) {
  523. errorMsg = 'getUserMedia() has to be called in https:// application';
  524. log.error(errorMsg, options);
  525. if (typeof callback === 'function') {
  526. callback(new Error(errorMsg), null);
  527. }
  528. return;
  529. }*/
  530.  
  531. if (typeof callback === 'function') {
  532. var mediaAccessSuccessFn = function (stream) {
  533. self.off('mediaAccessError', mediaAccessErrorFn);
  534. callback(null, stream);
  535. };
  536. var mediaAccessErrorFn = function (error) {
  537. self.off('mediaAccessSuccess', mediaAccessSuccessFn);
  538. callback(error, null);
  539. };
  540.  
  541. self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
  542. return !isScreensharing;
  543. });
  544.  
  545. self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
  546. return !isScreensharing;
  547. });
  548. }
  549.  
  550. // Parse stream settings
  551. var settings = self._parseStreamSettings(options);
  552.  
  553. navigator.getUserMedia(settings.getUserMediaSettings, function (stream) {
  554. if (settings.mutedSettings.shouldAudioMuted) {
  555. self._streamsMutedSettings.audioMuted = true;
  556. }
  557.  
  558. if (settings.mutedSettings.shouldVideoMuted) {
  559. self._streamsMutedSettings.videoMuted = true;
  560. }
  561.  
  562. self._onStreamAccessSuccess(stream, settings, false, false);
  563.  
  564. }, function (error) {
  565. self._onStreamAccessError(error, settings, false, false);
  566. });
  567. };
  568.  
  569. /**
  570. * <blockquote class="info">
  571. * Note that if <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is available despite having
  572. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> available, the
  573. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is sent instead of the
  574. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> to Peers.
  575. * </blockquote>
  576. * Function that sends a new <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>
  577. * to all connected Peers in the Room.
  578. * @method sendStream
  579. * @param {JSON|MediaStream} options The <a href="#method_getUserMedia"><code>getUserMedia()</code>
  580. * method</a> <code>options</code> parameter settings.
  581. * - When provided as a <code>MediaStream</code> object, this configures the <code>options.audio</code> and
  582. * <code>options.video</code> based on the tracks available in the <code>MediaStream</code> object,
  583. * and configures the <code>options.audio.mute</code> and <code>options.video.mute</code> based on the tracks
  584. * <code>.enabled</code> flags in the tracks provided in the <code>MediaStream</code> object without
  585. * invoking <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.
  586. * <small>Object signature matches the <code>options</code> parameter in the
  587. * <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.</small>
  588. * @param {Function} [callback] The callback function fired when request has completed.
  589. * <small>Function parameters signature is <code>function (error, success)</code></small>
  590. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  591. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
  592. * as <code>false</code> for request success when User is in Room without Peers,
  593. * or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
  594. * <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
  595. * for request success when User is in Room with Peers.</small>
  596. * @param {Error|String} callback.error The error result in request.
  597. * <small>Defined as <code>null</code> when there are no errors in request</small>
  598. * <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> error or
  599. * when invalid <code>options</code> is provided.</small>
  600. * @param {MediaStream} callback.success The success result in request.
  601. * <small>Defined as <code>null</code> when there are errors in request</small>
  602. * <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>
  603. * Stream object.</small>
  604. * @example
  605. * // Example 1: Send MediaStream object
  606. * function retrieveStreamBySourceForFirefox (sourceId) {
  607. * navigator.mediaDevices.getUserMedia({
  608. * audio: true,
  609. * video: {
  610. * sourceId: { exact: sourceId }
  611. * }
  612. * }).then(function (stream) {
  613. * skylinkDemo.sendStream(stream, function (error, success) {
  614. * if (err) return;
  615. * if (stream === success) {
  616. * console.info("Same MediaStream has been sent");
  617. * }
  618. * console.log("Stream is now being sent to Peers");
  619. * attachMediaStream(document.getElementById("my-video"), success);
  620. * });
  621. * });
  622. * }
  623. *
  624. * // Example 2: Send video later
  625. * var inRoom = false;
  626. *
  627. * function sendVideo () {
  628. * if (!inRoom) return;
  629. * skylinkDemo.sendStream({
  630. * audio: true,
  631. * video: true
  632. * }, function (error, success) {
  633. * if (error) return;
  634. * console.log("getUserMedia() Stream with video is now being sent to Peers");
  635. * attachMediaStream(document.getElementById("my-video"), success);
  636. * });
  637. * }
  638. *
  639. * skylinkDemo.joinRoom({
  640. * audio: true
  641. * }, function (jRError, jRSuccess) {
  642. * if (jRError) return;
  643. * inRoom = true;
  644. * });
  645. * @trigger <ol class="desc-seq">
  646. * <li>If User is not in Room: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  647. * <li>Checks <code>options</code> provided. <ol><li>If provided parameter <code>options</code> is not valid: <ol>
  648. * <li><b>ABORT</b> and return error.</li></ol></li>
  649. * <li>Else if provided parameter <code>options</code> is a Stream object: <ol>
  650. * <li>Checks if there is any audio or video tracks. <ol><li>If there is no tracks: <ol>
  651. * <li><b>ABORT</b> and return error.</li></ol></li><li>Else: <ol>
  652. * <li>Set <code>options.audio</code> value as <code>true</code> if Stream has audio tracks.</li>
  653. * <li>Set <code>options.video</code> value as <code>false</code> if Stream has video tracks.</li>
  654. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in
  655. * <code>peerInfo.mediaStatus</code>. <small>This can be retrieved with
  656. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  657. * <li>If there is any previous <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>:
  658. * <ol><li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a> to stop previous Stream.</li></ol></li>
  659. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  660. * parameter payload <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
  661. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  662. * <li>Invoke <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> with
  663. * <code>options</code> provided in <code>sendStream()</code>. <ol><li>If request has errors: <ol>
  664. * <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li>
  665. * <li>If there is currently no <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>: <ol>
  666. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  667. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
  668. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
  669. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  670. * <code>isSelf</code> value as <code>true</code>.</li>
  671. * <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
  672. * <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code>
  673. * method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li>
  674. * <li>Else: <ol><li>If there are connected Peers in the Room: <ol>
  675. * <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>. <ol>
  676. * <li>If request has errors: <ol><li><b>ABORT</b> and return error.
  677. * </li></ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
  678. * @for Skylink
  679. * @since 0.5.6
  680. */
  681.  
  682. Skylink.prototype.sendStream = function(options, callback) {
  683. var self = this;
  684.  
  685. var restartFn = function (stream) {
  686. if (self._inRoom) {
  687. if (!self._streams.screenshare) {
  688. self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo());
  689. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  690. }
  691.  
  692. if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
  693. self._refreshPeerConnection(Object.keys(self._peerConnections), false, function (err, success) {
  694. if (err) {
  695. log.error('Failed refreshing connections for sendStream() ->', err);
  696. if (typeof callback === 'function') {
  697. callback(new Error('Failed refreshing connections.'), null);
  698. }
  699. return;
  700. }
  701. if (typeof callback === 'function') {
  702. callback(null, stream);
  703. }
  704. });
  705. } else if (typeof callback === 'function') {
  706. callback(null, stream);
  707. }
  708. } else {
  709. var notInRoomAgainError = 'Unable to send stream as user is not in the Room.';
  710. log.error(notInRoomAgainError, stream);
  711. if (typeof callback === 'function') {
  712. callback(new Error(notInRoomAgainError), null);
  713. }
  714. }
  715. };
  716.  
  717. if (typeof options !== 'object' || options === null) {
  718. var invalidOptionsError = 'Provided stream settings is invalid';
  719. log.error(invalidOptionsError, options);
  720. if (typeof callback === 'function'){
  721. callback(new Error(invalidOptionsError),null);
  722. }
  723. return;
  724. }
  725.  
  726. if (!self._inRoom) {
  727. var notInRoomError = 'Unable to send stream as user is not in the Room.';
  728. log.error(notInRoomError, options);
  729. if (typeof callback === 'function'){
  730. callback(new Error(notInRoomError),null);
  731. }
  732. return;
  733. }
  734.  
  735. if (typeof options.getAudioTracks === 'function' || typeof options.getVideoTracks === 'function') {
  736. var checkActiveTracksFn = function (tracks) {
  737. for (var t = 0; t < tracks.length; t++) {
  738. if (!(tracks[t].ended || (typeof tracks[t].readyState === 'string' ?
  739. tracks[t].readyState !== 'live' : false))) {
  740. return true;
  741. }
  742. }
  743. return false;
  744. };
  745.  
  746. if (!checkActiveTracksFn( options.getAudioTracks() ) && !checkActiveTracksFn( options.getVideoTracks() )) {
  747. var invalidStreamError = 'Provided stream object does not have audio or video tracks.';
  748. log.error(invalidStreamError, options);
  749. if (typeof callback === 'function'){
  750. callback(new Error(invalidStreamError),null);
  751. }
  752. return;
  753. }
  754.  
  755. self._onStreamAccessSuccess(options, {
  756. settings: {
  757. audio: true,
  758. video: true
  759. },
  760. getUserMediaSettings: {
  761. audio: true,
  762. video: true
  763. }
  764. }, false, false);
  765.  
  766. restartFn(options);
  767.  
  768. } else {
  769. self.getUserMedia(options, function (err, stream) {
  770. if (err) {
  771. if (typeof callback === 'function') {
  772. callback(err, null);
  773. }
  774. return;
  775. }
  776. restartFn(stream);
  777. });
  778. }
  779. };
  780.  
  781. /**
  782. * <blockquote class="info">
  783. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  784. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  785. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  786. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  787. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  788. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  789. * sent within less than an interval.
  790. * </blockquote>
  791. * Function that stops <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.
  792. * @method stopStream
  793. * @example
  794. * function stopStream () {
  795. * skylinkDemo.stopStream();
  796. * }
  797. *
  798. * skylinkDemo.getUserMedia();
  799. * @trigger <ol class="desc-seq">
  800. * <li>Checks if there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>. <ol>
  801. * <li>If there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>: <ol>
  802. * <li>Stop <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> Stream. <ol>
  803. * <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a> triggers
  804. * parameter payload <code>isScreensharing</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
  805. * <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter
  806. * payload <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as<code>false</code>
  807. * .</li><li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  808. * <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol></li></ol>
  809. * @for Skylink
  810. * @since 0.5.6
  811. */
  812. Skylink.prototype.stopStream = function () {
  813. if (this._streams.userMedia) {
  814. this._stopStreams({
  815. userMedia: true
  816. });
  817. }
  818. };
  819.  
  820. /**
  821. * <blockquote class="info">
  822. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  823. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  824. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  825. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  826. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  827. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  828. * sent within less than an interval.
  829. * </blockquote>
  830. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  831. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio or video tracks.
  832. * @method muteStream
  833. * @param {JSON} options The Streams muting options.
  834. * @param {Boolean} [options.audioMuted=true] The flag if all Streams audio
  835. * tracks should be muted or not.
  836. * @param {Boolean} [options.videoMuted=true] The flag if all Streams video
  837. * tracks should be muted or not.
  838. * @example
  839. * // Example 1: Mute both audio and video tracks in all Streams
  840. * skylinkDemo.muteStream({
  841. * audioMuted: true,
  842. * videoMuted: true
  843. * });
  844. *
  845. * // Example 2: Mute only audio tracks in all Streams
  846. * skylinkDemo.muteStream({
  847. * audioMuted: true,
  848. * videoMuted: false
  849. * });
  850. *
  851. * // Example 3: Mute only video tracks in all Streams
  852. * skylinkDemo.muteStream({
  853. * audioMuted: false,
  854. * videoMuted: true
  855. * });
  856. * @trigger <ol class="desc-seq">
  857. * <li>If provided parameter <code>options</code> is invalid: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  858. * <li>Checks if there is any available Streams: <ol><li>If there is no available Streams: <ol>
  859. * <li><b>ABORT</b> and return error.</li></ol></li><li>If User is in Room: <ol>
  860. * <li>Checks if there is audio tracks to mute / unmute: <ol><li>If there is audio tracks to mute / unmute: <ol>
  861. * <li>If <code>options.audioMuted</code> value is not the same as the current
  862. * <code>peerInfo.mediaStatus.audioMuted</code>: <small>This can be retrieved with
  863. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
  864. * <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
  865. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
  866. * <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a>
  867. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li>
  868. * <li>Checks if there is video tracks to mute / unmute: <ol><li>If there is video tracks to mute / unmute: <ol>
  869. * <li>If <code>options.videoMuted</code> value is not the same as the current
  870. * <code>peerInfo.mediaStatus.videoMuted</code>: <small>This can be retrieved with
  871. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
  872. * <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
  873. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
  874. * <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a> triggers with
  875. * parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li></ol></li>
  876. * <li>If <code>options.audioMuted</code> value is not the same as the current
  877. * <code>peerInfo.mediaStatus.audioMuted</code> or <code>options.videoMuted</code> value is not
  878. * the same as the current <code>peerInfo.mediaStatus.videoMuted</code>: <ol>
  879. * <li><a href="#event_localMediaMuted"><code>localMediaMuted</code> event</a> triggers.</li>
  880. * <li>If User is in Room: <ol><li><a href="#event_streamMuted"><code>streamMuted</code> event</a>
  881. * triggers with parameter payload <code>isSelf</code> value as <code>true</code>.</li>
  882. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers with
  883. * parameter payload <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol>
  884. * @for Skylink
  885. * @since 0.5.7
  886. */
  887. Skylink.prototype.muteStream = function(options) {
  888. var self = this;
  889.  
  890. if (typeof options !== 'object') {
  891. log.error('Provided settings is not an object');
  892. return;
  893. }
  894.  
  895. if (!(self._streams.userMedia && self._streams.userMedia.stream) &&
  896. !(self._streams.screenshare && self._streams.screenshare.stream)) {
  897. log.warn('No streams are available to mute / unmute!');
  898. return;
  899. }
  900.  
  901. var audioMuted = typeof options.audioMuted === 'boolean' ? options.audioMuted : true;
  902. var videoMuted = typeof options.videoMuted === 'boolean' ? options.videoMuted : true;
  903. var hasToggledAudio = false;
  904. var hasToggledVideo = false;
  905.  
  906. if (self._streamsMutedSettings.audioMuted !== audioMuted) {
  907. self._streamsMutedSettings.audioMuted = audioMuted;
  908. hasToggledAudio = true;
  909. }
  910.  
  911. if (self._streamsMutedSettings.videoMuted !== videoMuted) {
  912. self._streamsMutedSettings.videoMuted = videoMuted;
  913. hasToggledVideo = true;
  914. }
  915.  
  916. if (hasToggledVideo || hasToggledAudio) {
  917. var streamTracksAvailability = self._muteStreams();
  918.  
  919. if (hasToggledVideo && self._inRoom) {
  920. self._sendChannelMessage({
  921. type: self._SIG_MESSAGE_TYPE.MUTE_VIDEO,
  922. mid: self._user.sid,
  923. rid: self._room.id,
  924. muted: self._streamsMutedSettings.videoMuted,
  925. stamp: (new Date()).getTime()
  926. });
  927. }
  928.  
  929. if (hasToggledAudio && self._inRoom) {
  930. setTimeout(function () {
  931. self._sendChannelMessage({
  932. type: self._SIG_MESSAGE_TYPE.MUTE_AUDIO,
  933. mid: self._user.sid,
  934. rid: self._room.id,
  935. muted: self._streamsMutedSettings.audioMuted,
  936. stamp: (new Date()).getTime()
  937. });
  938. }, hasToggledVideo ? 1050 : 0);
  939. }
  940.  
  941. if ((streamTracksAvailability.hasVideo && hasToggledVideo) ||
  942. (streamTracksAvailability.hasAudio && hasToggledAudio)) {
  943.  
  944. self._trigger('localMediaMuted', {
  945. audioMuted: streamTracksAvailability.hasAudio ? self._streamsMutedSettings.audioMuted : true,
  946. videoMuted: streamTracksAvailability.hasVideo ? self._streamsMutedSettings.videoMuted : true
  947. });
  948.  
  949. if (self._inRoom) {
  950. self._trigger('streamMuted', self._user.sid, self.getPeerInfo(), true,
  951. self._streams.screenshare && self._streams.screenshare.stream);
  952. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  953. }
  954. }
  955. }
  956. };
  957.  
  958. /**
  959. * <blockquote class="info"><b>Deprecation Warning!</b>
  960. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  961. * </blockquote>
  962. * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  963. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
  964. * @method enableAudio
  965. * @deprecated true
  966. * @example
  967. * function unmuteAudio () {
  968. * skylinkDemo.enableAudio();
  969. * }
  970. * @trigger <ol class="desc-seq">
  971. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  972. * <code>options.audioMuted</code> value as <code>false</code> and
  973. * <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
  974. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  975. * @for Skylink
  976. * @since 0.5.5
  977. */
  978. Skylink.prototype.enableAudio = function() {
  979. this.muteStream({
  980. audioMuted: false,
  981. videoMuted: this._streamsMutedSettings.videoMuted
  982. });
  983. };
  984.  
  985. /**
  986. * <blockquote class="info"><b>Deprecation Warning!</b>
  987. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  988. * </blockquote>
  989. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  990. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
  991. * @method disableAudio
  992. * @deprecated true
  993. * @example
  994. * function muteAudio () {
  995. * skylinkDemo.disableAudio();
  996. * }
  997. * @trigger <ol class="desc-seq">
  998. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  999. * <code>options.audioMuted</code> value as <code>true</code> and
  1000. * <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
  1001. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  1002. * @for Skylink
  1003. * @since 0.5.5
  1004. */
  1005. Skylink.prototype.disableAudio = function() {
  1006. this.muteStream({
  1007. audioMuted: true,
  1008. videoMuted: this._streamsMutedSettings.videoMuted
  1009. });
  1010. };
  1011.  
  1012. /**
  1013. * <blockquote class="info"><b>Deprecation Warning!</b>
  1014. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  1015. * </blockquote>
  1016. * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  1017. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
  1018. * @method enableVideo
  1019. * @deprecated true
  1020. * @example
  1021. * function unmuteVideo () {
  1022. * skylinkDemo.enableVideo();
  1023. * }
  1024. * @trigger <ol class="desc-seq">
  1025. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  1026. * <code>options.videoMuted</code> value as <code>false</code> and
  1027. * <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
  1028. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  1029. * @for Skylink
  1030. * @since 0.5.5
  1031. */
  1032. Skylink.prototype.enableVideo = function() {
  1033. this.muteStream({
  1034. videoMuted: false,
  1035. audioMuted: this._streamsMutedSettings.audioMuted
  1036. });
  1037. };
  1038.  
  1039. /**
  1040. * <blockquote class="info"><b>Deprecation Warning!</b>
  1041. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  1042. * </blockquote>
  1043. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  1044. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
  1045. * @method disableVideo
  1046. * @deprecated true
  1047. * @example
  1048. * function muteVideo () {
  1049. * skylinkDemo.disableVideo();
  1050. * }
  1051. * @trigger <ol class="desc-seq">
  1052. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  1053. * <code>options.videoMuted</code> value as <code>true</code> and
  1054. * <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
  1055. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  1056. * @for Skylink
  1057. * @since 0.5.5
  1058. */
  1059. Skylink.prototype.disableVideo = function() {
  1060. this.muteStream({
  1061. videoMuted: true,
  1062. audioMuted: this._streamsMutedSettings.audioMuted
  1063. });
  1064. };
  1065.  
  1066. /**
  1067. * Function that retrieves screensharing Stream.
  1068. * @method shareScreen
  1069. * @param {JSON} [enableAudio=false] The flag if audio tracks should be retrieved.
  1070. * @param {Function} [callback] The callback function fired when request has completed.
  1071. * <small>Function parameters signature is <code>function (error, success)</code></small>
  1072. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  1073. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
  1074. * as <code>true</code> for request success when User is not in the Room or is in Room without Peers,
  1075. * or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
  1076. * <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
  1077. * for request success when User is in Room with Peers.</small>
  1078. * @param {Error|String} callback.error The error result in request.
  1079. * <small>Defined as <code>null</code> when there are no errors in request</small>
  1080. * <small>Object signature is the <code>shareScreen()</code> error when retrieving screensharing Stream.</small>
  1081. * @param {MediaStream} callback.success The success result in request.
  1082. * <small>Defined as <code>null</code> when there are errors in request</small>
  1083. * <small>Object signature is the screensharing Stream object.</small>
  1084. * @example
  1085. * // Example 1: Share screen with audio
  1086. * skylinkDemo.shareScreen(function (error, success) {
  1087. * if (error) return;
  1088. * attachMediaStream(document.getElementById("my-screen"), success);
  1089. * });
  1090. *
  1091. * // Example 2: Share screen without audio
  1092. * skylinkDemo.shareScreen(false, function (error, success) {
  1093. * if (error) return;
  1094. * attachMediaStream(document.getElementById("my-screen"), success);
  1095. * });
  1096. * @trigger <ol class="desc-seq">
  1097. * <li>Retrieves screensharing Stream. <ol><li>If retrieval was successful: <ol><li>If browser is Firefox: <ol>
  1098. * <li>If there are missing audio or video tracks requested: <ol>
  1099. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1100. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1101. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
  1102. * triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  1103. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
  1104. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1105. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1106. * value as <code>false</code>.</li></ol></li><li>Else: <ol>
  1107. * <li>If audio is requested: <small>Chrome, Safari and IE currently doesn't support retrieval of
  1108. * audio track together with screensharing video track.</small> <ol><li>Retrieves audio Stream: <ol>
  1109. * <li>If retrieval was successful: <ol><li>Attempts to attach screensharing Stream video track to audio Stream. <ol>
  1110. * <li>If attachment was successful: <ol><li><a href="#event_mediaAccessSuccess">
  1111. * <code>mediaAccessSuccess</code> event</a> triggers parameter payload <code>isScreensharing</code>
  1112. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li><li>Else: <ol>
  1113. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1114. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1115. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
  1116. * <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>true</code> and
  1117. * <code>isAudioFallback</code> value as <code>false</code>.</li>
  1118. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1119. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1120. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1121. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1122. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1123. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
  1124. * triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  1125. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li>
  1126. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1127. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1128. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1129. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a>
  1130. * triggers parameter payload <code>isScreensharing</code> value as <code>true</code>
  1131. * and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1132. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
  1133. * <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code> value as
  1134. * <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li><li>If User is in Room: <ol>
  1135. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  1136. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as <code>shareScreen()</code> Stream.</li>
  1137. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1138. * <code>isSelf</code> value as <code>true</code>.</li>
  1139. * <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
  1140. * <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.
  1141. * <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1142. * <li>If there are connected Peers in the Room: <ol><li>Invoke <a href="#method_refreshConnection">
  1143. * <code>refreshConnection()</code> method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li>
  1144. * </ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
  1145. * @for Skylink
  1146. * @since 0.6.0
  1147. */
  1148. Skylink.prototype.shareScreen = function (enableAudio, callback) {
  1149. var self = this;
  1150.  
  1151. if (typeof enableAudio === 'function') {
  1152. callback = enableAudio;
  1153. enableAudio = true;
  1154. }
  1155.  
  1156. if (typeof enableAudio !== 'boolean') {
  1157. enableAudio = true;
  1158. }
  1159.  
  1160. var throttleFn = function (fn, wait) {
  1161. if (!self._timestamp.func){
  1162. //First time run, need to force timestamp to skip condition
  1163. self._timestamp.func = self._timestamp.now - wait;
  1164. }
  1165. var now = Date.now();
  1166.  
  1167. if (!self._timestamp.screen) {
  1168. if (now - self._timestamp.func < wait) {
  1169. return;
  1170. }
  1171. }
  1172. fn();
  1173. self._timestamp.screen = false;
  1174. self._timestamp.func = now;
  1175. };
  1176.  
  1177. throttleFn(function () {
  1178. var settings = {
  1179. settings: {
  1180. audio: enableAudio,
  1181. video: {
  1182. screenshare: true
  1183. }
  1184. },
  1185. getUserMediaSettings: {
  1186. video: {
  1187. mediaSource: 'window'
  1188. }
  1189. }
  1190. };
  1191.  
  1192. var mediaAccessSuccessFn = function (stream) {
  1193. self.off('mediaAccessError', mediaAccessErrorFn);
  1194.  
  1195. if (self._inRoom) {
  1196. self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo());
  1197. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1198.  
  1199. if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
  1200. self._refreshPeerConnection(Object.keys(self._peerConnections), false, function (err, success) {
  1201. if (err) {
  1202. log.error('Failed refreshing connections for shareScreen() ->', err);
  1203. if (typeof callback === 'function') {
  1204. callback(new Error('Failed refreshing connections.'), null);
  1205. }
  1206. return;
  1207. }
  1208. if (typeof callback === 'function') {
  1209. callback(null, stream);
  1210. }
  1211. });
  1212. } else if (typeof callback === 'function') {
  1213. callback(null, stream);
  1214. }
  1215. } else if (typeof callback === 'function') {
  1216. callback(null, stream);
  1217. }
  1218. };
  1219.  
  1220. var mediaAccessErrorFn = function (error) {
  1221. self.off('mediaAccessSuccess', mediaAccessSuccessFn);
  1222.  
  1223. if (typeof callback === 'function') {
  1224. callback(error, null);
  1225. }
  1226. };
  1227.  
  1228. self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
  1229. return isScreensharing;
  1230. });
  1231.  
  1232. self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
  1233. return isScreensharing;
  1234. });
  1235.  
  1236. try {
  1237. if (enableAudio && window.webrtcDetectedBrowser === 'firefox') {
  1238. settings.getUserMediaSettings.audio = true;
  1239. }
  1240.  
  1241. navigator.getUserMedia(settings.getUserMediaSettings, function (stream) {
  1242. if (window.webrtcDetectedBrowser === 'firefox' || !enableAudio) {
  1243. self._onStreamAccessSuccess(stream, settings, true, false);
  1244. return;
  1245. }
  1246.  
  1247. navigator.getUserMedia({
  1248. audio: true
  1249.  
  1250. }, function (audioStream) {
  1251. try {
  1252. audioStream.addTrack(stream.getVideoTracks()[0]);
  1253.  
  1254. self.once('mediaAccessSuccess', function () {
  1255. self._streams.screenshare.streamClone = stream;
  1256. }, function (stream, isScreensharing) {
  1257. return isScreensharing;
  1258. });
  1259.  
  1260. self._onStreamAccessSuccess(audioStream, settings, true, false);
  1261.  
  1262. } catch (error) {
  1263. log.error('Failed retrieving audio stream for screensharing stream', error);
  1264. self._onStreamAccessSuccess(stream, settings, true, false);
  1265. }
  1266. }, function (error) {
  1267. log.error('Failed retrieving audio stream for screensharing stream', error);
  1268. self._onStreamAccessSuccess(stream, settings, true, false);
  1269. });
  1270.  
  1271. }, function (error) {
  1272. self._onStreamAccessError(error, settings, true, false);
  1273. });
  1274.  
  1275. } catch (error) {
  1276. self._onStreamAccessError(error, settings, true, false);
  1277. }
  1278.  
  1279. }, 10000);
  1280. };
  1281.  
  1282. /**
  1283. * <blockquote class="info">
  1284. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  1285. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  1286. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  1287. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  1288. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  1289. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  1290. * sent within less than an interval.
  1291. * </blockquote>
  1292. * Function that stops <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>.
  1293. * @method stopScreen
  1294. * @example
  1295. * function stopScreen () {
  1296. * skylinkDemo.stopScreen();
  1297. * }
  1298. *
  1299. * skylinkDemo.shareScreen();
  1300. * @trigger <ol class="desc-seq">
  1301. * <li>Checks if there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>. <ol>
  1302. * <li>If there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>: <ol>
  1303. * <li>Stop <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> Stream. <ol>
  1304. * <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a>
  1305. * triggers parameter payload <code>isScreensharing</code> value as <code>true</code> and
  1306. * <code>isAudioFallback</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
  1307. * <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter payload
  1308. * <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as <code>true</code>.</li>
  1309. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1310. * <code>isSelf</code> value as <code>true</code>.</li>
  1311. * </ol></li></ol></li><li>If User is in Room: <small><b>SKIP</b> this step if <code>stopScreen()</code>
  1312. * was invoked from <a href="#method_shareScreen"><code>shareScreen()</code> method</a>.</small> <ol>
  1313. * <li>If there is <a href="#method_getUserMedia"> <code>getUserMedia()</code>Stream</a> Stream: <ol>
  1314. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  1315. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
  1316. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
  1317. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1318. * <code>isSelf</code> value as <code>true</code>.</li></ol></li>
  1319. * <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.</li>
  1320. * </ol></li></ol></li></ol></li></ol>
  1321. * @for Skylink
  1322. * @since 0.6.0
  1323. */
  1324. Skylink.prototype.stopScreen = function () {
  1325. if (this._streams.screenshare) {
  1326. this._stopStreams({
  1327. screenshare: true
  1328. });
  1329.  
  1330. if (this._inRoom) {
  1331. if (this._streams.userMedia && this._streams.userMedia.stream) {
  1332. this._trigger('incomingStream', this._user.sid, this._streams.userMedia.stream, true, this.getPeerInfo());
  1333. this._trigger('peerUpdated', this._user.sid, this.getPeerInfo(), true);
  1334. }
  1335. this._refreshPeerConnection(Object.keys(this._peerConnections), false);
  1336. }
  1337. }
  1338. };
  1339.  
  1340. /**
  1341. * Function that handles the muting of Stream audio and video tracks.
  1342. * @method _muteStreams
  1343. * @private
  1344. * @for Skylink
  1345. * @since 0.6.15
  1346. */
  1347. Skylink.prototype._muteStreams = function () {
  1348. var self = this;
  1349. var hasVideo = false;
  1350. var hasAudio = false;
  1351.  
  1352. var muteFn = function (stream) {
  1353. var audioTracks = stream.getAudioTracks();
  1354. var videoTracks = stream.getVideoTracks();
  1355.  
  1356. for (var a = 0; a < audioTracks.length; a++) {
  1357. audioTracks[a].enabled = !self._streamsMutedSettings.audioMuted;
  1358. hasAudio = true;
  1359. }
  1360.  
  1361. for (var v = 0; v < videoTracks.length; v++) {
  1362. videoTracks[v].enabled = !self._streamsMutedSettings.videoMuted;
  1363. hasVideo = true;
  1364. }
  1365. };
  1366.  
  1367. if (self._streams.userMedia && self._streams.userMedia.stream) {
  1368. muteFn(self._streams.userMedia.stream);
  1369. }
  1370.  
  1371. if (self._streams.screenshare && self._streams.screenshare.stream) {
  1372. muteFn(self._streams.screenshare.stream);
  1373. }
  1374.  
  1375. if (self._streams.screenshare && self._streams.screenshare.streamClone) {
  1376. muteFn(self._streams.screenshare.streamClone);
  1377. }
  1378.  
  1379. log.debug('Updated Streams muted status ->', self._streamsMutedSettings);
  1380.  
  1381. return {
  1382. hasVideo: hasVideo,
  1383. hasAudio: hasAudio
  1384. };
  1385. };
  1386.  
  1387. /**
  1388. * Function that handles stopping the Stream streaming.
  1389. * @method _stopStreams
  1390. * @private
  1391. * @for Skylink
  1392. * @since 0.6.15
  1393. */
  1394. Skylink.prototype._stopStreams = function (options) {
  1395. var self = this;
  1396. var stopFn = function (stream) {
  1397. var streamId = stream.id || stream.label;
  1398. log.debug([null, 'MediaStream', streamId, 'Stopping Stream ->'], stream);
  1399.  
  1400. try {
  1401. var audioTracks = stream.getAudioTracks();
  1402. var videoTracks = stream.getVideoTracks();
  1403.  
  1404. for (var a = 0; a < audioTracks.length; a++) {
  1405. audioTracks[a].stop();
  1406. }
  1407.  
  1408. for (var v = 0; v < videoTracks.length; v++) {
  1409. videoTracks[v].stop();
  1410. }
  1411.  
  1412. } catch (error) {
  1413. stream.stop();
  1414. }
  1415.  
  1416. if (self._streamsStoppedCbs[streamId]) {
  1417. self._streamsStoppedCbs[streamId]();
  1418. }
  1419. };
  1420.  
  1421. var stopUserMedia = false;
  1422. var stopScreenshare = false;
  1423. var hasStoppedMedia = false;
  1424.  
  1425. if (typeof options === 'object') {
  1426. stopUserMedia = options.userMedia === true;
  1427. stopScreenshare = options.screenshare === true;
  1428. }
  1429.  
  1430. if (stopUserMedia && self._streams.userMedia) {
  1431. if (self._streams.userMedia.stream) {
  1432. stopFn(self._streams.userMedia.stream);
  1433. }
  1434.  
  1435. self._streams.userMedia = null;
  1436. hasStoppedMedia = true;
  1437. }
  1438.  
  1439. if (stopScreenshare && self._streams.screenshare) {
  1440. if (self._streams.screenshare.streamClone) {
  1441. stopFn(self._streams.screenshare.streamClone);
  1442. }
  1443.  
  1444. if (self._streams.screenshare.stream) {
  1445. stopFn(self._streams.screenshare.stream);
  1446. }
  1447.  
  1448. self._streams.screenshare = null;
  1449. hasStoppedMedia = true;
  1450. }
  1451.  
  1452. if (self._inRoom && hasStoppedMedia) {
  1453. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1454. }
  1455.  
  1456. log.log('Stopping Streams with settings ->', options);
  1457. };
  1458.  
  1459. /**
  1460. * Function that parses the <code>getUserMedia()</code> settings provided.
  1461. * @method _parseStreamSettings
  1462. * @private
  1463. * @for Skylink
  1464. * @since 0.6.15
  1465. */
  1466. Skylink.prototype._parseStreamSettings = function(options) {
  1467. var settings = {
  1468. settings: { audio: false, video: false },
  1469. mutedSettings: { shouldAudioMuted: false, shouldVideoMuted: false },
  1470. getUserMediaSettings: { audio: false, video: false }
  1471. };
  1472.  
  1473. if (options.audio) {
  1474. settings.settings.audio = {
  1475. stereo: false,
  1476. exactConstraints: !!options.useExactConstraints
  1477. };
  1478. settings.getUserMediaSettings.audio = {};
  1479.  
  1480. if (typeof options.audio.stereo === 'boolean') {
  1481. settings.settings.audio.stereo = options.audio.stereo;
  1482. }
  1483.  
  1484. if (typeof options.audio.mute === 'boolean') {
  1485. settings.mutedSettings.shouldAudioMuted = options.audio.mute;
  1486. }
  1487.  
  1488. if (Array.isArray(options.audio.optional)) {
  1489. settings.settings.audio.optional = clone(options.audio.optional);
  1490. settings.getUserMediaSettings.audio.optional = clone(options.audio.optional);
  1491. }
  1492.  
  1493. if (options.audio.deviceId && typeof options.audio.deviceId === 'string' &&
  1494. window.webrtcDetectedBrowser !== 'firefox') {
  1495. settings.settings.audio.deviceId = options.audio.deviceId;
  1496.  
  1497. if (options.useExactConstraints) {
  1498. settings.getUserMediaSettings.audio.deviceId = { exact: options.audio.deviceId };
  1499.  
  1500. } else {
  1501. if (!Array.isArray(settings.getUserMediaSettings.audio.optional)) {
  1502. settings.getUserMediaSettings.audio.optional = [];
  1503. }
  1504.  
  1505. settings.getUserMediaSettings.audio.optional.push({
  1506. sourceId: options.audio.deviceId
  1507. });
  1508. }
  1509. }
  1510.  
  1511. // For Edge to work since they do not support the advanced constraints yet
  1512. if (window.webrtcDetectedBrowser === 'edge') {
  1513. settings.getUserMediaSettings.audio = true;
  1514. }
  1515. }
  1516.  
  1517. if (options.video) {
  1518. settings.settings.video = {
  1519. resolution: clone(this.VIDEO_RESOLUTION.VGA),
  1520. screenshare: false,
  1521. exactConstraints: !!options.useExactConstraints
  1522. };
  1523. settings.getUserMediaSettings.video = {};
  1524.  
  1525. if (typeof options.video.mute === 'boolean') {
  1526. settings.mutedSettings.shouldVideoMuted = options.video.mute;
  1527. }
  1528.  
  1529. if (Array.isArray(options.video.optional)) {
  1530. settings.settings.video.optional = clone(options.video.optional);
  1531. settings.getUserMediaSettings.video.optional = clone(options.video.optional);
  1532. }
  1533.  
  1534. if (options.video.deviceId && typeof options.video.deviceId === 'string' &&
  1535. window.webrtcDetectedBrowser !== 'firefox') {
  1536. settings.settings.video.deviceId = options.video.deviceId;
  1537.  
  1538. if (options.useExactConstraints) {
  1539. settings.getUserMediaSettings.video.deviceId = { exact: options.video.deviceId };
  1540.  
  1541. } else {
  1542. if (!Array.isArray(settings.getUserMediaSettings.video.optional)) {
  1543. settings.getUserMediaSettings.video.optional = [];
  1544. }
  1545.  
  1546. settings.getUserMediaSettings.video.optional.push({
  1547. sourceId: options.video.deviceId
  1548. });
  1549. }
  1550. }
  1551.  
  1552. if (options.video.resolution && typeof options.video.resolution === 'object') {
  1553. if (typeof options.video.resolution.width === 'number') {
  1554. settings.settings.video.resolution.width = options.video.resolution.width;
  1555. }
  1556. if (typeof options.video.resolution.height === 'number') {
  1557. settings.settings.video.resolution.height = options.video.resolution.height;
  1558. }
  1559. }
  1560.  
  1561. if (options.useExactConstraints) {
  1562. settings.getUserMediaSettings.video.width = { exact: settings.settings.video.resolution.width };
  1563. settings.getUserMediaSettings.video.height = { exact: settings.settings.video.resolution.height };
  1564.  
  1565. if (typeof options.video.frameRate === 'number') {
  1566. settings.settings.video.frameRate = options.video.frameRate;
  1567. settings.getUserMediaSettings.video.frameRate = { exact: options.video.frameRate };
  1568. }
  1569.  
  1570. } else {
  1571. settings.getUserMediaSettings.video.mandatory = {
  1572. maxWidth: settings.settings.video.resolution.width,
  1573. maxHeight: settings.settings.video.resolution.height
  1574. };
  1575.  
  1576. if (typeof options.video.frameRate === 'number' && ['IE', 'safari'].indexOf(window.webrtcDetectedBrowser) === -1) {
  1577. settings.settings.video.frameRate = options.video.frameRate;
  1578. settings.getUserMediaSettings.video.mandatory.maxFrameRate = options.video.frameRate;
  1579. }
  1580. }
  1581.  
  1582. // For Edge to work since they do not support the advanced constraints yet
  1583. if (window.webrtcDetectedBrowser === 'edge') {
  1584. settings.getUserMediaSettings.video = true;
  1585. }
  1586. }
  1587.  
  1588. return settings;
  1589. };
  1590.  
  1591. /**
  1592. * Function that handles the native <code>navigator.getUserMedia()</code> API success callback result.
  1593. * @method _onStreamAccessSuccess
  1594. * @private
  1595. * @for Skylink
  1596. * @since 0.3.0
  1597. */
  1598. Skylink.prototype._onStreamAccessSuccess = function(stream, settings, isScreenSharing, isAudioFallback) {
  1599. var self = this;
  1600. var streamId = stream.id || stream.label;
  1601.  
  1602. log.log([null, 'MediaStream', streamId, 'Has access to stream ->'], stream);
  1603.  
  1604. // Stop previous stream
  1605. if (!isScreenSharing && self._streams.userMedia) {
  1606. self._stopStreams({
  1607. userMedia: true,
  1608. screenshare: false
  1609. });
  1610.  
  1611. } else if (isScreenSharing && self._streams.screenshare) {
  1612. self._stopStreams({
  1613. userMedia: false,
  1614. screenshare: true
  1615. });
  1616. }
  1617.  
  1618. self._streamsStoppedCbs[streamId] = function () {
  1619. log.log([null, 'MediaStream', streamId, 'Stream has ended']);
  1620.  
  1621. self._trigger('mediaAccessStopped', !!isScreenSharing, !!isAudioFallback, streamId);
  1622.  
  1623. if (self._inRoom) {
  1624. log.debug([null, 'MediaStream', streamId, 'Sending Stream ended status to Peers']);
  1625.  
  1626. self._sendChannelMessage({
  1627. type: self._SIG_MESSAGE_TYPE.STREAM,
  1628. mid: self._user.sid,
  1629. rid: self._room.id,
  1630. cid: self._key,
  1631. sessionType: !!isScreenSharing ? 'screensharing' : 'stream',
  1632. streamId: streamId,
  1633. status: 'ended'
  1634. });
  1635.  
  1636. self._trigger('streamEnded', self._user.sid, self.getPeerInfo(), true, !!isScreenSharing, streamId);
  1637.  
  1638. if (isScreenSharing && self._streams.screenshare && self._streams.screenshare.stream &&
  1639. (self._streams.screenshare.stream.id || self._streams.screenshare.stream.label) === streamId) {
  1640. self._streams.screenshare = null;
  1641.  
  1642. } else if (!isScreenSharing && self._streams.userMedia && self._streams.userMedia.stream &&
  1643. (self._streams.userMedia.stream.id || self._streams.userMedia.stream.label) === streamId) {
  1644. self._streams.userMedia = null;
  1645. }
  1646. }
  1647. };
  1648.  
  1649. // Handle event for Chrome / Opera
  1650. if (['chrome', 'opera'].indexOf(window.webrtcDetectedBrowser) > -1) {
  1651. stream.oninactive = function () {
  1652. if (self._streamsStoppedCbs[streamId]) {
  1653. self._streamsStoppedCbs[streamId]();
  1654. }
  1655. };
  1656.  
  1657. // Handle event for Firefox (use an interval)
  1658. } else if (window.webrtcDetectedBrowser === 'firefox') {
  1659. stream.endedInterval = setInterval(function () {
  1660. if (typeof stream.recordedTime === 'undefined') {
  1661. stream.recordedTime = 0;
  1662. }
  1663. if (stream.recordedTime === stream.currentTime) {
  1664. clearInterval(stream.endedInterval);
  1665.  
  1666. if (self._streamsStoppedCbs[streamId]) {
  1667. self._streamsStoppedCbs[streamId]();
  1668. }
  1669.  
  1670. } else {
  1671. stream.recordedTime = stream.currentTime;
  1672. }
  1673. }, 1000);
  1674.  
  1675. } else {
  1676. stream.onended = function () {
  1677. if (self._streamsStoppedCbs[streamId]) {
  1678. self._streamsStoppedCbs[streamId]();
  1679. }
  1680. };
  1681. }
  1682.  
  1683. if ((settings.settings.audio && stream.getAudioTracks().length === 0) ||
  1684. (settings.settings.video && stream.getVideoTracks().length === 0)) {
  1685.  
  1686. var tracksNotSameError = 'Expected audio tracks length with ' +
  1687. (settings.settings.audio ? '1' : '0') + ' and video tracks length with ' +
  1688. (settings.settings.video ? '1' : '0') + ' but received audio tracks length ' +
  1689. 'with ' + stream.getAudioTracks().length + ' and video ' +
  1690. 'tracks length with ' + stream.getVideoTracks().length;
  1691.  
  1692. log.warn([null, 'MediaStream', streamId, tracksNotSameError]);
  1693.  
  1694. var requireAudio = !!settings.settings.audio;
  1695. var requireVideo = !!settings.settings.video;
  1696.  
  1697. if (settings.settings.audio && stream.getAudioTracks().length === 0) {
  1698. settings.settings.audio = false;
  1699. }
  1700.  
  1701. if (settings.settings.video && stream.getVideoTracks().length === 0) {
  1702. settings.settings.video = false;
  1703. }
  1704.  
  1705. self._trigger('mediaAccessFallback', {
  1706. error: new Error(tracksNotSameError),
  1707. diff: {
  1708. video: { expected: requireVideo ? 1 : 0, received: stream.getVideoTracks().length },
  1709. audio: { expected: requireAudio ? 1 : 0, received: stream.getAudioTracks().length }
  1710. }
  1711. }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKED, !!isScreenSharing, !!isAudioFallback, streamId);
  1712. }
  1713.  
  1714. self._streams[ isScreenSharing ? 'screenshare' : 'userMedia' ] = {
  1715. stream: stream,
  1716. settings: settings.settings,
  1717. constraints: settings.getUserMediaSettings
  1718. };
  1719. self._muteStreams();
  1720. self._trigger('mediaAccessSuccess', stream, !!isScreenSharing, !!isAudioFallback, streamId);
  1721. };
  1722.  
  1723. /**
  1724. * Function that handles the native <code>navigator.getUserMedia()</code> API failure callback result.
  1725. * @method _onStreamAccessError
  1726. * @private
  1727. * @for Skylink
  1728. * @since 0.6.15
  1729. */
  1730. Skylink.prototype._onStreamAccessError = function(error, settings, isScreenSharing) {
  1731. var self = this;
  1732.  
  1733. if (!isScreenSharing && settings.settings.audio && settings.settings.video && self._audioFallback) {
  1734. log.debug('Fallbacking to retrieve audio only Stream');
  1735.  
  1736. self._trigger('mediaAccessFallback', {
  1737. error: error,
  1738. diff: null
  1739. }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKING, false, true);
  1740.  
  1741. navigator.getUserMedia({
  1742. audio: true
  1743. }, function (stream) {
  1744. self._onStreamAccessSuccess(stream, settings, false, true);
  1745.  
  1746. }, function (error) {
  1747. log.error('Failed fallbacking to retrieve audio only Stream ->', error);
  1748.  
  1749. self._trigger('mediaAccessError', error, false, true);
  1750. self._trigger('mediaAccessFallback', {
  1751. error: error,
  1752. diff: null
  1753. }, self.MEDIA_ACCESS_FALLBACK_STATE.ERROR, false, true);
  1754. });
  1755. return;
  1756. }
  1757.  
  1758. log.error('Failed retrieving ' + (isScreenSharing ? 'screensharing' : 'camera') + ' Stream ->', error);
  1759.  
  1760. self._trigger('mediaAccessError', error, !!isScreenSharing, false);
  1761. };
  1762.  
  1763. /**
  1764. * Function that handles the <code>RTCPeerConnection.onaddstream</code> remote MediaStream received.
  1765. * @method _onRemoteStreamAdded
  1766. * @private
  1767. * @for Skylink
  1768. * @since 0.5.2
  1769. */
  1770. Skylink.prototype._onRemoteStreamAdded = function(targetMid, stream, isScreenSharing) {
  1771. var self = this;
  1772.  
  1773. if (!self._peerInformations[targetMid]) {
  1774. log.warn([targetMid, 'MediaStream', stream.id,
  1775. 'Received remote stream when peer is not connected. ' +
  1776. 'Ignoring stream ->'], stream);
  1777. return;
  1778. }
  1779.  
  1780. /*if (!self._peerInformations[targetMid].settings.audio &&
  1781. !self._peerInformations[targetMid].settings.video && !isScreenSharing) {
  1782. log.log([targetMid, 'MediaStream', stream.id,
  1783. 'Receive remote stream but ignoring stream as it is empty ->'
  1784. ], stream);
  1785. return;
  1786. }*/
  1787. log.log([targetMid, 'MediaStream', stream.id, 'Received remote stream ->'], stream);
  1788.  
  1789. if (isScreenSharing) {
  1790. log.log([targetMid, 'MediaStream', stream.id, 'Peer is having a screensharing session with user']);
  1791. }
  1792.  
  1793. self._trigger('incomingStream', targetMid, stream, false, self.getPeerInfo(targetMid));
  1794. self._trigger('peerUpdated', targetMid, self.getPeerInfo(targetMid), false);
  1795. };
  1796.  
  1797. /**
  1798. * Function that sets User's Stream to send to Peer connection.
  1799. * Priority for <code>shareScreen()</code> Stream over <code>getUserMedia()</code> Stream.
  1800. * @method _addLocalMediaStreams
  1801. * @private
  1802. * @for Skylink
  1803. * @since 0.5.2
  1804. */
  1805. Skylink.prototype._addLocalMediaStreams = function(peerId) {
  1806. var self = this;
  1807.  
  1808. // NOTE ALEX: here we could do something smarter
  1809. // a mediastream is mainly a container, most of the info
  1810. // are attached to the tracks. We should iterates over track and print
  1811. try {
  1812. log.log([peerId, null, null, 'Adding local stream']);
  1813.  
  1814. var pc = self._peerConnections[peerId];
  1815.  
  1816. if (pc) {
  1817. if (pc.signalingState !== self.PEER_CONNECTION_STATE.CLOSED) {
  1818. // Updates the streams accordingly
  1819. var updateStreamFn = function (updatedStream) {
  1820. var hasStream = false;
  1821.  
  1822. // remove streams
  1823. var streams = pc.getLocalStreams();
  1824. for (var i = 0; i < streams.length; i++) {
  1825. if (updatedStream !== null && streams[i].id === updatedStream.id) {
  1826. hasStream = true;
  1827. continue;
  1828. }
  1829. // try removeStream
  1830. pc.removeStream(streams[i]);
  1831. }
  1832.  
  1833. if (updatedStream !== null && !hasStream) {
  1834. pc.addStream(updatedStream);
  1835. }
  1836. };
  1837.  
  1838. if (self._streams.screenshare && self._streams.screenshare.stream) {
  1839. log.debug([peerId, 'MediaStream', null, 'Sending screen'], self._streams.screenshare.stream);
  1840.  
  1841. updateStreamFn(self._streams.screenshare.stream);
  1842.  
  1843. } else if (self._streams.userMedia && self._streams.userMedia.stream) {
  1844. log.debug([peerId, 'MediaStream', null, 'Sending stream'], self._streams.userMedia.stream);
  1845.  
  1846. updateStreamFn(self._streams.userMedia.stream);
  1847.  
  1848. } else {
  1849. log.warn([peerId, 'MediaStream', null, 'No media to send. Will be only receiving']);
  1850.  
  1851. updateStreamFn(null);
  1852. }
  1853.  
  1854. } else {
  1855. log.warn([peerId, 'MediaStream', null,
  1856. 'Not adding any stream as signalingState is closed']);
  1857. }
  1858. } else {
  1859. log.warn([peerId, 'MediaStream', self._mediaStream,
  1860. 'Not adding stream as peerconnection object does not exists']);
  1861. }
  1862. } catch (error) {
  1863. if ((error.message || '').indexOf('already added') > -1) {
  1864. log.warn([peerId, null, null, 'Not re-adding stream as LocalMediaStream is already added'], error);
  1865. } else {
  1866. // Fix errors thrown like NS_ERROR_UNEXPECTED
  1867. log.error([peerId, null, null, 'Failed adding local stream'], error);
  1868. }
  1869. }
  1870.  
  1871. setTimeout(function () {
  1872. var streamId = null;
  1873.  
  1874. if (self._streams.screenshare && self._streams.screenshare.stream) {
  1875. streamId = self._streams.screenshare.stream.id || self._streams.screenshare.stream.label;
  1876. } else if (self._streams.userMedia && self._streams.userMedia.stream) {
  1877. streamId = self._streams.userMedia.stream.id || self._streams.userMedia.stream.label;
  1878. }
  1879.  
  1880. if (self._inRoom) {
  1881. self._sendChannelMessage({
  1882. type: self._SIG_MESSAGE_TYPE.STREAM,
  1883. mid: self._user.sid,
  1884. rid: self._room.id,
  1885. cid: self._key,
  1886. sessionType: self._streams.screenshare && self._streams.screenshare.stream ? 'screensharing' : 'stream',
  1887. streamId: streamId,
  1888. status: 'check'
  1889. });
  1890. }
  1891. }, 3500);
  1892. };