not really known
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

6162 lines
188 KiB

  1. 'use strict';
  2. // Last time updated: 2019-06-24 7:56:58 AM UTC
  3. // ________________
  4. // RecordRTC v5.5.9
  5. // Open-Sourced: https://github.com/muaz-khan/RecordRTC
  6. // --------------------------------------------------
  7. // Muaz Khan - www.MuazKhan.com
  8. // MIT License - www.WebRTC-Experiment.com/licence
  9. // --------------------------------------------------
  10. // ____________
  11. // RecordRTC.js
  12. /**
  13. * {@link https://github.com/muaz-khan/RecordRTC|RecordRTC} is a WebRTC JavaScript library for audio/video as well as screen activity recording. It supports Chrome, Firefox, Opera, Android, and Microsoft Edge. Platforms: Linux, Mac and Windows.
  14. * @summary Record audio, video or screen inside the browser.
  15. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  16. * @author {@link https://MuazKhan.com|Muaz Khan}
  17. * @typedef RecordRTC
  18. * @class
  19. * @example
  20. * var recorder = RecordRTC(mediaStream or [arrayOfMediaStream], {
  21. * type: 'video', // audio or video or gif or canvas
  22. * recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder || Etc
  23. * });
  24. * recorder.startRecording();
  25. * @see For further information:
  26. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  27. * @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
  28. * @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, desiredSampRate: 16000, video: HTMLVideoElement, etc.}
  29. */
  30. function RecordRTC(mediaStream, config) {
  31. if (!mediaStream) {
  32. throw 'First parameter is required.';
  33. }
  34. config = config || {
  35. type: 'video'
  36. };
  37. config = new RecordRTCConfiguration(mediaStream, config);
  38. // a reference to user's recordRTC object
  39. var self = this;
  40. function startRecording(config2) {
  41. if (!config.disableLogs) {
  42. console.log('RecordRTC version: ', self.version);
  43. }
  44. if (!!config2) {
  45. // allow users to set options using startRecording method
  46. // config2 is similar to main "config" object (second parameter over RecordRTC constructor)
  47. config = new RecordRTCConfiguration(mediaStream, config2);
  48. }
  49. if (!config.disableLogs) {
  50. console.log('started recording ' + config.type + ' stream.');
  51. }
  52. if (mediaRecorder) {
  53. mediaRecorder.clearRecordedData();
  54. mediaRecorder.record();
  55. setState('recording');
  56. if (self.recordingDuration) {
  57. handleRecordingDuration();
  58. }
  59. return self;
  60. }
  61. initRecorder(function() {
  62. if (self.recordingDuration) {
  63. handleRecordingDuration();
  64. }
  65. });
  66. return self;
  67. }
  68. function initRecorder(initCallback) {
  69. if (initCallback) {
  70. config.initCallback = function() {
  71. initCallback();
  72. initCallback = config.initCallback = null; // recorder.initRecorder should be call-backed once.
  73. };
  74. }
  75. var Recorder = new GetRecorderType(mediaStream, config);
  76. mediaRecorder = new Recorder(mediaStream, config);
  77. mediaRecorder.record();
  78. setState('recording');
  79. if (!config.disableLogs) {
  80. console.log('Initialized recorderType:', mediaRecorder.constructor.name, 'for output-type:', config.type);
  81. }
  82. }
  83. function stopRecording(callback) {
  84. callback = callback || function() {};
  85. if (!mediaRecorder) {
  86. warningLog();
  87. return;
  88. }
  89. if (self.state === 'paused') {
  90. self.resumeRecording();
  91. setTimeout(function() {
  92. stopRecording(callback);
  93. }, 1);
  94. return;
  95. }
  96. if (self.state !== 'recording' && !config.disableLogs) {
  97. console.warn('Recording state should be: "recording", however current state is: ', self.state);
  98. }
  99. if (!config.disableLogs) {
  100. console.log('Stopped recording ' + config.type + ' stream.');
  101. }
  102. if (config.type !== 'gif') {
  103. mediaRecorder.stop(_callback);
  104. } else {
  105. mediaRecorder.stop();
  106. _callback();
  107. }
  108. setState('stopped');
  109. function _callback(__blob) {
  110. if (!mediaRecorder) {
  111. if (typeof callback.call === 'function') {
  112. callback.call(self, '');
  113. } else {
  114. callback('');
  115. }
  116. return;
  117. }
  118. Object.keys(mediaRecorder).forEach(function(key) {
  119. if (typeof mediaRecorder[key] === 'function') {
  120. return;
  121. }
  122. self[key] = mediaRecorder[key];
  123. });
  124. var blob = mediaRecorder.blob;
  125. if (!blob) {
  126. if (__blob) {
  127. mediaRecorder.blob = blob = __blob;
  128. } else {
  129. throw 'Recording failed.';
  130. }
  131. }
  132. if (blob && !config.disableLogs) {
  133. console.log(blob.type, '->', bytesToSize(blob.size));
  134. }
  135. if (callback) {
  136. var url;
  137. try {
  138. url = URL.createObjectURL(blob);
  139. } catch (e) {}
  140. if (typeof callback.call === 'function') {
  141. callback.call(self, url);
  142. } else {
  143. callback(url);
  144. }
  145. }
  146. if (!config.autoWriteToDisk) {
  147. return;
  148. }
  149. getDataURL(function(dataURL) {
  150. var parameter = {};
  151. parameter[config.type + 'Blob'] = dataURL;
  152. DiskStorage.Store(parameter);
  153. });
  154. }
  155. }
  156. function pauseRecording() {
  157. if (!mediaRecorder) {
  158. warningLog();
  159. return;
  160. }
  161. if (self.state !== 'recording') {
  162. if (!config.disableLogs) {
  163. console.warn('Unable to pause the recording. Recording state: ', self.state);
  164. }
  165. return;
  166. }
  167. setState('paused');
  168. mediaRecorder.pause();
  169. if (!config.disableLogs) {
  170. console.log('Paused recording.');
  171. }
  172. }
  173. function resumeRecording() {
  174. if (!mediaRecorder) {
  175. warningLog();
  176. return;
  177. }
  178. if (self.state !== 'paused') {
  179. if (!config.disableLogs) {
  180. console.warn('Unable to resume the recording. Recording state: ', self.state);
  181. }
  182. return;
  183. }
  184. setState('recording');
  185. // not all libs have this method yet
  186. mediaRecorder.resume();
  187. if (!config.disableLogs) {
  188. console.log('Resumed recording.');
  189. }
  190. }
  191. function readFile(_blob) {
  192. postMessage(new FileReaderSync().readAsDataURL(_blob));
  193. }
  194. function getDataURL(callback, _mediaRecorder) {
  195. if (!callback) {
  196. throw 'Pass a callback function over getDataURL.';
  197. }
  198. var blob = _mediaRecorder ? _mediaRecorder.blob : (mediaRecorder || {}).blob;
  199. if (!blob) {
  200. if (!config.disableLogs) {
  201. console.warn('Blob encoder did not finish its job yet.');
  202. }
  203. setTimeout(function() {
  204. getDataURL(callback, _mediaRecorder);
  205. }, 1000);
  206. return;
  207. }
  208. if (typeof Worker !== 'undefined' && !navigator.mozGetUserMedia) {
  209. var webWorker = processInWebWorker(readFile);
  210. webWorker.onmessage = function(event) {
  211. callback(event.data);
  212. };
  213. webWorker.postMessage(blob);
  214. } else {
  215. var reader = new FileReader();
  216. reader.readAsDataURL(blob);
  217. reader.onload = function(event) {
  218. callback(event.target.result);
  219. };
  220. }
  221. function processInWebWorker(_function) {
  222. try {
  223. var blob = URL.createObjectURL(new Blob([_function.toString(),
  224. 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
  225. ], {
  226. type: 'application/javascript'
  227. }));
  228. var worker = new Worker(blob);
  229. URL.revokeObjectURL(blob);
  230. return worker;
  231. } catch (e) {}
  232. }
  233. }
  234. function handleRecordingDuration(counter) {
  235. counter = counter || 0;
  236. if (self.state === 'paused') {
  237. setTimeout(function() {
  238. handleRecordingDuration(counter);
  239. }, 1000);
  240. return;
  241. }
  242. if (self.state === 'stopped') {
  243. return;
  244. }
  245. if (counter >= self.recordingDuration) {
  246. stopRecording(self.onRecordingStopped);
  247. return;
  248. }
  249. counter += 1000; // 1-second
  250. setTimeout(function() {
  251. handleRecordingDuration(counter);
  252. }, 1000);
  253. }
  254. function setState(state) {
  255. if (!self) {
  256. return;
  257. }
  258. self.state = state;
  259. if (typeof self.onStateChanged.call === 'function') {
  260. self.onStateChanged.call(self, state);
  261. } else {
  262. self.onStateChanged(state);
  263. }
  264. }
  265. var WARNING = 'It seems that recorder is destroyed or "startRecording" is not invoked for ' + config.type + ' recorder.';
  266. function warningLog() {
  267. if (config.disableLogs === true) {
  268. return;
  269. }
  270. console.warn(WARNING);
  271. }
  272. var mediaRecorder;
  273. var returnObject = {
  274. /**
  275. * This method starts the recording.
  276. * @method
  277. * @memberof RecordRTC
  278. * @instance
  279. * @example
  280. * var recorder = RecordRTC(mediaStream, {
  281. * type: 'video'
  282. * });
  283. * recorder.startRecording();
  284. */
  285. startRecording: startRecording,
  286. /**
  287. * This method stops the recording. It is strongly recommended to get "blob" or "URI" inside the callback to make sure all recorders finished their job.
  288. * @param {function} callback - Callback to get the recorded blob.
  289. * @method
  290. * @memberof RecordRTC
  291. * @instance
  292. * @example
  293. * recorder.stopRecording(function() {
  294. * // use either "this" or "recorder" object; both are identical
  295. * video.src = this.toURL();
  296. * var blob = this.getBlob();
  297. * });
  298. */
  299. stopRecording: stopRecording,
  300. /**
  301. * This method pauses the recording. You can resume recording using "resumeRecording" method.
  302. * @method
  303. * @memberof RecordRTC
  304. * @instance
  305. * @todo Firefox is unable to pause the recording. Fix it.
  306. * @example
  307. * recorder.pauseRecording(); // pause the recording
  308. * recorder.resumeRecording(); // resume again
  309. */
  310. pauseRecording: pauseRecording,
  311. /**
  312. * This method resumes the recording.
  313. * @method
  314. * @memberof RecordRTC
  315. * @instance
  316. * @example
  317. * recorder.pauseRecording(); // first of all, pause the recording
  318. * recorder.resumeRecording(); // now resume it
  319. */
  320. resumeRecording: resumeRecording,
  321. /**
  322. * This method initializes the recording.
  323. * @method
  324. * @memberof RecordRTC
  325. * @instance
  326. * @todo This method should be deprecated.
  327. * @example
  328. * recorder.initRecorder();
  329. */
  330. initRecorder: initRecorder,
  331. /**
  332. * Ask RecordRTC to auto-stop the recording after 5 minutes.
  333. * @method
  334. * @memberof RecordRTC
  335. * @instance
  336. * @example
  337. * var fiveMinutes = 5 * 1000 * 60;
  338. * recorder.setRecordingDuration(fiveMinutes, function() {
  339. * var blob = this.getBlob();
  340. * video.src = this.toURL();
  341. * });
  342. *
  343. * // or otherwise
  344. * recorder.setRecordingDuration(fiveMinutes).onRecordingStopped(function() {
  345. * var blob = this.getBlob();
  346. * video.src = this.toURL();
  347. * });
  348. */
  349. setRecordingDuration: function(recordingDuration, callback) {
  350. if (typeof recordingDuration === 'undefined') {
  351. throw 'recordingDuration is required.';
  352. }
  353. if (typeof recordingDuration !== 'number') {
  354. throw 'recordingDuration must be a number.';
  355. }
  356. self.recordingDuration = recordingDuration;
  357. self.onRecordingStopped = callback || function() {};
  358. return {
  359. onRecordingStopped: function(callback) {
  360. self.onRecordingStopped = callback;
  361. }
  362. };
  363. },
  364. /**
  365. * This method can be used to clear/reset all the recorded data.
  366. * @method
  367. * @memberof RecordRTC
  368. * @instance
  369. * @todo Figure out the difference between "reset" and "clearRecordedData" methods.
  370. * @example
  371. * recorder.clearRecordedData();
  372. */
  373. clearRecordedData: function() {
  374. if (!mediaRecorder) {
  375. warningLog();
  376. return;
  377. }
  378. mediaRecorder.clearRecordedData();
  379. if (!config.disableLogs) {
  380. console.log('Cleared old recorded data.');
  381. }
  382. },
  383. /**
  384. * Get the recorded blob. Use this method inside the "stopRecording" callback.
  385. * @method
  386. * @memberof RecordRTC
  387. * @instance
  388. * @example
  389. * recorder.stopRecording(function() {
  390. * var blob = this.getBlob();
  391. *
  392. * var file = new File([blob], 'filename.webm', {
  393. * type: 'video/webm'
  394. * });
  395. *
  396. * var formData = new FormData();
  397. * formData.append('file', file); // upload "File" object rather than a "Blob"
  398. * uploadToServer(formData);
  399. * });
  400. * @returns {Blob} Returns recorded data as "Blob" object.
  401. */
  402. getBlob: function() {
  403. if (!mediaRecorder) {
  404. warningLog();
  405. return;
  406. }
  407. return mediaRecorder.blob;
  408. },
  409. /**
  410. * Get data-URI instead of Blob.
  411. * @param {function} callback - Callback to get the Data-URI.
  412. * @method
  413. * @memberof RecordRTC
  414. * @instance
  415. * @example
  416. * recorder.stopRecording(function() {
  417. * recorder.getDataURL(function(dataURI) {
  418. * video.src = dataURI;
  419. * });
  420. * });
  421. */
  422. getDataURL: getDataURL,
  423. /**
  424. * Get virtual/temporary URL. Usage of this URL is limited to current tab.
  425. * @method
  426. * @memberof RecordRTC
  427. * @instance
  428. * @example
  429. * recorder.stopRecording(function() {
  430. * video.src = this.toURL();
  431. * });
  432. * @returns {String} Returns a virtual/temporary URL for the recorded "Blob".
  433. */
  434. toURL: function() {
  435. if (!mediaRecorder) {
  436. warningLog();
  437. return;
  438. }
  439. return URL.createObjectURL(mediaRecorder.blob);
  440. },
  441. /**
  442. * Get internal recording object (i.e. internal module) e.g. MutliStreamRecorder, MediaStreamRecorder, StereoAudioRecorder or WhammyRecorder etc.
  443. * @method
  444. * @memberof RecordRTC
  445. * @instance
  446. * @example
  447. * var internalRecorder = recorder.getInternalRecorder();
  448. * if(internalRecorder instanceof MultiStreamRecorder) {
  449. * internalRecorder.addStreams([newAudioStream]);
  450. * internalRecorder.resetVideoStreams([screenStream]);
  451. * }
  452. * @returns {Object} Returns internal recording object.
  453. */
  454. getInternalRecorder: function() {
  455. return mediaRecorder;
  456. },
  457. /**
  458. * Invoke save-as dialog to save the recorded blob into your disk.
  459. * @param {string} fileName - Set your own file name.
  460. * @method
  461. * @memberof RecordRTC
  462. * @instance
  463. * @example
  464. * recorder.stopRecording(function() {
  465. * this.save('file-name');
  466. *
  467. * // or manually:
  468. * invokeSaveAsDialog(this.getBlob(), 'filename.webm');
  469. * });
  470. */
  471. save: function(fileName) {
  472. if (!mediaRecorder) {
  473. warningLog();
  474. return;
  475. }
  476. invokeSaveAsDialog(mediaRecorder.blob, fileName);
  477. },
  478. /**
  479. * This method gets a blob from indexed-DB storage.
  480. * @param {function} callback - Callback to get the recorded blob.
  481. * @method
  482. * @memberof RecordRTC
  483. * @instance
  484. * @example
  485. * recorder.getFromDisk(function(dataURL) {
  486. * video.src = dataURL;
  487. * });
  488. */
  489. getFromDisk: function(callback) {
  490. if (!mediaRecorder) {
  491. warningLog();
  492. return;
  493. }
  494. RecordRTC.getFromDisk(config.type, callback);
  495. },
  496. /**
  497. * This method appends an array of webp images to the recorded video-blob. It takes an "array" object.
  498. * @type {Array.<Array>}
  499. * @param {Array} arrayOfWebPImages - Array of webp images.
  500. * @method
  501. * @memberof RecordRTC
  502. * @instance
  503. * @todo This method should be deprecated.
  504. * @example
  505. * var arrayOfWebPImages = [];
  506. * arrayOfWebPImages.push({
  507. * duration: index,
  508. * image: 'data:image/webp;base64,...'
  509. * });
  510. * recorder.setAdvertisementArray(arrayOfWebPImages);
  511. */
  512. setAdvertisementArray: function(arrayOfWebPImages) {
  513. config.advertisement = [];
  514. var length = arrayOfWebPImages.length;
  515. for (var i = 0; i < length; i++) {
  516. config.advertisement.push({
  517. duration: i,
  518. image: arrayOfWebPImages[i]
  519. });
  520. }
  521. },
  522. /**
  523. * It is equivalent to <code class="str">"recorder.getBlob()"</code> method. Usage of "getBlob" is recommended, though.
  524. * @property {Blob} blob - Recorded Blob can be accessed using this property.
  525. * @memberof RecordRTC
  526. * @instance
  527. * @readonly
  528. * @example
  529. * recorder.stopRecording(function() {
  530. * var blob = this.blob;
  531. *
  532. * // below one is recommended
  533. * var blob = this.getBlob();
  534. * });
  535. */
  536. blob: null,
  537. /**
  538. * This works only with {recorderType:StereoAudioRecorder}. Use this property on "stopRecording" to verify the encoder's sample-rates.
  539. * @property {number} bufferSize - Buffer-size used to encode the WAV container
  540. * @memberof RecordRTC
  541. * @instance
  542. * @readonly
  543. * @example
  544. * recorder.stopRecording(function() {
  545. * alert('Recorder used this buffer-size: ' + this.bufferSize);
  546. * });
  547. */
  548. bufferSize: 0,
  549. /**
  550. * This works only with {recorderType:StereoAudioRecorder}. Use this property on "stopRecording" to verify the encoder's sample-rates.
  551. * @property {number} sampleRate - Sample-rates used to encode the WAV container
  552. * @memberof RecordRTC
  553. * @instance
  554. * @readonly
  555. * @example
  556. * recorder.stopRecording(function() {
  557. * alert('Recorder used these sample-rates: ' + this.sampleRate);
  558. * });
  559. */
  560. sampleRate: 0,
  561. /**
  562. * {recorderType:StereoAudioRecorder} returns ArrayBuffer object.
  563. * @property {ArrayBuffer} buffer - Audio ArrayBuffer, supported only in Chrome.
  564. * @memberof RecordRTC
  565. * @instance
  566. * @readonly
  567. * @example
  568. * recorder.stopRecording(function() {
  569. * var arrayBuffer = this.buffer;
  570. * alert(arrayBuffer.byteLength);
  571. * });
  572. */
  573. buffer: null,
  574. /**
  575. * This method resets the recorder. So that you can reuse single recorder instance many times.
  576. * @method
  577. * @memberof RecordRTC
  578. * @instance
  579. * @example
  580. * recorder.reset();
  581. * recorder.startRecording();
  582. */
  583. reset: function() {
  584. if (self.state === 'recording' && !config.disableLogs) {
  585. console.warn('Stop an active recorder.');
  586. }
  587. if (mediaRecorder && typeof mediaRecorder.clearRecordedData === 'function') {
  588. mediaRecorder.clearRecordedData();
  589. }
  590. mediaRecorder = null;
  591. setState('inactive');
  592. self.blob = null;
  593. },
  594. /**
  595. * This method is called whenever recorder's state changes. Use this as an "event".
  596. * @property {String} state - A recorder's state can be: recording, paused, stopped or inactive.
  597. * @method
  598. * @memberof RecordRTC
  599. * @instance
  600. * @example
  601. * recorder.onStateChanged = function(state) {
  602. * console.log('Recorder state: ', state);
  603. * };
  604. */
  605. onStateChanged: function(state) {
  606. if (!config.disableLogs) {
  607. console.log('Recorder state changed:', state);
  608. }
  609. },
  610. /**
  611. * A recorder can have inactive, recording, paused or stopped states.
  612. * @property {String} state - A recorder's state can be: recording, paused, stopped or inactive.
  613. * @memberof RecordRTC
  614. * @static
  615. * @readonly
  616. * @example
  617. * // this looper function will keep you updated about the recorder's states.
  618. * (function looper() {
  619. * document.querySelector('h1').innerHTML = 'Recorder\'s state is: ' + recorder.state;
  620. * if(recorder.state === 'stopped') return; // ignore+stop
  621. * setTimeout(looper, 1000); // update after every 3-seconds
  622. * })();
  623. * recorder.startRecording();
  624. */
  625. state: 'inactive',
  626. /**
  627. * Get recorder's readonly state.
  628. * @method
  629. * @memberof RecordRTC
  630. * @example
  631. * var state = recorder.getState();
  632. * @returns {String} Returns recording state.
  633. */
  634. getState: function() {
  635. return self.state;
  636. },
  637. /**
  638. * Destroy RecordRTC instance. Clear all recorders and objects.
  639. * @method
  640. * @memberof RecordRTC
  641. * @example
  642. * recorder.destroy();
  643. */
  644. destroy: function() {
  645. var disableLogsCache = config.disableLogs;
  646. config = {
  647. disableLogs: true
  648. };
  649. self.reset();
  650. setState('destroyed');
  651. returnObject = self = null;
  652. if (Storage.AudioContextConstructor) {
  653. Storage.AudioContextConstructor.close();
  654. Storage.AudioContextConstructor = null;
  655. }
  656. config.disableLogs = disableLogsCache;
  657. if (!config.disableLogs) {
  658. console.log('RecordRTC is destroyed.');
  659. }
  660. },
  661. /**
  662. * RecordRTC version number
  663. * @property {String} version - Release version number.
  664. * @memberof RecordRTC
  665. * @static
  666. * @readonly
  667. * @example
  668. * alert(recorder.version);
  669. */
  670. version: '5.5.9'
  671. };
  672. if (!this) {
  673. self = returnObject;
  674. return returnObject;
  675. }
  676. // if someone wants to use RecordRTC with the "new" keyword.
  677. for (var prop in returnObject) {
  678. this[prop] = returnObject[prop];
  679. }
  680. self = this;
  681. return returnObject;
  682. }
  683. RecordRTC.version = '5.5.9';
  684. if (typeof module !== 'undefined' /* && !!module.exports*/ ) {
  685. module.exports = RecordRTC;
  686. }
  687. if (typeof define === 'function' && define.amd) {
  688. define('RecordRTC', [], function() {
  689. return RecordRTC;
  690. });
  691. }
  692. RecordRTC.getFromDisk = function(type, callback) {
  693. if (!callback) {
  694. throw 'callback is mandatory.';
  695. }
  696. console.log('Getting recorded ' + (type === 'all' ? 'blobs' : type + ' blob ') + ' from disk!');
  697. DiskStorage.Fetch(function(dataURL, _type) {
  698. if (type !== 'all' && _type === type + 'Blob' && callback) {
  699. callback(dataURL);
  700. }
  701. if (type === 'all' && callback) {
  702. callback(dataURL, _type.replace('Blob', ''));
  703. }
  704. });
  705. };
  706. /**
  707. * This method can be used to store recorded blobs into IndexedDB storage.
  708. * @param {object} options - {audio: Blob, video: Blob, gif: Blob}
  709. * @method
  710. * @memberof RecordRTC
  711. * @example
  712. * RecordRTC.writeToDisk({
  713. * audio: audioBlob,
  714. * video: videoBlob,
  715. * gif : gifBlob
  716. * });
  717. */
  718. RecordRTC.writeToDisk = function(options) {
  719. console.log('Writing recorded blob(s) to disk!');
  720. options = options || {};
  721. if (options.audio && options.video && options.gif) {
  722. options.audio.getDataURL(function(audioDataURL) {
  723. options.video.getDataURL(function(videoDataURL) {
  724. options.gif.getDataURL(function(gifDataURL) {
  725. DiskStorage.Store({
  726. audioBlob: audioDataURL,
  727. videoBlob: videoDataURL,
  728. gifBlob: gifDataURL
  729. });
  730. });
  731. });
  732. });
  733. } else if (options.audio && options.video) {
  734. options.audio.getDataURL(function(audioDataURL) {
  735. options.video.getDataURL(function(videoDataURL) {
  736. DiskStorage.Store({
  737. audioBlob: audioDataURL,
  738. videoBlob: videoDataURL
  739. });
  740. });
  741. });
  742. } else if (options.audio && options.gif) {
  743. options.audio.getDataURL(function(audioDataURL) {
  744. options.gif.getDataURL(function(gifDataURL) {
  745. DiskStorage.Store({
  746. audioBlob: audioDataURL,
  747. gifBlob: gifDataURL
  748. });
  749. });
  750. });
  751. } else if (options.video && options.gif) {
  752. options.video.getDataURL(function(videoDataURL) {
  753. options.gif.getDataURL(function(gifDataURL) {
  754. DiskStorage.Store({
  755. videoBlob: videoDataURL,
  756. gifBlob: gifDataURL
  757. });
  758. });
  759. });
  760. } else if (options.audio) {
  761. options.audio.getDataURL(function(audioDataURL) {
  762. DiskStorage.Store({
  763. audioBlob: audioDataURL
  764. });
  765. });
  766. } else if (options.video) {
  767. options.video.getDataURL(function(videoDataURL) {
  768. DiskStorage.Store({
  769. videoBlob: videoDataURL
  770. });
  771. });
  772. } else if (options.gif) {
  773. options.gif.getDataURL(function(gifDataURL) {
  774. DiskStorage.Store({
  775. gifBlob: gifDataURL
  776. });
  777. });
  778. }
  779. };
  780. // __________________________
  781. // RecordRTC-Configuration.js
  782. /**
  783. * {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}.
  784. * @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid "config" object.
  785. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  786. * @author {@link https://MuazKhan.com|Muaz Khan}
  787. * @typedef RecordRTCConfiguration
  788. * @class
  789. * @example
  790. * var options = RecordRTCConfiguration(mediaStream, options);
  791. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  792. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  793. * @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.}
  794. */
  795. function RecordRTCConfiguration(mediaStream, config) {
  796. if (!config.recorderType && !config.type) {
  797. if (!!config.audio && !!config.video) {
  798. config.type = 'video';
  799. } else if (!!config.audio && !config.video) {
  800. config.type = 'audio';
  801. }
  802. }
  803. if (config.recorderType && !config.type) {
  804. if (config.recorderType === WhammyRecorder || config.recorderType === CanvasRecorder || (typeof WebAssemblyRecorder !== 'undefined' && config.recorderType === WebAssemblyRecorder)) {
  805. config.type = 'video';
  806. } else if (config.recorderType === GifRecorder) {
  807. config.type = 'gif';
  808. } else if (config.recorderType === StereoAudioRecorder) {
  809. config.type = 'audio';
  810. } else if (config.recorderType === MediaStreamRecorder) {
  811. if (getTracks(mediaStream, 'audio').length && getTracks(mediaStream, 'video').length) {
  812. config.type = 'video';
  813. } else if (!getTracks(mediaStream, 'audio').length && getTracks(mediaStream, 'video').length) {
  814. config.type = 'video';
  815. } else if (getTracks(mediaStream, 'audio').length && !getTracks(mediaStream, 'video').length) {
  816. config.type = 'audio';
  817. } else {
  818. // config.type = 'UnKnown';
  819. }
  820. }
  821. }
  822. if (typeof MediaStreamRecorder !== 'undefined' && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) {
  823. if (!config.mimeType) {
  824. config.mimeType = 'video/webm';
  825. }
  826. if (!config.type) {
  827. config.type = config.mimeType.split('/')[0];
  828. }
  829. if (!config.bitsPerSecond) {
  830. // config.bitsPerSecond = 128000;
  831. }
  832. }
  833. // consider default type=audio
  834. if (!config.type) {
  835. if (config.mimeType) {
  836. config.type = config.mimeType.split('/')[0];
  837. }
  838. if (!config.type) {
  839. config.type = 'audio';
  840. }
  841. }
  842. return config;
  843. }
  844. // __________________
  845. // GetRecorderType.js
  846. /**
  847. * {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}.
  848. * @summary It returns best recorder-type available for your browser.
  849. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  850. * @author {@link https://MuazKhan.com|Muaz Khan}
  851. * @typedef GetRecorderType
  852. * @class
  853. * @example
  854. * var RecorderType = GetRecorderType(options);
  855. * var recorder = new RecorderType(options);
  856. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  857. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  858. * @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
  859. */
  860. function GetRecorderType(mediaStream, config) {
  861. var recorder;
  862. // StereoAudioRecorder can work with all three: Edge, Firefox and Chrome
  863. // todo: detect if it is Edge, then auto use: StereoAudioRecorder
  864. if (isChrome || isEdge || isOpera) {
  865. // Media Stream Recording API has not been implemented in chrome yet;
  866. // That's why using WebAudio API to record stereo audio in WAV format
  867. recorder = StereoAudioRecorder;
  868. }
  869. if (typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype && !isChrome) {
  870. recorder = MediaStreamRecorder;
  871. }
  872. // video recorder (in WebM format)
  873. if (config.type === 'video' && (isChrome || isOpera)) {
  874. recorder = WhammyRecorder;
  875. if (typeof WebAssemblyRecorder !== 'undefined' && typeof ReadableStream !== 'undefined') {
  876. recorder = WebAssemblyRecorder;
  877. }
  878. }
  879. // video recorder (in Gif format)
  880. if (config.type === 'gif') {
  881. recorder = GifRecorder;
  882. }
  883. // html2canvas recording!
  884. if (config.type === 'canvas') {
  885. recorder = CanvasRecorder;
  886. }
  887. if (isMediaRecorderCompatible() && recorder !== CanvasRecorder && recorder !== GifRecorder && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) {
  888. if (getTracks(mediaStream, 'video').length || getTracks(mediaStream, 'audio').length) {
  889. // audio-only recording
  890. if (config.type === 'audio') {
  891. if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('audio/webm')) {
  892. recorder = MediaStreamRecorder;
  893. }
  894. // else recorder = StereoAudioRecorder;
  895. } else {
  896. // video or screen tracks
  897. if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('video/webm')) {
  898. recorder = MediaStreamRecorder;
  899. }
  900. }
  901. }
  902. }
  903. if (mediaStream instanceof Array && mediaStream.length) {
  904. recorder = MultiStreamRecorder;
  905. }
  906. if (config.recorderType) {
  907. recorder = config.recorderType;
  908. }
  909. if (!config.disableLogs && !!recorder && !!recorder.name) {
  910. console.log('Using recorderType:', recorder.name || recorder.constructor.name);
  911. }
  912. if (!recorder && isSafari) {
  913. recorder = MediaStreamRecorder;
  914. }
  915. return recorder;
  916. }
  917. // _____________
  918. // MRecordRTC.js
  919. /**
  920. * MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API.
  921. * @summary MRecordRTC stands for "Multiple-RecordRTC".
  922. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  923. * @author {@link https://MuazKhan.com|Muaz Khan}
  924. * @typedef MRecordRTC
  925. * @class
  926. * @example
  927. * var recorder = new MRecordRTC();
  928. * recorder.addStream(MediaStream);
  929. * recorder.mediaType = {
  930. * audio: true, // or StereoAudioRecorder or MediaStreamRecorder
  931. * video: true, // or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder
  932. * gif: true // or GifRecorder
  933. * };
  934. * // mimeType is optional and should be set only in advance cases.
  935. * recorder.mimeType = {
  936. * audio: 'audio/wav',
  937. * video: 'video/webm',
  938. * gif: 'image/gif'
  939. * };
  940. * recorder.startRecording();
  941. * @see For further information:
  942. * @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code}
  943. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  944. * @requires {@link RecordRTC}
  945. */
  946. function MRecordRTC(mediaStream) {
  947. /**
  948. * This method attaches MediaStream object to {@link MRecordRTC}.
  949. * @param {MediaStream} mediaStream - A MediaStream object, either fetched using getUserMedia API, or generated using captureStreamUntilEnded or WebAudio API.
  950. * @method
  951. * @memberof MRecordRTC
  952. * @example
  953. * recorder.addStream(MediaStream);
  954. */
  955. this.addStream = function(_mediaStream) {
  956. if (_mediaStream) {
  957. mediaStream = _mediaStream;
  958. }
  959. };
  960. /**
  961. * This property can be used to set the recording type e.g. audio, or video, or gif, or canvas.
  962. * @property {object} mediaType - {audio: true, video: true, gif: true}
  963. * @memberof MRecordRTC
  964. * @example
  965. * var recorder = new MRecordRTC();
  966. * recorder.mediaType = {
  967. * audio: true, // TRUE or StereoAudioRecorder or MediaStreamRecorder
  968. * video: true, // TRUE or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder
  969. * gif : true // TRUE or GifRecorder
  970. * };
  971. */
  972. this.mediaType = {
  973. audio: true,
  974. video: true
  975. };
  976. /**
  977. * This method starts recording.
  978. * @method
  979. * @memberof MRecordRTC
  980. * @example
  981. * recorder.startRecording();
  982. */
  983. this.startRecording = function() {
  984. var mediaType = this.mediaType;
  985. var recorderType;
  986. var mimeType = this.mimeType || {
  987. audio: null,
  988. video: null,
  989. gif: null
  990. };
  991. if (typeof mediaType.audio !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'audio').length) {
  992. mediaType.audio = false;
  993. }
  994. if (typeof mediaType.video !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'video').length) {
  995. mediaType.video = false;
  996. }
  997. if (typeof mediaType.gif !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'video').length) {
  998. mediaType.gif = false;
  999. }
  1000. if (!mediaType.audio && !mediaType.video && !mediaType.gif) {
  1001. throw 'MediaStream must have either audio or video tracks.';
  1002. }
  1003. if (!!mediaType.audio) {
  1004. recorderType = null;
  1005. if (typeof mediaType.audio === 'function') {
  1006. recorderType = mediaType.audio;
  1007. }
  1008. this.audioRecorder = new RecordRTC(mediaStream, {
  1009. type: 'audio',
  1010. bufferSize: this.bufferSize,
  1011. sampleRate: this.sampleRate,
  1012. numberOfAudioChannels: this.numberOfAudioChannels || 2,
  1013. disableLogs: this.disableLogs,
  1014. recorderType: recorderType,
  1015. mimeType: mimeType.audio,
  1016. timeSlice: this.timeSlice,
  1017. onTimeStamp: this.onTimeStamp
  1018. });
  1019. if (!mediaType.video) {
  1020. this.audioRecorder.startRecording();
  1021. }
  1022. }
  1023. if (!!mediaType.video) {
  1024. recorderType = null;
  1025. if (typeof mediaType.video === 'function') {
  1026. recorderType = mediaType.video;
  1027. }
  1028. var newStream = mediaStream;
  1029. if (isMediaRecorderCompatible() && !!mediaType.audio && typeof mediaType.audio === 'function') {
  1030. var videoTrack = getTracks(mediaStream, 'video')[0];
  1031. if (isFirefox) {
  1032. newStream = new MediaStream();
  1033. newStream.addTrack(videoTrack);
  1034. if (recorderType && recorderType === WhammyRecorder) {
  1035. // Firefox does NOT supports webp-encoding yet
  1036. // But Firefox do supports WebAssemblyRecorder
  1037. recorderType = MediaStreamRecorder;
  1038. }
  1039. } else {
  1040. newStream = new MediaStream();
  1041. newStream.addTrack(videoTrack);
  1042. }
  1043. }
  1044. this.videoRecorder = new RecordRTC(newStream, {
  1045. type: 'video',
  1046. video: this.video,
  1047. canvas: this.canvas,
  1048. frameInterval: this.frameInterval || 10,
  1049. disableLogs: this.disableLogs,
  1050. recorderType: recorderType,
  1051. mimeType: mimeType.video,
  1052. timeSlice: this.timeSlice,
  1053. onTimeStamp: this.onTimeStamp,
  1054. workerPath: this.workerPath,
  1055. webAssemblyPath: this.webAssemblyPath,
  1056. frameRate: this.frameRate, // used by WebAssemblyRecorder; values: usually 30; accepts any.
  1057. bitrate: this.bitrate // used by WebAssemblyRecorder; values: 0 to 1000+
  1058. });
  1059. if (!mediaType.audio) {
  1060. this.videoRecorder.startRecording();
  1061. }
  1062. }
  1063. if (!!mediaType.audio && !!mediaType.video) {
  1064. var self = this;
  1065. var isSingleRecorder = isMediaRecorderCompatible() === true;
  1066. if (mediaType.audio instanceof StereoAudioRecorder && !!mediaType.video) {
  1067. isSingleRecorder = false;
  1068. } else if (mediaType.audio !== true && mediaType.video !== true && mediaType.audio !== mediaType.video) {
  1069. isSingleRecorder = false;
  1070. }
  1071. if (isSingleRecorder === true) {
  1072. self.audioRecorder = null;
  1073. self.videoRecorder.startRecording();
  1074. } else {
  1075. self.videoRecorder.initRecorder(function() {
  1076. self.audioRecorder.initRecorder(function() {
  1077. // Both recorders are ready to record things accurately
  1078. self.videoRecorder.startRecording();
  1079. self.audioRecorder.startRecording();
  1080. });
  1081. });
  1082. }
  1083. }
  1084. if (!!mediaType.gif) {
  1085. recorderType = null;
  1086. if (typeof mediaType.gif === 'function') {
  1087. recorderType = mediaType.gif;
  1088. }
  1089. this.gifRecorder = new RecordRTC(mediaStream, {
  1090. type: 'gif',
  1091. frameRate: this.frameRate || 200,
  1092. quality: this.quality || 10,
  1093. disableLogs: this.disableLogs,
  1094. recorderType: recorderType,
  1095. mimeType: mimeType.gif
  1096. });
  1097. this.gifRecorder.startRecording();
  1098. }
  1099. };
  1100. /**
  1101. * This method stops recording.
  1102. * @param {function} callback - Callback function is invoked when all encoders finished their jobs.
  1103. * @method
  1104. * @memberof MRecordRTC
  1105. * @example
  1106. * recorder.stopRecording(function(recording){
  1107. * var audioBlob = recording.audio;
  1108. * var videoBlob = recording.video;
  1109. * var gifBlob = recording.gif;
  1110. * });
  1111. */
  1112. this.stopRecording = function(callback) {
  1113. callback = callback || function() {};
  1114. if (this.audioRecorder) {
  1115. this.audioRecorder.stopRecording(function(blobURL) {
  1116. callback(blobURL, 'audio');
  1117. });
  1118. }
  1119. if (this.videoRecorder) {
  1120. this.videoRecorder.stopRecording(function(blobURL) {
  1121. callback(blobURL, 'video');
  1122. });
  1123. }
  1124. if (this.gifRecorder) {
  1125. this.gifRecorder.stopRecording(function(blobURL) {
  1126. callback(blobURL, 'gif');
  1127. });
  1128. }
  1129. };
  1130. /**
  1131. * This method pauses recording.
  1132. * @method
  1133. * @memberof MRecordRTC
  1134. * @example
  1135. * recorder.pauseRecording();
  1136. */
  1137. this.pauseRecording = function() {
  1138. if (this.audioRecorder) {
  1139. this.audioRecorder.pauseRecording();
  1140. }
  1141. if (this.videoRecorder) {
  1142. this.videoRecorder.pauseRecording();
  1143. }
  1144. if (this.gifRecorder) {
  1145. this.gifRecorder.pauseRecording();
  1146. }
  1147. };
  1148. /**
  1149. * This method resumes recording.
  1150. * @method
  1151. * @memberof MRecordRTC
  1152. * @example
  1153. * recorder.resumeRecording();
  1154. */
  1155. this.resumeRecording = function() {
  1156. if (this.audioRecorder) {
  1157. this.audioRecorder.resumeRecording();
  1158. }
  1159. if (this.videoRecorder) {
  1160. this.videoRecorder.resumeRecording();
  1161. }
  1162. if (this.gifRecorder) {
  1163. this.gifRecorder.resumeRecording();
  1164. }
  1165. };
  1166. /**
  1167. * This method can be used to manually get all recorded blobs.
  1168. * @param {function} callback - All recorded blobs are passed back to the "callback" function.
  1169. * @method
  1170. * @memberof MRecordRTC
  1171. * @example
  1172. * recorder.getBlob(function(recording){
  1173. * var audioBlob = recording.audio;
  1174. * var videoBlob = recording.video;
  1175. * var gifBlob = recording.gif;
  1176. * });
  1177. * // or
  1178. * var audioBlob = recorder.getBlob().audio;
  1179. * var videoBlob = recorder.getBlob().video;
  1180. */
  1181. this.getBlob = function(callback) {
  1182. var output = {};
  1183. if (this.audioRecorder) {
  1184. output.audio = this.audioRecorder.getBlob();
  1185. }
  1186. if (this.videoRecorder) {
  1187. output.video = this.videoRecorder.getBlob();
  1188. }
  1189. if (this.gifRecorder) {
  1190. output.gif = this.gifRecorder.getBlob();
  1191. }
  1192. if (callback) {
  1193. callback(output);
  1194. }
  1195. return output;
  1196. };
  1197. /**
  1198. * Destroy all recorder instances.
  1199. * @method
  1200. * @memberof MRecordRTC
  1201. * @example
  1202. * recorder.destroy();
  1203. */
  1204. this.destroy = function() {
  1205. if (this.audioRecorder) {
  1206. this.audioRecorder.destroy();
  1207. this.audioRecorder = null;
  1208. }
  1209. if (this.videoRecorder) {
  1210. this.videoRecorder.destroy();
  1211. this.videoRecorder = null;
  1212. }
  1213. if (this.gifRecorder) {
  1214. this.gifRecorder.destroy();
  1215. this.gifRecorder = null;
  1216. }
  1217. };
  1218. /**
  1219. * This method can be used to manually get all recorded blobs' DataURLs.
  1220. * @param {function} callback - All recorded blobs' DataURLs are passed back to the "callback" function.
  1221. * @method
  1222. * @memberof MRecordRTC
  1223. * @example
  1224. * recorder.getDataURL(function(recording){
  1225. * var audioDataURL = recording.audio;
  1226. * var videoDataURL = recording.video;
  1227. * var gifDataURL = recording.gif;
  1228. * });
  1229. */
  1230. this.getDataURL = function(callback) {
  1231. this.getBlob(function(blob) {
  1232. if (blob.audio && blob.video) {
  1233. getDataURL(blob.audio, function(_audioDataURL) {
  1234. getDataURL(blob.video, function(_videoDataURL) {
  1235. callback({
  1236. audio: _audioDataURL,
  1237. video: _videoDataURL
  1238. });
  1239. });
  1240. });
  1241. } else if (blob.audio) {
  1242. getDataURL(blob.audio, function(_audioDataURL) {
  1243. callback({
  1244. audio: _audioDataURL
  1245. });
  1246. });
  1247. } else if (blob.video) {
  1248. getDataURL(blob.video, function(_videoDataURL) {
  1249. callback({
  1250. video: _videoDataURL
  1251. });
  1252. });
  1253. }
  1254. });
  1255. function getDataURL(blob, callback00) {
  1256. if (typeof Worker !== 'undefined') {
  1257. var webWorker = processInWebWorker(function readFile(_blob) {
  1258. postMessage(new FileReaderSync().readAsDataURL(_blob));
  1259. });
  1260. webWorker.onmessage = function(event) {
  1261. callback00(event.data);
  1262. };
  1263. webWorker.postMessage(blob);
  1264. } else {
  1265. var reader = new FileReader();
  1266. reader.readAsDataURL(blob);
  1267. reader.onload = function(event) {
  1268. callback00(event.target.result);
  1269. };
  1270. }
  1271. }
  1272. function processInWebWorker(_function) {
  1273. var blob = URL.createObjectURL(new Blob([_function.toString(),
  1274. 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
  1275. ], {
  1276. type: 'application/javascript'
  1277. }));
  1278. var worker = new Worker(blob);
  1279. var url;
  1280. if (typeof URL !== 'undefined') {
  1281. url = URL;
  1282. } else if (typeof webkitURL !== 'undefined') {
  1283. url = webkitURL;
  1284. } else {
  1285. throw 'Neither URL nor webkitURL detected.';
  1286. }
  1287. url.revokeObjectURL(blob);
  1288. return worker;
  1289. }
  1290. };
  1291. /**
  1292. * This method can be used to ask {@link MRecordRTC} to write all recorded blobs into IndexedDB storage.
  1293. * @method
  1294. * @memberof MRecordRTC
  1295. * @example
  1296. * recorder.writeToDisk();
  1297. */
  1298. this.writeToDisk = function() {
  1299. RecordRTC.writeToDisk({
  1300. audio: this.audioRecorder,
  1301. video: this.videoRecorder,
  1302. gif: this.gifRecorder
  1303. });
  1304. };
  1305. /**
  1306. * This method can be used to invoke a save-as dialog for all recorded blobs.
  1307. * @param {object} args - {audio: 'audio-name', video: 'video-name', gif: 'gif-name'}
  1308. * @method
  1309. * @memberof MRecordRTC
  1310. * @example
  1311. * recorder.save({
  1312. * audio: 'audio-file-name',
  1313. * video: 'video-file-name',
  1314. * gif : 'gif-file-name'
  1315. * });
  1316. */
  1317. this.save = function(args) {
  1318. args = args || {
  1319. audio: true,
  1320. video: true,
  1321. gif: true
  1322. };
  1323. if (!!args.audio && this.audioRecorder) {
  1324. this.audioRecorder.save(typeof args.audio === 'string' ? args.audio : '');
  1325. }
  1326. if (!!args.video && this.videoRecorder) {
  1327. this.videoRecorder.save(typeof args.video === 'string' ? args.video : '');
  1328. }
  1329. if (!!args.gif && this.gifRecorder) {
  1330. this.gifRecorder.save(typeof args.gif === 'string' ? args.gif : '');
  1331. }
  1332. };
  1333. }
  1334. /**
  1335. * This method can be used to get all recorded blobs from IndexedDB storage.
  1336. * @param {string} type - 'all' or 'audio' or 'video' or 'gif'
  1337. * @param {function} callback - Callback function to get all stored blobs.
  1338. * @method
  1339. * @memberof MRecordRTC
  1340. * @example
  1341. * MRecordRTC.getFromDisk('all', function(dataURL, type){
  1342. * if(type === 'audio') { }
  1343. * if(type === 'video') { }
  1344. * if(type === 'gif') { }
  1345. * });
  1346. */
  1347. MRecordRTC.getFromDisk = RecordRTC.getFromDisk;
  1348. /**
  1349. * This method can be used to store recorded blobs into IndexedDB storage.
  1350. * @param {object} options - {audio: Blob, video: Blob, gif: Blob}
  1351. * @method
  1352. * @memberof MRecordRTC
  1353. * @example
  1354. * MRecordRTC.writeToDisk({
  1355. * audio: audioBlob,
  1356. * video: videoBlob,
  1357. * gif : gifBlob
  1358. * });
  1359. */
  1360. MRecordRTC.writeToDisk = RecordRTC.writeToDisk;
  1361. if (typeof RecordRTC !== 'undefined') {
  1362. RecordRTC.MRecordRTC = MRecordRTC;
  1363. }
  1364. var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45';
  1365. (function(that) {
  1366. if (!that) {
  1367. return;
  1368. }
  1369. if (typeof window !== 'undefined') {
  1370. return;
  1371. }
  1372. if (typeof global === 'undefined') {
  1373. return;
  1374. }
  1375. global.navigator = {
  1376. userAgent: browserFakeUserAgent,
  1377. getUserMedia: function() {}
  1378. };
  1379. if (!global.console) {
  1380. global.console = {};
  1381. }
  1382. if (typeof global.console.log === 'undefined' || typeof global.console.error === 'undefined') {
  1383. global.console.error = global.console.log = global.console.log || function() {
  1384. console.log(arguments);
  1385. };
  1386. }
  1387. if (typeof document === 'undefined') {
  1388. /*global document:true */
  1389. that.document = {
  1390. documentElement: {
  1391. appendChild: function() {
  1392. return '';
  1393. }
  1394. }
  1395. };
  1396. document.createElement = document.captureStream = document.mozCaptureStream = function() {
  1397. var obj = {
  1398. getContext: function() {
  1399. return obj;
  1400. },
  1401. play: function() {},
  1402. pause: function() {},
  1403. drawImage: function() {},
  1404. toDataURL: function() {
  1405. return '';
  1406. },
  1407. style: {}
  1408. };
  1409. return obj;
  1410. };
  1411. that.HTMLVideoElement = function() {};
  1412. }
  1413. if (typeof location === 'undefined') {
  1414. /*global location:true */
  1415. that.location = {
  1416. protocol: 'file:',
  1417. href: '',
  1418. hash: ''
  1419. };
  1420. }
  1421. if (typeof screen === 'undefined') {
  1422. /*global screen:true */
  1423. that.screen = {
  1424. width: 0,
  1425. height: 0
  1426. };
  1427. }
  1428. if (typeof URL === 'undefined') {
  1429. /*global screen:true */
  1430. that.URL = {
  1431. createObjectURL: function() {
  1432. return '';
  1433. },
  1434. revokeObjectURL: function() {
  1435. return '';
  1436. }
  1437. };
  1438. }
  1439. /*global window:true */
  1440. that.window = global;
  1441. })(typeof global !== 'undefined' ? global : null);
  1442. // _____________________________
  1443. // Cross-Browser-Declarations.js
  1444. // animation-frame used in WebM recording
  1445. /*jshint -W079 */
  1446. var requestAnimationFrame = window.requestAnimationFrame;
  1447. if (typeof requestAnimationFrame === 'undefined') {
  1448. if (typeof webkitRequestAnimationFrame !== 'undefined') {
  1449. /*global requestAnimationFrame:true */
  1450. requestAnimationFrame = webkitRequestAnimationFrame;
  1451. } else if (typeof mozRequestAnimationFrame !== 'undefined') {
  1452. /*global requestAnimationFrame:true */
  1453. requestAnimationFrame = mozRequestAnimationFrame;
  1454. } else if (typeof msRequestAnimationFrame !== 'undefined') {
  1455. /*global requestAnimationFrame:true */
  1456. requestAnimationFrame = msRequestAnimationFrame;
  1457. } else if (typeof requestAnimationFrame === 'undefined') {
  1458. // via: https://gist.github.com/paulirish/1579671
  1459. var lastTime = 0;
  1460. /*global requestAnimationFrame:true */
  1461. requestAnimationFrame = function(callback, element) {
  1462. var currTime = new Date().getTime();
  1463. var timeToCall = Math.max(0, 16 - (currTime - lastTime));
  1464. var id = setTimeout(function() {
  1465. callback(currTime + timeToCall);
  1466. }, timeToCall);
  1467. lastTime = currTime + timeToCall;
  1468. return id;
  1469. };
  1470. }
  1471. }
  1472. /*jshint -W079 */
  1473. var cancelAnimationFrame = window.cancelAnimationFrame;
  1474. if (typeof cancelAnimationFrame === 'undefined') {
  1475. if (typeof webkitCancelAnimationFrame !== 'undefined') {
  1476. /*global cancelAnimationFrame:true */
  1477. cancelAnimationFrame = webkitCancelAnimationFrame;
  1478. } else if (typeof mozCancelAnimationFrame !== 'undefined') {
  1479. /*global cancelAnimationFrame:true */
  1480. cancelAnimationFrame = mozCancelAnimationFrame;
  1481. } else if (typeof msCancelAnimationFrame !== 'undefined') {
  1482. /*global cancelAnimationFrame:true */
  1483. cancelAnimationFrame = msCancelAnimationFrame;
  1484. } else if (typeof cancelAnimationFrame === 'undefined') {
  1485. /*global cancelAnimationFrame:true */
  1486. cancelAnimationFrame = function(id) {
  1487. clearTimeout(id);
  1488. };
  1489. }
  1490. }
  1491. // WebAudio API representer
  1492. var AudioContext = window.AudioContext;
  1493. if (typeof AudioContext === 'undefined') {
  1494. if (typeof webkitAudioContext !== 'undefined') {
  1495. /*global AudioContext:true */
  1496. AudioContext = webkitAudioContext;
  1497. }
  1498. if (typeof mozAudioContext !== 'undefined') {
  1499. /*global AudioContext:true */
  1500. AudioContext = mozAudioContext;
  1501. }
  1502. }
  1503. /*jshint -W079 */
  1504. var URL = window.URL;
  1505. if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') {
  1506. /*global URL:true */
  1507. URL = webkitURL;
  1508. }
  1509. if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator?
  1510. if (typeof navigator.webkitGetUserMedia !== 'undefined') {
  1511. navigator.getUserMedia = navigator.webkitGetUserMedia;
  1512. }
  1513. if (typeof navigator.mozGetUserMedia !== 'undefined') {
  1514. navigator.getUserMedia = navigator.mozGetUserMedia;
  1515. }
  1516. }
  1517. var isEdge = navigator.userAgent.indexOf('Edge') !== -1 && (!!navigator.msSaveBlob || !!navigator.msSaveOrOpenBlob);
  1518. var isOpera = !!window.opera || navigator.userAgent.indexOf('OPR/') !== -1;
  1519. var isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1 && ('netscape' in window) && / rv:/.test(navigator.userAgent);
  1520. var isChrome = (!isOpera && !isEdge && !!navigator.webkitGetUserMedia) || isElectron() || navigator.userAgent.toLowerCase().indexOf('chrome/') !== -1;
  1521. var isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
  1522. if (isSafari && !isChrome && navigator.userAgent.indexOf('CriOS') !== -1) {
  1523. isSafari = false;
  1524. isChrome = true;
  1525. }
  1526. var MediaStream = window.MediaStream;
  1527. if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {
  1528. MediaStream = webkitMediaStream;
  1529. }
  1530. /*global MediaStream:true */
  1531. if (typeof MediaStream !== 'undefined') {
  1532. // override "stop" method for all browsers
  1533. if (typeof MediaStream.prototype.stop === 'undefined') {
  1534. MediaStream.prototype.stop = function() {
  1535. this.getTracks().forEach(function(track) {
  1536. track.stop();
  1537. });
  1538. };
  1539. }
  1540. }
  1541. // below function via: http://goo.gl/B3ae8c
  1542. /**
  1543. * Return human-readable file size.
  1544. * @param {number} bytes - Pass bytes and get formatted string.
  1545. * @returns {string} - formatted string
  1546. * @example
  1547. * bytesToSize(1024*1024*5) === '5 GB'
  1548. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  1549. */
  1550. function bytesToSize(bytes) {
  1551. var k = 1000;
  1552. var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
  1553. if (bytes === 0) {
  1554. return '0 Bytes';
  1555. }
  1556. var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
  1557. return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
  1558. }
  1559. /**
  1560. * @param {Blob} file - File or Blob object. This parameter is required.
  1561. * @param {string} fileName - Optional file name e.g. "Recorded-Video.webm"
  1562. * @example
  1563. * invokeSaveAsDialog(blob or file, [optional] fileName);
  1564. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  1565. */
  1566. function invokeSaveAsDialog(file, fileName) {
  1567. if (!file) {
  1568. throw 'Blob object is required.';
  1569. }
  1570. if (!file.type) {
  1571. try {
  1572. file.type = 'video/webm';
  1573. } catch (e) {}
  1574. }
  1575. var fileExtension = (file.type || 'video/webm').split('/')[1];
  1576. if (fileName && fileName.indexOf('.') !== -1) {
  1577. var splitted = fileName.split('.');
  1578. fileName = splitted[0];
  1579. fileExtension = splitted[1];
  1580. }
  1581. var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
  1582. if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
  1583. return navigator.msSaveOrOpenBlob(file, fileFullName);
  1584. } else if (typeof navigator.msSaveBlob !== 'undefined') {
  1585. return navigator.msSaveBlob(file, fileFullName);
  1586. }
  1587. var hyperlink = document.createElement('a');
  1588. hyperlink.href = URL.createObjectURL(file);
  1589. hyperlink.download = fileFullName;
  1590. hyperlink.style = 'display:none;opacity:0;color:transparent;';
  1591. (document.body || document.documentElement).appendChild(hyperlink);
  1592. if (typeof hyperlink.click === 'function') {
  1593. hyperlink.click();
  1594. } else {
  1595. hyperlink.target = '_blank';
  1596. hyperlink.dispatchEvent(new MouseEvent('click', {
  1597. view: window,
  1598. bubbles: true,
  1599. cancelable: true
  1600. }));
  1601. }
  1602. URL.revokeObjectURL(hyperlink.href);
  1603. }
  1604. /**
  1605. * from: https://github.com/cheton/is-electron/blob/master/index.js
  1606. **/
  1607. function isElectron() {
  1608. // Renderer process
  1609. if (typeof window !== 'undefined' && typeof window.process === 'object' && window.process.type === 'renderer') {
  1610. return true;
  1611. }
  1612. // Main process
  1613. if (typeof process !== 'undefined' && typeof process.versions === 'object' && !!process.versions.electron) {
  1614. return true;
  1615. }
  1616. // Detect the user agent when the `nodeIntegration` option is set to true
  1617. if (typeof navigator === 'object' && typeof navigator.userAgent === 'string' && navigator.userAgent.indexOf('Electron') >= 0) {
  1618. return true;
  1619. }
  1620. return false;
  1621. }
  1622. function getTracks(stream, kind) {
  1623. if (!stream || !stream.getTracks) {
  1624. return [];
  1625. }
  1626. return stream.getTracks().filter(function(t) {
  1627. return t.kind === (kind || 'audio');
  1628. });
  1629. }
  1630. function setSrcObject(stream, element) {
  1631. if ('srcObject' in element) {
  1632. element.srcObject = stream;
  1633. } else if ('mozSrcObject' in element) {
  1634. element.mozSrcObject = stream;
  1635. } else {
  1636. element.srcObject = stream;
  1637. }
  1638. }
  1639. /**
  1640. * @param {Blob} file - File or Blob object.
  1641. * @param {function} callback - Callback function.
  1642. * @example
  1643. * getSeekableBlob(blob or file, callback);
  1644. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  1645. */
  1646. function getSeekableBlob(inputBlob, callback) {
  1647. // EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml
  1648. if (typeof EBML === 'undefined') {
  1649. throw new Error('Please link: https://www.webrtc-experiment.com/EBML.js');
  1650. }
  1651. var reader = new EBML.Reader();
  1652. var decoder = new EBML.Decoder();
  1653. var tools = EBML.tools;
  1654. var fileReader = new FileReader();
  1655. fileReader.onload = function(e) {
  1656. var ebmlElms = decoder.decode(this.result);
  1657. ebmlElms.forEach(function(element) {
  1658. reader.read(element);
  1659. });
  1660. reader.stop();
  1661. var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
  1662. var body = this.result.slice(reader.metadataSize);
  1663. var newBlob = new Blob([refinedMetadataBuf, body], {
  1664. type: 'video/webm'
  1665. });
  1666. callback(newBlob);
  1667. };
  1668. fileReader.readAsArrayBuffer(inputBlob);
  1669. }
  1670. if (typeof RecordRTC !== 'undefined') {
  1671. RecordRTC.invokeSaveAsDialog = invokeSaveAsDialog;
  1672. RecordRTC.getTracks = getTracks;
  1673. RecordRTC.getSeekableBlob = getSeekableBlob;
  1674. RecordRTC.bytesToSize = bytesToSize;
  1675. RecordRTC.isElectron = isElectron;
  1676. }
  1677. // __________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129
  1678. // Storage.js
  1679. /**
  1680. * Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext".
  1681. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  1682. * @author {@link https://MuazKhan.com|Muaz Khan}
  1683. * @example
  1684. * Storage.AudioContext === webkitAudioContext
  1685. * @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object.
  1686. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  1687. */
  1688. var Storage = {};
  1689. if (typeof AudioContext !== 'undefined') {
  1690. Storage.AudioContext = AudioContext;
  1691. } else if (typeof webkitAudioContext !== 'undefined') {
  1692. Storage.AudioContext = webkitAudioContext;
  1693. }
  1694. if (typeof RecordRTC !== 'undefined') {
  1695. RecordRTC.Storage = Storage;
  1696. }
  1697. function isMediaRecorderCompatible() {
  1698. if (isFirefox || isSafari || isEdge) {
  1699. return true;
  1700. }
  1701. var nVer = navigator.appVersion;
  1702. var nAgt = navigator.userAgent;
  1703. var fullVersion = '' + parseFloat(navigator.appVersion);
  1704. var majorVersion = parseInt(navigator.appVersion, 10);
  1705. var nameOffset, verOffset, ix;
  1706. if (isChrome || isOpera) {
  1707. verOffset = nAgt.indexOf('Chrome');
  1708. fullVersion = nAgt.substring(verOffset + 7);
  1709. }
  1710. // trim the fullVersion string at semicolon/space if present
  1711. if ((ix = fullVersion.indexOf(';')) !== -1) {
  1712. fullVersion = fullVersion.substring(0, ix);
  1713. }
  1714. if ((ix = fullVersion.indexOf(' ')) !== -1) {
  1715. fullVersion = fullVersion.substring(0, ix);
  1716. }
  1717. majorVersion = parseInt('' + fullVersion, 10);
  1718. if (isNaN(majorVersion)) {
  1719. fullVersion = '' + parseFloat(navigator.appVersion);
  1720. majorVersion = parseInt(navigator.appVersion, 10);
  1721. }
  1722. return majorVersion >= 49;
  1723. }
  1724. // ______________________
  1725. // MediaStreamRecorder.js
  1726. /**
  1727. * MediaStreamRecorder is an abstraction layer for {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. It is used by {@link RecordRTC} to record MediaStream(s) in both Chrome and Firefox.
  1728. * @summary Runs top over {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}.
  1729. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  1730. * @author {@link https://github.com/muaz-khan|Muaz Khan}
  1731. * @typedef MediaStreamRecorder
  1732. * @class
  1733. * @example
  1734. * var config = {
  1735. * mimeType: 'video/webm', // vp8, vp9, h264, mkv, opus/vorbis
  1736. * audioBitsPerSecond : 256 * 8 * 1024,
  1737. * videoBitsPerSecond : 256 * 8 * 1024,
  1738. * bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two
  1739. * checkForInactiveTracks: true,
  1740. * timeSlice: 1000, // concatenate intervals based blobs
  1741. * ondataavailable: function() {} // get intervals based blobs
  1742. * }
  1743. * var recorder = new MediaStreamRecorder(mediaStream, config);
  1744. * recorder.record();
  1745. * recorder.stop(function(blob) {
  1746. * video.src = URL.createObjectURL(blob);
  1747. *
  1748. * // or
  1749. * var blob = recorder.blob;
  1750. * });
  1751. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  1752. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  1753. * @param {object} config - {disableLogs:true, initCallback: function, mimeType: "video/webm", timeSlice: 1000}
  1754. * @throws Will throw an error if first argument "MediaStream" is missing. Also throws error if "MediaRecorder API" are not supported by the browser.
  1755. */
  1756. function MediaStreamRecorder(mediaStream, config) {
  1757. var self = this;
  1758. if (typeof mediaStream === 'undefined') {
  1759. throw 'First argument "MediaStream" is required.';
  1760. }
  1761. if (typeof MediaRecorder === 'undefined') {
  1762. throw 'Your browser does not support the Media Recorder API. Please try other modules e.g. WhammyRecorder or StereoAudioRecorder.';
  1763. }
  1764. config = config || {
  1765. // bitsPerSecond: 256 * 8 * 1024,
  1766. mimeType: 'video/webm'
  1767. };
  1768. if (config.type === 'audio') {
  1769. if (getTracks(mediaStream, 'video').length && getTracks(mediaStream, 'audio').length) {
  1770. var stream;
  1771. if (!!navigator.mozGetUserMedia) {
  1772. stream = new MediaStream();
  1773. stream.addTrack(getTracks(mediaStream, 'audio')[0]);
  1774. } else {
  1775. // webkitMediaStream
  1776. stream = new MediaStream(getTracks(mediaStream, 'audio'));
  1777. }
  1778. mediaStream = stream;
  1779. }
  1780. if (!config.mimeType || config.mimeType.toString().toLowerCase().indexOf('audio') === -1) {
  1781. config.mimeType = isChrome ? 'audio/webm' : 'audio/ogg';
  1782. }
  1783. if (config.mimeType && config.mimeType.toString().toLowerCase() !== 'audio/ogg' && !!navigator.mozGetUserMedia) {
  1784. // forcing better codecs on Firefox (via #166)
  1785. config.mimeType = 'audio/ogg';
  1786. }
  1787. }
  1788. var arrayOfBlobs = [];
  1789. /**
  1790. * This method returns array of blobs. Use only with "timeSlice". Its useful to preview recording anytime, without using the "stop" method.
  1791. * @method
  1792. * @memberof MediaStreamRecorder
  1793. * @example
  1794. * var arrayOfBlobs = recorder.getArrayOfBlobs();
  1795. * @returns {Array} Returns array of recorded blobs.
  1796. */
  1797. this.getArrayOfBlobs = function() {
  1798. return arrayOfBlobs;
  1799. };
  1800. /**
  1801. * This method records MediaStream.
  1802. * @method
  1803. * @memberof MediaStreamRecorder
  1804. * @example
  1805. * recorder.record();
  1806. */
  1807. this.record = function() {
  1808. // set defaults
  1809. self.blob = null;
  1810. self.clearRecordedData();
  1811. self.timestamps = [];
  1812. allStates = [];
  1813. arrayOfBlobs = [];
  1814. var recorderHints = config;
  1815. if (!config.disableLogs) {
  1816. console.log('Passing following config over MediaRecorder API.', recorderHints);
  1817. }
  1818. if (mediaRecorder) {
  1819. // mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page.
  1820. mediaRecorder = null;
  1821. }
  1822. if (isChrome && !isMediaRecorderCompatible()) {
  1823. // to support video-only recording on stable
  1824. recorderHints = 'video/vp8';
  1825. }
  1826. if (typeof MediaRecorder.isTypeSupported === 'function' && recorderHints.mimeType) {
  1827. if (!MediaRecorder.isTypeSupported(recorderHints.mimeType)) {
  1828. if (!config.disableLogs) {
  1829. console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType);
  1830. }
  1831. recorderHints.mimeType = config.type === 'audio' ? 'audio/webm' : 'video/webm';
  1832. }
  1833. }
  1834. // using MediaRecorder API here
  1835. try {
  1836. mediaRecorder = new MediaRecorder(mediaStream, recorderHints);
  1837. // reset
  1838. config.mimeType = recorderHints.mimeType;
  1839. } catch (e) {
  1840. // chrome-based fallback
  1841. mediaRecorder = new MediaRecorder(mediaStream);
  1842. }
  1843. // old hack?
  1844. if (recorderHints.mimeType && !MediaRecorder.isTypeSupported && 'canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(recorderHints.mimeType) === false) {
  1845. if (!config.disableLogs) {
  1846. console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType);
  1847. }
  1848. }
  1849. // Dispatching OnDataAvailable Handler
  1850. mediaRecorder.ondataavailable = function(e) {
  1851. if (e.data) {
  1852. allStates.push('ondataavailable: ' + bytesToSize(e.data.size));
  1853. }
  1854. if (typeof config.timeSlice === 'number') {
  1855. if (e.data && e.data.size && e.data.size > 100) {
  1856. arrayOfBlobs.push(e.data);
  1857. updateTimeStamp();
  1858. if (typeof config.ondataavailable === 'function') {
  1859. // intervals based blobs
  1860. var blob = config.getNativeBlob ? e.data : new Blob([e.data], {
  1861. type: getMimeType(recorderHints)
  1862. });
  1863. config.ondataavailable(blob);
  1864. }
  1865. }
  1866. return;
  1867. }
  1868. if (!e.data || !e.data.size || e.data.size < 100 || self.blob) {
  1869. // make sure that stopRecording always getting fired
  1870. // even if there is invalid data
  1871. if (self.recordingCallback) {
  1872. self.recordingCallback(new Blob([], {
  1873. type: getMimeType(recorderHints)
  1874. }));
  1875. self.recordingCallback = null;
  1876. }
  1877. return;
  1878. }
  1879. self.blob = config.getNativeBlob ? e.data : new Blob([e.data], {
  1880. type: getMimeType(recorderHints)
  1881. });
  1882. if (self.recordingCallback) {
  1883. self.recordingCallback(self.blob);
  1884. self.recordingCallback = null;
  1885. }
  1886. };
  1887. mediaRecorder.onstart = function() {
  1888. allStates.push('started');
  1889. };
  1890. mediaRecorder.onpause = function() {
  1891. allStates.push('paused');
  1892. };
  1893. mediaRecorder.onresume = function() {
  1894. allStates.push('resumed');
  1895. };
  1896. mediaRecorder.onstop = function() {
  1897. allStates.push('stopped');
  1898. };
  1899. mediaRecorder.onerror = function(error) {
  1900. if (!error) {
  1901. return;
  1902. }
  1903. if (!error.name) {
  1904. error.name = 'UnknownError';
  1905. }
  1906. allStates.push('error: ' + error);
  1907. if (!config.disableLogs) {
  1908. // via: https://w3c.github.io/mediacapture-record/MediaRecorder.html#exception-summary
  1909. if (error.name.toString().toLowerCase().indexOf('invalidstate') !== -1) {
  1910. console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.', error);
  1911. } else if (error.name.toString().toLowerCase().indexOf('notsupported') !== -1) {
  1912. console.error('MIME type (', recorderHints.mimeType, ') is not supported.', error);
  1913. } else if (error.name.toString().toLowerCase().indexOf('security') !== -1) {
  1914. console.error('MediaRecorder security error', error);
  1915. }
  1916. // older code below
  1917. else if (error.name === 'OutOfMemory') {
  1918. console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.', error);
  1919. } else if (error.name === 'IllegalStreamModification') {
  1920. console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.', error);
  1921. } else if (error.name === 'OtherRecordingError') {
  1922. console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.', error);
  1923. } else if (error.name === 'GenericError') {
  1924. console.error('The UA cannot provide the codec or recording option that has been requested.', error);
  1925. } else {
  1926. console.error('MediaRecorder Error', error);
  1927. }
  1928. }
  1929. (function(looper) {
  1930. if (!self.manuallyStopped && mediaRecorder && mediaRecorder.state === 'inactive') {
  1931. delete config.timeslice;
  1932. // 10 minutes, enough?
  1933. mediaRecorder.start(10 * 60 * 1000);
  1934. return;
  1935. }
  1936. setTimeout(looper, 1000);
  1937. })();
  1938. if (mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') {
  1939. mediaRecorder.stop();
  1940. }
  1941. };
  1942. if (typeof config.timeSlice === 'number') {
  1943. updateTimeStamp();
  1944. mediaRecorder.start(config.timeSlice);
  1945. } else {
  1946. // default is 60 minutes; enough?
  1947. // use config => {timeSlice: 1000} otherwise
  1948. mediaRecorder.start(3.6e+6);
  1949. }
  1950. if (config.initCallback) {
  1951. config.initCallback(); // old code
  1952. }
  1953. };
  1954. /**
  1955. * @property {Array} timestamps - Array of time stamps
  1956. * @memberof MediaStreamRecorder
  1957. * @example
  1958. * console.log(recorder.timestamps);
  1959. */
  1960. this.timestamps = [];
  1961. function updateTimeStamp() {
  1962. self.timestamps.push(new Date().getTime());
  1963. if (typeof config.onTimeStamp === 'function') {
  1964. config.onTimeStamp(self.timestamps[self.timestamps.length - 1], self.timestamps);
  1965. }
  1966. }
  1967. function getMimeType(secondObject) {
  1968. if (mediaRecorder && mediaRecorder.mimeType) {
  1969. return mediaRecorder.mimeType;
  1970. }
  1971. return secondObject.mimeType || 'video/webm';
  1972. }
  1973. /**
  1974. * This method stops recording MediaStream.
  1975. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  1976. * @method
  1977. * @memberof MediaStreamRecorder
  1978. * @example
  1979. * recorder.stop(function(blob) {
  1980. * video.src = URL.createObjectURL(blob);
  1981. * });
  1982. */
  1983. this.stop = function(callback) {
  1984. callback = callback || function() {};
  1985. self.manuallyStopped = true; // used inside the mediaRecorder.onerror
  1986. if (!mediaRecorder) {
  1987. return;
  1988. }
  1989. this.recordingCallback = callback;
  1990. if (mediaRecorder.state === 'recording') {
  1991. mediaRecorder.stop();
  1992. }
  1993. if (typeof config.timeSlice === 'number') {
  1994. setTimeout(function() {
  1995. self.blob = new Blob(arrayOfBlobs, {
  1996. type: getMimeType(config)
  1997. });
  1998. self.recordingCallback(self.blob);
  1999. }, 100);
  2000. }
  2001. };
  2002. /**
  2003. * This method pauses the recording process.
  2004. * @method
  2005. * @memberof MediaStreamRecorder
  2006. * @example
  2007. * recorder.pause();
  2008. */
  2009. this.pause = function() {
  2010. if (!mediaRecorder) {
  2011. return;
  2012. }
  2013. if (mediaRecorder.state === 'recording') {
  2014. mediaRecorder.pause();
  2015. }
  2016. };
  2017. /**
  2018. * This method resumes the recording process.
  2019. * @method
  2020. * @memberof MediaStreamRecorder
  2021. * @example
  2022. * recorder.resume();
  2023. */
  2024. this.resume = function() {
  2025. if (!mediaRecorder) {
  2026. return;
  2027. }
  2028. if (mediaRecorder.state === 'paused') {
  2029. mediaRecorder.resume();
  2030. }
  2031. };
  2032. /**
  2033. * This method resets currently recorded data.
  2034. * @method
  2035. * @memberof MediaStreamRecorder
  2036. * @example
  2037. * recorder.clearRecordedData();
  2038. */
  2039. this.clearRecordedData = function() {
  2040. if (mediaRecorder && mediaRecorder.state === 'recording') {
  2041. self.stop(clearRecordedDataCB);
  2042. }
  2043. clearRecordedDataCB();
  2044. };
  2045. function clearRecordedDataCB() {
  2046. arrayOfBlobs = [];
  2047. mediaRecorder = null;
  2048. self.timestamps = [];
  2049. }
  2050. // Reference to "MediaRecorder" object
  2051. var mediaRecorder;
  2052. /**
  2053. * Access to native MediaRecorder API
  2054. * @method
  2055. * @memberof MediaStreamRecorder
  2056. * @instance
  2057. * @example
  2058. * var internal = recorder.getInternalRecorder();
  2059. * internal.ondataavailable = function() {}; // override
  2060. * internal.stream, internal.onpause, internal.onstop, etc.
  2061. * @returns {Object} Returns internal recording object.
  2062. */
  2063. this.getInternalRecorder = function() {
  2064. return mediaRecorder;
  2065. };
  2066. function isMediaStreamActive() {
  2067. if ('active' in mediaStream) {
  2068. if (!mediaStream.active) {
  2069. return false;
  2070. }
  2071. } else if ('ended' in mediaStream) { // old hack
  2072. if (mediaStream.ended) {
  2073. return false;
  2074. }
  2075. }
  2076. return true;
  2077. }
  2078. /**
  2079. * @property {Blob} blob - Recorded data as "Blob" object.
  2080. * @memberof MediaStreamRecorder
  2081. * @example
  2082. * recorder.stop(function() {
  2083. * var blob = recorder.blob;
  2084. * });
  2085. */
  2086. this.blob = null;
  2087. /**
  2088. * Get MediaRecorder readonly state.
  2089. * @method
  2090. * @memberof MediaStreamRecorder
  2091. * @example
  2092. * var state = recorder.getState();
  2093. * @returns {String} Returns recording state.
  2094. */
  2095. this.getState = function() {
  2096. if (!mediaRecorder) {
  2097. return 'inactive';
  2098. }
  2099. return mediaRecorder.state || 'inactive';
  2100. };
  2101. // list of all recording states
  2102. var allStates = [];
  2103. /**
  2104. * Get MediaRecorder all recording states.
  2105. * @method
  2106. * @memberof MediaStreamRecorder
  2107. * @example
  2108. * var state = recorder.getAllStates();
  2109. * @returns {Array} Returns all recording states
  2110. */
  2111. this.getAllStates = function() {
  2112. return allStates;
  2113. };
  2114. // if any Track within the MediaStream is muted or not enabled at any time,
  2115. // the browser will only record black frames
  2116. // or silence since that is the content produced by the Track
  2117. // so we need to stopRecording as soon as any single track ends.
  2118. if (typeof config.checkForInactiveTracks === 'undefined') {
  2119. config.checkForInactiveTracks = false; // disable to minimize CPU usage
  2120. }
  2121. var self = this;
  2122. // this method checks if media stream is stopped
  2123. // or if any track is ended.
  2124. (function looper() {
  2125. if (!mediaRecorder || config.checkForInactiveTracks === false) {
  2126. return;
  2127. }
  2128. if (isMediaStreamActive() === false) {
  2129. if (!config.disableLogs) {
  2130. console.log('MediaStream seems stopped.');
  2131. }
  2132. self.stop();
  2133. return;
  2134. }
  2135. setTimeout(looper, 1000); // check every second
  2136. })();
  2137. // for debugging
  2138. this.name = 'MediaStreamRecorder';
  2139. this.toString = function() {
  2140. return this.name;
  2141. };
  2142. }
  2143. if (typeof RecordRTC !== 'undefined') {
  2144. RecordRTC.MediaStreamRecorder = MediaStreamRecorder;
  2145. }
  2146. // source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
  2147. // https://github.com/mattdiamond/Recorderjs#license-mit
  2148. // ______________________
  2149. // StereoAudioRecorder.js
  2150. /**
  2151. * StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome.
  2152. * @summary JavaScript standalone object for stereo audio recording.
  2153. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  2154. * @author {@link https://MuazKhan.com|Muaz Khan}
  2155. * @typedef StereoAudioRecorder
  2156. * @class
  2157. * @example
  2158. * var recorder = new StereoAudioRecorder(MediaStream, {
  2159. * sampleRate: 44100,
  2160. * bufferSize: 4096
  2161. * });
  2162. * recorder.record();
  2163. * recorder.stop(function(blob) {
  2164. * video.src = URL.createObjectURL(blob);
  2165. * });
  2166. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  2167. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  2168. * @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.}
  2169. */
  2170. function StereoAudioRecorder(mediaStream, config) {
  2171. if (!getTracks(mediaStream, 'audio').length) {
  2172. throw 'Your stream has no audio tracks.';
  2173. }
  2174. config = config || {};
  2175. var self = this;
  2176. // variables
  2177. var leftchannel = [];
  2178. var rightchannel = [];
  2179. var recording = false;
  2180. var recordingLength = 0;
  2181. var jsAudioNode;
  2182. var numberOfAudioChannels = 2;
  2183. /**
  2184. * Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182
  2185. * @property {number} desiredSampRate - Desired Bits per sample * 1000
  2186. * @memberof StereoAudioRecorder
  2187. * @instance
  2188. * @example
  2189. * var recorder = StereoAudioRecorder(mediaStream, {
  2190. * desiredSampRate: 16 * 1000 // bits-per-sample * 1000
  2191. * });
  2192. */
  2193. var desiredSampRate = config.desiredSampRate;
  2194. // backward compatibility
  2195. if (config.leftChannel === true) {
  2196. numberOfAudioChannels = 1;
  2197. }
  2198. if (config.numberOfAudioChannels === 1) {
  2199. numberOfAudioChannels = 1;
  2200. }
  2201. if (!numberOfAudioChannels || numberOfAudioChannels < 1) {
  2202. numberOfAudioChannels = 2;
  2203. }
  2204. if (!config.disableLogs) {
  2205. console.log('StereoAudioRecorder is set to record number of channels: ' + numberOfAudioChannels);
  2206. }
  2207. // if any Track within the MediaStream is muted or not enabled at any time,
  2208. // the browser will only record black frames
  2209. // or silence since that is the content produced by the Track
  2210. // so we need to stopRecording as soon as any single track ends.
  2211. if (typeof config.checkForInactiveTracks === 'undefined') {
  2212. config.checkForInactiveTracks = true;
  2213. }
  2214. function isMediaStreamActive() {
  2215. if (config.checkForInactiveTracks === false) {
  2216. // always return "true"
  2217. return true;
  2218. }
  2219. if ('active' in mediaStream) {
  2220. if (!mediaStream.active) {
  2221. return false;
  2222. }
  2223. } else if ('ended' in mediaStream) { // old hack
  2224. if (mediaStream.ended) {
  2225. return false;
  2226. }
  2227. }
  2228. return true;
  2229. }
  2230. /**
  2231. * This method records MediaStream.
  2232. * @method
  2233. * @memberof StereoAudioRecorder
  2234. * @example
  2235. * recorder.record();
  2236. */
  2237. this.record = function() {
  2238. if (isMediaStreamActive() === false) {
  2239. throw 'Please make sure MediaStream is active.';
  2240. }
  2241. resetVariables();
  2242. isAudioProcessStarted = isPaused = false;
  2243. recording = true;
  2244. if (typeof config.timeSlice !== 'undefined') {
  2245. looper();
  2246. }
  2247. };
  2248. function mergeLeftRightBuffers(config, callback) {
  2249. function mergeAudioBuffers(config, cb) {
  2250. var numberOfAudioChannels = config.numberOfAudioChannels;
  2251. // todo: "slice(0)" --- is it causes loop? Should be removed?
  2252. var leftBuffers = config.leftBuffers.slice(0);
  2253. var rightBuffers = config.rightBuffers.slice(0);
  2254. var sampleRate = config.sampleRate;
  2255. var internalInterleavedLength = config.internalInterleavedLength;
  2256. var desiredSampRate = config.desiredSampRate;
  2257. if (numberOfAudioChannels === 2) {
  2258. leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
  2259. rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);
  2260. if (desiredSampRate) {
  2261. leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
  2262. rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);
  2263. }
  2264. }
  2265. if (numberOfAudioChannels === 1) {
  2266. leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);
  2267. if (desiredSampRate) {
  2268. leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);
  2269. }
  2270. }
  2271. // set sample rate as desired sample rate
  2272. if (desiredSampRate) {
  2273. sampleRate = desiredSampRate;
  2274. }
  2275. // for changing the sampling rate, reference:
  2276. // http://stackoverflow.com/a/28977136/552182
  2277. function interpolateArray(data, newSampleRate, oldSampleRate) {
  2278. var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));
  2279. var newData = [];
  2280. var springFactor = Number((data.length - 1) / (fitCount - 1));
  2281. newData[0] = data[0];
  2282. for (var i = 1; i < fitCount - 1; i++) {
  2283. var tmp = i * springFactor;
  2284. var before = Number(Math.floor(tmp)).toFixed();
  2285. var after = Number(Math.ceil(tmp)).toFixed();
  2286. var atPoint = tmp - before;
  2287. newData[i] = linearInterpolate(data[before], data[after], atPoint);
  2288. }
  2289. newData[fitCount - 1] = data[data.length - 1];
  2290. return newData;
  2291. }
  2292. function linearInterpolate(before, after, atPoint) {
  2293. return before + (after - before) * atPoint;
  2294. }
  2295. function mergeBuffers(channelBuffer, rLength) {
  2296. var result = new Float64Array(rLength);
  2297. var offset = 0;
  2298. var lng = channelBuffer.length;
  2299. for (var i = 0; i < lng; i++) {
  2300. var buffer = channelBuffer[i];
  2301. result.set(buffer, offset);
  2302. offset += buffer.length;
  2303. }
  2304. return result;
  2305. }
  2306. function interleave(leftChannel, rightChannel) {
  2307. var length = leftChannel.length + rightChannel.length;
  2308. var result = new Float64Array(length);
  2309. var inputIndex = 0;
  2310. for (var index = 0; index < length;) {
  2311. result[index++] = leftChannel[inputIndex];
  2312. result[index++] = rightChannel[inputIndex];
  2313. inputIndex++;
  2314. }
  2315. return result;
  2316. }
  2317. function writeUTFBytes(view, offset, string) {
  2318. var lng = string.length;
  2319. for (var i = 0; i < lng; i++) {
  2320. view.setUint8(offset + i, string.charCodeAt(i));
  2321. }
  2322. }
  2323. // interleave both channels together
  2324. var interleaved;
  2325. if (numberOfAudioChannels === 2) {
  2326. interleaved = interleave(leftBuffers, rightBuffers);
  2327. }
  2328. if (numberOfAudioChannels === 1) {
  2329. interleaved = leftBuffers;
  2330. }
  2331. var interleavedLength = interleaved.length;
  2332. // create wav file
  2333. var resultingBufferLength = 44 + interleavedLength * 2;
  2334. var buffer = new ArrayBuffer(resultingBufferLength);
  2335. var view = new DataView(buffer);
  2336. // RIFF chunk descriptor/identifier
  2337. writeUTFBytes(view, 0, 'RIFF');
  2338. // RIFF chunk length
  2339. // changed "44" to "36" via #401
  2340. view.setUint32(4, 36 + interleavedLength * 2, true);
  2341. // RIFF type
  2342. writeUTFBytes(view, 8, 'WAVE');
  2343. // format chunk identifier
  2344. // FMT sub-chunk
  2345. writeUTFBytes(view, 12, 'fmt ');
  2346. // format chunk length
  2347. view.setUint32(16, 16, true);
  2348. // sample format (raw)
  2349. view.setUint16(20, 1, true);
  2350. // stereo (2 channels)
  2351. view.setUint16(22, numberOfAudioChannels, true);
  2352. // sample rate
  2353. view.setUint32(24, sampleRate, true);
  2354. // byte rate (sample rate * block align)
  2355. view.setUint32(28, sampleRate * 2, true);
  2356. // block align (channel count * bytes per sample)
  2357. view.setUint16(32, numberOfAudioChannels * 2, true);
  2358. // bits per sample
  2359. view.setUint16(34, 16, true);
  2360. // data sub-chunk
  2361. // data chunk identifier
  2362. writeUTFBytes(view, 36, 'data');
  2363. // data chunk length
  2364. view.setUint32(40, interleavedLength * 2, true);
  2365. // write the PCM samples
  2366. var lng = interleavedLength;
  2367. var index = 44;
  2368. var volume = 1;
  2369. for (var i = 0; i < lng; i++) {
  2370. view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
  2371. index += 2;
  2372. }
  2373. if (cb) {
  2374. return cb({
  2375. buffer: buffer,
  2376. view: view
  2377. });
  2378. }
  2379. postMessage({
  2380. buffer: buffer,
  2381. view: view
  2382. });
  2383. }
  2384. if (config.noWorker) {
  2385. mergeAudioBuffers(config, function(data) {
  2386. callback(data.buffer, data.view);
  2387. });
  2388. return;
  2389. }
  2390. var webWorker = processInWebWorker(mergeAudioBuffers);
  2391. webWorker.onmessage = function(event) {
  2392. callback(event.data.buffer, event.data.view);
  2393. // release memory
  2394. URL.revokeObjectURL(webWorker.workerURL);
  2395. // kill webworker (or Chrome will kill your page after ~25 calls)
  2396. webWorker.terminate();
  2397. };
  2398. webWorker.postMessage(config);
  2399. }
  2400. function processInWebWorker(_function) {
  2401. var workerURL = URL.createObjectURL(new Blob([_function.toString(),
  2402. ';this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
  2403. ], {
  2404. type: 'application/javascript'
  2405. }));
  2406. var worker = new Worker(workerURL);
  2407. worker.workerURL = workerURL;
  2408. return worker;
  2409. }
  2410. /**
  2411. * This method stops recording MediaStream.
  2412. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  2413. * @method
  2414. * @memberof StereoAudioRecorder
  2415. * @example
  2416. * recorder.stop(function(blob) {
  2417. * video.src = URL.createObjectURL(blob);
  2418. * });
  2419. */
  2420. this.stop = function(callback) {
  2421. callback = callback || function() {};
  2422. // stop recording
  2423. recording = false;
  2424. mergeLeftRightBuffers({
  2425. desiredSampRate: desiredSampRate,
  2426. sampleRate: sampleRate,
  2427. numberOfAudioChannels: numberOfAudioChannels,
  2428. internalInterleavedLength: recordingLength,
  2429. leftBuffers: leftchannel,
  2430. rightBuffers: numberOfAudioChannels === 1 ? [] : rightchannel,
  2431. noWorker: config.noWorker
  2432. }, function(buffer, view) {
  2433. /**
  2434. * @property {Blob} blob - The recorded blob object.
  2435. * @memberof StereoAudioRecorder
  2436. * @example
  2437. * recorder.stop(function(){
  2438. * var blob = recorder.blob;
  2439. * });
  2440. */
  2441. self.blob = new Blob([view], {
  2442. type: 'audio/wav'
  2443. });
  2444. /**
  2445. * @property {ArrayBuffer} buffer - The recorded buffer object.
  2446. * @memberof StereoAudioRecorder
  2447. * @example
  2448. * recorder.stop(function(){
  2449. * var buffer = recorder.buffer;
  2450. * });
  2451. */
  2452. self.buffer = new ArrayBuffer(view.buffer.byteLength);
  2453. /**
  2454. * @property {DataView} view - The recorded data-view object.
  2455. * @memberof StereoAudioRecorder
  2456. * @example
  2457. * recorder.stop(function(){
  2458. * var view = recorder.view;
  2459. * });
  2460. */
  2461. self.view = view;
  2462. self.sampleRate = desiredSampRate || sampleRate;
  2463. self.bufferSize = bufferSize;
  2464. // recorded audio length
  2465. self.length = recordingLength;
  2466. isAudioProcessStarted = false;
  2467. if (callback) {
  2468. callback(self.blob);
  2469. }
  2470. });
  2471. };
  2472. if (typeof Storage === 'undefined') {
  2473. var Storage = {
  2474. AudioContextConstructor: null,
  2475. AudioContext: window.AudioContext || window.webkitAudioContext
  2476. };
  2477. }
  2478. if (!Storage.AudioContextConstructor) {
  2479. Storage.AudioContextConstructor = new Storage.AudioContext();
  2480. }
  2481. var context = Storage.AudioContextConstructor;
  2482. // creates an audio node from the microphone incoming stream
  2483. var audioInput = context.createMediaStreamSource(mediaStream);
  2484. var legalBufferValues = [0, 256, 512, 1024, 2048, 4096, 8192, 16384];
  2485. /**
  2486. * From the spec: This value controls how frequently the audioprocess event is
  2487. * dispatched and how many sample-frames need to be processed each call.
  2488. * Lower values for buffer size will result in a lower (better) latency.
  2489. * Higher values will be necessary to avoid audio breakup and glitches
  2490. * The size of the buffer (in sample-frames) which needs to
  2491. * be processed each time onprocessaudio is called.
  2492. * Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384).
  2493. * @property {number} bufferSize - Buffer-size for how frequently the audioprocess event is dispatched.
  2494. * @memberof StereoAudioRecorder
  2495. * @example
  2496. * recorder = new StereoAudioRecorder(mediaStream, {
  2497. * bufferSize: 4096
  2498. * });
  2499. */
  2500. // "0" means, let chrome decide the most accurate buffer-size for current platform.
  2501. var bufferSize = typeof config.bufferSize === 'undefined' ? 4096 : config.bufferSize;
  2502. if (legalBufferValues.indexOf(bufferSize) === -1) {
  2503. if (!config.disableLogs) {
  2504. console.log('Legal values for buffer-size are ' + JSON.stringify(legalBufferValues, null, '\t'));
  2505. }
  2506. }
  2507. if (context.createJavaScriptNode) {
  2508. jsAudioNode = context.createJavaScriptNode(bufferSize, numberOfAudioChannels, numberOfAudioChannels);
  2509. } else if (context.createScriptProcessor) {
  2510. jsAudioNode = context.createScriptProcessor(bufferSize, numberOfAudioChannels, numberOfAudioChannels);
  2511. } else {
  2512. throw 'WebAudio API has no support on this browser.';
  2513. }
  2514. // connect the stream to the script processor
  2515. audioInput.connect(jsAudioNode);
  2516. if (!config.bufferSize) {
  2517. bufferSize = jsAudioNode.bufferSize; // device buffer-size
  2518. }
  2519. /**
  2520. * The sample rate (in sample-frames per second) at which the
  2521. * AudioContext handles audio. It is assumed that all AudioNodes
  2522. * in the context run at this rate. In making this assumption,
  2523. * sample-rate converters or "varispeed" processors are not supported
  2524. * in real-time processing.
  2525. * The sampleRate parameter describes the sample-rate of the
  2526. * linear PCM audio data in the buffer in sample-frames per second.
  2527. * An implementation must support sample-rates in at least
  2528. * the range 22050 to 96000.
  2529. * @property {number} sampleRate - Buffer-size for how frequently the audioprocess event is dispatched.
  2530. * @memberof StereoAudioRecorder
  2531. * @example
  2532. * recorder = new StereoAudioRecorder(mediaStream, {
  2533. * sampleRate: 44100
  2534. * });
  2535. */
  2536. var sampleRate = typeof config.sampleRate !== 'undefined' ? config.sampleRate : context.sampleRate || 44100;
  2537. if (sampleRate < 22050 || sampleRate > 96000) {
  2538. // Ref: http://stackoverflow.com/a/26303918/552182
  2539. if (!config.disableLogs) {
  2540. console.log('sample-rate must be under range 22050 and 96000.');
  2541. }
  2542. }
  2543. if (!config.disableLogs) {
  2544. if (config.desiredSampRate) {
  2545. console.log('Desired sample-rate: ' + config.desiredSampRate);
  2546. }
  2547. }
  2548. var isPaused = false;
  2549. /**
  2550. * This method pauses the recording process.
  2551. * @method
  2552. * @memberof StereoAudioRecorder
  2553. * @example
  2554. * recorder.pause();
  2555. */
  2556. this.pause = function() {
  2557. isPaused = true;
  2558. };
  2559. /**
  2560. * This method resumes the recording process.
  2561. * @method
  2562. * @memberof StereoAudioRecorder
  2563. * @example
  2564. * recorder.resume();
  2565. */
  2566. this.resume = function() {
  2567. if (isMediaStreamActive() === false) {
  2568. throw 'Please make sure MediaStream is active.';
  2569. }
  2570. if (!recording) {
  2571. if (!config.disableLogs) {
  2572. console.log('Seems recording has been restarted.');
  2573. }
  2574. this.record();
  2575. return;
  2576. }
  2577. isPaused = false;
  2578. };
  2579. /**
  2580. * This method resets currently recorded data.
  2581. * @method
  2582. * @memberof StereoAudioRecorder
  2583. * @example
  2584. * recorder.clearRecordedData();
  2585. */
  2586. this.clearRecordedData = function() {
  2587. config.checkForInactiveTracks = false;
  2588. if (recording) {
  2589. this.stop(clearRecordedDataCB);
  2590. }
  2591. clearRecordedDataCB();
  2592. };
  2593. function resetVariables() {
  2594. leftchannel = [];
  2595. rightchannel = [];
  2596. recordingLength = 0;
  2597. isAudioProcessStarted = false;
  2598. recording = false;
  2599. isPaused = false;
  2600. context = null;
  2601. self.leftchannel = leftchannel;
  2602. self.rightchannel = rightchannel;
  2603. self.numberOfAudioChannels = numberOfAudioChannels;
  2604. self.desiredSampRate = desiredSampRate;
  2605. self.sampleRate = sampleRate;
  2606. self.recordingLength = recordingLength;
  2607. intervalsBasedBuffers = {
  2608. left: [],
  2609. right: [],
  2610. recordingLength: 0
  2611. };
  2612. }
  2613. function clearRecordedDataCB() {
  2614. if (jsAudioNode) {
  2615. jsAudioNode.onaudioprocess = null;
  2616. jsAudioNode.disconnect();
  2617. jsAudioNode = null;
  2618. }
  2619. if (audioInput) {
  2620. audioInput.disconnect();
  2621. audioInput = null;
  2622. }
  2623. resetVariables();
  2624. }
  2625. // for debugging
  2626. this.name = 'StereoAudioRecorder';
  2627. this.toString = function() {
  2628. return this.name;
  2629. };
  2630. var isAudioProcessStarted = false;
  2631. function onAudioProcessDataAvailable(e) {
  2632. if (isPaused) {
  2633. return;
  2634. }
  2635. if (isMediaStreamActive() === false) {
  2636. if (!config.disableLogs) {
  2637. console.log('MediaStream seems stopped.');
  2638. }
  2639. jsAudioNode.disconnect();
  2640. recording = false;
  2641. }
  2642. if (!recording) {
  2643. if (audioInput) {
  2644. audioInput.disconnect();
  2645. audioInput = null;
  2646. }
  2647. return;
  2648. }
  2649. /**
  2650. * This method is called on "onaudioprocess" event's first invocation.
  2651. * @method {function} onAudioProcessStarted
  2652. * @memberof StereoAudioRecorder
  2653. * @example
  2654. * recorder.onAudioProcessStarted: function() { };
  2655. */
  2656. if (!isAudioProcessStarted) {
  2657. isAudioProcessStarted = true;
  2658. if (config.onAudioProcessStarted) {
  2659. config.onAudioProcessStarted();
  2660. }
  2661. if (config.initCallback) {
  2662. config.initCallback();
  2663. }
  2664. }
  2665. var left = e.inputBuffer.getChannelData(0);
  2666. // we clone the samples
  2667. var chLeft = new Float32Array(left);
  2668. leftchannel.push(chLeft);
  2669. if (numberOfAudioChannels === 2) {
  2670. var right = e.inputBuffer.getChannelData(1);
  2671. var chRight = new Float32Array(right);
  2672. rightchannel.push(chRight);
  2673. }
  2674. recordingLength += bufferSize;
  2675. // export raw PCM
  2676. self.recordingLength = recordingLength;
  2677. if (typeof config.timeSlice !== 'undefined') {
  2678. intervalsBasedBuffers.recordingLength += bufferSize;
  2679. intervalsBasedBuffers.left.push(chLeft);
  2680. if (numberOfAudioChannels === 2) {
  2681. intervalsBasedBuffers.right.push(chRight);
  2682. }
  2683. }
  2684. }
  2685. jsAudioNode.onaudioprocess = onAudioProcessDataAvailable;
  2686. // to prevent self audio to be connected with speakers
  2687. if (context.createMediaStreamDestination) {
  2688. jsAudioNode.connect(context.createMediaStreamDestination());
  2689. } else {
  2690. jsAudioNode.connect(context.destination);
  2691. }
  2692. // export raw PCM
  2693. this.leftchannel = leftchannel;
  2694. this.rightchannel = rightchannel;
  2695. this.numberOfAudioChannels = numberOfAudioChannels;
  2696. this.desiredSampRate = desiredSampRate;
  2697. this.sampleRate = sampleRate;
  2698. self.recordingLength = recordingLength;
  2699. // helper for intervals based blobs
  2700. var intervalsBasedBuffers = {
  2701. left: [],
  2702. right: [],
  2703. recordingLength: 0
  2704. };
  2705. // this looper is used to support intervals based blobs (via timeSlice+ondataavailable)
  2706. function looper() {
  2707. if (!recording || typeof config.ondataavailable !== 'function' || typeof config.timeSlice === 'undefined') {
  2708. return;
  2709. }
  2710. if (intervalsBasedBuffers.left.length) {
  2711. mergeLeftRightBuffers({
  2712. desiredSampRate: desiredSampRate,
  2713. sampleRate: sampleRate,
  2714. numberOfAudioChannels: numberOfAudioChannels,
  2715. internalInterleavedLength: intervalsBasedBuffers.recordingLength,
  2716. leftBuffers: intervalsBasedBuffers.left,
  2717. rightBuffers: numberOfAudioChannels === 1 ? [] : intervalsBasedBuffers.right
  2718. }, function(buffer, view) {
  2719. var blob = new Blob([view], {
  2720. type: 'audio/wav'
  2721. });
  2722. config.ondataavailable(blob);
  2723. setTimeout(looper, config.timeSlice);
  2724. });
  2725. intervalsBasedBuffers = {
  2726. left: [],
  2727. right: [],
  2728. recordingLength: 0
  2729. };
  2730. } else {
  2731. setTimeout(looper, config.timeSlice);
  2732. }
  2733. }
  2734. }
  2735. if (typeof RecordRTC !== 'undefined') {
  2736. RecordRTC.StereoAudioRecorder = StereoAudioRecorder;
  2737. }
  2738. // _________________
  2739. // CanvasRecorder.js
  2740. /**
  2741. * CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.
  2742. * @summary HTML2Canvas recording into video WebM.
  2743. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  2744. * @author {@link https://MuazKhan.com|Muaz Khan}
  2745. * @typedef CanvasRecorder
  2746. * @class
  2747. * @example
  2748. * var recorder = new CanvasRecorder(htmlElement, { disableLogs: true, useWhammyRecorder: true });
  2749. * recorder.record();
  2750. * recorder.stop(function(blob) {
  2751. * video.src = URL.createObjectURL(blob);
  2752. * });
  2753. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  2754. * @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc.
  2755. * @param {object} config - {disableLogs:true, initCallback: function}
  2756. */
  2757. function CanvasRecorder(htmlElement, config) {
  2758. if (typeof html2canvas === 'undefined') {
  2759. throw 'Please link: https://www.webrtc-experiment.com/screenshot.js';
  2760. }
  2761. config = config || {};
  2762. if (!config.frameInterval) {
  2763. config.frameInterval = 10;
  2764. }
  2765. // via DetectRTC.js
  2766. var isCanvasSupportsStreamCapturing = false;
  2767. ['captureStream', 'mozCaptureStream', 'webkitCaptureStream'].forEach(function(item) {
  2768. if (item in document.createElement('canvas')) {
  2769. isCanvasSupportsStreamCapturing = true;
  2770. }
  2771. });
  2772. var _isChrome = (!!window.webkitRTCPeerConnection || !!window.webkitGetUserMedia) && !!window.chrome;
  2773. var chromeVersion = 50;
  2774. var matchArray = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);
  2775. if (_isChrome && matchArray && matchArray[2]) {
  2776. chromeVersion = parseInt(matchArray[2], 10);
  2777. }
  2778. if (_isChrome && chromeVersion < 52) {
  2779. isCanvasSupportsStreamCapturing = false;
  2780. }
  2781. if (config.useWhammyRecorder) {
  2782. isCanvasSupportsStreamCapturing = false;
  2783. }
  2784. var globalCanvas, mediaStreamRecorder;
  2785. if (isCanvasSupportsStreamCapturing) {
  2786. if (!config.disableLogs) {
  2787. console.log('Your browser supports both MediRecorder API and canvas.captureStream!');
  2788. }
  2789. if (htmlElement instanceof HTMLCanvasElement) {
  2790. globalCanvas = htmlElement;
  2791. } else if (htmlElement instanceof CanvasRenderingContext2D) {
  2792. globalCanvas = htmlElement.canvas;
  2793. } else {
  2794. throw 'Please pass either HTMLCanvasElement or CanvasRenderingContext2D.';
  2795. }
  2796. } else if (!!navigator.mozGetUserMedia) {
  2797. if (!config.disableLogs) {
  2798. console.error('Canvas recording is NOT supported in Firefox.');
  2799. }
  2800. }
  2801. var isRecording;
  2802. /**
  2803. * This method records Canvas.
  2804. * @method
  2805. * @memberof CanvasRecorder
  2806. * @example
  2807. * recorder.record();
  2808. */
  2809. this.record = function() {
  2810. isRecording = true;
  2811. if (isCanvasSupportsStreamCapturing && !config.useWhammyRecorder) {
  2812. // CanvasCaptureMediaStream
  2813. var canvasMediaStream;
  2814. if ('captureStream' in globalCanvas) {
  2815. canvasMediaStream = globalCanvas.captureStream(25); // 25 FPS
  2816. } else if ('mozCaptureStream' in globalCanvas) {
  2817. canvasMediaStream = globalCanvas.mozCaptureStream(25);
  2818. } else if ('webkitCaptureStream' in globalCanvas) {
  2819. canvasMediaStream = globalCanvas.webkitCaptureStream(25);
  2820. }
  2821. try {
  2822. var mdStream = new MediaStream();
  2823. mdStream.addTrack(getTracks(canvasMediaStream, 'video')[0]);
  2824. canvasMediaStream = mdStream;
  2825. } catch (e) {}
  2826. if (!canvasMediaStream) {
  2827. throw 'captureStream API are NOT available.';
  2828. }
  2829. // Note: Jan 18, 2016 status is that,
  2830. // Firefox MediaRecorder API can't record CanvasCaptureMediaStream object.
  2831. mediaStreamRecorder = new MediaStreamRecorder(canvasMediaStream, {
  2832. mimeType: config.mimeType || 'video/webm'
  2833. });
  2834. mediaStreamRecorder.record();
  2835. } else {
  2836. whammy.frames = [];
  2837. lastTime = new Date().getTime();
  2838. drawCanvasFrame();
  2839. }
  2840. if (config.initCallback) {
  2841. config.initCallback();
  2842. }
  2843. };
  2844. this.getWebPImages = function(callback) {
  2845. if (htmlElement.nodeName.toLowerCase() !== 'canvas') {
  2846. callback();
  2847. return;
  2848. }
  2849. var framesLength = whammy.frames.length;
  2850. whammy.frames.forEach(function(frame, idx) {
  2851. var framesRemaining = framesLength - idx;
  2852. if (!config.disableLogs) {
  2853. console.log(framesRemaining + '/' + framesLength + ' frames remaining');
  2854. }
  2855. if (config.onEncodingCallback) {
  2856. config.onEncodingCallback(framesRemaining, framesLength);
  2857. }
  2858. var webp = frame.image.toDataURL('image/webp', 1);
  2859. whammy.frames[idx].image = webp;
  2860. });
  2861. if (!config.disableLogs) {
  2862. console.log('Generating WebM');
  2863. }
  2864. callback();
  2865. };
  2866. /**
  2867. * This method stops recording Canvas.
  2868. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  2869. * @method
  2870. * @memberof CanvasRecorder
  2871. * @example
  2872. * recorder.stop(function(blob) {
  2873. * video.src = URL.createObjectURL(blob);
  2874. * });
  2875. */
  2876. this.stop = function(callback) {
  2877. isRecording = false;
  2878. var that = this;
  2879. if (isCanvasSupportsStreamCapturing && mediaStreamRecorder) {
  2880. mediaStreamRecorder.stop(callback);
  2881. return;
  2882. }
  2883. this.getWebPImages(function() {
  2884. /**
  2885. * @property {Blob} blob - Recorded frames in video/webm blob.
  2886. * @memberof CanvasRecorder
  2887. * @example
  2888. * recorder.stop(function() {
  2889. * var blob = recorder.blob;
  2890. * });
  2891. */
  2892. whammy.compile(function(blob) {
  2893. if (!config.disableLogs) {
  2894. console.log('Recording finished!');
  2895. }
  2896. that.blob = blob;
  2897. if (that.blob.forEach) {
  2898. that.blob = new Blob([], {
  2899. type: 'video/webm'
  2900. });
  2901. }
  2902. if (callback) {
  2903. callback(that.blob);
  2904. }
  2905. whammy.frames = [];
  2906. });
  2907. });
  2908. };
  2909. var isPausedRecording = false;
  2910. /**
  2911. * This method pauses the recording process.
  2912. * @method
  2913. * @memberof CanvasRecorder
  2914. * @example
  2915. * recorder.pause();
  2916. */
  2917. this.pause = function() {
  2918. isPausedRecording = true;
  2919. if (mediaStreamRecorder instanceof MediaStreamRecorder) {
  2920. mediaStreamRecorder.pause();
  2921. return;
  2922. }
  2923. };
  2924. /**
  2925. * This method resumes the recording process.
  2926. * @method
  2927. * @memberof CanvasRecorder
  2928. * @example
  2929. * recorder.resume();
  2930. */
  2931. this.resume = function() {
  2932. isPausedRecording = false;
  2933. if (mediaStreamRecorder instanceof MediaStreamRecorder) {
  2934. mediaStreamRecorder.resume();
  2935. return;
  2936. }
  2937. if (!isRecording) {
  2938. this.record();
  2939. }
  2940. };
  2941. /**
  2942. * This method resets currently recorded data.
  2943. * @method
  2944. * @memberof CanvasRecorder
  2945. * @example
  2946. * recorder.clearRecordedData();
  2947. */
  2948. this.clearRecordedData = function() {
  2949. if (isRecording) {
  2950. this.stop(clearRecordedDataCB);
  2951. }
  2952. clearRecordedDataCB();
  2953. };
  2954. function clearRecordedDataCB() {
  2955. whammy.frames = [];
  2956. isRecording = false;
  2957. isPausedRecording = false;
  2958. }
  2959. // for debugging
  2960. this.name = 'CanvasRecorder';
  2961. this.toString = function() {
  2962. return this.name;
  2963. };
  2964. function cloneCanvas() {
  2965. //create a new canvas
  2966. var newCanvas = document.createElement('canvas');
  2967. var context = newCanvas.getContext('2d');
  2968. //set dimensions
  2969. newCanvas.width = htmlElement.width;
  2970. newCanvas.height = htmlElement.height;
  2971. //apply the old canvas to the new one
  2972. context.drawImage(htmlElement, 0, 0);
  2973. //return the new canvas
  2974. return newCanvas;
  2975. }
  2976. function drawCanvasFrame() {
  2977. if (isPausedRecording) {
  2978. lastTime = new Date().getTime();
  2979. return setTimeout(drawCanvasFrame, 500);
  2980. }
  2981. if (htmlElement.nodeName.toLowerCase() === 'canvas') {
  2982. var duration = new Date().getTime() - lastTime;
  2983. // via #206, by Jack i.e. @Seymourr
  2984. lastTime = new Date().getTime();
  2985. whammy.frames.push({
  2986. image: cloneCanvas(),
  2987. duration: duration
  2988. });
  2989. if (isRecording) {
  2990. setTimeout(drawCanvasFrame, config.frameInterval);
  2991. }
  2992. return;
  2993. }
  2994. html2canvas(htmlElement, {
  2995. grabMouse: typeof config.showMousePointer === 'undefined' || config.showMousePointer,
  2996. onrendered: function(canvas) {
  2997. var duration = new Date().getTime() - lastTime;
  2998. if (!duration) {
  2999. return setTimeout(drawCanvasFrame, config.frameInterval);
  3000. }
  3001. // via #206, by Jack i.e. @Seymourr
  3002. lastTime = new Date().getTime();
  3003. whammy.frames.push({
  3004. image: canvas.toDataURL('image/webp', 1),
  3005. duration: duration
  3006. });
  3007. if (isRecording) {
  3008. setTimeout(drawCanvasFrame, config.frameInterval);
  3009. }
  3010. }
  3011. });
  3012. }
  3013. var lastTime = new Date().getTime();
  3014. var whammy = new Whammy.Video(100);
  3015. }
  3016. if (typeof RecordRTC !== 'undefined') {
  3017. RecordRTC.CanvasRecorder = CanvasRecorder;
  3018. }
  3019. // _________________
  3020. // WhammyRecorder.js
  3021. /**
  3022. * WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.
  3023. * @summary Video recording feature in Chrome.
  3024. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  3025. * @author {@link https://MuazKhan.com|Muaz Khan}
  3026. * @typedef WhammyRecorder
  3027. * @class
  3028. * @example
  3029. * var recorder = new WhammyRecorder(mediaStream);
  3030. * recorder.record();
  3031. * recorder.stop(function(blob) {
  3032. * video.src = URL.createObjectURL(blob);
  3033. * });
  3034. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  3035. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  3036. * @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.}
  3037. */
  3038. function WhammyRecorder(mediaStream, config) {
  3039. config = config || {};
  3040. if (!config.frameInterval) {
  3041. config.frameInterval = 10;
  3042. }
  3043. if (!config.disableLogs) {
  3044. console.log('Using frames-interval:', config.frameInterval);
  3045. }
  3046. /**
  3047. * This method records video.
  3048. * @method
  3049. * @memberof WhammyRecorder
  3050. * @example
  3051. * recorder.record();
  3052. */
  3053. this.record = function() {
  3054. if (!config.width) {
  3055. config.width = 320;
  3056. }
  3057. if (!config.height) {
  3058. config.height = 240;
  3059. }
  3060. if (!config.video) {
  3061. config.video = {
  3062. width: config.width,
  3063. height: config.height
  3064. };
  3065. }
  3066. if (!config.canvas) {
  3067. config.canvas = {
  3068. width: config.width,
  3069. height: config.height
  3070. };
  3071. }
  3072. canvas.width = config.canvas.width || 320;
  3073. canvas.height = config.canvas.height || 240;
  3074. context = canvas.getContext('2d');
  3075. // setting defaults
  3076. if (config.video && config.video instanceof HTMLVideoElement) {
  3077. video = config.video.cloneNode();
  3078. if (config.initCallback) {
  3079. config.initCallback();
  3080. }
  3081. } else {
  3082. video = document.createElement('video');
  3083. setSrcObject(mediaStream, video);
  3084. video.onloadedmetadata = function() { // "onloadedmetadata" may NOT work in FF?
  3085. if (config.initCallback) {
  3086. config.initCallback();
  3087. }
  3088. };
  3089. video.width = config.video.width;
  3090. video.height = config.video.height;
  3091. }
  3092. video.muted = true;
  3093. video.play();
  3094. lastTime = new Date().getTime();
  3095. whammy = new Whammy.Video();
  3096. if (!config.disableLogs) {
  3097. console.log('canvas resolutions', canvas.width, '*', canvas.height);
  3098. console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
  3099. }
  3100. drawFrames(config.frameInterval);
  3101. };
  3102. /**
  3103. * Draw and push frames to Whammy
  3104. * @param {integer} frameInterval - set minimum interval (in milliseconds) between each time we push a frame to Whammy
  3105. */
  3106. function drawFrames(frameInterval) {
  3107. frameInterval = typeof frameInterval !== 'undefined' ? frameInterval : 10;
  3108. var duration = new Date().getTime() - lastTime;
  3109. if (!duration) {
  3110. return setTimeout(drawFrames, frameInterval, frameInterval);
  3111. }
  3112. if (isPausedRecording) {
  3113. lastTime = new Date().getTime();
  3114. return setTimeout(drawFrames, 100);
  3115. }
  3116. // via #206, by Jack i.e. @Seymourr
  3117. lastTime = new Date().getTime();
  3118. if (video.paused) {
  3119. // via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
  3120. // Tweak for Android Chrome
  3121. video.play();
  3122. }
  3123. context.drawImage(video, 0, 0, canvas.width, canvas.height);
  3124. whammy.frames.push({
  3125. duration: duration,
  3126. image: canvas.toDataURL('image/webp')
  3127. });
  3128. if (!isStopDrawing) {
  3129. setTimeout(drawFrames, frameInterval, frameInterval);
  3130. }
  3131. }
  3132. function asyncLoop(o) {
  3133. var i = -1,
  3134. length = o.length;
  3135. (function loop() {
  3136. i++;
  3137. if (i === length) {
  3138. o.callback();
  3139. return;
  3140. }
  3141. // "setTimeout" added by Jim McLeod
  3142. setTimeout(function() {
  3143. o.functionToLoop(loop, i);
  3144. }, 1);
  3145. })();
  3146. }
  3147. /**
  3148. * remove black frames from the beginning to the specified frame
  3149. * @param {Array} _frames - array of frames to be checked
  3150. * @param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)
  3151. * @param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all
  3152. * @param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all
  3153. * @returns {Array} - array of frames
  3154. */
  3155. // pull#293 by @volodalexey
  3156. function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance, callback) {
  3157. var localCanvas = document.createElement('canvas');
  3158. localCanvas.width = canvas.width;
  3159. localCanvas.height = canvas.height;
  3160. var context2d = localCanvas.getContext('2d');
  3161. var resultFrames = [];
  3162. var checkUntilNotBlack = _framesToCheck === -1;
  3163. var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
  3164. _framesToCheck : _frames.length;
  3165. var sampleColor = {
  3166. r: 0,
  3167. g: 0,
  3168. b: 0
  3169. };
  3170. var maxColorDifference = Math.sqrt(
  3171. Math.pow(255, 2) +
  3172. Math.pow(255, 2) +
  3173. Math.pow(255, 2)
  3174. );
  3175. var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
  3176. var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
  3177. var doNotCheckNext = false;
  3178. asyncLoop({
  3179. length: endCheckFrame,
  3180. functionToLoop: function(loop, f) {
  3181. var matchPixCount, endPixCheck, maxPixCount;
  3182. var finishImage = function() {
  3183. if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
  3184. // console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
  3185. } else {
  3186. // console.log('frame is passed : ' + f);
  3187. if (checkUntilNotBlack) {
  3188. doNotCheckNext = true;
  3189. }
  3190. resultFrames.push(_frames[f]);
  3191. }
  3192. loop();
  3193. };
  3194. if (!doNotCheckNext) {
  3195. var image = new Image();
  3196. image.onload = function() {
  3197. context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
  3198. var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
  3199. matchPixCount = 0;
  3200. endPixCheck = imageData.data.length;
  3201. maxPixCount = imageData.data.length / 4;
  3202. for (var pix = 0; pix < endPixCheck; pix += 4) {
  3203. var currentColor = {
  3204. r: imageData.data[pix],
  3205. g: imageData.data[pix + 1],
  3206. b: imageData.data[pix + 2]
  3207. };
  3208. var colorDifference = Math.sqrt(
  3209. Math.pow(currentColor.r - sampleColor.r, 2) +
  3210. Math.pow(currentColor.g - sampleColor.g, 2) +
  3211. Math.pow(currentColor.b - sampleColor.b, 2)
  3212. );
  3213. // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
  3214. if (colorDifference <= maxColorDifference * pixTolerance) {
  3215. matchPixCount++;
  3216. }
  3217. }
  3218. finishImage();
  3219. };
  3220. image.src = _frames[f].image;
  3221. } else {
  3222. finishImage();
  3223. }
  3224. },
  3225. callback: function() {
  3226. resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
  3227. if (resultFrames.length <= 0) {
  3228. // at least one last frame should be available for next manipulation
  3229. // if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
  3230. resultFrames.push(_frames[_frames.length - 1]);
  3231. }
  3232. callback(resultFrames);
  3233. }
  3234. });
  3235. }
  3236. var isStopDrawing = false;
  3237. /**
  3238. * This method stops recording video.
  3239. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  3240. * @method
  3241. * @memberof WhammyRecorder
  3242. * @example
  3243. * recorder.stop(function(blob) {
  3244. * video.src = URL.createObjectURL(blob);
  3245. * });
  3246. */
  3247. this.stop = function(callback) {
  3248. callback = callback || function() {};
  3249. isStopDrawing = true;
  3250. var _this = this;
  3251. // analyse of all frames takes some time!
  3252. setTimeout(function() {
  3253. // e.g. dropBlackFrames(frames, 10, 1, 1) - will cut all 10 frames
  3254. // e.g. dropBlackFrames(frames, 10, 0.5, 0.5) - will analyse 10 frames
  3255. // e.g. dropBlackFrames(frames, 10) === dropBlackFrames(frames, 10, 0, 0) - will analyse 10 frames with strict black color
  3256. dropBlackFrames(whammy.frames, -1, null, null, function(frames) {
  3257. whammy.frames = frames;
  3258. // to display advertisement images!
  3259. if (config.advertisement && config.advertisement.length) {
  3260. whammy.frames = config.advertisement.concat(whammy.frames);
  3261. }
  3262. /**
  3263. * @property {Blob} blob - Recorded frames in video/webm blob.
  3264. * @memberof WhammyRecorder
  3265. * @example
  3266. * recorder.stop(function() {
  3267. * var blob = recorder.blob;
  3268. * });
  3269. */
  3270. whammy.compile(function(blob) {
  3271. _this.blob = blob;
  3272. if (_this.blob.forEach) {
  3273. _this.blob = new Blob([], {
  3274. type: 'video/webm'
  3275. });
  3276. }
  3277. if (callback) {
  3278. callback(_this.blob);
  3279. }
  3280. });
  3281. });
  3282. }, 10);
  3283. };
  3284. var isPausedRecording = false;
  3285. /**
  3286. * This method pauses the recording process.
  3287. * @method
  3288. * @memberof WhammyRecorder
  3289. * @example
  3290. * recorder.pause();
  3291. */
  3292. this.pause = function() {
  3293. isPausedRecording = true;
  3294. };
  3295. /**
  3296. * This method resumes the recording process.
  3297. * @method
  3298. * @memberof WhammyRecorder
  3299. * @example
  3300. * recorder.resume();
  3301. */
  3302. this.resume = function() {
  3303. isPausedRecording = false;
  3304. if (isStopDrawing) {
  3305. this.record();
  3306. }
  3307. };
  3308. /**
  3309. * This method resets currently recorded data.
  3310. * @method
  3311. * @memberof WhammyRecorder
  3312. * @example
  3313. * recorder.clearRecordedData();
  3314. */
  3315. this.clearRecordedData = function() {
  3316. if (!isStopDrawing) {
  3317. this.stop(clearRecordedDataCB);
  3318. }
  3319. clearRecordedDataCB();
  3320. };
  3321. function clearRecordedDataCB() {
  3322. whammy.frames = [];
  3323. isStopDrawing = true;
  3324. isPausedRecording = false;
  3325. }
  3326. // for debugging
  3327. this.name = 'WhammyRecorder';
  3328. this.toString = function() {
  3329. return this.name;
  3330. };
  3331. var canvas = document.createElement('canvas');
  3332. var context = canvas.getContext('2d');
  3333. var video;
  3334. var lastTime;
  3335. var whammy;
  3336. }
  3337. if (typeof RecordRTC !== 'undefined') {
  3338. RecordRTC.WhammyRecorder = WhammyRecorder;
  3339. }
  3340. // https://github.com/antimatter15/whammy/blob/master/LICENSE
  3341. // _________
  3342. // Whammy.js
  3343. // todo: Firefox now supports webp for webm containers!
  3344. // their MediaRecorder implementation works well!
  3345. // should we provide an option to record via Whammy.js or MediaRecorder API is a better solution?
  3346. /**
  3347. * Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
  3348. * @summary A real time javascript webm encoder based on a canvas hack.
  3349. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  3350. * @author {@link https://MuazKhan.com|Muaz Khan}
  3351. * @typedef Whammy
  3352. * @class
  3353. * @example
  3354. * var recorder = new Whammy().Video(15);
  3355. * recorder.add(context || canvas || dataURL);
  3356. * var output = recorder.compile();
  3357. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  3358. */
  3359. var Whammy = (function() {
  3360. // a more abstract-ish API
  3361. function WhammyVideo(duration) {
  3362. this.frames = [];
  3363. this.duration = duration || 1;
  3364. this.quality = 0.8;
  3365. }
  3366. /**
  3367. * Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
  3368. * @method
  3369. * @memberof Whammy
  3370. * @example
  3371. * recorder = new Whammy().Video(0.8, 100);
  3372. * recorder.add(canvas || context || 'image/webp');
  3373. * @param {string} frame - Canvas || Context || image/webp
  3374. * @param {number} duration - Stick a duration (in milliseconds)
  3375. */
  3376. WhammyVideo.prototype.add = function(frame, duration) {
  3377. if ('canvas' in frame) { //CanvasRenderingContext2D
  3378. frame = frame.canvas;
  3379. }
  3380. if ('toDataURL' in frame) {
  3381. frame = frame.toDataURL('image/webp', this.quality);
  3382. }
  3383. if (!(/^data:image\/webp;base64,/ig).test(frame)) {
  3384. throw 'Input must be formatted properly as a base64 encoded DataURI of type image/webp';
  3385. }
  3386. this.frames.push({
  3387. image: frame,
  3388. duration: duration || this.duration
  3389. });
  3390. };
  3391. function processInWebWorker(_function) {
  3392. var blob = URL.createObjectURL(new Blob([_function.toString(),
  3393. 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'
  3394. ], {
  3395. type: 'application/javascript'
  3396. }));
  3397. var worker = new Worker(blob);
  3398. URL.revokeObjectURL(blob);
  3399. return worker;
  3400. }
  3401. function whammyInWebWorker(frames) {
  3402. function ArrayToWebM(frames) {
  3403. var info = checkFrames(frames);
  3404. if (!info) {
  3405. return [];
  3406. }
  3407. var clusterMaxDuration = 30000;
  3408. var EBML = [{
  3409. 'id': 0x1a45dfa3, // EBML
  3410. 'data': [{
  3411. 'data': 1,
  3412. 'id': 0x4286 // EBMLVersion
  3413. }, {
  3414. 'data': 1,
  3415. 'id': 0x42f7 // EBMLReadVersion
  3416. }, {
  3417. 'data': 4,
  3418. 'id': 0x42f2 // EBMLMaxIDLength
  3419. }, {
  3420. 'data': 8,
  3421. 'id': 0x42f3 // EBMLMaxSizeLength
  3422. }, {
  3423. 'data': 'webm',
  3424. 'id': 0x4282 // DocType
  3425. }, {
  3426. 'data': 2,
  3427. 'id': 0x4287 // DocTypeVersion
  3428. }, {
  3429. 'data': 2,
  3430. 'id': 0x4285 // DocTypeReadVersion
  3431. }]
  3432. }, {
  3433. 'id': 0x18538067, // Segment
  3434. 'data': [{
  3435. 'id': 0x1549a966, // Info
  3436. 'data': [{
  3437. 'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
  3438. 'id': 0x2ad7b1 // TimecodeScale
  3439. }, {
  3440. 'data': 'whammy',
  3441. 'id': 0x4d80 // MuxingApp
  3442. }, {
  3443. 'data': 'whammy',
  3444. 'id': 0x5741 // WritingApp
  3445. }, {
  3446. 'data': doubleToString(info.duration),
  3447. 'id': 0x4489 // Duration
  3448. }]
  3449. }, {
  3450. 'id': 0x1654ae6b, // Tracks
  3451. 'data': [{
  3452. 'id': 0xae, // TrackEntry
  3453. 'data': [{
  3454. 'data': 1,
  3455. 'id': 0xd7 // TrackNumber
  3456. }, {
  3457. 'data': 1,
  3458. 'id': 0x73c5 // TrackUID
  3459. }, {
  3460. 'data': 0,
  3461. 'id': 0x9c // FlagLacing
  3462. }, {
  3463. 'data': 'und',
  3464. 'id': 0x22b59c // Language
  3465. }, {
  3466. 'data': 'V_VP8',
  3467. 'id': 0x86 // CodecID
  3468. }, {
  3469. 'data': 'VP8',
  3470. 'id': 0x258688 // CodecName
  3471. }, {
  3472. 'data': 1,
  3473. 'id': 0x83 // TrackType
  3474. }, {
  3475. 'id': 0xe0, // Video
  3476. 'data': [{
  3477. 'data': info.width,
  3478. 'id': 0xb0 // PixelWidth
  3479. }, {
  3480. 'data': info.height,
  3481. 'id': 0xba // PixelHeight
  3482. }]
  3483. }]
  3484. }]
  3485. }]
  3486. }];
  3487. //Generate clusters (max duration)
  3488. var frameNumber = 0;
  3489. var clusterTimecode = 0;
  3490. while (frameNumber < frames.length) {
  3491. var clusterFrames = [];
  3492. var clusterDuration = 0;
  3493. do {
  3494. clusterFrames.push(frames[frameNumber]);
  3495. clusterDuration += frames[frameNumber].duration;
  3496. frameNumber++;
  3497. } while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
  3498. var clusterCounter = 0;
  3499. var cluster = {
  3500. 'id': 0x1f43b675, // Cluster
  3501. 'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
  3502. }; //Add cluster to segment
  3503. EBML[1].data.push(cluster);
  3504. clusterTimecode += clusterDuration;
  3505. }
  3506. return generateEBML(EBML);
  3507. }
  3508. function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
  3509. return [{
  3510. 'data': clusterTimecode,
  3511. 'id': 0xe7 // Timecode
  3512. }].concat(clusterFrames.map(function(webp) {
  3513. var block = makeSimpleBlock({
  3514. discardable: 0,
  3515. frame: webp.data.slice(4),
  3516. invisible: 0,
  3517. keyframe: 1,
  3518. lacing: 0,
  3519. trackNum: 1,
  3520. timecode: Math.round(clusterCounter)
  3521. });
  3522. clusterCounter += webp.duration;
  3523. return {
  3524. data: block,
  3525. id: 0xa3
  3526. };
  3527. }));
  3528. }
  3529. // sums the lengths of all the frames and gets the duration
  3530. function checkFrames(frames) {
  3531. if (!frames[0]) {
  3532. postMessage({
  3533. error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
  3534. });
  3535. return;
  3536. }
  3537. var width = frames[0].width,
  3538. height = frames[0].height,
  3539. duration = frames[0].duration;
  3540. for (var i = 1; i < frames.length; i++) {
  3541. duration += frames[i].duration;
  3542. }
  3543. return {
  3544. duration: duration,
  3545. width: width,
  3546. height: height
  3547. };
  3548. }
  3549. function numToBuffer(num) {
  3550. var parts = [];
  3551. while (num > 0) {
  3552. parts.push(num & 0xff);
  3553. num = num >> 8;
  3554. }
  3555. return new Uint8Array(parts.reverse());
  3556. }
  3557. function strToBuffer(str) {
  3558. return new Uint8Array(str.split('').map(function(e) {
  3559. return e.charCodeAt(0);
  3560. }));
  3561. }
  3562. function bitsToBuffer(bits) {
  3563. var data = [];
  3564. var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
  3565. bits = pad + bits;
  3566. for (var i = 0; i < bits.length; i += 8) {
  3567. data.push(parseInt(bits.substr(i, 8), 2));
  3568. }
  3569. return new Uint8Array(data);
  3570. }
  3571. function generateEBML(json) {
  3572. var ebml = [];
  3573. for (var i = 0; i < json.length; i++) {
  3574. var data = json[i].data;
  3575. if (typeof data === 'object') {
  3576. data = generateEBML(data);
  3577. }
  3578. if (typeof data === 'number') {
  3579. data = bitsToBuffer(data.toString(2));
  3580. }
  3581. if (typeof data === 'string') {
  3582. data = strToBuffer(data);
  3583. }
  3584. var len = data.size || data.byteLength || data.length;
  3585. var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
  3586. var sizeToString = len.toString(2);
  3587. var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
  3588. var size = (new Array(zeroes)).join('0') + '1' + padded;
  3589. ebml.push(numToBuffer(json[i].id));
  3590. ebml.push(bitsToBuffer(size));
  3591. ebml.push(data);
  3592. }
  3593. return new Blob(ebml, {
  3594. type: 'video/webm'
  3595. });
  3596. }
  3597. function toBinStrOld(bits) {
  3598. var data = '';
  3599. var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
  3600. bits = pad + bits;
  3601. for (var i = 0; i < bits.length; i += 8) {
  3602. data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
  3603. }
  3604. return data;
  3605. }
  3606. function makeSimpleBlock(data) {
  3607. var flags = 0;
  3608. if (data.keyframe) {
  3609. flags |= 128;
  3610. }
  3611. if (data.invisible) {
  3612. flags |= 8;
  3613. }
  3614. if (data.lacing) {
  3615. flags |= (data.lacing << 1);
  3616. }
  3617. if (data.discardable) {
  3618. flags |= 1;
  3619. }
  3620. if (data.trackNum > 127) {
  3621. throw 'TrackNumber > 127 not supported';
  3622. }
  3623. var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
  3624. return String.fromCharCode(e);
  3625. }).join('') + data.frame;
  3626. return out;
  3627. }
  3628. function parseWebP(riff) {
  3629. var VP8 = riff.RIFF[0].WEBP[0];
  3630. var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
  3631. for (var i = 0, c = []; i < 4; i++) {
  3632. c[i] = VP8.charCodeAt(frameStart + 3 + i);
  3633. }
  3634. var width, height, tmp;
  3635. //the code below is literally copied verbatim from the bitstream spec
  3636. tmp = (c[1] << 8) | c[0];
  3637. width = tmp & 0x3FFF;
  3638. tmp = (c[3] << 8) | c[2];
  3639. height = tmp & 0x3FFF;
  3640. return {
  3641. width: width,
  3642. height: height,
  3643. data: VP8,
  3644. riff: riff
  3645. };
  3646. }
  3647. function getStrLength(string, offset) {
  3648. return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
  3649. var unpadded = i.charCodeAt(0).toString(2);
  3650. return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
  3651. }).join(''), 2);
  3652. }
  3653. function parseRIFF(string) {
  3654. var offset = 0;
  3655. var chunks = {};
  3656. while (offset < string.length) {
  3657. var id = string.substr(offset, 4);
  3658. var len = getStrLength(string, offset);
  3659. var data = string.substr(offset + 4 + 4, len);
  3660. offset += 4 + 4 + len;
  3661. chunks[id] = chunks[id] || [];
  3662. if (id === 'RIFF' || id === 'LIST') {
  3663. chunks[id].push(parseRIFF(data));
  3664. } else {
  3665. chunks[id].push(data);
  3666. }
  3667. }
  3668. return chunks;
  3669. }
  3670. function doubleToString(num) {
  3671. return [].slice.call(
  3672. new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
  3673. return String.fromCharCode(e);
  3674. }).reverse().join('');
  3675. }
  3676. var webm = new ArrayToWebM(frames.map(function(frame) {
  3677. var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
  3678. webp.duration = frame.duration;
  3679. return webp;
  3680. }));
  3681. postMessage(webm);
  3682. }
  3683. /**
  3684. * Encodes frames in WebM container. It uses WebWorkinvoke to invoke 'ArrayToWebM' method.
  3685. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  3686. * @method
  3687. * @memberof Whammy
  3688. * @example
  3689. * recorder = new Whammy().Video(0.8, 100);
  3690. * recorder.compile(function(blob) {
  3691. * // blob.size - blob.type
  3692. * });
  3693. */
  3694. WhammyVideo.prototype.compile = function(callback) {
  3695. var webWorker = processInWebWorker(whammyInWebWorker);
  3696. webWorker.onmessage = function(event) {
  3697. if (event.data.error) {
  3698. console.error(event.data.error);
  3699. return;
  3700. }
  3701. callback(event.data);
  3702. };
  3703. webWorker.postMessage(this.frames);
  3704. };
  3705. return {
  3706. /**
  3707. * A more abstract-ish API.
  3708. * @method
  3709. * @memberof Whammy
  3710. * @example
  3711. * recorder = new Whammy().Video(0.8, 100);
  3712. * @param {?number} speed - 0.8
  3713. * @param {?number} quality - 100
  3714. */
  3715. Video: WhammyVideo
  3716. };
  3717. })();
  3718. if (typeof RecordRTC !== 'undefined') {
  3719. RecordRTC.Whammy = Whammy;
  3720. }
  3721. // ______________ (indexed-db)
  3722. // DiskStorage.js
  3723. /**
  3724. * DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage.
  3725. * @summary Writing blobs into IndexedDB.
  3726. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  3727. * @author {@link https://MuazKhan.com|Muaz Khan}
  3728. * @example
  3729. * DiskStorage.Store({
  3730. * audioBlob: yourAudioBlob,
  3731. * videoBlob: yourVideoBlob,
  3732. * gifBlob : yourGifBlob
  3733. * });
  3734. * DiskStorage.Fetch(function(dataURL, type) {
  3735. * if(type === 'audioBlob') { }
  3736. * if(type === 'videoBlob') { }
  3737. * if(type === 'gifBlob') { }
  3738. * });
  3739. * // DiskStorage.dataStoreName = 'recordRTC';
  3740. * // DiskStorage.onError = function(error) { };
  3741. * @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
  3742. * @property {function} Fetch - This method fetches stored blobs from IndexedDB.
  3743. * @property {function} Store - This method stores blobs in IndexedDB.
  3744. * @property {function} onError - This function is invoked for any known/unknown error.
  3745. * @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
  3746. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  3747. */
  3748. var DiskStorage = {
  3749. /**
  3750. * This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
  3751. * @method
  3752. * @memberof DiskStorage
  3753. * @internal
  3754. * @example
  3755. * DiskStorage.init();
  3756. */
  3757. init: function() {
  3758. var self = this;
  3759. if (typeof indexedDB === 'undefined' || typeof indexedDB.open === 'undefined') {
  3760. console.error('IndexedDB API are not available in this browser.');
  3761. return;
  3762. }
  3763. var dbVersion = 1;
  3764. var dbName = this.dbName || location.href.replace(/\/|:|#|%|\.|\[|\]/g, ''),
  3765. db;
  3766. var request = indexedDB.open(dbName, dbVersion);
  3767. function createObjectStore(dataBase) {
  3768. dataBase.createObjectStore(self.dataStoreName);
  3769. }
  3770. function putInDB() {
  3771. var transaction = db.transaction([self.dataStoreName], 'readwrite');
  3772. if (self.videoBlob) {
  3773. transaction.objectStore(self.dataStoreName).put(self.videoBlob, 'videoBlob');
  3774. }
  3775. if (self.gifBlob) {
  3776. transaction.objectStore(self.dataStoreName).put(self.gifBlob, 'gifBlob');
  3777. }
  3778. if (self.audioBlob) {
  3779. transaction.objectStore(self.dataStoreName).put(self.audioBlob, 'audioBlob');
  3780. }
  3781. function getFromStore(portionName) {
  3782. transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
  3783. if (self.callback) {
  3784. self.callback(event.target.result, portionName);
  3785. }
  3786. };
  3787. }
  3788. getFromStore('audioBlob');
  3789. getFromStore('videoBlob');
  3790. getFromStore('gifBlob');
  3791. }
  3792. request.onerror = self.onError;
  3793. request.onsuccess = function() {
  3794. db = request.result;
  3795. db.onerror = self.onError;
  3796. if (db.setVersion) {
  3797. if (db.version !== dbVersion) {
  3798. var setVersion = db.setVersion(dbVersion);
  3799. setVersion.onsuccess = function() {
  3800. createObjectStore(db);
  3801. putInDB();
  3802. };
  3803. } else {
  3804. putInDB();
  3805. }
  3806. } else {
  3807. putInDB();
  3808. }
  3809. };
  3810. request.onupgradeneeded = function(event) {
  3811. createObjectStore(event.target.result);
  3812. };
  3813. },
  3814. /**
  3815. * This method fetches stored blobs from IndexedDB.
  3816. * @method
  3817. * @memberof DiskStorage
  3818. * @internal
  3819. * @example
  3820. * DiskStorage.Fetch(function(dataURL, type) {
  3821. * if(type === 'audioBlob') { }
  3822. * if(type === 'videoBlob') { }
  3823. * if(type === 'gifBlob') { }
  3824. * });
  3825. */
  3826. Fetch: function(callback) {
  3827. this.callback = callback;
  3828. this.init();
  3829. return this;
  3830. },
  3831. /**
  3832. * This method stores blobs in IndexedDB.
  3833. * @method
  3834. * @memberof DiskStorage
  3835. * @internal
  3836. * @example
  3837. * DiskStorage.Store({
  3838. * audioBlob: yourAudioBlob,
  3839. * videoBlob: yourVideoBlob,
  3840. * gifBlob : yourGifBlob
  3841. * });
  3842. */
  3843. Store: function(config) {
  3844. this.audioBlob = config.audioBlob;
  3845. this.videoBlob = config.videoBlob;
  3846. this.gifBlob = config.gifBlob;
  3847. this.init();
  3848. return this;
  3849. },
  3850. /**
  3851. * This function is invoked for any known/unknown error.
  3852. * @method
  3853. * @memberof DiskStorage
  3854. * @internal
  3855. * @example
  3856. * DiskStorage.onError = function(error){
  3857. * alerot( JSON.stringify(error) );
  3858. * };
  3859. */
  3860. onError: function(error) {
  3861. console.error(JSON.stringify(error, null, '\t'));
  3862. },
  3863. /**
  3864. * @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
  3865. * @memberof DiskStorage
  3866. * @internal
  3867. * @example
  3868. * DiskStorage.dataStoreName = 'recordRTC';
  3869. */
  3870. dataStoreName: 'recordRTC',
  3871. dbName: null
  3872. };
  3873. if (typeof RecordRTC !== 'undefined') {
  3874. RecordRTC.DiskStorage = DiskStorage;
  3875. }
  3876. // ______________
  3877. // GifRecorder.js
  3878. /**
  3879. * GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif.
  3880. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  3881. * @author {@link https://MuazKhan.com|Muaz Khan}
  3882. * @typedef GifRecorder
  3883. * @class
  3884. * @example
  3885. * var recorder = new GifRecorder(mediaStream || canvas || context, { onGifPreview: function, onGifRecordingStarted: function, width: 1280, height: 720, frameRate: 200, quality: 10 });
  3886. * recorder.record();
  3887. * recorder.stop(function(blob) {
  3888. * img.src = URL.createObjectURL(blob);
  3889. * });
  3890. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  3891. * @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D.
  3892. * @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10}
  3893. */
  3894. function GifRecorder(mediaStream, config) {
  3895. if (typeof GIFEncoder === 'undefined') {
  3896. var script = document.createElement('script');
  3897. script.src = 'https://www.webrtc-experiment.com/gif-recorder.js';
  3898. (document.body || document.documentElement).appendChild(script);
  3899. }
  3900. config = config || {};
  3901. var isHTMLObject = mediaStream instanceof CanvasRenderingContext2D || mediaStream instanceof HTMLCanvasElement;
  3902. /**
  3903. * This method records MediaStream.
  3904. * @method
  3905. * @memberof GifRecorder
  3906. * @example
  3907. * recorder.record();
  3908. */
  3909. this.record = function() {
  3910. if (typeof GIFEncoder === 'undefined') {
  3911. setTimeout(self.record, 1000);
  3912. return;
  3913. }
  3914. if (!isLoadedMetaData) {
  3915. setTimeout(self.record, 1000);
  3916. return;
  3917. }
  3918. if (!isHTMLObject) {
  3919. if (!config.width) {
  3920. config.width = video.offsetWidth || 320;
  3921. }
  3922. if (!config.height) {
  3923. config.height = video.offsetHeight || 240;
  3924. }
  3925. if (!config.video) {
  3926. config.video = {
  3927. width: config.width,
  3928. height: config.height
  3929. };
  3930. }
  3931. if (!config.canvas) {
  3932. config.canvas = {
  3933. width: config.width,
  3934. height: config.height
  3935. };
  3936. }
  3937. canvas.width = config.canvas.width || 320;
  3938. canvas.height = config.canvas.height || 240;
  3939. video.width = config.video.width || 320;
  3940. video.height = config.video.height || 240;
  3941. }
  3942. // external library to record as GIF images
  3943. gifEncoder = new GIFEncoder();
  3944. // void setRepeat(int iter)
  3945. // Sets the number of times the set of GIF frames should be played.
  3946. // Default is 1; 0 means play indefinitely.
  3947. gifEncoder.setRepeat(0);
  3948. // void setFrameRate(Number fps)
  3949. // Sets frame rate in frames per second.
  3950. // Equivalent to setDelay(1000/fps).
  3951. // Using "setDelay" instead of "setFrameRate"
  3952. gifEncoder.setDelay(config.frameRate || 200);
  3953. // void setQuality(int quality)
  3954. // Sets quality of color quantization (conversion of images to the
  3955. // maximum 256 colors allowed by the GIF specification).
  3956. // Lower values (minimum = 1) produce better colors,
  3957. // but slow processing significantly. 10 is the default,
  3958. // and produces good color mapping at reasonable speeds.
  3959. // Values greater than 20 do not yield significant improvements in speed.
  3960. gifEncoder.setQuality(config.quality || 10);
  3961. // Boolean start()
  3962. // This writes the GIF Header and returns false if it fails.
  3963. gifEncoder.start();
  3964. if (typeof config.onGifRecordingStarted === 'function') {
  3965. config.onGifRecordingStarted();
  3966. }
  3967. startTime = Date.now();
  3968. function drawVideoFrame(time) {
  3969. if (self.clearedRecordedData === true) {
  3970. return;
  3971. }
  3972. if (isPausedRecording) {
  3973. return setTimeout(function() {
  3974. drawVideoFrame(time);
  3975. }, 100);
  3976. }
  3977. lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
  3978. if (typeof lastFrameTime === undefined) {
  3979. lastFrameTime = time;
  3980. }
  3981. // ~10 fps
  3982. if (time - lastFrameTime < 90) {
  3983. return;
  3984. }
  3985. if (!isHTMLObject && video.paused) {
  3986. // via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316
  3987. // Tweak for Android Chrome
  3988. video.play();
  3989. }
  3990. if (!isHTMLObject) {
  3991. context.drawImage(video, 0, 0, canvas.width, canvas.height);
  3992. }
  3993. if (config.onGifPreview) {
  3994. config.onGifPreview(canvas.toDataURL('image/png'));
  3995. }
  3996. gifEncoder.addFrame(context);
  3997. lastFrameTime = time;
  3998. }
  3999. lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
  4000. if (config.initCallback) {
  4001. config.initCallback();
  4002. }
  4003. };
  4004. /**
  4005. * This method stops recording MediaStream.
  4006. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  4007. * @method
  4008. * @memberof GifRecorder
  4009. * @example
  4010. * recorder.stop(function(blob) {
  4011. * img.src = URL.createObjectURL(blob);
  4012. * });
  4013. */
  4014. this.stop = function(callback) {
  4015. callback = callback || function() {};
  4016. if (lastAnimationFrame) {
  4017. cancelAnimationFrame(lastAnimationFrame);
  4018. }
  4019. endTime = Date.now();
  4020. /**
  4021. * @property {Blob} blob - The recorded blob object.
  4022. * @memberof GifRecorder
  4023. * @example
  4024. * recorder.stop(function(){
  4025. * var blob = recorder.blob;
  4026. * });
  4027. */
  4028. this.blob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
  4029. type: 'image/gif'
  4030. });
  4031. callback(this.blob);
  4032. // bug: find a way to clear old recorded blobs
  4033. gifEncoder.stream().bin = [];
  4034. };
  4035. var isPausedRecording = false;
  4036. /**
  4037. * This method pauses the recording process.
  4038. * @method
  4039. * @memberof GifRecorder
  4040. * @example
  4041. * recorder.pause();
  4042. */
  4043. this.pause = function() {
  4044. isPausedRecording = true;
  4045. };
  4046. /**
  4047. * This method resumes the recording process.
  4048. * @method
  4049. * @memberof GifRecorder
  4050. * @example
  4051. * recorder.resume();
  4052. */
  4053. this.resume = function() {
  4054. isPausedRecording = false;
  4055. };
  4056. /**
  4057. * This method resets currently recorded data.
  4058. * @method
  4059. * @memberof GifRecorder
  4060. * @example
  4061. * recorder.clearRecordedData();
  4062. */
  4063. this.clearRecordedData = function() {
  4064. self.clearedRecordedData = true;
  4065. clearRecordedDataCB();
  4066. };
  4067. function clearRecordedDataCB() {
  4068. if (gifEncoder) {
  4069. gifEncoder.stream().bin = [];
  4070. }
  4071. }
  4072. // for debugging
  4073. this.name = 'GifRecorder';
  4074. this.toString = function() {
  4075. return this.name;
  4076. };
  4077. var canvas = document.createElement('canvas');
  4078. var context = canvas.getContext('2d');
  4079. if (isHTMLObject) {
  4080. if (mediaStream instanceof CanvasRenderingContext2D) {
  4081. context = mediaStream;
  4082. canvas = context.canvas;
  4083. } else if (mediaStream instanceof HTMLCanvasElement) {
  4084. context = mediaStream.getContext('2d');
  4085. canvas = mediaStream;
  4086. }
  4087. }
  4088. var isLoadedMetaData = true;
  4089. if (!isHTMLObject) {
  4090. var video = document.createElement('video');
  4091. video.muted = true;
  4092. video.autoplay = true;
  4093. isLoadedMetaData = false;
  4094. video.onloadedmetadata = function() {
  4095. isLoadedMetaData = true;
  4096. };
  4097. setSrcObject(mediaStream, video);
  4098. video.play();
  4099. }
  4100. var lastAnimationFrame = null;
  4101. var startTime, endTime, lastFrameTime;
  4102. var gifEncoder;
  4103. var self = this;
  4104. }
  4105. if (typeof RecordRTC !== 'undefined') {
  4106. RecordRTC.GifRecorder = GifRecorder;
  4107. }
  4108. // Last time updated: 2019-06-21 4:09:42 AM UTC
  4109. // ________________________
  4110. // MultiStreamsMixer v1.2.2
  4111. // Open-Sourced: https://github.com/muaz-khan/MultiStreamsMixer
  4112. // --------------------------------------------------
  4113. // Muaz Khan - www.MuazKhan.com
  4114. // MIT License - www.WebRTC-Experiment.com/licence
  4115. // --------------------------------------------------
  4116. function MultiStreamsMixer(arrayOfMediaStreams, elementClass) {
  4117. var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45';
  4118. (function(that) {
  4119. if (typeof RecordRTC !== 'undefined') {
  4120. return;
  4121. }
  4122. if (!that) {
  4123. return;
  4124. }
  4125. if (typeof window !== 'undefined') {
  4126. return;
  4127. }
  4128. if (typeof global === 'undefined') {
  4129. return;
  4130. }
  4131. global.navigator = {
  4132. userAgent: browserFakeUserAgent,
  4133. getUserMedia: function() {}
  4134. };
  4135. if (!global.console) {
  4136. global.console = {};
  4137. }
  4138. if (typeof global.console.log === 'undefined' || typeof global.console.error === 'undefined') {
  4139. global.console.error = global.console.log = global.console.log || function() {
  4140. console.log(arguments);
  4141. };
  4142. }
  4143. if (typeof document === 'undefined') {
  4144. /*global document:true */
  4145. that.document = {
  4146. documentElement: {
  4147. appendChild: function() {
  4148. return '';
  4149. }
  4150. }
  4151. };
  4152. document.createElement = document.captureStream = document.mozCaptureStream = function() {
  4153. var obj = {
  4154. getContext: function() {
  4155. return obj;
  4156. },
  4157. play: function() {},
  4158. pause: function() {},
  4159. drawImage: function() {},
  4160. toDataURL: function() {
  4161. return '';
  4162. },
  4163. style: {}
  4164. };
  4165. return obj;
  4166. };
  4167. that.HTMLVideoElement = function() {};
  4168. }
  4169. if (typeof location === 'undefined') {
  4170. /*global location:true */
  4171. that.location = {
  4172. protocol: 'file:',
  4173. href: '',
  4174. hash: ''
  4175. };
  4176. }
  4177. if (typeof screen === 'undefined') {
  4178. /*global screen:true */
  4179. that.screen = {
  4180. width: 0,
  4181. height: 0
  4182. };
  4183. }
  4184. if (typeof URL === 'undefined') {
  4185. /*global screen:true */
  4186. that.URL = {
  4187. createObjectURL: function() {
  4188. return '';
  4189. },
  4190. revokeObjectURL: function() {
  4191. return '';
  4192. }
  4193. };
  4194. }
  4195. /*global window:true */
  4196. that.window = global;
  4197. })(typeof global !== 'undefined' ? global : null);
  4198. // requires: chrome://flags/#enable-experimental-web-platform-features
  4199. elementClass = elementClass || 'multi-streams-mixer';
  4200. var videos = [];
  4201. var isStopDrawingFrames = false;
  4202. var canvas = document.createElement('canvas');
  4203. var context = canvas.getContext('2d');
  4204. canvas.style.opacity = 0;
  4205. canvas.style.position = 'absolute';
  4206. canvas.style.zIndex = -1;
  4207. canvas.style.top = '-1000em';
  4208. canvas.style.left = '-1000em';
  4209. canvas.className = elementClass;
  4210. (document.body || document.documentElement).appendChild(canvas);
  4211. this.disableLogs = false;
  4212. this.frameInterval = 10;
  4213. this.width = 360;
  4214. this.height = 240;
  4215. // use gain node to prevent echo
  4216. this.useGainNode = true;
  4217. var self = this;
  4218. // _____________________________
  4219. // Cross-Browser-Declarations.js
  4220. // WebAudio API representer
  4221. var AudioContext = window.AudioContext;
  4222. if (typeof AudioContext === 'undefined') {
  4223. if (typeof webkitAudioContext !== 'undefined') {
  4224. /*global AudioContext:true */
  4225. AudioContext = webkitAudioContext;
  4226. }
  4227. if (typeof mozAudioContext !== 'undefined') {
  4228. /*global AudioContext:true */
  4229. AudioContext = mozAudioContext;
  4230. }
  4231. }
  4232. /*jshint -W079 */
  4233. var URL = window.URL;
  4234. if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') {
  4235. /*global URL:true */
  4236. URL = webkitURL;
  4237. }
  4238. if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator?
  4239. if (typeof navigator.webkitGetUserMedia !== 'undefined') {
  4240. navigator.getUserMedia = navigator.webkitGetUserMedia;
  4241. }
  4242. if (typeof navigator.mozGetUserMedia !== 'undefined') {
  4243. navigator.getUserMedia = navigator.mozGetUserMedia;
  4244. }
  4245. }
  4246. var MediaStream = window.MediaStream;
  4247. if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {
  4248. MediaStream = webkitMediaStream;
  4249. }
  4250. /*global MediaStream:true */
  4251. if (typeof MediaStream !== 'undefined') {
  4252. // override "stop" method for all browsers
  4253. if (typeof MediaStream.prototype.stop === 'undefined') {
  4254. MediaStream.prototype.stop = function() {
  4255. this.getTracks().forEach(function(track) {
  4256. track.stop();
  4257. });
  4258. };
  4259. }
  4260. }
  4261. var Storage = {};
  4262. if (typeof AudioContext !== 'undefined') {
  4263. Storage.AudioContext = AudioContext;
  4264. } else if (typeof webkitAudioContext !== 'undefined') {
  4265. Storage.AudioContext = webkitAudioContext;
  4266. }
  4267. function setSrcObject(stream, element) {
  4268. if ('srcObject' in element) {
  4269. element.srcObject = stream;
  4270. } else if ('mozSrcObject' in element) {
  4271. element.mozSrcObject = stream;
  4272. } else {
  4273. element.srcObject = stream;
  4274. }
  4275. }
  4276. this.startDrawingFrames = function() {
  4277. drawVideosToCanvas();
  4278. };
  4279. function drawVideosToCanvas() {
  4280. if (isStopDrawingFrames) {
  4281. return;
  4282. }
  4283. var videosLength = videos.length;
  4284. var fullcanvas = false;
  4285. var remaining = [];
  4286. videos.forEach(function(video) {
  4287. if (!video.stream) {
  4288. video.stream = {};
  4289. }
  4290. if (video.stream.fullcanvas) {
  4291. fullcanvas = video;
  4292. } else {
  4293. // todo: video.stream.active or video.stream.live to fix blank frames issues?
  4294. remaining.push(video);
  4295. }
  4296. });
  4297. if (fullcanvas) {
  4298. canvas.width = fullcanvas.stream.width;
  4299. canvas.height = fullcanvas.stream.height;
  4300. } else if (remaining.length) {
  4301. canvas.width = videosLength > 1 ? remaining[0].width * 2 : remaining[0].width;
  4302. var height = 1;
  4303. if (videosLength === 3 || videosLength === 4) {
  4304. height = 2;
  4305. }
  4306. if (videosLength === 5 || videosLength === 6) {
  4307. height = 3;
  4308. }
  4309. if (videosLength === 7 || videosLength === 8) {
  4310. height = 4;
  4311. }
  4312. if (videosLength === 9 || videosLength === 10) {
  4313. height = 5;
  4314. }
  4315. canvas.height = remaining[0].height * height;
  4316. } else {
  4317. canvas.width = self.width || 360;
  4318. canvas.height = self.height || 240;
  4319. }
  4320. if (fullcanvas && fullcanvas instanceof HTMLVideoElement) {
  4321. drawImage(fullcanvas);
  4322. }
  4323. remaining.forEach(function(video, idx) {
  4324. drawImage(video, idx);
  4325. });
  4326. setTimeout(drawVideosToCanvas, self.frameInterval);
  4327. }
  4328. function drawImage(video, idx) {
  4329. if (isStopDrawingFrames) {
  4330. return;
  4331. }
  4332. var x = 0;
  4333. var y = 0;
  4334. var width = video.width;
  4335. var height = video.height;
  4336. if (idx === 1) {
  4337. x = video.width;
  4338. }
  4339. if (idx === 2) {
  4340. y = video.height;
  4341. }
  4342. if (idx === 3) {
  4343. x = video.width;
  4344. y = video.height;
  4345. }
  4346. if (idx === 4) {
  4347. y = video.height * 2;
  4348. }
  4349. if (idx === 5) {
  4350. x = video.width;
  4351. y = video.height * 2;
  4352. }
  4353. if (idx === 6) {
  4354. y = video.height * 3;
  4355. }
  4356. if (idx === 7) {
  4357. x = video.width;
  4358. y = video.height * 3;
  4359. }
  4360. if (typeof video.stream.left !== 'undefined') {
  4361. x = video.stream.left;
  4362. }
  4363. if (typeof video.stream.top !== 'undefined') {
  4364. y = video.stream.top;
  4365. }
  4366. if (typeof video.stream.width !== 'undefined') {
  4367. width = video.stream.width;
  4368. }
  4369. if (typeof video.stream.height !== 'undefined') {
  4370. height = video.stream.height;
  4371. }
  4372. context.drawImage(video, x, y, width, height);
  4373. if (typeof video.stream.onRender === 'function') {
  4374. video.stream.onRender(context, x, y, width, height, idx);
  4375. }
  4376. }
  4377. function getMixedStream() {
  4378. isStopDrawingFrames = false;
  4379. var mixedVideoStream = getMixedVideoStream();
  4380. var mixedAudioStream = getMixedAudioStream();
  4381. if (mixedAudioStream) {
  4382. mixedAudioStream.getTracks().filter(function(t) {
  4383. return t.kind === 'audio';
  4384. }).forEach(function(track) {
  4385. mixedVideoStream.addTrack(track);
  4386. });
  4387. }
  4388. var fullcanvas;
  4389. arrayOfMediaStreams.forEach(function(stream) {
  4390. if (stream.fullcanvas) {
  4391. fullcanvas = true;
  4392. }
  4393. });
  4394. // mixedVideoStream.prototype.appendStreams = appendStreams;
  4395. // mixedVideoStream.prototype.resetVideoStreams = resetVideoStreams;
  4396. // mixedVideoStream.prototype.clearRecordedData = clearRecordedData;
  4397. return mixedVideoStream;
  4398. }
  4399. function getMixedVideoStream() {
  4400. resetVideoStreams();
  4401. var capturedStream;
  4402. if ('captureStream' in canvas) {
  4403. capturedStream = canvas.captureStream();
  4404. } else if ('mozCaptureStream' in canvas) {
  4405. capturedStream = canvas.mozCaptureStream();
  4406. } else if (!self.disableLogs) {
  4407. console.error('Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features');
  4408. }
  4409. var videoStream = new MediaStream();
  4410. capturedStream.getTracks().filter(function(t) {
  4411. return t.kind === 'video';
  4412. }).forEach(function(track) {
  4413. videoStream.addTrack(track);
  4414. });
  4415. canvas.stream = videoStream;
  4416. return videoStream;
  4417. }
  4418. function getMixedAudioStream() {
  4419. // via: @pehrsons
  4420. if (!Storage.AudioContextConstructor) {
  4421. Storage.AudioContextConstructor = new Storage.AudioContext();
  4422. }
  4423. self.audioContext = Storage.AudioContextConstructor;
  4424. self.audioSources = [];
  4425. if (self.useGainNode === true) {
  4426. self.gainNode = self.audioContext.createGain();
  4427. self.gainNode.connect(self.audioContext.destination);
  4428. self.gainNode.gain.value = 0; // don't hear self
  4429. }
  4430. var audioTracksLength = 0;
  4431. arrayOfMediaStreams.forEach(function(stream) {
  4432. if (!stream.getTracks().filter(function(t) {
  4433. return t.kind === 'audio';
  4434. }).length) {
  4435. return;
  4436. }
  4437. audioTracksLength++;
  4438. var audioSource = self.audioContext.createMediaStreamSource(stream);
  4439. if (self.useGainNode === true) {
  4440. audioSource.connect(self.gainNode);
  4441. }
  4442. self.audioSources.push(audioSource);
  4443. });
  4444. if (!audioTracksLength) {
  4445. // because "self.audioContext" is not initialized
  4446. // that's why we've to ignore rest of the code
  4447. return;
  4448. }
  4449. self.audioDestination = self.audioContext.createMediaStreamDestination();
  4450. self.audioSources.forEach(function(audioSource) {
  4451. audioSource.connect(self.audioDestination);
  4452. });
  4453. return self.audioDestination.stream;
  4454. }
  4455. function getVideo(stream) {
  4456. var video = document.createElement('video');
  4457. setSrcObject(stream, video);
  4458. video.className = elementClass;
  4459. video.muted = true;
  4460. video.volume = 0;
  4461. video.width = stream.width || self.width || 360;
  4462. video.height = stream.height || self.height || 240;
  4463. video.play();
  4464. return video;
  4465. }
  4466. this.appendStreams = function(streams) {
  4467. if (!streams) {
  4468. throw 'First parameter is required.';
  4469. }
  4470. if (!(streams instanceof Array)) {
  4471. streams = [streams];
  4472. }
  4473. streams.forEach(function(stream) {
  4474. var newStream = new MediaStream();
  4475. if (stream.getTracks().filter(function(t) {
  4476. return t.kind === 'video';
  4477. }).length) {
  4478. var video = getVideo(stream);
  4479. video.stream = stream;
  4480. videos.push(video);
  4481. newStream.addTrack(stream.getTracks().filter(function(t) {
  4482. return t.kind === 'video';
  4483. })[0]);
  4484. }
  4485. if (stream.getTracks().filter(function(t) {
  4486. return t.kind === 'audio';
  4487. }).length) {
  4488. var audioSource = self.audioContext.createMediaStreamSource(stream);
  4489. self.audioDestination = self.audioContext.createMediaStreamDestination();
  4490. audioSource.connect(self.audioDestination);
  4491. newStream.addTrack(self.audioDestination.stream.getTracks().filter(function(t) {
  4492. return t.kind === 'audio';
  4493. })[0]);
  4494. }
  4495. arrayOfMediaStreams.push(newStream);
  4496. });
  4497. };
  4498. this.releaseStreams = function() {
  4499. videos = [];
  4500. isStopDrawingFrames = true;
  4501. if (self.gainNode) {
  4502. self.gainNode.disconnect();
  4503. self.gainNode = null;
  4504. }
  4505. if (self.audioSources.length) {
  4506. self.audioSources.forEach(function(source) {
  4507. source.disconnect();
  4508. });
  4509. self.audioSources = [];
  4510. }
  4511. if (self.audioDestination) {
  4512. self.audioDestination.disconnect();
  4513. self.audioDestination = null;
  4514. }
  4515. if (self.audioContext) {
  4516. self.audioContext.close();
  4517. }
  4518. self.audioContext = null;
  4519. context.clearRect(0, 0, canvas.width, canvas.height);
  4520. if (canvas.stream) {
  4521. canvas.stream.stop();
  4522. canvas.stream = null;
  4523. }
  4524. };
  4525. this.resetVideoStreams = function(streams) {
  4526. if (streams && !(streams instanceof Array)) {
  4527. streams = [streams];
  4528. }
  4529. resetVideoStreams(streams);
  4530. };
  4531. function resetVideoStreams(streams) {
  4532. videos = [];
  4533. streams = streams || arrayOfMediaStreams;
  4534. // via: @adrian-ber
  4535. streams.forEach(function(stream) {
  4536. if (!stream.getTracks().filter(function(t) {
  4537. return t.kind === 'video';
  4538. }).length) {
  4539. return;
  4540. }
  4541. var video = getVideo(stream);
  4542. video.stream = stream;
  4543. videos.push(video);
  4544. });
  4545. }
  4546. // for debugging
  4547. this.name = 'MultiStreamsMixer';
  4548. this.toString = function() {
  4549. return this.name;
  4550. };
  4551. this.getMixedStream = getMixedStream;
  4552. }
  4553. if (typeof RecordRTC === 'undefined') {
  4554. if (typeof module !== 'undefined' /* && !!module.exports*/ ) {
  4555. module.exports = MultiStreamsMixer;
  4556. }
  4557. if (typeof define === 'function' && define.amd) {
  4558. define('MultiStreamsMixer', [], function() {
  4559. return MultiStreamsMixer;
  4560. });
  4561. }
  4562. }
  4563. // ______________________
  4564. // MultiStreamRecorder.js
  4565. /*
  4566. * Video conference recording, using captureStream API along with WebAudio and Canvas2D API.
  4567. */
  4568. /**
  4569. * MultiStreamRecorder can record multiple videos in single container.
  4570. * @summary Multi-videos recorder.
  4571. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  4572. * @author {@link https://MuazKhan.com|Muaz Khan}
  4573. * @typedef MultiStreamRecorder
  4574. * @class
  4575. * @example
  4576. * var options = {
  4577. * mimeType: 'video/webm'
  4578. * }
  4579. * var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options);
  4580. * recorder.record();
  4581. * recorder.stop(function(blob) {
  4582. * video.src = URL.createObjectURL(blob);
  4583. *
  4584. * // or
  4585. * var blob = recorder.blob;
  4586. * });
  4587. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  4588. * @param {MediaStreams} mediaStreams - Array of MediaStreams.
  4589. * @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: "video/webm"}
  4590. */
  4591. function MultiStreamRecorder(arrayOfMediaStreams, options) {
  4592. arrayOfMediaStreams = arrayOfMediaStreams || [];
  4593. var self = this;
  4594. var mixer;
  4595. var mediaRecorder;
  4596. options = options || {
  4597. elementClass: 'multi-streams-mixer',
  4598. mimeType: 'video/webm',
  4599. video: {
  4600. width: 360,
  4601. height: 240
  4602. }
  4603. };
  4604. if (!options.frameInterval) {
  4605. options.frameInterval = 10;
  4606. }
  4607. if (!options.video) {
  4608. options.video = {};
  4609. }
  4610. if (!options.video.width) {
  4611. options.video.width = 360;
  4612. }
  4613. if (!options.video.height) {
  4614. options.video.height = 240;
  4615. }
  4616. /**
  4617. * This method records all MediaStreams.
  4618. * @method
  4619. * @memberof MultiStreamRecorder
  4620. * @example
  4621. * recorder.record();
  4622. */
  4623. this.record = function() {
  4624. // github/muaz-khan/MultiStreamsMixer
  4625. mixer = new MultiStreamsMixer(arrayOfMediaStreams, options.elementClass || 'multi-streams-mixer');
  4626. if (getAllVideoTracks().length) {
  4627. mixer.frameInterval = options.frameInterval || 10;
  4628. mixer.width = options.video.width || 360;
  4629. mixer.height = options.video.height || 240;
  4630. mixer.startDrawingFrames();
  4631. }
  4632. if (options.previewStream && typeof options.previewStream === 'function') {
  4633. options.previewStream(mixer.getMixedStream());
  4634. }
  4635. // record using MediaRecorder API
  4636. mediaRecorder = new MediaStreamRecorder(mixer.getMixedStream(), options);
  4637. mediaRecorder.record();
  4638. };
  4639. function getAllVideoTracks() {
  4640. var tracks = [];
  4641. arrayOfMediaStreams.forEach(function(stream) {
  4642. getTracks(stream, 'video').forEach(function(track) {
  4643. tracks.push(track);
  4644. });
  4645. });
  4646. return tracks;
  4647. }
  4648. /**
  4649. * This method stops recording MediaStream.
  4650. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  4651. * @method
  4652. * @memberof MultiStreamRecorder
  4653. * @example
  4654. * recorder.stop(function(blob) {
  4655. * video.src = URL.createObjectURL(blob);
  4656. * });
  4657. */
  4658. this.stop = function(callback) {
  4659. if (!mediaRecorder) {
  4660. return;
  4661. }
  4662. mediaRecorder.stop(function(blob) {
  4663. self.blob = blob;
  4664. callback(blob);
  4665. self.clearRecordedData();
  4666. });
  4667. };
  4668. /**
  4669. * This method pauses the recording process.
  4670. * @method
  4671. * @memberof MultiStreamRecorder
  4672. * @example
  4673. * recorder.pause();
  4674. */
  4675. this.pause = function() {
  4676. if (mediaRecorder) {
  4677. mediaRecorder.pause();
  4678. }
  4679. };
  4680. /**
  4681. * This method resumes the recording process.
  4682. * @method
  4683. * @memberof MultiStreamRecorder
  4684. * @example
  4685. * recorder.resume();
  4686. */
  4687. this.resume = function() {
  4688. if (mediaRecorder) {
  4689. mediaRecorder.resume();
  4690. }
  4691. };
  4692. /**
  4693. * This method resets currently recorded data.
  4694. * @method
  4695. * @memberof MultiStreamRecorder
  4696. * @example
  4697. * recorder.clearRecordedData();
  4698. */
  4699. this.clearRecordedData = function() {
  4700. if (mediaRecorder) {
  4701. mediaRecorder.clearRecordedData();
  4702. mediaRecorder = null;
  4703. }
  4704. if (mixer) {
  4705. mixer.releaseStreams();
  4706. mixer = null;
  4707. }
  4708. };
  4709. /**
  4710. * Add extra media-streams to existing recordings.
  4711. * @method
  4712. * @memberof MultiStreamRecorder
  4713. * @param {MediaStreams} mediaStreams - Array of MediaStreams
  4714. * @example
  4715. * recorder.addStreams([newAudioStream, newVideoStream]);
  4716. */
  4717. this.addStreams = function(streams) {
  4718. if (!streams) {
  4719. throw 'First parameter is required.';
  4720. }
  4721. if (!(streams instanceof Array)) {
  4722. streams = [streams];
  4723. }
  4724. arrayOfMediaStreams.concat(streams);
  4725. if (!mediaRecorder || !mixer) {
  4726. return;
  4727. }
  4728. mixer.appendStreams(streams);
  4729. if (options.previewStream && typeof options.previewStream === 'function') {
  4730. options.previewStream(mixer.getMixedStream());
  4731. }
  4732. };
  4733. /**
  4734. * Reset videos during live recording. Replace old videos e.g. replace cameras with full-screen.
  4735. * @method
  4736. * @memberof MultiStreamRecorder
  4737. * @param {MediaStreams} mediaStreams - Array of MediaStreams
  4738. * @example
  4739. * recorder.resetVideoStreams([newVideo1, newVideo2]);
  4740. */
  4741. this.resetVideoStreams = function(streams) {
  4742. if (!mixer) {
  4743. return;
  4744. }
  4745. if (streams && !(streams instanceof Array)) {
  4746. streams = [streams];
  4747. }
  4748. mixer.resetVideoStreams(streams);
  4749. };
  4750. /**
  4751. * Returns MultiStreamsMixer
  4752. * @method
  4753. * @memberof MultiStreamRecorder
  4754. * @example
  4755. * let mixer = recorder.getMixer();
  4756. * mixer.appendStreams([newStream]);
  4757. */
  4758. this.getMixer = function() {
  4759. return mixer;
  4760. };
  4761. // for debugging
  4762. this.name = 'MultiStreamRecorder';
  4763. this.toString = function() {
  4764. return this.name;
  4765. };
  4766. }
  4767. if (typeof RecordRTC !== 'undefined') {
  4768. RecordRTC.MultiStreamRecorder = MultiStreamRecorder;
  4769. }
  4770. // _____________________
  4771. // RecordRTC.promises.js
  4772. /**
  4773. * RecordRTCPromisesHandler adds promises support in {@link RecordRTC}. Try a {@link https://github.com/muaz-khan/RecordRTC/blob/master/simple-demos/RecordRTCPromisesHandler.html|demo here}
  4774. * @summary Promises for {@link RecordRTC}
  4775. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  4776. * @author {@link https://MuazKhan.com|Muaz Khan}
  4777. * @typedef RecordRTCPromisesHandler
  4778. * @class
  4779. * @example
  4780. * var recorder = new RecordRTCPromisesHandler(mediaStream, options);
  4781. * recorder.startRecording()
  4782. * .then(successCB)
  4783. * .catch(errorCB);
  4784. * // Note: You can access all RecordRTC API using "recorder.recordRTC" e.g.
  4785. * recorder.recordRTC.onStateChanged = function(state) {};
  4786. * recorder.recordRTC.setRecordingDuration(5000);
  4787. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  4788. * @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
  4789. * @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
  4790. * @throws Will throw an error if "new" keyword is not used to initiate "RecordRTCPromisesHandler". Also throws error if first argument "MediaStream" is missing.
  4791. * @requires {@link RecordRTC}
  4792. */
  4793. function RecordRTCPromisesHandler(mediaStream, options) {
  4794. if (!this) {
  4795. throw 'Use "new RecordRTCPromisesHandler()"';
  4796. }
  4797. if (typeof mediaStream === 'undefined') {
  4798. throw 'First argument "MediaStream" is required.';
  4799. }
  4800. var self = this;
  4801. /**
  4802. * @property {Blob} blob - Access/reach the native {@link RecordRTC} object.
  4803. * @memberof RecordRTCPromisesHandler
  4804. * @example
  4805. * let internal = recorder.recordRTC.getInternalRecorder();
  4806. * alert(internal instanceof MediaStreamRecorder);
  4807. * recorder.recordRTC.onStateChanged = function(state) {};
  4808. */
  4809. self.recordRTC = new RecordRTC(mediaStream, options);
  4810. /**
  4811. * This method records MediaStream.
  4812. * @method
  4813. * @memberof RecordRTCPromisesHandler
  4814. * @example
  4815. * recorder.startRecording()
  4816. * .then(successCB)
  4817. * .catch(errorCB);
  4818. */
  4819. this.startRecording = function() {
  4820. return new Promise(function(resolve, reject) {
  4821. try {
  4822. self.recordRTC.startRecording();
  4823. resolve();
  4824. } catch (e) {
  4825. reject(e);
  4826. }
  4827. });
  4828. };
  4829. /**
  4830. * This method stops the recording.
  4831. * @method
  4832. * @memberof RecordRTCPromisesHandler
  4833. * @example
  4834. * recorder.stopRecording().then(function() {
  4835. * var blob = recorder.getBlob();
  4836. * }).catch(errorCB);
  4837. */
  4838. this.stopRecording = function() {
  4839. return new Promise(function(resolve, reject) {
  4840. try {
  4841. self.recordRTC.stopRecording(function(url) {
  4842. self.blob = self.recordRTC.getBlob();
  4843. if (!self.blob || !self.blob.size) {
  4844. reject('Empty blob.', self.blob);
  4845. return;
  4846. }
  4847. resolve(url);
  4848. });
  4849. } catch (e) {
  4850. reject(e);
  4851. }
  4852. });
  4853. };
  4854. /**
  4855. * This method pauses the recording. You can resume recording using "resumeRecording" method.
  4856. * @method
  4857. * @memberof RecordRTCPromisesHandler
  4858. * @example
  4859. * recorder.pauseRecording()
  4860. * .then(successCB)
  4861. * .catch(errorCB);
  4862. */
  4863. this.pauseRecording = function() {
  4864. return new Promise(function(resolve, reject) {
  4865. try {
  4866. self.recordRTC.pauseRecording();
  4867. resolve();
  4868. } catch (e) {
  4869. reject(e);
  4870. }
  4871. });
  4872. };
  4873. /**
  4874. * This method resumes the recording.
  4875. * @method
  4876. * @memberof RecordRTCPromisesHandler
  4877. * @example
  4878. * recorder.resumeRecording()
  4879. * .then(successCB)
  4880. * .catch(errorCB);
  4881. */
  4882. this.resumeRecording = function() {
  4883. return new Promise(function(resolve, reject) {
  4884. try {
  4885. self.recordRTC.resumeRecording();
  4886. resolve();
  4887. } catch (e) {
  4888. reject(e);
  4889. }
  4890. });
  4891. };
  4892. /**
  4893. * This method returns data-url for the recorded blob.
  4894. * @method
  4895. * @memberof RecordRTCPromisesHandler
  4896. * @example
  4897. * recorder.stopRecording().then(function() {
  4898. * recorder.getDataURL().then(function(dataURL) {
  4899. * window.open(dataURL);
  4900. * }).catch(errorCB);;
  4901. * }).catch(errorCB);
  4902. */
  4903. this.getDataURL = function(callback) {
  4904. return new Promise(function(resolve, reject) {
  4905. try {
  4906. self.recordRTC.getDataURL(function(dataURL) {
  4907. resolve(dataURL);
  4908. });
  4909. } catch (e) {
  4910. reject(e);
  4911. }
  4912. });
  4913. };
  4914. /**
  4915. * This method returns the recorded blob.
  4916. * @method
  4917. * @memberof RecordRTCPromisesHandler
  4918. * @example
  4919. * recorder.stopRecording().then(function() {
  4920. * recorder.getBlob().then(function(blob) {})
  4921. * }).catch(errorCB);
  4922. */
  4923. this.getBlob = function() {
  4924. return new Promise(function(resolve, reject) {
  4925. try {
  4926. resolve(self.recordRTC.getBlob());
  4927. } catch (e) {
  4928. reject(e);
  4929. }
  4930. });
  4931. };
  4932. /**
  4933. * Destroy RecordRTC instance. Clear all recorders and objects.
  4934. * @method
  4935. * @memberof RecordRTCPromisesHandler
  4936. * @example
  4937. * let internalRecorder = await recorder.getInternalRecorder();
  4938. * if(internalRecorder instanceof MultiStreamRecorder) {
  4939. * internalRecorder.addStreams([newAudioStream]);
  4940. * internalRecorder.resetVideoStreams([screenStream]);
  4941. * }
  4942. * @returns {Object} Returns internal recording object.
  4943. */
  4944. this.getInternalRecorder = function() {
  4945. return new Promise(function(resolve, reject) {
  4946. try {
  4947. resolve(self.recordRTC.getInternalRecorder());
  4948. } catch (e) {
  4949. reject(e);
  4950. }
  4951. });
  4952. };
  4953. /**
  4954. * This method resets the recorder. So that you can reuse single recorder instance many times.
  4955. * @method
  4956. * @memberof RecordRTCPromisesHandler
  4957. * @example
  4958. * await recorder.reset();
  4959. * recorder.startRecording(); // record again
  4960. */
  4961. this.reset = function() {
  4962. return new Promise(function(resolve, reject) {
  4963. try {
  4964. resolve(self.recordRTC.reset());
  4965. } catch (e) {
  4966. reject(e);
  4967. }
  4968. });
  4969. };
  4970. /**
  4971. * Destroy RecordRTC instance. Clear all recorders and objects.
  4972. * @method
  4973. * @memberof RecordRTCPromisesHandler
  4974. * @example
  4975. * recorder.destroy().then(successCB).catch(errorCB);
  4976. */
  4977. this.destroy = function() {
  4978. return new Promise(function(resolve, reject) {
  4979. try {
  4980. resolve(self.recordRTC.destroy());
  4981. } catch (e) {
  4982. reject(e);
  4983. }
  4984. });
  4985. };
  4986. /**
  4987. * Get recorder's readonly state.
  4988. * @method
  4989. * @memberof RecordRTCPromisesHandler
  4990. * @example
  4991. * let state = await recorder.getState();
  4992. * // or
  4993. * recorder.getState().then(state => { console.log(state); })
  4994. * @returns {String} Returns recording state.
  4995. */
  4996. this.getState = function() {
  4997. return new Promise(function(resolve, reject) {
  4998. try {
  4999. resolve(self.recordRTC.getState());
  5000. } catch (e) {
  5001. reject(e);
  5002. }
  5003. });
  5004. };
  5005. /**
  5006. * @property {Blob} blob - Recorded data as "Blob" object.
  5007. * @memberof RecordRTCPromisesHandler
  5008. * @example
  5009. * await recorder.stopRecording();
  5010. * let blob = recorder.getBlob(); // or "recorder.recordRTC.blob"
  5011. * invokeSaveAsDialog(blob);
  5012. */
  5013. this.blob = null;
  5014. /**
  5015. * RecordRTC version number
  5016. * @property {String} version - Release version number.
  5017. * @memberof RecordRTCPromisesHandler
  5018. * @static
  5019. * @readonly
  5020. * @example
  5021. * alert(recorder.version);
  5022. */
  5023. this.version = '5.5.9';
  5024. }
  5025. if (typeof RecordRTC !== 'undefined') {
  5026. RecordRTC.RecordRTCPromisesHandler = RecordRTCPromisesHandler;
  5027. }
  5028. // ______________________
  5029. // WebAssemblyRecorder.js
  5030. /**
  5031. * WebAssemblyRecorder lets you create webm videos in JavaScript via WebAssembly. The library consumes raw RGBA32 buffers (4 bytes per pixel) and turns them into a webm video with the given framerate and quality. This makes it compatible out-of-the-box with ImageData from a CANVAS. With realtime mode you can also use webm-wasm for streaming webm videos.
  5032. * @summary Video recording feature in Chrome, Firefox and maybe Edge.
  5033. * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
  5034. * @author {@link https://MuazKhan.com|Muaz Khan}
  5035. * @typedef WebAssemblyRecorder
  5036. * @class
  5037. * @example
  5038. * var recorder = new WebAssemblyRecorder(mediaStream);
  5039. * recorder.record();
  5040. * recorder.stop(function(blob) {
  5041. * video.src = URL.createObjectURL(blob);
  5042. * });
  5043. * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
  5044. * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
  5045. * @param {object} config - {webAssemblyPath:'webm-wasm.wasm',workerPath: 'webm-worker.js', frameRate: 30, width: 1920, height: 1080, bitrate: 1024}
  5046. */
  5047. function WebAssemblyRecorder(stream, config) {
  5048. // based on: github.com/GoogleChromeLabs/webm-wasm
  5049. if (typeof ReadableStream === 'undefined' || typeof WritableStream === 'undefined') {
  5050. // because it fixes readable/writable streams issues
  5051. console.error('Following polyfill is strongly recommended: https://unpkg.com/@mattiasbuelens/web-streams-polyfill/dist/polyfill.min.js');
  5052. }
  5053. config = config || {};
  5054. config.width = config.width || 640;
  5055. config.height = config.height || 480;
  5056. config.frameRate = config.frameRate || 30;
  5057. config.bitrate = config.bitrate || 1200;
  5058. function createBufferURL(buffer, type) {
  5059. return URL.createObjectURL(new Blob([buffer], {
  5060. type: type || ''
  5061. }));
  5062. }
  5063. function cameraStream() {
  5064. return new ReadableStream({
  5065. start: function(controller) {
  5066. var cvs = document.createElement('canvas');
  5067. var video = document.createElement('video');
  5068. video.srcObject = stream;
  5069. video.onplaying = function() {
  5070. cvs.width = config.width;
  5071. cvs.height = config.height;
  5072. var ctx = cvs.getContext('2d');
  5073. var frameTimeout = 1000 / config.frameRate;
  5074. setTimeout(function f() {
  5075. ctx.drawImage(video, 0, 0);
  5076. controller.enqueue(
  5077. ctx.getImageData(0, 0, config.width, config.height)
  5078. );
  5079. setTimeout(f, frameTimeout);
  5080. }, frameTimeout);
  5081. };
  5082. video.play();
  5083. }
  5084. });
  5085. }
  5086. var worker;
  5087. function startRecording(stream, buffer) {
  5088. if (!config.workerPath && !buffer) {
  5089. // is it safe to use @latest ?
  5090. fetch(
  5091. 'https://unpkg.com/webm-wasm@latest/dist/webm-worker.js'
  5092. ).then(function(r) {
  5093. r.arrayBuffer().then(function(buffer) {
  5094. startRecording(stream, buffer);
  5095. });
  5096. });
  5097. return;
  5098. }
  5099. if (!config.workerPath && buffer instanceof ArrayBuffer) {
  5100. var blob = new Blob([buffer], {
  5101. type: 'text/javascript'
  5102. });
  5103. config.workerPath = URL.createObjectURL(blob);
  5104. }
  5105. if (!config.workerPath) {
  5106. console.error('workerPath parameter is missing.');
  5107. }
  5108. worker = new Worker(config.workerPath);
  5109. worker.postMessage(config.webAssemblyPath || 'https://unpkg.com/webm-wasm@latest/dist/webm-wasm.wasm');
  5110. worker.addEventListener('message', function(event) {
  5111. if (event.data === 'READY') {
  5112. worker.postMessage({
  5113. width: config.width,
  5114. height: config.height,
  5115. bitrate: config.bitrate || 1200,
  5116. timebaseDen: config.frameRate || 30,
  5117. realtime: true
  5118. });
  5119. cameraStream().pipeTo(new WritableStream({
  5120. write: function(image) {
  5121. if (!worker) {
  5122. return;
  5123. }
  5124. worker.postMessage(image.data.buffer, [image.data.buffer]);
  5125. }
  5126. }));
  5127. } else if (!!event.data) {
  5128. if (!isPaused) {
  5129. arrayOfBuffers.push(event.data);
  5130. }
  5131. }
  5132. });
  5133. }
  5134. /**
  5135. * This method records video.
  5136. * @method
  5137. * @memberof WebAssemblyRecorder
  5138. * @example
  5139. * recorder.record();
  5140. */
  5141. this.record = function() {
  5142. arrayOfBuffers = [];
  5143. isPaused = false;
  5144. this.blob = null;
  5145. startRecording(stream);
  5146. if (typeof config.initCallback === 'function') {
  5147. config.initCallback();
  5148. }
  5149. };
  5150. var isPaused;
  5151. /**
  5152. * This method pauses the recording process.
  5153. * @method
  5154. * @memberof WebAssemblyRecorder
  5155. * @example
  5156. * recorder.pause();
  5157. */
  5158. this.pause = function() {
  5159. isPaused = true;
  5160. };
  5161. /**
  5162. * This method resumes the recording process.
  5163. * @method
  5164. * @memberof WebAssemblyRecorder
  5165. * @example
  5166. * recorder.resume();
  5167. */
  5168. this.resume = function() {
  5169. isPaused = false;
  5170. };
  5171. function terminate() {
  5172. if (!worker) {
  5173. return;
  5174. }
  5175. worker.postMessage(null);
  5176. worker.terminate();
  5177. worker = null;
  5178. }
  5179. var arrayOfBuffers = [];
  5180. /**
  5181. * This method stops recording video.
  5182. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
  5183. * @method
  5184. * @memberof WebAssemblyRecorder
  5185. * @example
  5186. * recorder.stop(function(blob) {
  5187. * video.src = URL.createObjectURL(blob);
  5188. * });
  5189. */
  5190. this.stop = function(callback) {
  5191. terminate();
  5192. this.blob = new Blob(arrayOfBuffers, {
  5193. type: 'video/webm'
  5194. });
  5195. callback(this.blob);
  5196. };
  5197. // for debugging
  5198. this.name = 'WebAssemblyRecorder';
  5199. this.toString = function() {
  5200. return this.name;
  5201. };
  5202. /**
  5203. * This method resets currently recorded data.
  5204. * @method
  5205. * @memberof WebAssemblyRecorder
  5206. * @example
  5207. * recorder.clearRecordedData();
  5208. */
  5209. this.clearRecordedData = function() {
  5210. arrayOfBuffers = [];
  5211. isPaused = false;
  5212. this.blob = null;
  5213. // todo: if recording-ON then STOP it first
  5214. };
  5215. /**
  5216. * @property {Blob} blob - The recorded blob object.
  5217. * @memberof WebAssemblyRecorder
  5218. * @example
  5219. * recorder.stop(function(){
  5220. * var blob = recorder.blob;
  5221. * });
  5222. */
  5223. this.blob = null;
  5224. }
  5225. if (typeof RecordRTC !== 'undefined') {
  5226. RecordRTC.WebAssemblyRecorder = WebAssemblyRecorder;
  5227. }