\"recorder.getBlob()\"
method. Usage of \"getBlob\" is recommended, though.\r\n * @property {Blob} blob - Recorded Blob can be accessed using this property.\r\n * @memberof RecordRTC\r\n * @instance\r\n * @readonly\r\n * @example\r\n * recorder.stopRecording(function() {\r\n * var blob = this.blob;\r\n *\r\n * // below one is recommended\r\n * var blob = this.getBlob();\r\n * });\r\n */\r\n blob: null,\r\n\r\n /**\r\n * This works only with {recorderType:StereoAudioRecorder}. Use this property on \"stopRecording\" to verify the encoder's sample-rates.\r\n * @property {number} bufferSize - Buffer-size used to encode the WAV container\r\n * @memberof RecordRTC\r\n * @instance\r\n * @readonly\r\n * @example\r\n * recorder.stopRecording(function() {\r\n * alert('Recorder used this buffer-size: ' + this.bufferSize);\r\n * });\r\n */\r\n bufferSize: 0,\r\n\r\n /**\r\n * This works only with {recorderType:StereoAudioRecorder}. Use this property on \"stopRecording\" to verify the encoder's sample-rates.\r\n * @property {number} sampleRate - Sample-rates used to encode the WAV container\r\n * @memberof RecordRTC\r\n * @instance\r\n * @readonly\r\n * @example\r\n * recorder.stopRecording(function() {\r\n * alert('Recorder used these sample-rates: ' + this.sampleRate);\r\n * });\r\n */\r\n sampleRate: 0,\r\n\r\n /**\r\n * {recorderType:StereoAudioRecorder} returns ArrayBuffer object.\r\n * @property {ArrayBuffer} buffer - Audio ArrayBuffer, supported only in Chrome.\r\n * @memberof RecordRTC\r\n * @instance\r\n * @readonly\r\n * @example\r\n * recorder.stopRecording(function() {\r\n * var arrayBuffer = this.buffer;\r\n * alert(arrayBuffer.byteLength);\r\n * });\r\n */\r\n buffer: null,\r\n\r\n /**\r\n * This method resets the recorder. So that you can reuse single recorder instance many times.\r\n * @method\r\n * @memberof RecordRTC\r\n * @instance\r\n * @example\r\n * recorder.reset();\r\n * recorder.startRecording();\r\n */\r\n reset: function() {\r\n if (self.state === 'recording' && !config.disableLogs) {\r\n console.warn('Stop an active recorder.');\r\n }\r\n\r\n if (mediaRecorder && typeof mediaRecorder.clearRecordedData === 'function') {\r\n mediaRecorder.clearRecordedData();\r\n }\r\n mediaRecorder = null;\r\n setState('inactive');\r\n self.blob = null;\r\n },\r\n\r\n /**\r\n * This method is called whenever recorder's state changes. Use this as an \"event\".\r\n * @property {String} state - A recorder's state can be: recording, paused, stopped or inactive.\r\n * @method\r\n * @memberof RecordRTC\r\n * @instance\r\n * @example\r\n * recorder.onStateChanged = function(state) {\r\n * console.log('Recorder state: ', state);\r\n * };\r\n */\r\n onStateChanged: function(state) {\r\n if (!config.disableLogs) {\r\n console.log('Recorder state changed:', state);\r\n }\r\n },\r\n\r\n /**\r\n * A recorder can have inactive, recording, paused or stopped states.\r\n * @property {String} state - A recorder's state can be: recording, paused, stopped or inactive.\r\n * @memberof RecordRTC\r\n * @static\r\n * @readonly\r\n * @example\r\n * // this looper function will keep you updated about the recorder's states.\r\n * (function looper() {\r\n * document.querySelector('h1').innerHTML = 'Recorder\\'s state is: ' + recorder.state;\r\n * if(recorder.state === 'stopped') return; // ignore+stop\r\n * setTimeout(looper, 1000); // update after every 3-seconds\r\n * })();\r\n * recorder.startRecording();\r\n */\r\n state: 'inactive',\r\n\r\n /**\r\n * Get recorder's readonly state.\r\n * @method\r\n * @memberof RecordRTC\r\n * @example\r\n * var state = recorder.getState();\r\n * @returns {String} Returns recording state.\r\n */\r\n getState: function() {\r\n return self.state;\r\n },\r\n\r\n /**\r\n * Destroy RecordRTC instance. Clear all recorders and objects.\r\n * @method\r\n * @memberof RecordRTC\r\n * @example\r\n * recorder.destroy();\r\n */\r\n destroy: function() {\r\n var disableLogsCache = config.disableLogs;\r\n\r\n config = {\r\n disableLogs: true\r\n };\r\n self.reset();\r\n setState('destroyed');\r\n returnObject = self = null;\r\n\r\n if (Storage.AudioContextConstructor) {\r\n Storage.AudioContextConstructor.close();\r\n Storage.AudioContextConstructor = null;\r\n }\r\n\r\n config.disableLogs = disableLogsCache;\r\n\r\n if (!config.disableLogs) {\r\n console.log('RecordRTC is destroyed.');\r\n }\r\n },\r\n\r\n /**\r\n * RecordRTC version number\r\n * @property {String} version - Release version number.\r\n * @memberof RecordRTC\r\n * @static\r\n * @readonly\r\n * @example\r\n * alert(recorder.version);\r\n */\r\n version: '5.6.2'\r\n };\r\n\r\n if (!this) {\r\n self = returnObject;\r\n return returnObject;\r\n }\r\n\r\n // if someone wants to use RecordRTC with the \"new\" keyword.\r\n for (var prop in returnObject) {\r\n this[prop] = returnObject[prop];\r\n }\r\n\r\n self = this;\r\n\r\n return returnObject;\r\n}\r\n\r\nRecordRTC.version = '5.6.2';\r\n\r\nif (typeof module !== 'undefined' /* && !!module.exports*/ ) {\r\n module.exports = RecordRTC;\r\n}\r\n\r\nif (typeof define === 'function' && define.amd) {\r\n define('RecordRTC', [], function() {\r\n return RecordRTC;\r\n });\r\n}\n\r\nRecordRTC.getFromDisk = function(type, callback) {\r\n if (!callback) {\r\n throw 'callback is mandatory.';\r\n }\r\n\r\n console.log('Getting recorded ' + (type === 'all' ? 'blobs' : type + ' blob ') + ' from disk!');\r\n DiskStorage.Fetch(function(dataURL, _type) {\r\n if (type !== 'all' && _type === type + 'Blob' && callback) {\r\n callback(dataURL);\r\n }\r\n\r\n if (type === 'all' && callback) {\r\n callback(dataURL, _type.replace('Blob', ''));\r\n }\r\n });\r\n};\r\n\r\n/**\r\n * This method can be used to store recorded blobs into IndexedDB storage.\r\n * @param {object} options - {audio: Blob, video: Blob, gif: Blob}\r\n * @method\r\n * @memberof RecordRTC\r\n * @example\r\n * RecordRTC.writeToDisk({\r\n * audio: audioBlob,\r\n * video: videoBlob,\r\n * gif : gifBlob\r\n * });\r\n */\r\nRecordRTC.writeToDisk = function(options) {\r\n console.log('Writing recorded blob(s) to disk!');\r\n options = options || {};\r\n if (options.audio && options.video && options.gif) {\r\n options.audio.getDataURL(function(audioDataURL) {\r\n options.video.getDataURL(function(videoDataURL) {\r\n options.gif.getDataURL(function(gifDataURL) {\r\n DiskStorage.Store({\r\n audioBlob: audioDataURL,\r\n videoBlob: videoDataURL,\r\n gifBlob: gifDataURL\r\n });\r\n });\r\n });\r\n });\r\n } else if (options.audio && options.video) {\r\n options.audio.getDataURL(function(audioDataURL) {\r\n options.video.getDataURL(function(videoDataURL) {\r\n DiskStorage.Store({\r\n audioBlob: audioDataURL,\r\n videoBlob: videoDataURL\r\n });\r\n });\r\n });\r\n } else if (options.audio && options.gif) {\r\n options.audio.getDataURL(function(audioDataURL) {\r\n options.gif.getDataURL(function(gifDataURL) {\r\n DiskStorage.Store({\r\n audioBlob: audioDataURL,\r\n gifBlob: gifDataURL\r\n });\r\n });\r\n });\r\n } else if (options.video && options.gif) {\r\n options.video.getDataURL(function(videoDataURL) {\r\n options.gif.getDataURL(function(gifDataURL) {\r\n DiskStorage.Store({\r\n videoBlob: videoDataURL,\r\n gifBlob: gifDataURL\r\n });\r\n });\r\n });\r\n } else if (options.audio) {\r\n options.audio.getDataURL(function(audioDataURL) {\r\n DiskStorage.Store({\r\n audioBlob: audioDataURL\r\n });\r\n });\r\n } else if (options.video) {\r\n options.video.getDataURL(function(videoDataURL) {\r\n DiskStorage.Store({\r\n videoBlob: videoDataURL\r\n });\r\n });\r\n } else if (options.gif) {\r\n options.gif.getDataURL(function(gifDataURL) {\r\n DiskStorage.Store({\r\n gifBlob: gifDataURL\r\n });\r\n });\r\n }\r\n};\n\r\n// __________________________\r\n// RecordRTC-Configuration.js\r\n\r\n/**\r\n * {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}.\r\n * @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid \"config\" object.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef RecordRTCConfiguration\r\n * @class\r\n * @example\r\n * var options = RecordRTCConfiguration(mediaStream, options);\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @param {object} config - {type:\"video\", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.}\r\n */\r\n\r\nfunction RecordRTCConfiguration(mediaStream, config) {\r\n if (!config.recorderType && !config.type) {\r\n if (!!config.audio && !!config.video) {\r\n config.type = 'video';\r\n } else if (!!config.audio && !config.video) {\r\n config.type = 'audio';\r\n }\r\n }\r\n\r\n if (config.recorderType && !config.type) {\r\n if (config.recorderType === WhammyRecorder || config.recorderType === CanvasRecorder || (typeof WebAssemblyRecorder !== 'undefined' && config.recorderType === WebAssemblyRecorder)) {\r\n config.type = 'video';\r\n } else if (config.recorderType === GifRecorder) {\r\n config.type = 'gif';\r\n } else if (config.recorderType === StereoAudioRecorder) {\r\n config.type = 'audio';\r\n } else if (config.recorderType === MediaStreamRecorder) {\r\n if (getTracks(mediaStream, 'audio').length && getTracks(mediaStream, 'video').length) {\r\n config.type = 'video';\r\n } else if (!getTracks(mediaStream, 'audio').length && getTracks(mediaStream, 'video').length) {\r\n config.type = 'video';\r\n } else if (getTracks(mediaStream, 'audio').length && !getTracks(mediaStream, 'video').length) {\r\n config.type = 'audio';\r\n } else {\r\n // config.type = 'UnKnown';\r\n }\r\n }\r\n }\r\n\r\n if (typeof MediaStreamRecorder !== 'undefined' && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) {\r\n if (!config.mimeType) {\r\n config.mimeType = 'video/webm';\r\n }\r\n\r\n if (!config.type) {\r\n config.type = config.mimeType.split('/')[0];\r\n }\r\n\r\n if (!config.bitsPerSecond) {\r\n // config.bitsPerSecond = 128000;\r\n }\r\n }\r\n\r\n // consider default type=audio\r\n if (!config.type) {\r\n if (config.mimeType) {\r\n config.type = config.mimeType.split('/')[0];\r\n }\r\n if (!config.type) {\r\n config.type = 'audio';\r\n }\r\n }\r\n\r\n return config;\r\n}\n\r\n// __________________\r\n// GetRecorderType.js\r\n\r\n/**\r\n * {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}.\r\n * @summary It returns best recorder-type available for your browser.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef GetRecorderType\r\n * @class\r\n * @example\r\n * var RecorderType = GetRecorderType(options);\r\n * var recorder = new RecorderType(options);\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @param {object} config - {type:\"video\", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}\r\n */\r\n\r\nfunction GetRecorderType(mediaStream, config) {\r\n var recorder;\r\n\r\n // StereoAudioRecorder can work with all three: Edge, Firefox and Chrome\r\n // todo: detect if it is Edge, then auto use: StereoAudioRecorder\r\n if (isChrome || isEdge || isOpera) {\r\n // Media Stream Recording API has not been implemented in chrome yet;\r\n // That's why using WebAudio API to record stereo audio in WAV format\r\n recorder = StereoAudioRecorder;\r\n }\r\n\r\n if (typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype && !isChrome) {\r\n recorder = MediaStreamRecorder;\r\n }\r\n\r\n // video recorder (in WebM format)\r\n if (config.type === 'video' && (isChrome || isOpera)) {\r\n recorder = WhammyRecorder;\r\n\r\n if (typeof WebAssemblyRecorder !== 'undefined' && typeof ReadableStream !== 'undefined') {\r\n recorder = WebAssemblyRecorder;\r\n }\r\n }\r\n\r\n // video recorder (in Gif format)\r\n if (config.type === 'gif') {\r\n recorder = GifRecorder;\r\n }\r\n\r\n // html2canvas recording!\r\n if (config.type === 'canvas') {\r\n recorder = CanvasRecorder;\r\n }\r\n\r\n if (isMediaRecorderCompatible() && recorder !== CanvasRecorder && recorder !== GifRecorder && typeof MediaRecorder !== 'undefined' && 'requestData' in MediaRecorder.prototype) {\r\n if (getTracks(mediaStream, 'video').length || getTracks(mediaStream, 'audio').length) {\r\n // audio-only recording\r\n if (config.type === 'audio') {\r\n if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('audio/webm')) {\r\n recorder = MediaStreamRecorder;\r\n }\r\n // else recorder = StereoAudioRecorder;\r\n } else {\r\n // video or screen tracks\r\n if (typeof MediaRecorder.isTypeSupported === 'function' && MediaRecorder.isTypeSupported('video/webm')) {\r\n recorder = MediaStreamRecorder;\r\n }\r\n }\r\n }\r\n }\r\n\r\n if (mediaStream instanceof Array && mediaStream.length) {\r\n recorder = MultiStreamRecorder;\r\n }\r\n\r\n if (config.recorderType) {\r\n recorder = config.recorderType;\r\n }\r\n\r\n if (!config.disableLogs && !!recorder && !!recorder.name) {\r\n console.log('Using recorderType:', recorder.name || recorder.constructor.name);\r\n }\r\n\r\n if (!recorder && isSafari) {\r\n recorder = MediaStreamRecorder;\r\n }\r\n\r\n return recorder;\r\n}\n\r\n// _____________\r\n// MRecordRTC.js\r\n\r\n/**\r\n * MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API.\r\n * @summary MRecordRTC stands for \"Multiple-RecordRTC\".\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef MRecordRTC\r\n * @class\r\n * @example\r\n * var recorder = new MRecordRTC();\r\n * recorder.addStream(MediaStream);\r\n * recorder.mediaType = {\r\n * audio: true, // or StereoAudioRecorder or MediaStreamRecorder\r\n * video: true, // or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder\r\n * gif: true // or GifRecorder\r\n * };\r\n * // mimeType is optional and should be set only in advance cases.\r\n * recorder.mimeType = {\r\n * audio: 'audio/wav',\r\n * video: 'video/webm',\r\n * gif: 'image/gif'\r\n * };\r\n * recorder.startRecording();\r\n * @see For further information:\r\n * @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @requires {@link RecordRTC}\r\n */\r\n\r\nfunction MRecordRTC(mediaStream) {\r\n\r\n /**\r\n * This method attaches MediaStream object to {@link MRecordRTC}.\r\n * @param {MediaStream} mediaStream - A MediaStream object, either fetched using getUserMedia API, or generated using captureStreamUntilEnded or WebAudio API.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.addStream(MediaStream);\r\n */\r\n this.addStream = function(_mediaStream) {\r\n if (_mediaStream) {\r\n mediaStream = _mediaStream;\r\n }\r\n };\r\n\r\n /**\r\n * This property can be used to set the recording type e.g. audio, or video, or gif, or canvas.\r\n * @property {object} mediaType - {audio: true, video: true, gif: true}\r\n * @memberof MRecordRTC\r\n * @example\r\n * var recorder = new MRecordRTC();\r\n * recorder.mediaType = {\r\n * audio: true, // TRUE or StereoAudioRecorder or MediaStreamRecorder\r\n * video: true, // TRUE or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder\r\n * gif : true // TRUE or GifRecorder\r\n * };\r\n */\r\n this.mediaType = {\r\n audio: true,\r\n video: true\r\n };\r\n\r\n /**\r\n * This method starts recording.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.startRecording();\r\n */\r\n this.startRecording = function() {\r\n var mediaType = this.mediaType;\r\n var recorderType;\r\n var mimeType = this.mimeType || {\r\n audio: null,\r\n video: null,\r\n gif: null\r\n };\r\n\r\n if (typeof mediaType.audio !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'audio').length) {\r\n mediaType.audio = false;\r\n }\r\n\r\n if (typeof mediaType.video !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'video').length) {\r\n mediaType.video = false;\r\n }\r\n\r\n if (typeof mediaType.gif !== 'function' && isMediaRecorderCompatible() && !getTracks(mediaStream, 'video').length) {\r\n mediaType.gif = false;\r\n }\r\n\r\n if (!mediaType.audio && !mediaType.video && !mediaType.gif) {\r\n throw 'MediaStream must have either audio or video tracks.';\r\n }\r\n\r\n if (!!mediaType.audio) {\r\n recorderType = null;\r\n if (typeof mediaType.audio === 'function') {\r\n recorderType = mediaType.audio;\r\n }\r\n\r\n this.audioRecorder = new RecordRTC(mediaStream, {\r\n type: 'audio',\r\n bufferSize: this.bufferSize,\r\n sampleRate: this.sampleRate,\r\n numberOfAudioChannels: this.numberOfAudioChannels || 2,\r\n disableLogs: this.disableLogs,\r\n recorderType: recorderType,\r\n mimeType: mimeType.audio,\r\n timeSlice: this.timeSlice,\r\n onTimeStamp: this.onTimeStamp\r\n });\r\n\r\n if (!mediaType.video) {\r\n this.audioRecorder.startRecording();\r\n }\r\n }\r\n\r\n if (!!mediaType.video) {\r\n recorderType = null;\r\n if (typeof mediaType.video === 'function') {\r\n recorderType = mediaType.video;\r\n }\r\n\r\n var newStream = mediaStream;\r\n\r\n if (isMediaRecorderCompatible() && !!mediaType.audio && typeof mediaType.audio === 'function') {\r\n var videoTrack = getTracks(mediaStream, 'video')[0];\r\n\r\n if (isFirefox) {\r\n newStream = new MediaStream();\r\n newStream.addTrack(videoTrack);\r\n\r\n if (recorderType && recorderType === WhammyRecorder) {\r\n // Firefox does NOT supports webp-encoding yet\r\n // But Firefox do supports WebAssemblyRecorder\r\n recorderType = MediaStreamRecorder;\r\n }\r\n } else {\r\n newStream = new MediaStream();\r\n newStream.addTrack(videoTrack);\r\n }\r\n }\r\n\r\n this.videoRecorder = new RecordRTC(newStream, {\r\n type: 'video',\r\n video: this.video,\r\n canvas: this.canvas,\r\n frameInterval: this.frameInterval || 10,\r\n disableLogs: this.disableLogs,\r\n recorderType: recorderType,\r\n mimeType: mimeType.video,\r\n timeSlice: this.timeSlice,\r\n onTimeStamp: this.onTimeStamp,\r\n workerPath: this.workerPath,\r\n webAssemblyPath: this.webAssemblyPath,\r\n frameRate: this.frameRate, // used by WebAssemblyRecorder; values: usually 30; accepts any.\r\n bitrate: this.bitrate // used by WebAssemblyRecorder; values: 0 to 1000+\r\n });\r\n\r\n if (!mediaType.audio) {\r\n this.videoRecorder.startRecording();\r\n }\r\n }\r\n\r\n if (!!mediaType.audio && !!mediaType.video) {\r\n var self = this;\r\n\r\n var isSingleRecorder = isMediaRecorderCompatible() === true;\r\n\r\n if (mediaType.audio instanceof StereoAudioRecorder && !!mediaType.video) {\r\n isSingleRecorder = false;\r\n } else if (mediaType.audio !== true && mediaType.video !== true && mediaType.audio !== mediaType.video) {\r\n isSingleRecorder = false;\r\n }\r\n\r\n if (isSingleRecorder === true) {\r\n self.audioRecorder = null;\r\n self.videoRecorder.startRecording();\r\n } else {\r\n self.videoRecorder.initRecorder(function() {\r\n self.audioRecorder.initRecorder(function() {\r\n // Both recorders are ready to record things accurately\r\n self.videoRecorder.startRecording();\r\n self.audioRecorder.startRecording();\r\n });\r\n });\r\n }\r\n }\r\n\r\n if (!!mediaType.gif) {\r\n recorderType = null;\r\n if (typeof mediaType.gif === 'function') {\r\n recorderType = mediaType.gif;\r\n }\r\n this.gifRecorder = new RecordRTC(mediaStream, {\r\n type: 'gif',\r\n frameRate: this.frameRate || 200,\r\n quality: this.quality || 10,\r\n disableLogs: this.disableLogs,\r\n recorderType: recorderType,\r\n mimeType: mimeType.gif\r\n });\r\n this.gifRecorder.startRecording();\r\n }\r\n };\r\n\r\n /**\r\n * This method stops recording.\r\n * @param {function} callback - Callback function is invoked when all encoders finished their jobs.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.stopRecording(function(recording){\r\n * var audioBlob = recording.audio;\r\n * var videoBlob = recording.video;\r\n * var gifBlob = recording.gif;\r\n * });\r\n */\r\n this.stopRecording = function(callback) {\r\n callback = callback || function() {};\r\n\r\n if (this.audioRecorder) {\r\n this.audioRecorder.stopRecording(function(blobURL) {\r\n callback(blobURL, 'audio');\r\n });\r\n }\r\n\r\n if (this.videoRecorder) {\r\n this.videoRecorder.stopRecording(function(blobURL) {\r\n callback(blobURL, 'video');\r\n });\r\n }\r\n\r\n if (this.gifRecorder) {\r\n this.gifRecorder.stopRecording(function(blobURL) {\r\n callback(blobURL, 'gif');\r\n });\r\n }\r\n };\r\n\r\n /**\r\n * This method pauses recording.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.pauseRecording();\r\n */\r\n this.pauseRecording = function() {\r\n if (this.audioRecorder) {\r\n this.audioRecorder.pauseRecording();\r\n }\r\n\r\n if (this.videoRecorder) {\r\n this.videoRecorder.pauseRecording();\r\n }\r\n\r\n if (this.gifRecorder) {\r\n this.gifRecorder.pauseRecording();\r\n }\r\n };\r\n\r\n /**\r\n * This method resumes recording.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.resumeRecording();\r\n */\r\n this.resumeRecording = function() {\r\n if (this.audioRecorder) {\r\n this.audioRecorder.resumeRecording();\r\n }\r\n\r\n if (this.videoRecorder) {\r\n this.videoRecorder.resumeRecording();\r\n }\r\n\r\n if (this.gifRecorder) {\r\n this.gifRecorder.resumeRecording();\r\n }\r\n };\r\n\r\n /**\r\n * This method can be used to manually get all recorded blobs.\r\n * @param {function} callback - All recorded blobs are passed back to the \"callback\" function.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.getBlob(function(recording){\r\n * var audioBlob = recording.audio;\r\n * var videoBlob = recording.video;\r\n * var gifBlob = recording.gif;\r\n * });\r\n * // or\r\n * var audioBlob = recorder.getBlob().audio;\r\n * var videoBlob = recorder.getBlob().video;\r\n */\r\n this.getBlob = function(callback) {\r\n var output = {};\r\n\r\n if (this.audioRecorder) {\r\n output.audio = this.audioRecorder.getBlob();\r\n }\r\n\r\n if (this.videoRecorder) {\r\n output.video = this.videoRecorder.getBlob();\r\n }\r\n\r\n if (this.gifRecorder) {\r\n output.gif = this.gifRecorder.getBlob();\r\n }\r\n\r\n if (callback) {\r\n callback(output);\r\n }\r\n\r\n return output;\r\n };\r\n\r\n /**\r\n * Destroy all recorder instances.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.destroy();\r\n */\r\n this.destroy = function() {\r\n if (this.audioRecorder) {\r\n this.audioRecorder.destroy();\r\n this.audioRecorder = null;\r\n }\r\n\r\n if (this.videoRecorder) {\r\n this.videoRecorder.destroy();\r\n this.videoRecorder = null;\r\n }\r\n\r\n if (this.gifRecorder) {\r\n this.gifRecorder.destroy();\r\n this.gifRecorder = null;\r\n }\r\n };\r\n\r\n /**\r\n * This method can be used to manually get all recorded blobs' DataURLs.\r\n * @param {function} callback - All recorded blobs' DataURLs are passed back to the \"callback\" function.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.getDataURL(function(recording){\r\n * var audioDataURL = recording.audio;\r\n * var videoDataURL = recording.video;\r\n * var gifDataURL = recording.gif;\r\n * });\r\n */\r\n this.getDataURL = function(callback) {\r\n this.getBlob(function(blob) {\r\n if (blob.audio && blob.video) {\r\n getDataURL(blob.audio, function(_audioDataURL) {\r\n getDataURL(blob.video, function(_videoDataURL) {\r\n callback({\r\n audio: _audioDataURL,\r\n video: _videoDataURL\r\n });\r\n });\r\n });\r\n } else if (blob.audio) {\r\n getDataURL(blob.audio, function(_audioDataURL) {\r\n callback({\r\n audio: _audioDataURL\r\n });\r\n });\r\n } else if (blob.video) {\r\n getDataURL(blob.video, function(_videoDataURL) {\r\n callback({\r\n video: _videoDataURL\r\n });\r\n });\r\n }\r\n });\r\n\r\n function getDataURL(blob, callback00) {\r\n if (typeof Worker !== 'undefined') {\r\n var webWorker = processInWebWorker(function readFile(_blob) {\r\n postMessage(new FileReaderSync().readAsDataURL(_blob));\r\n });\r\n\r\n webWorker.onmessage = function(event) {\r\n callback00(event.data);\r\n };\r\n\r\n webWorker.postMessage(blob);\r\n } else {\r\n var reader = new FileReader();\r\n reader.readAsDataURL(blob);\r\n reader.onload = function(event) {\r\n callback00(event.target.result);\r\n };\r\n }\r\n }\r\n\r\n function processInWebWorker(_function) {\r\n var blob = URL.createObjectURL(new Blob([_function.toString(),\r\n 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'\r\n ], {\r\n type: 'application/javascript'\r\n }));\r\n\r\n var worker = new Worker(blob);\r\n var url;\r\n if (typeof URL !== 'undefined') {\r\n url = URL;\r\n } else if (typeof webkitURL !== 'undefined') {\r\n url = webkitURL;\r\n } else {\r\n throw 'Neither URL nor webkitURL detected.';\r\n }\r\n url.revokeObjectURL(blob);\r\n return worker;\r\n }\r\n };\r\n\r\n /**\r\n * This method can be used to ask {@link MRecordRTC} to write all recorded blobs into IndexedDB storage.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.writeToDisk();\r\n */\r\n this.writeToDisk = function() {\r\n RecordRTC.writeToDisk({\r\n audio: this.audioRecorder,\r\n video: this.videoRecorder,\r\n gif: this.gifRecorder\r\n });\r\n };\r\n\r\n /**\r\n * This method can be used to invoke a save-as dialog for all recorded blobs.\r\n * @param {object} args - {audio: 'audio-name', video: 'video-name', gif: 'gif-name'}\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * recorder.save({\r\n * audio: 'audio-file-name',\r\n * video: 'video-file-name',\r\n * gif : 'gif-file-name'\r\n * });\r\n */\r\n this.save = function(args) {\r\n args = args || {\r\n audio: true,\r\n video: true,\r\n gif: true\r\n };\r\n\r\n if (!!args.audio && this.audioRecorder) {\r\n this.audioRecorder.save(typeof args.audio === 'string' ? args.audio : '');\r\n }\r\n\r\n if (!!args.video && this.videoRecorder) {\r\n this.videoRecorder.save(typeof args.video === 'string' ? args.video : '');\r\n }\r\n if (!!args.gif && this.gifRecorder) {\r\n this.gifRecorder.save(typeof args.gif === 'string' ? args.gif : '');\r\n }\r\n };\r\n}\r\n\r\n/**\r\n * This method can be used to get all recorded blobs from IndexedDB storage.\r\n * @param {string} type - 'all' or 'audio' or 'video' or 'gif'\r\n * @param {function} callback - Callback function to get all stored blobs.\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * MRecordRTC.getFromDisk('all', function(dataURL, type){\r\n * if(type === 'audio') { }\r\n * if(type === 'video') { }\r\n * if(type === 'gif') { }\r\n * });\r\n */\r\nMRecordRTC.getFromDisk = RecordRTC.getFromDisk;\r\n\r\n/**\r\n * This method can be used to store recorded blobs into IndexedDB storage.\r\n * @param {object} options - {audio: Blob, video: Blob, gif: Blob}\r\n * @method\r\n * @memberof MRecordRTC\r\n * @example\r\n * MRecordRTC.writeToDisk({\r\n * audio: audioBlob,\r\n * video: videoBlob,\r\n * gif : gifBlob\r\n * });\r\n */\r\nMRecordRTC.writeToDisk = RecordRTC.writeToDisk;\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.MRecordRTC = MRecordRTC;\r\n}\n\r\nvar browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45';\r\n\r\n(function(that) {\r\n if (!that) {\r\n return;\r\n }\r\n\r\n if (typeof window !== 'undefined') {\r\n return;\r\n }\r\n\r\n if (typeof global === 'undefined') {\r\n return;\r\n }\r\n\r\n global.navigator = {\r\n userAgent: browserFakeUserAgent,\r\n getUserMedia: function() {}\r\n };\r\n\r\n if (!global.console) {\r\n global.console = {};\r\n }\r\n\r\n if (typeof global.console.log === 'undefined' || typeof global.console.error === 'undefined') {\r\n global.console.error = global.console.log = global.console.log || function() {\r\n console.log(arguments);\r\n };\r\n }\r\n\r\n if (typeof document === 'undefined') {\r\n /*global document:true */\r\n that.document = {\r\n documentElement: {\r\n appendChild: function() {\r\n return '';\r\n }\r\n }\r\n };\r\n\r\n document.createElement = document.captureStream = document.mozCaptureStream = function() {\r\n var obj = {\r\n getContext: function() {\r\n return obj;\r\n },\r\n play: function() {},\r\n pause: function() {},\r\n drawImage: function() {},\r\n toDataURL: function() {\r\n return '';\r\n },\r\n style: {}\r\n };\r\n return obj;\r\n };\r\n\r\n that.HTMLVideoElement = function() {};\r\n }\r\n\r\n if (typeof location === 'undefined') {\r\n /*global location:true */\r\n that.location = {\r\n protocol: 'file:',\r\n href: '',\r\n hash: ''\r\n };\r\n }\r\n\r\n if (typeof screen === 'undefined') {\r\n /*global screen:true */\r\n that.screen = {\r\n width: 0,\r\n height: 0\r\n };\r\n }\r\n\r\n if (typeof URL === 'undefined') {\r\n /*global screen:true */\r\n that.URL = {\r\n createObjectURL: function() {\r\n return '';\r\n },\r\n revokeObjectURL: function() {\r\n return '';\r\n }\r\n };\r\n }\r\n\r\n /*global window:true */\r\n that.window = global;\r\n})(typeof global !== 'undefined' ? global : null);\n\r\n// _____________________________\r\n// Cross-Browser-Declarations.js\r\n\r\n// animation-frame used in WebM recording\r\n\r\n/*jshint -W079 */\r\nvar requestAnimationFrame = window.requestAnimationFrame;\r\nif (typeof requestAnimationFrame === 'undefined') {\r\n if (typeof webkitRequestAnimationFrame !== 'undefined') {\r\n /*global requestAnimationFrame:true */\r\n requestAnimationFrame = webkitRequestAnimationFrame;\r\n } else if (typeof mozRequestAnimationFrame !== 'undefined') {\r\n /*global requestAnimationFrame:true */\r\n requestAnimationFrame = mozRequestAnimationFrame;\r\n } else if (typeof msRequestAnimationFrame !== 'undefined') {\r\n /*global requestAnimationFrame:true */\r\n requestAnimationFrame = msRequestAnimationFrame;\r\n } else if (typeof requestAnimationFrame === 'undefined') {\r\n // via: https://gist.github.com/paulirish/1579671\r\n var lastTime = 0;\r\n\r\n /*global requestAnimationFrame:true */\r\n requestAnimationFrame = function(callback, element) {\r\n var currTime = new Date().getTime();\r\n var timeToCall = Math.max(0, 16 - (currTime - lastTime));\r\n var id = setTimeout(function() {\r\n callback(currTime + timeToCall);\r\n }, timeToCall);\r\n lastTime = currTime + timeToCall;\r\n return id;\r\n };\r\n }\r\n}\r\n\r\n/*jshint -W079 */\r\nvar cancelAnimationFrame = window.cancelAnimationFrame;\r\nif (typeof cancelAnimationFrame === 'undefined') {\r\n if (typeof webkitCancelAnimationFrame !== 'undefined') {\r\n /*global cancelAnimationFrame:true */\r\n cancelAnimationFrame = webkitCancelAnimationFrame;\r\n } else if (typeof mozCancelAnimationFrame !== 'undefined') {\r\n /*global cancelAnimationFrame:true */\r\n cancelAnimationFrame = mozCancelAnimationFrame;\r\n } else if (typeof msCancelAnimationFrame !== 'undefined') {\r\n /*global cancelAnimationFrame:true */\r\n cancelAnimationFrame = msCancelAnimationFrame;\r\n } else if (typeof cancelAnimationFrame === 'undefined') {\r\n /*global cancelAnimationFrame:true */\r\n cancelAnimationFrame = function(id) {\r\n clearTimeout(id);\r\n };\r\n }\r\n}\r\n\r\n// WebAudio API representer\r\nvar AudioContext = window.AudioContext;\r\n\r\nif (typeof AudioContext === 'undefined') {\r\n if (typeof webkitAudioContext !== 'undefined') {\r\n /*global AudioContext:true */\r\n AudioContext = webkitAudioContext;\r\n }\r\n\r\n if (typeof mozAudioContext !== 'undefined') {\r\n /*global AudioContext:true */\r\n AudioContext = mozAudioContext;\r\n }\r\n}\r\n\r\n/*jshint -W079 */\r\nvar URL = window.URL;\r\n\r\nif (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') {\r\n /*global URL:true */\r\n URL = webkitURL;\r\n}\r\n\r\nif (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator?\r\n if (typeof navigator.webkitGetUserMedia !== 'undefined') {\r\n navigator.getUserMedia = navigator.webkitGetUserMedia;\r\n }\r\n\r\n if (typeof navigator.mozGetUserMedia !== 'undefined') {\r\n navigator.getUserMedia = navigator.mozGetUserMedia;\r\n }\r\n}\r\n\r\nvar isEdge = navigator.userAgent.indexOf('Edge') !== -1 && (!!navigator.msSaveBlob || !!navigator.msSaveOrOpenBlob);\r\nvar isOpera = !!window.opera || navigator.userAgent.indexOf('OPR/') !== -1;\r\nvar isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1 && ('netscape' in window) && / rv:/.test(navigator.userAgent);\r\nvar isChrome = (!isOpera && !isEdge && !!navigator.webkitGetUserMedia) || isElectron() || navigator.userAgent.toLowerCase().indexOf('chrome/') !== -1;\r\n\r\nvar isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);\r\n\r\nif (isSafari && !isChrome && navigator.userAgent.indexOf('CriOS') !== -1) {\r\n isSafari = false;\r\n isChrome = true;\r\n}\r\n\r\nvar MediaStream = window.MediaStream;\r\n\r\nif (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {\r\n MediaStream = webkitMediaStream;\r\n}\r\n\r\n/*global MediaStream:true */\r\nif (typeof MediaStream !== 'undefined') {\r\n // override \"stop\" method for all browsers\r\n if (typeof MediaStream.prototype.stop === 'undefined') {\r\n MediaStream.prototype.stop = function() {\r\n this.getTracks().forEach(function(track) {\r\n track.stop();\r\n });\r\n };\r\n }\r\n}\r\n\r\n// below function via: http://goo.gl/B3ae8c\r\n/**\r\n * Return human-readable file size.\r\n * @param {number} bytes - Pass bytes and get formatted string.\r\n * @returns {string} - formatted string\r\n * @example\r\n * bytesToSize(1024*1024*5) === '5 GB'\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n */\r\nfunction bytesToSize(bytes) {\r\n var k = 1000;\r\n var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];\r\n if (bytes === 0) {\r\n return '0 Bytes';\r\n }\r\n var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);\r\n return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];\r\n}\r\n\r\n/**\r\n * @param {Blob} file - File or Blob object. This parameter is required.\r\n * @param {string} fileName - Optional file name e.g. \"Recorded-Video.webm\"\r\n * @example\r\n * invokeSaveAsDialog(blob or file, [optional] fileName);\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n */\r\nfunction invokeSaveAsDialog(file, fileName) {\r\n if (!file) {\r\n throw 'Blob object is required.';\r\n }\r\n\r\n if (!file.type) {\r\n try {\r\n file.type = 'video/webm';\r\n } catch (e) {}\r\n }\r\n\r\n var fileExtension = (file.type || 'video/webm').split('/')[1];\r\n if (fileExtension.indexOf(';') !== -1) {\r\n // extended mimetype, e.g. 'video/webm;codecs=vp8,opus'\r\n fileExtension = fileExtension.split(';')[0];\r\n }\r\n if (fileName && fileName.indexOf('.') !== -1) {\r\n var splitted = fileName.split('.');\r\n fileName = splitted[0];\r\n fileExtension = splitted[1];\r\n }\r\n\r\n var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;\r\n\r\n if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {\r\n return navigator.msSaveOrOpenBlob(file, fileFullName);\r\n } else if (typeof navigator.msSaveBlob !== 'undefined') {\r\n return navigator.msSaveBlob(file, fileFullName);\r\n }\r\n\r\n var hyperlink = document.createElement('a');\r\n hyperlink.href = URL.createObjectURL(file);\r\n hyperlink.download = fileFullName;\r\n\r\n hyperlink.style = 'display:none;opacity:0;color:transparent;';\r\n (document.body || document.documentElement).appendChild(hyperlink);\r\n\r\n if (typeof hyperlink.click === 'function') {\r\n hyperlink.click();\r\n } else {\r\n hyperlink.target = '_blank';\r\n hyperlink.dispatchEvent(new MouseEvent('click', {\r\n view: window,\r\n bubbles: true,\r\n cancelable: true\r\n }));\r\n }\r\n\r\n URL.revokeObjectURL(hyperlink.href);\r\n}\r\n\r\n/**\r\n * from: https://github.com/cheton/is-electron/blob/master/index.js\r\n **/\r\nfunction isElectron() {\r\n // Renderer process\r\n if (typeof window !== 'undefined' && typeof window.process === 'object' && window.process.type === 'renderer') {\r\n return true;\r\n }\r\n\r\n // Main process\r\n if (typeof process !== 'undefined' && typeof process.versions === 'object' && !!process.versions.electron) {\r\n return true;\r\n }\r\n\r\n // Detect the user agent when the `nodeIntegration` option is set to true\r\n if (typeof navigator === 'object' && typeof navigator.userAgent === 'string' && navigator.userAgent.indexOf('Electron') >= 0) {\r\n return true;\r\n }\r\n\r\n return false;\r\n}\r\n\r\nfunction getTracks(stream, kind) {\r\n if (!stream || !stream.getTracks) {\r\n return [];\r\n }\r\n\r\n return stream.getTracks().filter(function(t) {\r\n return t.kind === (kind || 'audio');\r\n });\r\n}\r\n\r\nfunction setSrcObject(stream, element) {\r\n if ('srcObject' in element) {\r\n element.srcObject = stream;\r\n } else if ('mozSrcObject' in element) {\r\n element.mozSrcObject = stream;\r\n } else {\r\n element.srcObject = stream;\r\n }\r\n}\r\n\r\n/**\r\n * @param {Blob} file - File or Blob object.\r\n * @param {function} callback - Callback function.\r\n * @example\r\n * getSeekableBlob(blob or file, callback);\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n */\r\nfunction getSeekableBlob(inputBlob, callback) {\r\n // EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml\r\n if (typeof EBML === 'undefined') {\r\n throw new Error('Please link: https://www.webrtc-experiment.com/EBML.js');\r\n }\r\n\r\n var reader = new EBML.Reader();\r\n var decoder = new EBML.Decoder();\r\n var tools = EBML.tools;\r\n\r\n var fileReader = new FileReader();\r\n fileReader.onload = function(e) {\r\n var ebmlElms = decoder.decode(this.result);\r\n ebmlElms.forEach(function(element) {\r\n reader.read(element);\r\n });\r\n reader.stop();\r\n var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);\r\n var body = this.result.slice(reader.metadataSize);\r\n var newBlob = new Blob([refinedMetadataBuf, body], {\r\n type: 'video/webm'\r\n });\r\n\r\n callback(newBlob);\r\n };\r\n fileReader.readAsArrayBuffer(inputBlob);\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.invokeSaveAsDialog = invokeSaveAsDialog;\r\n RecordRTC.getTracks = getTracks;\r\n RecordRTC.getSeekableBlob = getSeekableBlob;\r\n RecordRTC.bytesToSize = bytesToSize;\r\n RecordRTC.isElectron = isElectron;\r\n}\r\n\r\n// __________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129\r\n// Storage.js\r\n\r\n/**\r\n * Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. \"new AudioContext\".\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @example\r\n * Storage.AudioContext === webkitAudioContext\r\n * @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object.\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n */\r\n\r\nvar Storage = {};\r\n\r\nif (typeof AudioContext !== 'undefined') {\r\n Storage.AudioContext = AudioContext;\r\n} else if (typeof webkitAudioContext !== 'undefined') {\r\n Storage.AudioContext = webkitAudioContext;\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.Storage = Storage;\r\n}\n\r\nfunction isMediaRecorderCompatible() {\r\n if (isFirefox || isSafari || isEdge) {\r\n return true;\r\n }\r\n\r\n var nVer = navigator.appVersion;\r\n var nAgt = navigator.userAgent;\r\n var fullVersion = '' + parseFloat(navigator.appVersion);\r\n var majorVersion = parseInt(navigator.appVersion, 10);\r\n var nameOffset, verOffset, ix;\r\n\r\n if (isChrome || isOpera) {\r\n verOffset = nAgt.indexOf('Chrome');\r\n fullVersion = nAgt.substring(verOffset + 7);\r\n }\r\n\r\n // trim the fullVersion string at semicolon/space if present\r\n if ((ix = fullVersion.indexOf(';')) !== -1) {\r\n fullVersion = fullVersion.substring(0, ix);\r\n }\r\n\r\n if ((ix = fullVersion.indexOf(' ')) !== -1) {\r\n fullVersion = fullVersion.substring(0, ix);\r\n }\r\n\r\n majorVersion = parseInt('' + fullVersion, 10);\r\n\r\n if (isNaN(majorVersion)) {\r\n fullVersion = '' + parseFloat(navigator.appVersion);\r\n majorVersion = parseInt(navigator.appVersion, 10);\r\n }\r\n\r\n return majorVersion >= 49;\r\n}\n\r\n// ______________________\r\n// MediaStreamRecorder.js\r\n\r\n/**\r\n * MediaStreamRecorder is an abstraction layer for {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. It is used by {@link RecordRTC} to record MediaStream(s) in both Chrome and Firefox.\r\n * @summary Runs top over {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://github.com/muaz-khan|Muaz Khan}\r\n * @typedef MediaStreamRecorder\r\n * @class\r\n * @example\r\n * var config = {\r\n * mimeType: 'video/webm', // vp8, vp9, h264, mkv, opus/vorbis\r\n * audioBitsPerSecond : 256 * 8 * 1024,\r\n * videoBitsPerSecond : 256 * 8 * 1024,\r\n * bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two\r\n * checkForInactiveTracks: true,\r\n * timeSlice: 1000, // concatenate intervals based blobs\r\n * ondataavailable: function() {} // get intervals based blobs\r\n * }\r\n * var recorder = new MediaStreamRecorder(mediaStream, config);\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n *\r\n * // or\r\n * var blob = recorder.blob;\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @param {object} config - {disableLogs:true, initCallback: function, mimeType: \"video/webm\", timeSlice: 1000}\r\n * @throws Will throw an error if first argument \"MediaStream\" is missing. Also throws error if \"MediaRecorder API\" are not supported by the browser.\r\n */\r\n\r\nfunction MediaStreamRecorder(mediaStream, config) {\r\n var self = this;\r\n\r\n if (typeof mediaStream === 'undefined') {\r\n throw 'First argument \"MediaStream\" is required.';\r\n }\r\n\r\n if (typeof MediaRecorder === 'undefined') {\r\n throw 'Your browser does not support the Media Recorder API. Please try other modules e.g. WhammyRecorder or StereoAudioRecorder.';\r\n }\r\n\r\n config = config || {\r\n // bitsPerSecond: 256 * 8 * 1024,\r\n mimeType: 'video/webm'\r\n };\r\n\r\n if (config.type === 'audio') {\r\n if (getTracks(mediaStream, 'video').length && getTracks(mediaStream, 'audio').length) {\r\n var stream;\r\n if (!!navigator.mozGetUserMedia) {\r\n stream = new MediaStream();\r\n stream.addTrack(getTracks(mediaStream, 'audio')[0]);\r\n } else {\r\n // webkitMediaStream\r\n stream = new MediaStream(getTracks(mediaStream, 'audio'));\r\n }\r\n mediaStream = stream;\r\n }\r\n\r\n if (!config.mimeType || config.mimeType.toString().toLowerCase().indexOf('audio') === -1) {\r\n config.mimeType = isChrome ? 'audio/webm' : 'audio/ogg';\r\n }\r\n\r\n if (config.mimeType && config.mimeType.toString().toLowerCase() !== 'audio/ogg' && !!navigator.mozGetUserMedia) {\r\n // forcing better codecs on Firefox (via #166)\r\n config.mimeType = 'audio/ogg';\r\n }\r\n }\r\n\r\n var arrayOfBlobs = [];\r\n\r\n /**\r\n * This method returns array of blobs. Use only with \"timeSlice\". Its useful to preview recording anytime, without using the \"stop\" method.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * var arrayOfBlobs = recorder.getArrayOfBlobs();\r\n * @returns {Array} Returns array of recorded blobs.\r\n */\r\n this.getArrayOfBlobs = function() {\r\n return arrayOfBlobs;\r\n };\r\n\r\n /**\r\n * This method records MediaStream.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n // set defaults\r\n self.blob = null;\r\n self.clearRecordedData();\r\n self.timestamps = [];\r\n allStates = [];\r\n arrayOfBlobs = [];\r\n\r\n var recorderHints = config;\r\n\r\n if (!config.disableLogs) {\r\n console.log('Passing following config over MediaRecorder API.', recorderHints);\r\n }\r\n\r\n if (mediaRecorder) {\r\n // mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page.\r\n mediaRecorder = null;\r\n }\r\n\r\n if (isChrome && !isMediaRecorderCompatible()) {\r\n // to support video-only recording on stable\r\n recorderHints = 'video/vp8';\r\n }\r\n\r\n if (typeof MediaRecorder.isTypeSupported === 'function' && recorderHints.mimeType) {\r\n if (!MediaRecorder.isTypeSupported(recorderHints.mimeType)) {\r\n if (!config.disableLogs) {\r\n console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType);\r\n }\r\n\r\n recorderHints.mimeType = config.type === 'audio' ? 'audio/webm' : 'video/webm';\r\n }\r\n }\r\n\r\n // using MediaRecorder API here\r\n try {\r\n mediaRecorder = new MediaRecorder(mediaStream, recorderHints);\r\n\r\n // reset\r\n config.mimeType = recorderHints.mimeType;\r\n } catch (e) {\r\n // chrome-based fallback\r\n mediaRecorder = new MediaRecorder(mediaStream);\r\n }\r\n\r\n // old hack?\r\n if (recorderHints.mimeType && !MediaRecorder.isTypeSupported && 'canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(recorderHints.mimeType) === false) {\r\n if (!config.disableLogs) {\r\n console.warn('MediaRecorder API seems unable to record mimeType:', recorderHints.mimeType);\r\n }\r\n }\r\n\r\n // Dispatching OnDataAvailable Handler\r\n mediaRecorder.ondataavailable = function(e) {\r\n if (e.data) {\r\n allStates.push('ondataavailable: ' + bytesToSize(e.data.size));\r\n }\r\n\r\n if (typeof config.timeSlice === 'number') {\r\n if (e.data && e.data.size) {\r\n arrayOfBlobs.push(e.data);\r\n updateTimeStamp();\r\n\r\n if (typeof config.ondataavailable === 'function') {\r\n // intervals based blobs\r\n var blob = config.getNativeBlob ? e.data : new Blob([e.data], {\r\n type: getMimeType(recorderHints)\r\n });\r\n config.ondataavailable(blob);\r\n }\r\n }\r\n return;\r\n }\r\n\r\n if (!e.data || !e.data.size || e.data.size < 100 || self.blob) {\r\n // make sure that stopRecording always getting fired\r\n // even if there is invalid data\r\n if (self.recordingCallback) {\r\n self.recordingCallback(new Blob([], {\r\n type: getMimeType(recorderHints)\r\n }));\r\n self.recordingCallback = null;\r\n }\r\n return;\r\n }\r\n\r\n self.blob = config.getNativeBlob ? e.data : new Blob([e.data], {\r\n type: getMimeType(recorderHints)\r\n });\r\n\r\n if (self.recordingCallback) {\r\n self.recordingCallback(self.blob);\r\n self.recordingCallback = null;\r\n }\r\n };\r\n\r\n mediaRecorder.onstart = function() {\r\n allStates.push('started');\r\n };\r\n\r\n mediaRecorder.onpause = function() {\r\n allStates.push('paused');\r\n };\r\n\r\n mediaRecorder.onresume = function() {\r\n allStates.push('resumed');\r\n };\r\n\r\n mediaRecorder.onstop = function() {\r\n allStates.push('stopped');\r\n };\r\n\r\n mediaRecorder.onerror = function(error) {\r\n if (!error) {\r\n return;\r\n }\r\n\r\n if (!error.name) {\r\n error.name = 'UnknownError';\r\n }\r\n\r\n allStates.push('error: ' + error);\r\n\r\n if (!config.disableLogs) {\r\n // via: https://w3c.github.io/mediacapture-record/MediaRecorder.html#exception-summary\r\n if (error.name.toString().toLowerCase().indexOf('invalidstate') !== -1) {\r\n console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.', error);\r\n } else if (error.name.toString().toLowerCase().indexOf('notsupported') !== -1) {\r\n console.error('MIME type (', recorderHints.mimeType, ') is not supported.', error);\r\n } else if (error.name.toString().toLowerCase().indexOf('security') !== -1) {\r\n console.error('MediaRecorder security error', error);\r\n }\r\n\r\n // older code below\r\n else if (error.name === 'OutOfMemory') {\r\n console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.', error);\r\n } else if (error.name === 'IllegalStreamModification') {\r\n console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.', error);\r\n } else if (error.name === 'OtherRecordingError') {\r\n console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.', error);\r\n } else if (error.name === 'GenericError') {\r\n console.error('The UA cannot provide the codec or recording option that has been requested.', error);\r\n } else {\r\n console.error('MediaRecorder Error', error);\r\n }\r\n }\r\n\r\n (function(looper) {\r\n if (!self.manuallyStopped && mediaRecorder && mediaRecorder.state === 'inactive') {\r\n delete config.timeslice;\r\n\r\n // 10 minutes, enough?\r\n mediaRecorder.start(10 * 60 * 1000);\r\n return;\r\n }\r\n\r\n setTimeout(looper, 1000);\r\n })();\r\n\r\n if (mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') {\r\n mediaRecorder.stop();\r\n }\r\n };\r\n\r\n if (typeof config.timeSlice === 'number') {\r\n updateTimeStamp();\r\n mediaRecorder.start(config.timeSlice);\r\n } else {\r\n // default is 60 minutes; enough?\r\n // use config => {timeSlice: 1000} otherwise\r\n\r\n mediaRecorder.start(3.6e+6);\r\n }\r\n\r\n if (config.initCallback) {\r\n config.initCallback(); // old code\r\n }\r\n };\r\n\r\n /**\r\n * @property {Array} timestamps - Array of time stamps\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * console.log(recorder.timestamps);\r\n */\r\n this.timestamps = [];\r\n\r\n function updateTimeStamp() {\r\n self.timestamps.push(new Date().getTime());\r\n\r\n if (typeof config.onTimeStamp === 'function') {\r\n config.onTimeStamp(self.timestamps[self.timestamps.length - 1], self.timestamps);\r\n }\r\n }\r\n\r\n function getMimeType(secondObject) {\r\n if (mediaRecorder && mediaRecorder.mimeType) {\r\n return mediaRecorder.mimeType;\r\n }\r\n\r\n return secondObject.mimeType || 'video/webm';\r\n }\r\n\r\n /**\r\n * This method stops recording MediaStream.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n callback = callback || function() {};\r\n\r\n self.manuallyStopped = true; // used inside the mediaRecorder.onerror\r\n\r\n if (!mediaRecorder) {\r\n return;\r\n }\r\n\r\n this.recordingCallback = callback;\r\n\r\n if (mediaRecorder.state === 'recording') {\r\n mediaRecorder.stop();\r\n }\r\n\r\n if (typeof config.timeSlice === 'number') {\r\n setTimeout(function() {\r\n self.blob = new Blob(arrayOfBlobs, {\r\n type: getMimeType(config)\r\n });\r\n\r\n self.recordingCallback(self.blob);\r\n }, 100);\r\n }\r\n };\r\n\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n if (!mediaRecorder) {\r\n return;\r\n }\r\n\r\n if (mediaRecorder.state === 'recording') {\r\n mediaRecorder.pause();\r\n }\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n if (!mediaRecorder) {\r\n return;\r\n }\r\n\r\n if (mediaRecorder.state === 'paused') {\r\n mediaRecorder.resume();\r\n }\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n if (mediaRecorder && mediaRecorder.state === 'recording') {\r\n self.stop(clearRecordedDataCB);\r\n }\r\n\r\n clearRecordedDataCB();\r\n };\r\n\r\n function clearRecordedDataCB() {\r\n arrayOfBlobs = [];\r\n mediaRecorder = null;\r\n self.timestamps = [];\r\n }\r\n\r\n // Reference to \"MediaRecorder\" object\r\n var mediaRecorder;\r\n\r\n /**\r\n * Access to native MediaRecorder API\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @instance\r\n * @example\r\n * var internal = recorder.getInternalRecorder();\r\n * internal.ondataavailable = function() {}; // override\r\n * internal.stream, internal.onpause, internal.onstop, etc.\r\n * @returns {Object} Returns internal recording object.\r\n */\r\n this.getInternalRecorder = function() {\r\n return mediaRecorder;\r\n };\r\n\r\n function isMediaStreamActive() {\r\n if ('active' in mediaStream) {\r\n if (!mediaStream.active) {\r\n return false;\r\n }\r\n } else if ('ended' in mediaStream) { // old hack\r\n if (mediaStream.ended) {\r\n return false;\r\n }\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * @property {Blob} blob - Recorded data as \"Blob\" object.\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * recorder.stop(function() {\r\n * var blob = recorder.blob;\r\n * });\r\n */\r\n this.blob = null;\r\n\r\n\r\n /**\r\n * Get MediaRecorder readonly state.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * var state = recorder.getState();\r\n * @returns {String} Returns recording state.\r\n */\r\n this.getState = function() {\r\n if (!mediaRecorder) {\r\n return 'inactive';\r\n }\r\n\r\n return mediaRecorder.state || 'inactive';\r\n };\r\n\r\n // list of all recording states\r\n var allStates = [];\r\n\r\n /**\r\n * Get MediaRecorder all recording states.\r\n * @method\r\n * @memberof MediaStreamRecorder\r\n * @example\r\n * var state = recorder.getAllStates();\r\n * @returns {Array} Returns all recording states\r\n */\r\n this.getAllStates = function() {\r\n return allStates;\r\n };\r\n\r\n // if any Track within the MediaStream is muted or not enabled at any time, \r\n // the browser will only record black frames \r\n // or silence since that is the content produced by the Track\r\n // so we need to stopRecording as soon as any single track ends.\r\n if (typeof config.checkForInactiveTracks === 'undefined') {\r\n config.checkForInactiveTracks = false; // disable to minimize CPU usage\r\n }\r\n\r\n var self = this;\r\n\r\n // this method checks if media stream is stopped\r\n // or if any track is ended.\r\n (function looper() {\r\n if (!mediaRecorder || config.checkForInactiveTracks === false) {\r\n return;\r\n }\r\n\r\n if (isMediaStreamActive() === false) {\r\n if (!config.disableLogs) {\r\n console.log('MediaStream seems stopped.');\r\n }\r\n self.stop();\r\n return;\r\n }\r\n\r\n setTimeout(looper, 1000); // check every second\r\n })();\r\n\r\n // for debugging\r\n this.name = 'MediaStreamRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.MediaStreamRecorder = MediaStreamRecorder;\r\n}\r\n\r\n// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js\r\n// https://github.com/mattdiamond/Recorderjs#license-mit\r\n// ______________________\r\n// StereoAudioRecorder.js\r\n\r\n/**\r\n * StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring \"stereo\" audio-recording in chrome.\r\n * @summary JavaScript standalone object for stereo audio recording.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef StereoAudioRecorder\r\n * @class\r\n * @example\r\n * var recorder = new StereoAudioRecorder(MediaStream, {\r\n * sampleRate: 44100,\r\n * bufferSize: 4096\r\n * });\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.}\r\n */\r\n\r\nfunction StereoAudioRecorder(mediaStream, config) {\r\n if (!getTracks(mediaStream, 'audio').length) {\r\n throw 'Your stream has no audio tracks.';\r\n }\r\n\r\n config = config || {};\r\n\r\n var self = this;\r\n\r\n // variables\r\n var leftchannel = [];\r\n var rightchannel = [];\r\n var recording = false;\r\n var recordingLength = 0;\r\n var jsAudioNode;\r\n\r\n var numberOfAudioChannels = 2;\r\n\r\n /**\r\n * Set sample rates such as 8K or 16K. Reference: http://stackoverflow.com/a/28977136/552182\r\n * @property {number} desiredSampRate - Desired Bits per sample * 1000\r\n * @memberof StereoAudioRecorder\r\n * @instance\r\n * @example\r\n * var recorder = StereoAudioRecorder(mediaStream, {\r\n * desiredSampRate: 16 * 1000 // bits-per-sample * 1000\r\n * });\r\n */\r\n var desiredSampRate = config.desiredSampRate;\r\n\r\n // backward compatibility\r\n if (config.leftChannel === true) {\r\n numberOfAudioChannels = 1;\r\n }\r\n\r\n if (config.numberOfAudioChannels === 1) {\r\n numberOfAudioChannels = 1;\r\n }\r\n\r\n if (!numberOfAudioChannels || numberOfAudioChannels < 1) {\r\n numberOfAudioChannels = 2;\r\n }\r\n\r\n if (!config.disableLogs) {\r\n console.log('StereoAudioRecorder is set to record number of channels: ' + numberOfAudioChannels);\r\n }\r\n\r\n // if any Track within the MediaStream is muted or not enabled at any time, \r\n // the browser will only record black frames \r\n // or silence since that is the content produced by the Track\r\n // so we need to stopRecording as soon as any single track ends.\r\n if (typeof config.checkForInactiveTracks === 'undefined') {\r\n config.checkForInactiveTracks = true;\r\n }\r\n\r\n function isMediaStreamActive() {\r\n if (config.checkForInactiveTracks === false) {\r\n // always return \"true\"\r\n return true;\r\n }\r\n\r\n if ('active' in mediaStream) {\r\n if (!mediaStream.active) {\r\n return false;\r\n }\r\n } else if ('ended' in mediaStream) { // old hack\r\n if (mediaStream.ended) {\r\n return false;\r\n }\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * This method records MediaStream.\r\n * @method\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n if (isMediaStreamActive() === false) {\r\n throw 'Please make sure MediaStream is active.';\r\n }\r\n\r\n resetVariables();\r\n\r\n isAudioProcessStarted = isPaused = false;\r\n recording = true;\r\n\r\n if (typeof config.timeSlice !== 'undefined') {\r\n looper();\r\n }\r\n };\r\n\r\n function mergeLeftRightBuffers(config, callback) {\r\n function mergeAudioBuffers(config, cb) {\r\n var numberOfAudioChannels = config.numberOfAudioChannels;\r\n\r\n // todo: \"slice(0)\" --- is it causes loop? Should be removed?\r\n var leftBuffers = config.leftBuffers.slice(0);\r\n var rightBuffers = config.rightBuffers.slice(0);\r\n var sampleRate = config.sampleRate;\r\n var internalInterleavedLength = config.internalInterleavedLength;\r\n var desiredSampRate = config.desiredSampRate;\r\n\r\n if (numberOfAudioChannels === 2) {\r\n leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);\r\n rightBuffers = mergeBuffers(rightBuffers, internalInterleavedLength);\r\n\r\n if (desiredSampRate) {\r\n leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);\r\n rightBuffers = interpolateArray(rightBuffers, desiredSampRate, sampleRate);\r\n }\r\n }\r\n\r\n if (numberOfAudioChannels === 1) {\r\n leftBuffers = mergeBuffers(leftBuffers, internalInterleavedLength);\r\n\r\n if (desiredSampRate) {\r\n leftBuffers = interpolateArray(leftBuffers, desiredSampRate, sampleRate);\r\n }\r\n }\r\n\r\n // set sample rate as desired sample rate\r\n if (desiredSampRate) {\r\n sampleRate = desiredSampRate;\r\n }\r\n\r\n // for changing the sampling rate, reference:\r\n // http://stackoverflow.com/a/28977136/552182\r\n function interpolateArray(data, newSampleRate, oldSampleRate) {\r\n var fitCount = Math.round(data.length * (newSampleRate / oldSampleRate));\r\n var newData = [];\r\n var springFactor = Number((data.length - 1) / (fitCount - 1));\r\n newData[0] = data[0];\r\n for (var i = 1; i < fitCount - 1; i++) {\r\n var tmp = i * springFactor;\r\n var before = Number(Math.floor(tmp)).toFixed();\r\n var after = Number(Math.ceil(tmp)).toFixed();\r\n var atPoint = tmp - before;\r\n newData[i] = linearInterpolate(data[before], data[after], atPoint);\r\n }\r\n newData[fitCount - 1] = data[data.length - 1];\r\n return newData;\r\n }\r\n\r\n function linearInterpolate(before, after, atPoint) {\r\n return before + (after - before) * atPoint;\r\n }\r\n\r\n function mergeBuffers(channelBuffer, rLength) {\r\n var result = new Float64Array(rLength);\r\n var offset = 0;\r\n var lng = channelBuffer.length;\r\n\r\n for (var i = 0; i < lng; i++) {\r\n var buffer = channelBuffer[i];\r\n result.set(buffer, offset);\r\n offset += buffer.length;\r\n }\r\n\r\n return result;\r\n }\r\n\r\n function interleave(leftChannel, rightChannel) {\r\n var length = leftChannel.length + rightChannel.length;\r\n\r\n var result = new Float64Array(length);\r\n\r\n var inputIndex = 0;\r\n\r\n for (var index = 0; index < length;) {\r\n result[index++] = leftChannel[inputIndex];\r\n result[index++] = rightChannel[inputIndex];\r\n inputIndex++;\r\n }\r\n return result;\r\n }\r\n\r\n function writeUTFBytes(view, offset, string) {\r\n var lng = string.length;\r\n for (var i = 0; i < lng; i++) {\r\n view.setUint8(offset + i, string.charCodeAt(i));\r\n }\r\n }\r\n\r\n // interleave both channels together\r\n var interleaved;\r\n\r\n if (numberOfAudioChannels === 2) {\r\n interleaved = interleave(leftBuffers, rightBuffers);\r\n }\r\n\r\n if (numberOfAudioChannels === 1) {\r\n interleaved = leftBuffers;\r\n }\r\n\r\n var interleavedLength = interleaved.length;\r\n\r\n // create wav file\r\n var resultingBufferLength = 44 + interleavedLength * 2;\r\n\r\n var buffer = new ArrayBuffer(resultingBufferLength);\r\n\r\n var view = new DataView(buffer);\r\n\r\n // RIFF chunk descriptor/identifier \r\n writeUTFBytes(view, 0, 'RIFF');\r\n\r\n // RIFF chunk length\r\n // changed \"44\" to \"36\" via #401\r\n view.setUint32(4, 36 + interleavedLength * 2, true);\r\n\r\n // RIFF type \r\n writeUTFBytes(view, 8, 'WAVE');\r\n\r\n // format chunk identifier \r\n // FMT sub-chunk\r\n writeUTFBytes(view, 12, 'fmt ');\r\n\r\n // format chunk length \r\n view.setUint32(16, 16, true);\r\n\r\n // sample format (raw)\r\n view.setUint16(20, 1, true);\r\n\r\n // stereo (2 channels)\r\n view.setUint16(22, numberOfAudioChannels, true);\r\n\r\n // sample rate \r\n view.setUint32(24, sampleRate, true);\r\n\r\n // byte rate (sample rate * block align)\r\n view.setUint32(28, sampleRate * numberOfAudioChannels * 2, true);\r\n\r\n // block align (channel count * bytes per sample) \r\n view.setUint16(32, numberOfAudioChannels * 2, true);\r\n\r\n // bits per sample \r\n view.setUint16(34, 16, true);\r\n\r\n // data sub-chunk\r\n // data chunk identifier \r\n writeUTFBytes(view, 36, 'data');\r\n\r\n // data chunk length \r\n view.setUint32(40, interleavedLength * 2, true);\r\n\r\n // write the PCM samples\r\n var lng = interleavedLength;\r\n var index = 44;\r\n var volume = 1;\r\n for (var i = 0; i < lng; i++) {\r\n view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);\r\n index += 2;\r\n }\r\n\r\n if (cb) {\r\n return cb({\r\n buffer: buffer,\r\n view: view\r\n });\r\n }\r\n\r\n postMessage({\r\n buffer: buffer,\r\n view: view\r\n });\r\n }\r\n\r\n if (config.noWorker) {\r\n mergeAudioBuffers(config, function(data) {\r\n callback(data.buffer, data.view);\r\n });\r\n return;\r\n }\r\n\r\n\r\n var webWorker = processInWebWorker(mergeAudioBuffers);\r\n\r\n webWorker.onmessage = function(event) {\r\n callback(event.data.buffer, event.data.view);\r\n\r\n // release memory\r\n URL.revokeObjectURL(webWorker.workerURL);\r\n\r\n // kill webworker (or Chrome will kill your page after ~25 calls)\r\n webWorker.terminate();\r\n };\r\n\r\n webWorker.postMessage(config);\r\n }\r\n\r\n function processInWebWorker(_function) {\r\n var workerURL = URL.createObjectURL(new Blob([_function.toString(),\r\n ';this.onmessage = function (eee) {' + _function.name + '(eee.data);}'\r\n ], {\r\n type: 'application/javascript'\r\n }));\r\n\r\n var worker = new Worker(workerURL);\r\n worker.workerURL = workerURL;\r\n return worker;\r\n }\r\n\r\n /**\r\n * This method stops recording MediaStream.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n callback = callback || function() {};\r\n\r\n // stop recording\r\n recording = false;\r\n\r\n mergeLeftRightBuffers({\r\n desiredSampRate: desiredSampRate,\r\n sampleRate: sampleRate,\r\n numberOfAudioChannels: numberOfAudioChannels,\r\n internalInterleavedLength: recordingLength,\r\n leftBuffers: leftchannel,\r\n rightBuffers: numberOfAudioChannels === 1 ? [] : rightchannel,\r\n noWorker: config.noWorker\r\n }, function(buffer, view) {\r\n /**\r\n * @property {Blob} blob - The recorded blob object.\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.stop(function(){\r\n * var blob = recorder.blob;\r\n * });\r\n */\r\n self.blob = new Blob([view], {\r\n type: 'audio/wav'\r\n });\r\n\r\n /**\r\n * @property {ArrayBuffer} buffer - The recorded buffer object.\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.stop(function(){\r\n * var buffer = recorder.buffer;\r\n * });\r\n */\r\n self.buffer = new ArrayBuffer(view.buffer.byteLength);\r\n\r\n /**\r\n * @property {DataView} view - The recorded data-view object.\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.stop(function(){\r\n * var view = recorder.view;\r\n * });\r\n */\r\n self.view = view;\r\n\r\n self.sampleRate = desiredSampRate || sampleRate;\r\n self.bufferSize = bufferSize;\r\n\r\n // recorded audio length\r\n self.length = recordingLength;\r\n\r\n isAudioProcessStarted = false;\r\n\r\n if (callback) {\r\n callback(self.blob);\r\n }\r\n });\r\n };\r\n\r\n if (typeof RecordRTC.Storage === 'undefined') {\r\n RecordRTC.Storage = {\r\n AudioContextConstructor: null,\r\n AudioContext: window.AudioContext || window.webkitAudioContext\r\n };\r\n }\r\n\r\n if (!RecordRTC.Storage.AudioContextConstructor || RecordRTC.Storage.AudioContextConstructor.state === 'closed') {\r\n RecordRTC.Storage.AudioContextConstructor = new RecordRTC.Storage.AudioContext();\r\n }\r\n\r\n var context = RecordRTC.Storage.AudioContextConstructor;\r\n\r\n // creates an audio node from the microphone incoming stream\r\n var audioInput = context.createMediaStreamSource(mediaStream);\r\n\r\n var legalBufferValues = [0, 256, 512, 1024, 2048, 4096, 8192, 16384];\r\n\r\n /**\r\n * From the spec: This value controls how frequently the audioprocess event is\r\n * dispatched and how many sample-frames need to be processed each call.\r\n * Lower values for buffer size will result in a lower (better) latency.\r\n * Higher values will be necessary to avoid audio breakup and glitches\r\n * The size of the buffer (in sample-frames) which needs to\r\n * be processed each time onprocessaudio is called.\r\n * Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384).\r\n * @property {number} bufferSize - Buffer-size for how frequently the audioprocess event is dispatched.\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder = new StereoAudioRecorder(mediaStream, {\r\n * bufferSize: 4096\r\n * });\r\n */\r\n\r\n // \"0\" means, let chrome decide the most accurate buffer-size for current platform.\r\n var bufferSize = typeof config.bufferSize === 'undefined' ? 4096 : config.bufferSize;\r\n\r\n if (legalBufferValues.indexOf(bufferSize) === -1) {\r\n if (!config.disableLogs) {\r\n console.log('Legal values for buffer-size are ' + JSON.stringify(legalBufferValues, null, '\\t'));\r\n }\r\n }\r\n\r\n if (context.createJavaScriptNode) {\r\n jsAudioNode = context.createJavaScriptNode(bufferSize, numberOfAudioChannels, numberOfAudioChannels);\r\n } else if (context.createScriptProcessor) {\r\n jsAudioNode = context.createScriptProcessor(bufferSize, numberOfAudioChannels, numberOfAudioChannels);\r\n } else {\r\n throw 'WebAudio API has no support on this browser.';\r\n }\r\n\r\n // connect the stream to the script processor\r\n audioInput.connect(jsAudioNode);\r\n\r\n if (!config.bufferSize) {\r\n bufferSize = jsAudioNode.bufferSize; // device buffer-size\r\n }\r\n\r\n /**\r\n * The sample rate (in sample-frames per second) at which the\r\n * AudioContext handles audio. It is assumed that all AudioNodes\r\n * in the context run at this rate. In making this assumption,\r\n * sample-rate converters or \"varispeed\" processors are not supported\r\n * in real-time processing.\r\n * The sampleRate parameter describes the sample-rate of the\r\n * linear PCM audio data in the buffer in sample-frames per second.\r\n * An implementation must support sample-rates in at least\r\n * the range 22050 to 96000.\r\n * @property {number} sampleRate - Buffer-size for how frequently the audioprocess event is dispatched.\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder = new StereoAudioRecorder(mediaStream, {\r\n * sampleRate: 44100\r\n * });\r\n */\r\n var sampleRate = typeof config.sampleRate !== 'undefined' ? config.sampleRate : context.sampleRate || 44100;\r\n\r\n if (sampleRate < 22050 || sampleRate > 96000) {\r\n // Ref: http://stackoverflow.com/a/26303918/552182\r\n if (!config.disableLogs) {\r\n console.log('sample-rate must be under range 22050 and 96000.');\r\n }\r\n }\r\n\r\n if (!config.disableLogs) {\r\n if (config.desiredSampRate) {\r\n console.log('Desired sample-rate: ' + config.desiredSampRate);\r\n }\r\n }\r\n\r\n var isPaused = false;\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n isPaused = true;\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n if (isMediaStreamActive() === false) {\r\n throw 'Please make sure MediaStream is active.';\r\n }\r\n\r\n if (!recording) {\r\n if (!config.disableLogs) {\r\n console.log('Seems recording has been restarted.');\r\n }\r\n this.record();\r\n return;\r\n }\r\n\r\n isPaused = false;\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n config.checkForInactiveTracks = false;\r\n\r\n if (recording) {\r\n this.stop(clearRecordedDataCB);\r\n }\r\n\r\n clearRecordedDataCB();\r\n };\r\n\r\n function resetVariables() {\r\n leftchannel = [];\r\n rightchannel = [];\r\n recordingLength = 0;\r\n isAudioProcessStarted = false;\r\n recording = false;\r\n isPaused = false;\r\n context = null;\r\n\r\n self.leftchannel = leftchannel;\r\n self.rightchannel = rightchannel;\r\n self.numberOfAudioChannels = numberOfAudioChannels;\r\n self.desiredSampRate = desiredSampRate;\r\n self.sampleRate = sampleRate;\r\n self.recordingLength = recordingLength;\r\n\r\n intervalsBasedBuffers = {\r\n left: [],\r\n right: [],\r\n recordingLength: 0\r\n };\r\n }\r\n\r\n function clearRecordedDataCB() {\r\n if (jsAudioNode) {\r\n jsAudioNode.onaudioprocess = null;\r\n jsAudioNode.disconnect();\r\n jsAudioNode = null;\r\n }\r\n\r\n if (audioInput) {\r\n audioInput.disconnect();\r\n audioInput = null;\r\n }\r\n\r\n resetVariables();\r\n }\r\n\r\n // for debugging\r\n this.name = 'StereoAudioRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n\r\n var isAudioProcessStarted = false;\r\n\r\n function onAudioProcessDataAvailable(e) {\r\n if (isPaused) {\r\n return;\r\n }\r\n\r\n if (isMediaStreamActive() === false) {\r\n if (!config.disableLogs) {\r\n console.log('MediaStream seems stopped.');\r\n }\r\n jsAudioNode.disconnect();\r\n recording = false;\r\n }\r\n\r\n if (!recording) {\r\n if (audioInput) {\r\n audioInput.disconnect();\r\n audioInput = null;\r\n }\r\n return;\r\n }\r\n\r\n /**\r\n * This method is called on \"onaudioprocess\" event's first invocation.\r\n * @method {function} onAudioProcessStarted\r\n * @memberof StereoAudioRecorder\r\n * @example\r\n * recorder.onAudioProcessStarted: function() { };\r\n */\r\n if (!isAudioProcessStarted) {\r\n isAudioProcessStarted = true;\r\n if (config.onAudioProcessStarted) {\r\n config.onAudioProcessStarted();\r\n }\r\n\r\n if (config.initCallback) {\r\n config.initCallback();\r\n }\r\n }\r\n\r\n var left = e.inputBuffer.getChannelData(0);\r\n\r\n // we clone the samples\r\n var chLeft = new Float32Array(left);\r\n leftchannel.push(chLeft);\r\n\r\n if (numberOfAudioChannels === 2) {\r\n var right = e.inputBuffer.getChannelData(1);\r\n var chRight = new Float32Array(right);\r\n rightchannel.push(chRight);\r\n }\r\n\r\n recordingLength += bufferSize;\r\n\r\n // export raw PCM\r\n self.recordingLength = recordingLength;\r\n\r\n if (typeof config.timeSlice !== 'undefined') {\r\n intervalsBasedBuffers.recordingLength += bufferSize;\r\n intervalsBasedBuffers.left.push(chLeft);\r\n\r\n if (numberOfAudioChannels === 2) {\r\n intervalsBasedBuffers.right.push(chRight);\r\n }\r\n }\r\n }\r\n\r\n jsAudioNode.onaudioprocess = onAudioProcessDataAvailable;\r\n\r\n // to prevent self audio to be connected with speakers\r\n if (context.createMediaStreamDestination) {\r\n jsAudioNode.connect(context.createMediaStreamDestination());\r\n } else {\r\n jsAudioNode.connect(context.destination);\r\n }\r\n\r\n // export raw PCM\r\n this.leftchannel = leftchannel;\r\n this.rightchannel = rightchannel;\r\n this.numberOfAudioChannels = numberOfAudioChannels;\r\n this.desiredSampRate = desiredSampRate;\r\n this.sampleRate = sampleRate;\r\n self.recordingLength = recordingLength;\r\n\r\n // helper for intervals based blobs\r\n var intervalsBasedBuffers = {\r\n left: [],\r\n right: [],\r\n recordingLength: 0\r\n };\r\n\r\n // this looper is used to support intervals based blobs (via timeSlice+ondataavailable)\r\n function looper() {\r\n if (!recording || typeof config.ondataavailable !== 'function' || typeof config.timeSlice === 'undefined') {\r\n return;\r\n }\r\n\r\n if (intervalsBasedBuffers.left.length) {\r\n mergeLeftRightBuffers({\r\n desiredSampRate: desiredSampRate,\r\n sampleRate: sampleRate,\r\n numberOfAudioChannels: numberOfAudioChannels,\r\n internalInterleavedLength: intervalsBasedBuffers.recordingLength,\r\n leftBuffers: intervalsBasedBuffers.left,\r\n rightBuffers: numberOfAudioChannels === 1 ? [] : intervalsBasedBuffers.right\r\n }, function(buffer, view) {\r\n var blob = new Blob([view], {\r\n type: 'audio/wav'\r\n });\r\n config.ondataavailable(blob);\r\n\r\n setTimeout(looper, config.timeSlice);\r\n });\r\n\r\n intervalsBasedBuffers = {\r\n left: [],\r\n right: [],\r\n recordingLength: 0\r\n };\r\n } else {\r\n setTimeout(looper, config.timeSlice);\r\n }\r\n }\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.StereoAudioRecorder = StereoAudioRecorder;\r\n}\r\n\r\n// _________________\r\n// CanvasRecorder.js\r\n\r\n/**\r\n * CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.\r\n * @summary HTML2Canvas recording into video WebM.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef CanvasRecorder\r\n * @class\r\n * @example\r\n * var recorder = new CanvasRecorder(htmlElement, { disableLogs: true, useWhammyRecorder: true });\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc.\r\n * @param {object} config - {disableLogs:true, initCallback: function}\r\n */\r\n\r\nfunction CanvasRecorder(htmlElement, config) {\r\n if (typeof html2canvas === 'undefined') {\r\n throw 'Please link: https://www.webrtc-experiment.com/screenshot.js';\r\n }\r\n\r\n config = config || {};\r\n if (!config.frameInterval) {\r\n config.frameInterval = 10;\r\n }\r\n\r\n // via DetectRTC.js\r\n var isCanvasSupportsStreamCapturing = false;\r\n ['captureStream', 'mozCaptureStream', 'webkitCaptureStream'].forEach(function(item) {\r\n if (item in document.createElement('canvas')) {\r\n isCanvasSupportsStreamCapturing = true;\r\n }\r\n });\r\n\r\n var _isChrome = (!!window.webkitRTCPeerConnection || !!window.webkitGetUserMedia) && !!window.chrome;\r\n\r\n var chromeVersion = 50;\r\n var matchArray = navigator.userAgent.match(/Chrom(e|ium)\\/([0-9]+)\\./);\r\n if (_isChrome && matchArray && matchArray[2]) {\r\n chromeVersion = parseInt(matchArray[2], 10);\r\n }\r\n\r\n if (_isChrome && chromeVersion < 52) {\r\n isCanvasSupportsStreamCapturing = false;\r\n }\r\n\r\n if (config.useWhammyRecorder) {\r\n isCanvasSupportsStreamCapturing = false;\r\n }\r\n\r\n var globalCanvas, mediaStreamRecorder;\r\n\r\n if (isCanvasSupportsStreamCapturing) {\r\n if (!config.disableLogs) {\r\n console.log('Your browser supports both MediRecorder API and canvas.captureStream!');\r\n }\r\n\r\n if (htmlElement instanceof HTMLCanvasElement) {\r\n globalCanvas = htmlElement;\r\n } else if (htmlElement instanceof CanvasRenderingContext2D) {\r\n globalCanvas = htmlElement.canvas;\r\n } else {\r\n throw 'Please pass either HTMLCanvasElement or CanvasRenderingContext2D.';\r\n }\r\n } else if (!!navigator.mozGetUserMedia) {\r\n if (!config.disableLogs) {\r\n console.error('Canvas recording is NOT supported in Firefox.');\r\n }\r\n }\r\n\r\n var isRecording;\r\n\r\n /**\r\n * This method records Canvas.\r\n * @method\r\n * @memberof CanvasRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n isRecording = true;\r\n\r\n if (isCanvasSupportsStreamCapturing && !config.useWhammyRecorder) {\r\n // CanvasCaptureMediaStream\r\n var canvasMediaStream;\r\n if ('captureStream' in globalCanvas) {\r\n canvasMediaStream = globalCanvas.captureStream(25); // 25 FPS\r\n } else if ('mozCaptureStream' in globalCanvas) {\r\n canvasMediaStream = globalCanvas.mozCaptureStream(25);\r\n } else if ('webkitCaptureStream' in globalCanvas) {\r\n canvasMediaStream = globalCanvas.webkitCaptureStream(25);\r\n }\r\n\r\n try {\r\n var mdStream = new MediaStream();\r\n mdStream.addTrack(getTracks(canvasMediaStream, 'video')[0]);\r\n canvasMediaStream = mdStream;\r\n } catch (e) {}\r\n\r\n if (!canvasMediaStream) {\r\n throw 'captureStream API are NOT available.';\r\n }\r\n\r\n // Note: Jan 18, 2016 status is that, \r\n // Firefox MediaRecorder API can't record CanvasCaptureMediaStream object.\r\n mediaStreamRecorder = new MediaStreamRecorder(canvasMediaStream, {\r\n mimeType: config.mimeType || 'video/webm'\r\n });\r\n mediaStreamRecorder.record();\r\n } else {\r\n whammy.frames = [];\r\n lastTime = new Date().getTime();\r\n drawCanvasFrame();\r\n }\r\n\r\n if (config.initCallback) {\r\n config.initCallback();\r\n }\r\n };\r\n\r\n this.getWebPImages = function(callback) {\r\n if (htmlElement.nodeName.toLowerCase() !== 'canvas') {\r\n callback();\r\n return;\r\n }\r\n\r\n var framesLength = whammy.frames.length;\r\n whammy.frames.forEach(function(frame, idx) {\r\n var framesRemaining = framesLength - idx;\r\n if (!config.disableLogs) {\r\n console.log(framesRemaining + '/' + framesLength + ' frames remaining');\r\n }\r\n\r\n if (config.onEncodingCallback) {\r\n config.onEncodingCallback(framesRemaining, framesLength);\r\n }\r\n\r\n var webp = frame.image.toDataURL('image/webp', 1);\r\n whammy.frames[idx].image = webp;\r\n });\r\n\r\n if (!config.disableLogs) {\r\n console.log('Generating WebM');\r\n }\r\n\r\n callback();\r\n };\r\n\r\n /**\r\n * This method stops recording Canvas.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof CanvasRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n isRecording = false;\r\n\r\n var that = this;\r\n\r\n if (isCanvasSupportsStreamCapturing && mediaStreamRecorder) {\r\n mediaStreamRecorder.stop(callback);\r\n return;\r\n }\r\n\r\n this.getWebPImages(function() {\r\n /**\r\n * @property {Blob} blob - Recorded frames in video/webm blob.\r\n * @memberof CanvasRecorder\r\n * @example\r\n * recorder.stop(function() {\r\n * var blob = recorder.blob;\r\n * });\r\n */\r\n whammy.compile(function(blob) {\r\n if (!config.disableLogs) {\r\n console.log('Recording finished!');\r\n }\r\n\r\n that.blob = blob;\r\n\r\n if (that.blob.forEach) {\r\n that.blob = new Blob([], {\r\n type: 'video/webm'\r\n });\r\n }\r\n\r\n if (callback) {\r\n callback(that.blob);\r\n }\r\n\r\n whammy.frames = [];\r\n });\r\n });\r\n };\r\n\r\n var isPausedRecording = false;\r\n\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof CanvasRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n isPausedRecording = true;\r\n\r\n if (mediaStreamRecorder instanceof MediaStreamRecorder) {\r\n mediaStreamRecorder.pause();\r\n return;\r\n }\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof CanvasRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n isPausedRecording = false;\r\n\r\n if (mediaStreamRecorder instanceof MediaStreamRecorder) {\r\n mediaStreamRecorder.resume();\r\n return;\r\n }\r\n\r\n if (!isRecording) {\r\n this.record();\r\n }\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof CanvasRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n if (isRecording) {\r\n this.stop(clearRecordedDataCB);\r\n }\r\n clearRecordedDataCB();\r\n };\r\n\r\n function clearRecordedDataCB() {\r\n whammy.frames = [];\r\n isRecording = false;\r\n isPausedRecording = false;\r\n }\r\n\r\n // for debugging\r\n this.name = 'CanvasRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n\r\n function cloneCanvas() {\r\n //create a new canvas\r\n var newCanvas = document.createElement('canvas');\r\n var context = newCanvas.getContext('2d');\r\n\r\n //set dimensions\r\n newCanvas.width = htmlElement.width;\r\n newCanvas.height = htmlElement.height;\r\n\r\n //apply the old canvas to the new one\r\n context.drawImage(htmlElement, 0, 0);\r\n\r\n //return the new canvas\r\n return newCanvas;\r\n }\r\n\r\n function drawCanvasFrame() {\r\n if (isPausedRecording) {\r\n lastTime = new Date().getTime();\r\n return setTimeout(drawCanvasFrame, 500);\r\n }\r\n\r\n if (htmlElement.nodeName.toLowerCase() === 'canvas') {\r\n var duration = new Date().getTime() - lastTime;\r\n // via #206, by Jack i.e. @Seymourr\r\n lastTime = new Date().getTime();\r\n\r\n whammy.frames.push({\r\n image: cloneCanvas(),\r\n duration: duration\r\n });\r\n\r\n if (isRecording) {\r\n setTimeout(drawCanvasFrame, config.frameInterval);\r\n }\r\n return;\r\n }\r\n\r\n html2canvas(htmlElement, {\r\n grabMouse: typeof config.showMousePointer === 'undefined' || config.showMousePointer,\r\n onrendered: function(canvas) {\r\n var duration = new Date().getTime() - lastTime;\r\n if (!duration) {\r\n return setTimeout(drawCanvasFrame, config.frameInterval);\r\n }\r\n\r\n // via #206, by Jack i.e. @Seymourr\r\n lastTime = new Date().getTime();\r\n\r\n whammy.frames.push({\r\n image: canvas.toDataURL('image/webp', 1),\r\n duration: duration\r\n });\r\n\r\n if (isRecording) {\r\n setTimeout(drawCanvasFrame, config.frameInterval);\r\n }\r\n }\r\n });\r\n }\r\n\r\n var lastTime = new Date().getTime();\r\n\r\n var whammy = new Whammy.Video(100);\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.CanvasRecorder = CanvasRecorder;\r\n}\n\r\n// _________________\r\n// WhammyRecorder.js\r\n\r\n/**\r\n * WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.\r\n * @summary Video recording feature in Chrome.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef WhammyRecorder\r\n * @class\r\n * @example\r\n * var recorder = new WhammyRecorder(mediaStream);\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.}\r\n */\r\n\r\nfunction WhammyRecorder(mediaStream, config) {\r\n\r\n config = config || {};\r\n\r\n if (!config.frameInterval) {\r\n config.frameInterval = 10;\r\n }\r\n\r\n if (!config.disableLogs) {\r\n console.log('Using frames-interval:', config.frameInterval);\r\n }\r\n\r\n /**\r\n * This method records video.\r\n * @method\r\n * @memberof WhammyRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n if (!config.width) {\r\n config.width = 320;\r\n }\r\n\r\n if (!config.height) {\r\n config.height = 240;\r\n }\r\n\r\n if (!config.video) {\r\n config.video = {\r\n width: config.width,\r\n height: config.height\r\n };\r\n }\r\n\r\n if (!config.canvas) {\r\n config.canvas = {\r\n width: config.width,\r\n height: config.height\r\n };\r\n }\r\n\r\n canvas.width = config.canvas.width || 320;\r\n canvas.height = config.canvas.height || 240;\r\n\r\n context = canvas.getContext('2d');\r\n\r\n // setting defaults\r\n if (config.video && config.video instanceof HTMLVideoElement) {\r\n video = config.video.cloneNode();\r\n\r\n if (config.initCallback) {\r\n config.initCallback();\r\n }\r\n } else {\r\n video = document.createElement('video');\r\n\r\n setSrcObject(mediaStream, video);\r\n\r\n video.onloadedmetadata = function() { // \"onloadedmetadata\" may NOT work in FF?\r\n if (config.initCallback) {\r\n config.initCallback();\r\n }\r\n };\r\n\r\n video.width = config.video.width;\r\n video.height = config.video.height;\r\n }\r\n\r\n video.muted = true;\r\n video.play();\r\n\r\n lastTime = new Date().getTime();\r\n whammy = new Whammy.Video();\r\n\r\n if (!config.disableLogs) {\r\n console.log('canvas resolutions', canvas.width, '*', canvas.height);\r\n console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);\r\n }\r\n\r\n drawFrames(config.frameInterval);\r\n };\r\n\r\n /**\r\n * Draw and push frames to Whammy\r\n * @param {integer} frameInterval - set minimum interval (in milliseconds) between each time we push a frame to Whammy\r\n */\r\n function drawFrames(frameInterval) {\r\n frameInterval = typeof frameInterval !== 'undefined' ? frameInterval : 10;\r\n\r\n var duration = new Date().getTime() - lastTime;\r\n if (!duration) {\r\n return setTimeout(drawFrames, frameInterval, frameInterval);\r\n }\r\n\r\n if (isPausedRecording) {\r\n lastTime = new Date().getTime();\r\n return setTimeout(drawFrames, 100);\r\n }\r\n\r\n // via #206, by Jack i.e. @Seymourr\r\n lastTime = new Date().getTime();\r\n\r\n if (video.paused) {\r\n // via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316\r\n // Tweak for Android Chrome\r\n video.play();\r\n }\r\n\r\n context.drawImage(video, 0, 0, canvas.width, canvas.height);\r\n whammy.frames.push({\r\n duration: duration,\r\n image: canvas.toDataURL('image/webp')\r\n });\r\n\r\n if (!isStopDrawing) {\r\n setTimeout(drawFrames, frameInterval, frameInterval);\r\n }\r\n }\r\n\r\n function asyncLoop(o) {\r\n var i = -1,\r\n length = o.length;\r\n\r\n (function loop() {\r\n i++;\r\n if (i === length) {\r\n o.callback();\r\n return;\r\n }\r\n\r\n // \"setTimeout\" added by Jim McLeod\r\n setTimeout(function() {\r\n o.functionToLoop(loop, i);\r\n }, 1);\r\n })();\r\n }\r\n\r\n\r\n /**\r\n * remove black frames from the beginning to the specified frame\r\n * @param {Array} _frames - array of frames to be checked\r\n * @param {number} _framesToCheck - number of frame until check will be executed (-1 - will drop all frames until frame not matched will be found)\r\n * @param {number} _pixTolerance - 0 - very strict (only black pixel color) ; 1 - all\r\n * @param {number} _frameTolerance - 0 - very strict (only black frame color) ; 1 - all\r\n * @returns {Array} - array of frames\r\n */\r\n // pull#293 by @volodalexey\r\n function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance, callback) {\r\n var localCanvas = document.createElement('canvas');\r\n localCanvas.width = canvas.width;\r\n localCanvas.height = canvas.height;\r\n var context2d = localCanvas.getContext('2d');\r\n var resultFrames = [];\r\n\r\n var checkUntilNotBlack = _framesToCheck === -1;\r\n var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?\r\n _framesToCheck : _frames.length;\r\n var sampleColor = {\r\n r: 0,\r\n g: 0,\r\n b: 0\r\n };\r\n var maxColorDifference = Math.sqrt(\r\n Math.pow(255, 2) +\r\n Math.pow(255, 2) +\r\n Math.pow(255, 2)\r\n );\r\n var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;\r\n var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;\r\n var doNotCheckNext = false;\r\n\r\n asyncLoop({\r\n length: endCheckFrame,\r\n functionToLoop: function(loop, f) {\r\n var matchPixCount, endPixCheck, maxPixCount;\r\n\r\n var finishImage = function() {\r\n if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {\r\n // console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);\r\n } else {\r\n // console.log('frame is passed : ' + f);\r\n if (checkUntilNotBlack) {\r\n doNotCheckNext = true;\r\n }\r\n resultFrames.push(_frames[f]);\r\n }\r\n loop();\r\n };\r\n\r\n if (!doNotCheckNext) {\r\n var image = new Image();\r\n image.onload = function() {\r\n context2d.drawImage(image, 0, 0, canvas.width, canvas.height);\r\n var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);\r\n matchPixCount = 0;\r\n endPixCheck = imageData.data.length;\r\n maxPixCount = imageData.data.length / 4;\r\n\r\n for (var pix = 0; pix < endPixCheck; pix += 4) {\r\n var currentColor = {\r\n r: imageData.data[pix],\r\n g: imageData.data[pix + 1],\r\n b: imageData.data[pix + 2]\r\n };\r\n var colorDifference = Math.sqrt(\r\n Math.pow(currentColor.r - sampleColor.r, 2) +\r\n Math.pow(currentColor.g - sampleColor.g, 2) +\r\n Math.pow(currentColor.b - sampleColor.b, 2)\r\n );\r\n // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)\r\n if (colorDifference <= maxColorDifference * pixTolerance) {\r\n matchPixCount++;\r\n }\r\n }\r\n finishImage();\r\n };\r\n image.src = _frames[f].image;\r\n } else {\r\n finishImage();\r\n }\r\n },\r\n callback: function() {\r\n resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));\r\n\r\n if (resultFrames.length <= 0) {\r\n // at least one last frame should be available for next manipulation\r\n // if total duration of all frames will be < 1000 than ffmpeg doesn't work well...\r\n resultFrames.push(_frames[_frames.length - 1]);\r\n }\r\n callback(resultFrames);\r\n }\r\n });\r\n }\r\n\r\n var isStopDrawing = false;\r\n\r\n /**\r\n * This method stops recording video.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof WhammyRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n callback = callback || function() {};\r\n\r\n isStopDrawing = true;\r\n\r\n var _this = this;\r\n // analyse of all frames takes some time!\r\n setTimeout(function() {\r\n // e.g. dropBlackFrames(frames, 10, 1, 1) - will cut all 10 frames\r\n // e.g. dropBlackFrames(frames, 10, 0.5, 0.5) - will analyse 10 frames\r\n // e.g. dropBlackFrames(frames, 10) === dropBlackFrames(frames, 10, 0, 0) - will analyse 10 frames with strict black color\r\n dropBlackFrames(whammy.frames, -1, null, null, function(frames) {\r\n whammy.frames = frames;\r\n\r\n // to display advertisement images!\r\n if (config.advertisement && config.advertisement.length) {\r\n whammy.frames = config.advertisement.concat(whammy.frames);\r\n }\r\n\r\n /**\r\n * @property {Blob} blob - Recorded frames in video/webm blob.\r\n * @memberof WhammyRecorder\r\n * @example\r\n * recorder.stop(function() {\r\n * var blob = recorder.blob;\r\n * });\r\n */\r\n whammy.compile(function(blob) {\r\n _this.blob = blob;\r\n\r\n if (_this.blob.forEach) {\r\n _this.blob = new Blob([], {\r\n type: 'video/webm'\r\n });\r\n }\r\n\r\n if (callback) {\r\n callback(_this.blob);\r\n }\r\n });\r\n });\r\n }, 10);\r\n };\r\n\r\n var isPausedRecording = false;\r\n\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof WhammyRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n isPausedRecording = true;\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof WhammyRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n isPausedRecording = false;\r\n\r\n if (isStopDrawing) {\r\n this.record();\r\n }\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof WhammyRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n if (!isStopDrawing) {\r\n this.stop(clearRecordedDataCB);\r\n }\r\n clearRecordedDataCB();\r\n };\r\n\r\n function clearRecordedDataCB() {\r\n whammy.frames = [];\r\n isStopDrawing = true;\r\n isPausedRecording = false;\r\n }\r\n\r\n // for debugging\r\n this.name = 'WhammyRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n\r\n var canvas = document.createElement('canvas');\r\n var context = canvas.getContext('2d');\r\n\r\n var video;\r\n var lastTime;\r\n var whammy;\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.WhammyRecorder = WhammyRecorder;\r\n}\n\r\n// https://github.com/antimatter15/whammy/blob/master/LICENSE\r\n// _________\r\n// Whammy.js\r\n\r\n// todo: Firefox now supports webp for webm containers!\r\n// their MediaRecorder implementation works well!\r\n// should we provide an option to record via Whammy.js or MediaRecorder API is a better solution?\r\n\r\n/**\r\n * Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}\r\n * @summary A real time javascript webm encoder based on a canvas hack.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef Whammy\r\n * @class\r\n * @example\r\n * var recorder = new Whammy().Video(15);\r\n * recorder.add(context || canvas || dataURL);\r\n * var output = recorder.compile();\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n */\r\n\r\nvar Whammy = (function() {\r\n // a more abstract-ish API\r\n\r\n function WhammyVideo(duration) {\r\n this.frames = [];\r\n this.duration = duration || 1;\r\n this.quality = 0.8;\r\n }\r\n\r\n /**\r\n * Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.\r\n * @method\r\n * @memberof Whammy\r\n * @example\r\n * recorder = new Whammy().Video(0.8, 100);\r\n * recorder.add(canvas || context || 'image/webp');\r\n * @param {string} frame - Canvas || Context || image/webp\r\n * @param {number} duration - Stick a duration (in milliseconds)\r\n */\r\n WhammyVideo.prototype.add = function(frame, duration) {\r\n if ('canvas' in frame) { //CanvasRenderingContext2D\r\n frame = frame.canvas;\r\n }\r\n\r\n if ('toDataURL' in frame) {\r\n frame = frame.toDataURL('image/webp', this.quality);\r\n }\r\n\r\n if (!(/^data:image\\/webp;base64,/ig).test(frame)) {\r\n throw 'Input must be formatted properly as a base64 encoded DataURI of type image/webp';\r\n }\r\n this.frames.push({\r\n image: frame,\r\n duration: duration || this.duration\r\n });\r\n };\r\n\r\n function processInWebWorker(_function) {\r\n var blob = URL.createObjectURL(new Blob([_function.toString(),\r\n 'this.onmessage = function (eee) {' + _function.name + '(eee.data);}'\r\n ], {\r\n type: 'application/javascript'\r\n }));\r\n\r\n var worker = new Worker(blob);\r\n URL.revokeObjectURL(blob);\r\n return worker;\r\n }\r\n\r\n function whammyInWebWorker(frames) {\r\n function ArrayToWebM(frames) {\r\n var info = checkFrames(frames);\r\n if (!info) {\r\n return [];\r\n }\r\n\r\n var clusterMaxDuration = 30000;\r\n\r\n var EBML = [{\r\n 'id': 0x1a45dfa3, // EBML\r\n 'data': [{\r\n 'data': 1,\r\n 'id': 0x4286 // EBMLVersion\r\n }, {\r\n 'data': 1,\r\n 'id': 0x42f7 // EBMLReadVersion\r\n }, {\r\n 'data': 4,\r\n 'id': 0x42f2 // EBMLMaxIDLength\r\n }, {\r\n 'data': 8,\r\n 'id': 0x42f3 // EBMLMaxSizeLength\r\n }, {\r\n 'data': 'webm',\r\n 'id': 0x4282 // DocType\r\n }, {\r\n 'data': 2,\r\n 'id': 0x4287 // DocTypeVersion\r\n }, {\r\n 'data': 2,\r\n 'id': 0x4285 // DocTypeReadVersion\r\n }]\r\n }, {\r\n 'id': 0x18538067, // Segment\r\n 'data': [{\r\n 'id': 0x1549a966, // Info\r\n 'data': [{\r\n 'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)\r\n 'id': 0x2ad7b1 // TimecodeScale\r\n }, {\r\n 'data': 'whammy',\r\n 'id': 0x4d80 // MuxingApp\r\n }, {\r\n 'data': 'whammy',\r\n 'id': 0x5741 // WritingApp\r\n }, {\r\n 'data': doubleToString(info.duration),\r\n 'id': 0x4489 // Duration\r\n }]\r\n }, {\r\n 'id': 0x1654ae6b, // Tracks\r\n 'data': [{\r\n 'id': 0xae, // TrackEntry\r\n 'data': [{\r\n 'data': 1,\r\n 'id': 0xd7 // TrackNumber\r\n }, {\r\n 'data': 1,\r\n 'id': 0x73c5 // TrackUID\r\n }, {\r\n 'data': 0,\r\n 'id': 0x9c // FlagLacing\r\n }, {\r\n 'data': 'und',\r\n 'id': 0x22b59c // Language\r\n }, {\r\n 'data': 'V_VP8',\r\n 'id': 0x86 // CodecID\r\n }, {\r\n 'data': 'VP8',\r\n 'id': 0x258688 // CodecName\r\n }, {\r\n 'data': 1,\r\n 'id': 0x83 // TrackType\r\n }, {\r\n 'id': 0xe0, // Video\r\n 'data': [{\r\n 'data': info.width,\r\n 'id': 0xb0 // PixelWidth\r\n }, {\r\n 'data': info.height,\r\n 'id': 0xba // PixelHeight\r\n }]\r\n }]\r\n }]\r\n }]\r\n }];\r\n\r\n //Generate clusters (max duration)\r\n var frameNumber = 0;\r\n var clusterTimecode = 0;\r\n while (frameNumber < frames.length) {\r\n\r\n var clusterFrames = [];\r\n var clusterDuration = 0;\r\n do {\r\n clusterFrames.push(frames[frameNumber]);\r\n clusterDuration += frames[frameNumber].duration;\r\n frameNumber++;\r\n } while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);\r\n\r\n var clusterCounter = 0;\r\n var cluster = {\r\n 'id': 0x1f43b675, // Cluster\r\n 'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)\r\n }; //Add cluster to segment\r\n EBML[1].data.push(cluster);\r\n clusterTimecode += clusterDuration;\r\n }\r\n\r\n return generateEBML(EBML);\r\n }\r\n\r\n function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {\r\n return [{\r\n 'data': clusterTimecode,\r\n 'id': 0xe7 // Timecode\r\n }].concat(clusterFrames.map(function(webp) {\r\n var block = makeSimpleBlock({\r\n discardable: 0,\r\n frame: webp.data.slice(4),\r\n invisible: 0,\r\n keyframe: 1,\r\n lacing: 0,\r\n trackNum: 1,\r\n timecode: Math.round(clusterCounter)\r\n });\r\n clusterCounter += webp.duration;\r\n return {\r\n data: block,\r\n id: 0xa3\r\n };\r\n }));\r\n }\r\n\r\n // sums the lengths of all the frames and gets the duration\r\n\r\n function checkFrames(frames) {\r\n if (!frames[0]) {\r\n postMessage({\r\n error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'\r\n });\r\n return;\r\n }\r\n\r\n var width = frames[0].width,\r\n height = frames[0].height,\r\n duration = frames[0].duration;\r\n\r\n for (var i = 1; i < frames.length; i++) {\r\n duration += frames[i].duration;\r\n }\r\n return {\r\n duration: duration,\r\n width: width,\r\n height: height\r\n };\r\n }\r\n\r\n function numToBuffer(num) {\r\n var parts = [];\r\n while (num > 0) {\r\n parts.push(num & 0xff);\r\n num = num >> 8;\r\n }\r\n return new Uint8Array(parts.reverse());\r\n }\r\n\r\n function strToBuffer(str) {\r\n return new Uint8Array(str.split('').map(function(e) {\r\n return e.charCodeAt(0);\r\n }));\r\n }\r\n\r\n function bitsToBuffer(bits) {\r\n var data = [];\r\n var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';\r\n bits = pad + bits;\r\n for (var i = 0; i < bits.length; i += 8) {\r\n data.push(parseInt(bits.substr(i, 8), 2));\r\n }\r\n return new Uint8Array(data);\r\n }\r\n\r\n function generateEBML(json) {\r\n var ebml = [];\r\n for (var i = 0; i < json.length; i++) {\r\n var data = json[i].data;\r\n\r\n if (typeof data === 'object') {\r\n data = generateEBML(data);\r\n }\r\n\r\n if (typeof data === 'number') {\r\n data = bitsToBuffer(data.toString(2));\r\n }\r\n\r\n if (typeof data === 'string') {\r\n data = strToBuffer(data);\r\n }\r\n\r\n var len = data.size || data.byteLength || data.length;\r\n var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);\r\n var sizeToString = len.toString(2);\r\n var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;\r\n var size = (new Array(zeroes)).join('0') + '1' + padded;\r\n\r\n ebml.push(numToBuffer(json[i].id));\r\n ebml.push(bitsToBuffer(size));\r\n ebml.push(data);\r\n }\r\n\r\n return new Blob(ebml, {\r\n type: 'video/webm'\r\n });\r\n }\r\n\r\n function toBinStrOld(bits) {\r\n var data = '';\r\n var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';\r\n bits = pad + bits;\r\n for (var i = 0; i < bits.length; i += 8) {\r\n data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));\r\n }\r\n return data;\r\n }\r\n\r\n function makeSimpleBlock(data) {\r\n var flags = 0;\r\n\r\n if (data.keyframe) {\r\n flags |= 128;\r\n }\r\n\r\n if (data.invisible) {\r\n flags |= 8;\r\n }\r\n\r\n if (data.lacing) {\r\n flags |= (data.lacing << 1);\r\n }\r\n\r\n if (data.discardable) {\r\n flags |= 1;\r\n }\r\n\r\n if (data.trackNum > 127) {\r\n throw 'TrackNumber > 127 not supported';\r\n }\r\n\r\n var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {\r\n return String.fromCharCode(e);\r\n }).join('') + data.frame;\r\n\r\n return out;\r\n }\r\n\r\n function parseWebP(riff) {\r\n var VP8 = riff.RIFF[0].WEBP[0];\r\n\r\n var frameStart = VP8.indexOf('\\x9d\\x01\\x2a'); // A VP8 keyframe starts with the 0x9d012a header\r\n for (var i = 0, c = []; i < 4; i++) {\r\n c[i] = VP8.charCodeAt(frameStart + 3 + i);\r\n }\r\n\r\n var width, height, tmp;\r\n\r\n //the code below is literally copied verbatim from the bitstream spec\r\n tmp = (c[1] << 8) | c[0];\r\n width = tmp & 0x3FFF;\r\n tmp = (c[3] << 8) | c[2];\r\n height = tmp & 0x3FFF;\r\n return {\r\n width: width,\r\n height: height,\r\n data: VP8,\r\n riff: riff\r\n };\r\n }\r\n\r\n function getStrLength(string, offset) {\r\n return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {\r\n var unpadded = i.charCodeAt(0).toString(2);\r\n return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;\r\n }).join(''), 2);\r\n }\r\n\r\n function parseRIFF(string) {\r\n var offset = 0;\r\n var chunks = {};\r\n\r\n while (offset < string.length) {\r\n var id = string.substr(offset, 4);\r\n var len = getStrLength(string, offset);\r\n var data = string.substr(offset + 4 + 4, len);\r\n offset += 4 + 4 + len;\r\n chunks[id] = chunks[id] || [];\r\n\r\n if (id === 'RIFF' || id === 'LIST') {\r\n chunks[id].push(parseRIFF(data));\r\n } else {\r\n chunks[id].push(data);\r\n }\r\n }\r\n return chunks;\r\n }\r\n\r\n function doubleToString(num) {\r\n return [].slice.call(\r\n new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {\r\n return String.fromCharCode(e);\r\n }).reverse().join('');\r\n }\r\n\r\n var webm = new ArrayToWebM(frames.map(function(frame) {\r\n var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));\r\n webp.duration = frame.duration;\r\n return webp;\r\n }));\r\n\r\n postMessage(webm);\r\n }\r\n\r\n /**\r\n * Encodes frames in WebM container. It uses WebWorkinvoke to invoke 'ArrayToWebM' method.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof Whammy\r\n * @example\r\n * recorder = new Whammy().Video(0.8, 100);\r\n * recorder.compile(function(blob) {\r\n * // blob.size - blob.type\r\n * });\r\n */\r\n WhammyVideo.prototype.compile = function(callback) {\r\n var webWorker = processInWebWorker(whammyInWebWorker);\r\n\r\n webWorker.onmessage = function(event) {\r\n if (event.data.error) {\r\n console.error(event.data.error);\r\n return;\r\n }\r\n callback(event.data);\r\n };\r\n\r\n webWorker.postMessage(this.frames);\r\n };\r\n\r\n return {\r\n /**\r\n * A more abstract-ish API.\r\n * @method\r\n * @memberof Whammy\r\n * @example\r\n * recorder = new Whammy().Video(0.8, 100);\r\n * @param {?number} speed - 0.8\r\n * @param {?number} quality - 100\r\n */\r\n Video: WhammyVideo\r\n };\r\n})();\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.Whammy = Whammy;\r\n}\n\r\n// ______________ (indexed-db)\r\n// DiskStorage.js\r\n\r\n/**\r\n * DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage.\r\n * @summary Writing blobs into IndexedDB.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @example\r\n * DiskStorage.Store({\r\n * audioBlob: yourAudioBlob,\r\n * videoBlob: yourVideoBlob,\r\n * gifBlob : yourGifBlob\r\n * });\r\n * DiskStorage.Fetch(function(dataURL, type) {\r\n * if(type === 'audioBlob') { }\r\n * if(type === 'videoBlob') { }\r\n * if(type === 'gifBlob') { }\r\n * });\r\n * // DiskStorage.dataStoreName = 'recordRTC';\r\n * // DiskStorage.onError = function(error) { };\r\n * @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.\r\n * @property {function} Fetch - This method fetches stored blobs from IndexedDB.\r\n * @property {function} Store - This method stores blobs in IndexedDB.\r\n * @property {function} onError - This function is invoked for any known/unknown error.\r\n * @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n */\r\n\r\n\r\nvar DiskStorage = {\r\n /**\r\n * This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.\r\n * @method\r\n * @memberof DiskStorage\r\n * @internal\r\n * @example\r\n * DiskStorage.init();\r\n */\r\n init: function() {\r\n var self = this;\r\n\r\n if (typeof indexedDB === 'undefined' || typeof indexedDB.open === 'undefined') {\r\n console.error('IndexedDB API are not available in this browser.');\r\n return;\r\n }\r\n\r\n var dbVersion = 1;\r\n var dbName = this.dbName || location.href.replace(/\\/|:|#|%|\\.|\\[|\\]/g, ''),\r\n db;\r\n var request = indexedDB.open(dbName, dbVersion);\r\n\r\n function createObjectStore(dataBase) {\r\n dataBase.createObjectStore(self.dataStoreName);\r\n }\r\n\r\n function putInDB() {\r\n var transaction = db.transaction([self.dataStoreName], 'readwrite');\r\n\r\n if (self.videoBlob) {\r\n transaction.objectStore(self.dataStoreName).put(self.videoBlob, 'videoBlob');\r\n }\r\n\r\n if (self.gifBlob) {\r\n transaction.objectStore(self.dataStoreName).put(self.gifBlob, 'gifBlob');\r\n }\r\n\r\n if (self.audioBlob) {\r\n transaction.objectStore(self.dataStoreName).put(self.audioBlob, 'audioBlob');\r\n }\r\n\r\n function getFromStore(portionName) {\r\n transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {\r\n if (self.callback) {\r\n self.callback(event.target.result, portionName);\r\n }\r\n };\r\n }\r\n\r\n getFromStore('audioBlob');\r\n getFromStore('videoBlob');\r\n getFromStore('gifBlob');\r\n }\r\n\r\n request.onerror = self.onError;\r\n\r\n request.onsuccess = function() {\r\n db = request.result;\r\n db.onerror = self.onError;\r\n\r\n if (db.setVersion) {\r\n if (db.version !== dbVersion) {\r\n var setVersion = db.setVersion(dbVersion);\r\n setVersion.onsuccess = function() {\r\n createObjectStore(db);\r\n putInDB();\r\n };\r\n } else {\r\n putInDB();\r\n }\r\n } else {\r\n putInDB();\r\n }\r\n };\r\n request.onupgradeneeded = function(event) {\r\n createObjectStore(event.target.result);\r\n };\r\n },\r\n /**\r\n * This method fetches stored blobs from IndexedDB.\r\n * @method\r\n * @memberof DiskStorage\r\n * @internal\r\n * @example\r\n * DiskStorage.Fetch(function(dataURL, type) {\r\n * if(type === 'audioBlob') { }\r\n * if(type === 'videoBlob') { }\r\n * if(type === 'gifBlob') { }\r\n * });\r\n */\r\n Fetch: function(callback) {\r\n this.callback = callback;\r\n this.init();\r\n\r\n return this;\r\n },\r\n /**\r\n * This method stores blobs in IndexedDB.\r\n * @method\r\n * @memberof DiskStorage\r\n * @internal\r\n * @example\r\n * DiskStorage.Store({\r\n * audioBlob: yourAudioBlob,\r\n * videoBlob: yourVideoBlob,\r\n * gifBlob : yourGifBlob\r\n * });\r\n */\r\n Store: function(config) {\r\n this.audioBlob = config.audioBlob;\r\n this.videoBlob = config.videoBlob;\r\n this.gifBlob = config.gifBlob;\r\n\r\n this.init();\r\n\r\n return this;\r\n },\r\n /**\r\n * This function is invoked for any known/unknown error.\r\n * @method\r\n * @memberof DiskStorage\r\n * @internal\r\n * @example\r\n * DiskStorage.onError = function(error){\r\n * alerot( JSON.stringify(error) );\r\n * };\r\n */\r\n onError: function(error) {\r\n console.error(JSON.stringify(error, null, '\\t'));\r\n },\r\n\r\n /**\r\n * @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.\r\n * @memberof DiskStorage\r\n * @internal\r\n * @example\r\n * DiskStorage.dataStoreName = 'recordRTC';\r\n */\r\n dataStoreName: 'recordRTC',\r\n dbName: null\r\n};\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.DiskStorage = DiskStorage;\r\n}\n\r\n// ______________\r\n// GifRecorder.js\r\n\r\n/**\r\n * GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef GifRecorder\r\n * @class\r\n * @example\r\n * var recorder = new GifRecorder(mediaStream || canvas || context, { onGifPreview: function, onGifRecordingStarted: function, width: 1280, height: 720, frameRate: 200, quality: 10 });\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * img.src = URL.createObjectURL(blob);\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D.\r\n * @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10}\r\n */\r\n\r\nfunction GifRecorder(mediaStream, config) {\r\n if (typeof GIFEncoder === 'undefined') {\r\n var script = document.createElement('script');\r\n script.src = 'https://www.webrtc-experiment.com/gif-recorder.js';\r\n (document.body || document.documentElement).appendChild(script);\r\n }\r\n\r\n config = config || {};\r\n\r\n var isHTMLObject = mediaStream instanceof CanvasRenderingContext2D || mediaStream instanceof HTMLCanvasElement;\r\n\r\n /**\r\n * This method records MediaStream.\r\n * @method\r\n * @memberof GifRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n if (typeof GIFEncoder === 'undefined') {\r\n setTimeout(self.record, 1000);\r\n return;\r\n }\r\n\r\n if (!isLoadedMetaData) {\r\n setTimeout(self.record, 1000);\r\n return;\r\n }\r\n\r\n if (!isHTMLObject) {\r\n if (!config.width) {\r\n config.width = video.offsetWidth || 320;\r\n }\r\n\r\n if (!config.height) {\r\n config.height = video.offsetHeight || 240;\r\n }\r\n\r\n if (!config.video) {\r\n config.video = {\r\n width: config.width,\r\n height: config.height\r\n };\r\n }\r\n\r\n if (!config.canvas) {\r\n config.canvas = {\r\n width: config.width,\r\n height: config.height\r\n };\r\n }\r\n\r\n canvas.width = config.canvas.width || 320;\r\n canvas.height = config.canvas.height || 240;\r\n\r\n video.width = config.video.width || 320;\r\n video.height = config.video.height || 240;\r\n }\r\n\r\n // external library to record as GIF images\r\n gifEncoder = new GIFEncoder();\r\n\r\n // void setRepeat(int iter) \r\n // Sets the number of times the set of GIF frames should be played. \r\n // Default is 1; 0 means play indefinitely.\r\n gifEncoder.setRepeat(0);\r\n\r\n // void setFrameRate(Number fps) \r\n // Sets frame rate in frames per second. \r\n // Equivalent to setDelay(1000/fps).\r\n // Using \"setDelay\" instead of \"setFrameRate\"\r\n gifEncoder.setDelay(config.frameRate || 200);\r\n\r\n // void setQuality(int quality) \r\n // Sets quality of color quantization (conversion of images to the \r\n // maximum 256 colors allowed by the GIF specification). \r\n // Lower values (minimum = 1) produce better colors, \r\n // but slow processing significantly. 10 is the default, \r\n // and produces good color mapping at reasonable speeds. \r\n // Values greater than 20 do not yield significant improvements in speed.\r\n gifEncoder.setQuality(config.quality || 10);\r\n\r\n // Boolean start() \r\n // This writes the GIF Header and returns false if it fails.\r\n gifEncoder.start();\r\n\r\n if (typeof config.onGifRecordingStarted === 'function') {\r\n config.onGifRecordingStarted();\r\n }\r\n\r\n startTime = Date.now();\r\n\r\n function drawVideoFrame(time) {\r\n if (self.clearedRecordedData === true) {\r\n return;\r\n }\r\n\r\n if (isPausedRecording) {\r\n return setTimeout(function() {\r\n drawVideoFrame(time);\r\n }, 100);\r\n }\r\n\r\n lastAnimationFrame = requestAnimationFrame(drawVideoFrame);\r\n\r\n if (typeof lastFrameTime === undefined) {\r\n lastFrameTime = time;\r\n }\r\n\r\n // ~10 fps\r\n if (time - lastFrameTime < 90) {\r\n return;\r\n }\r\n\r\n if (!isHTMLObject && video.paused) {\r\n // via: https://github.com/muaz-khan/WebRTC-Experiment/pull/316\r\n // Tweak for Android Chrome\r\n video.play();\r\n }\r\n\r\n if (!isHTMLObject) {\r\n context.drawImage(video, 0, 0, canvas.width, canvas.height);\r\n }\r\n\r\n if (config.onGifPreview) {\r\n config.onGifPreview(canvas.toDataURL('image/png'));\r\n }\r\n\r\n gifEncoder.addFrame(context);\r\n lastFrameTime = time;\r\n }\r\n\r\n lastAnimationFrame = requestAnimationFrame(drawVideoFrame);\r\n\r\n if (config.initCallback) {\r\n config.initCallback();\r\n }\r\n };\r\n\r\n /**\r\n * This method stops recording MediaStream.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof GifRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * img.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n callback = callback || function() {};\r\n\r\n if (lastAnimationFrame) {\r\n cancelAnimationFrame(lastAnimationFrame);\r\n }\r\n\r\n endTime = Date.now();\r\n\r\n /**\r\n * @property {Blob} blob - The recorded blob object.\r\n * @memberof GifRecorder\r\n * @example\r\n * recorder.stop(function(){\r\n * var blob = recorder.blob;\r\n * });\r\n */\r\n this.blob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {\r\n type: 'image/gif'\r\n });\r\n\r\n callback(this.blob);\r\n\r\n // bug: find a way to clear old recorded blobs\r\n gifEncoder.stream().bin = [];\r\n };\r\n\r\n var isPausedRecording = false;\r\n\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof GifRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n isPausedRecording = true;\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof GifRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n isPausedRecording = false;\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof GifRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n self.clearedRecordedData = true;\r\n clearRecordedDataCB();\r\n };\r\n\r\n function clearRecordedDataCB() {\r\n if (gifEncoder) {\r\n gifEncoder.stream().bin = [];\r\n }\r\n }\r\n\r\n // for debugging\r\n this.name = 'GifRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n\r\n var canvas = document.createElement('canvas');\r\n var context = canvas.getContext('2d');\r\n\r\n if (isHTMLObject) {\r\n if (mediaStream instanceof CanvasRenderingContext2D) {\r\n context = mediaStream;\r\n canvas = context.canvas;\r\n } else if (mediaStream instanceof HTMLCanvasElement) {\r\n context = mediaStream.getContext('2d');\r\n canvas = mediaStream;\r\n }\r\n }\r\n\r\n var isLoadedMetaData = true;\r\n\r\n if (!isHTMLObject) {\r\n var video = document.createElement('video');\r\n video.muted = true;\r\n video.autoplay = true;\r\n video.playsInline = true;\r\n\r\n isLoadedMetaData = false;\r\n video.onloadedmetadata = function() {\r\n isLoadedMetaData = true;\r\n };\r\n\r\n setSrcObject(mediaStream, video);\r\n\r\n video.play();\r\n }\r\n\r\n var lastAnimationFrame = null;\r\n var startTime, endTime, lastFrameTime;\r\n\r\n var gifEncoder;\r\n\r\n var self = this;\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.GifRecorder = GifRecorder;\r\n}\n\r\n// Last time updated: 2019-06-21 4:09:42 AM UTC\r\n\r\n// ________________________\r\n// MultiStreamsMixer v1.2.2\r\n\r\n// Open-Sourced: https://github.com/muaz-khan/MultiStreamsMixer\r\n\r\n// --------------------------------------------------\r\n// Muaz Khan - www.MuazKhan.com\r\n// MIT License - www.WebRTC-Experiment.com/licence\r\n// --------------------------------------------------\r\n\r\nfunction MultiStreamsMixer(arrayOfMediaStreams, elementClass) {\r\n\r\n var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45';\r\n\r\n (function(that) {\r\n if (typeof RecordRTC !== 'undefined') {\r\n return;\r\n }\r\n\r\n if (!that) {\r\n return;\r\n }\r\n\r\n if (typeof window !== 'undefined') {\r\n return;\r\n }\r\n\r\n if (typeof global === 'undefined') {\r\n return;\r\n }\r\n\r\n global.navigator = {\r\n userAgent: browserFakeUserAgent,\r\n getUserMedia: function() {}\r\n };\r\n\r\n if (!global.console) {\r\n global.console = {};\r\n }\r\n\r\n if (typeof global.console.log === 'undefined' || typeof global.console.error === 'undefined') {\r\n global.console.error = global.console.log = global.console.log || function() {\r\n console.log(arguments);\r\n };\r\n }\r\n\r\n if (typeof document === 'undefined') {\r\n /*global document:true */\r\n that.document = {\r\n documentElement: {\r\n appendChild: function() {\r\n return '';\r\n }\r\n }\r\n };\r\n\r\n document.createElement = document.captureStream = document.mozCaptureStream = function() {\r\n var obj = {\r\n getContext: function() {\r\n return obj;\r\n },\r\n play: function() {},\r\n pause: function() {},\r\n drawImage: function() {},\r\n toDataURL: function() {\r\n return '';\r\n },\r\n style: {}\r\n };\r\n return obj;\r\n };\r\n\r\n that.HTMLVideoElement = function() {};\r\n }\r\n\r\n if (typeof location === 'undefined') {\r\n /*global location:true */\r\n that.location = {\r\n protocol: 'file:',\r\n href: '',\r\n hash: ''\r\n };\r\n }\r\n\r\n if (typeof screen === 'undefined') {\r\n /*global screen:true */\r\n that.screen = {\r\n width: 0,\r\n height: 0\r\n };\r\n }\r\n\r\n if (typeof URL === 'undefined') {\r\n /*global screen:true */\r\n that.URL = {\r\n createObjectURL: function() {\r\n return '';\r\n },\r\n revokeObjectURL: function() {\r\n return '';\r\n }\r\n };\r\n }\r\n\r\n /*global window:true */\r\n that.window = global;\r\n })(typeof global !== 'undefined' ? global : null);\r\n\r\n // requires: chrome://flags/#enable-experimental-web-platform-features\r\n\r\n elementClass = elementClass || 'multi-streams-mixer';\r\n\r\n var videos = [];\r\n var isStopDrawingFrames = false;\r\n\r\n var canvas = document.createElement('canvas');\r\n var context = canvas.getContext('2d');\r\n canvas.style.opacity = 0;\r\n canvas.style.position = 'absolute';\r\n canvas.style.zIndex = -1;\r\n canvas.style.top = '-1000em';\r\n canvas.style.left = '-1000em';\r\n canvas.className = elementClass;\r\n (document.body || document.documentElement).appendChild(canvas);\r\n\r\n this.disableLogs = false;\r\n this.frameInterval = 10;\r\n\r\n this.width = 360;\r\n this.height = 240;\r\n\r\n // use gain node to prevent echo\r\n this.useGainNode = true;\r\n\r\n var self = this;\r\n\r\n // _____________________________\r\n // Cross-Browser-Declarations.js\r\n\r\n // WebAudio API representer\r\n var AudioContext = window.AudioContext;\r\n\r\n if (typeof AudioContext === 'undefined') {\r\n if (typeof webkitAudioContext !== 'undefined') {\r\n /*global AudioContext:true */\r\n AudioContext = webkitAudioContext;\r\n }\r\n\r\n if (typeof mozAudioContext !== 'undefined') {\r\n /*global AudioContext:true */\r\n AudioContext = mozAudioContext;\r\n }\r\n }\r\n\r\n /*jshint -W079 */\r\n var URL = window.URL;\r\n\r\n if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') {\r\n /*global URL:true */\r\n URL = webkitURL;\r\n }\r\n\r\n if (typeof navigator !== 'undefined' && typeof navigator.getUserMedia === 'undefined') { // maybe window.navigator?\r\n if (typeof navigator.webkitGetUserMedia !== 'undefined') {\r\n navigator.getUserMedia = navigator.webkitGetUserMedia;\r\n }\r\n\r\n if (typeof navigator.mozGetUserMedia !== 'undefined') {\r\n navigator.getUserMedia = navigator.mozGetUserMedia;\r\n }\r\n }\r\n\r\n var MediaStream = window.MediaStream;\r\n\r\n if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {\r\n MediaStream = webkitMediaStream;\r\n }\r\n\r\n /*global MediaStream:true */\r\n if (typeof MediaStream !== 'undefined') {\r\n // override \"stop\" method for all browsers\r\n if (typeof MediaStream.prototype.stop === 'undefined') {\r\n MediaStream.prototype.stop = function() {\r\n this.getTracks().forEach(function(track) {\r\n track.stop();\r\n });\r\n };\r\n }\r\n }\r\n\r\n var Storage = {};\r\n\r\n if (typeof AudioContext !== 'undefined') {\r\n Storage.AudioContext = AudioContext;\r\n } else if (typeof webkitAudioContext !== 'undefined') {\r\n Storage.AudioContext = webkitAudioContext;\r\n }\r\n\r\n function setSrcObject(stream, element) {\r\n if ('srcObject' in element) {\r\n element.srcObject = stream;\r\n } else if ('mozSrcObject' in element) {\r\n element.mozSrcObject = stream;\r\n } else {\r\n element.srcObject = stream;\r\n }\r\n }\r\n\r\n this.startDrawingFrames = function() {\r\n drawVideosToCanvas();\r\n };\r\n\r\n function drawVideosToCanvas() {\r\n if (isStopDrawingFrames) {\r\n return;\r\n }\r\n\r\n var videosLength = videos.length;\r\n\r\n var fullcanvas = false;\r\n var remaining = [];\r\n videos.forEach(function(video) {\r\n if (!video.stream) {\r\n video.stream = {};\r\n }\r\n\r\n if (video.stream.fullcanvas) {\r\n fullcanvas = video;\r\n } else {\r\n // todo: video.stream.active or video.stream.live to fix blank frames issues?\r\n remaining.push(video);\r\n }\r\n });\r\n\r\n if (fullcanvas) {\r\n canvas.width = fullcanvas.stream.width;\r\n canvas.height = fullcanvas.stream.height;\r\n } else if (remaining.length) {\r\n canvas.width = videosLength > 1 ? remaining[0].width * 2 : remaining[0].width;\r\n\r\n var height = 1;\r\n if (videosLength === 3 || videosLength === 4) {\r\n height = 2;\r\n }\r\n if (videosLength === 5 || videosLength === 6) {\r\n height = 3;\r\n }\r\n if (videosLength === 7 || videosLength === 8) {\r\n height = 4;\r\n }\r\n if (videosLength === 9 || videosLength === 10) {\r\n height = 5;\r\n }\r\n canvas.height = remaining[0].height * height;\r\n } else {\r\n canvas.width = self.width || 360;\r\n canvas.height = self.height || 240;\r\n }\r\n\r\n if (fullcanvas && fullcanvas instanceof HTMLVideoElement) {\r\n drawImage(fullcanvas);\r\n }\r\n\r\n remaining.forEach(function(video, idx) {\r\n drawImage(video, idx);\r\n });\r\n\r\n setTimeout(drawVideosToCanvas, self.frameInterval);\r\n }\r\n\r\n function drawImage(video, idx) {\r\n if (isStopDrawingFrames) {\r\n return;\r\n }\r\n\r\n var x = 0;\r\n var y = 0;\r\n var width = video.width;\r\n var height = video.height;\r\n\r\n if (idx === 1) {\r\n x = video.width;\r\n }\r\n\r\n if (idx === 2) {\r\n y = video.height;\r\n }\r\n\r\n if (idx === 3) {\r\n x = video.width;\r\n y = video.height;\r\n }\r\n\r\n if (idx === 4) {\r\n y = video.height * 2;\r\n }\r\n\r\n if (idx === 5) {\r\n x = video.width;\r\n y = video.height * 2;\r\n }\r\n\r\n if (idx === 6) {\r\n y = video.height * 3;\r\n }\r\n\r\n if (idx === 7) {\r\n x = video.width;\r\n y = video.height * 3;\r\n }\r\n\r\n if (typeof video.stream.left !== 'undefined') {\r\n x = video.stream.left;\r\n }\r\n\r\n if (typeof video.stream.top !== 'undefined') {\r\n y = video.stream.top;\r\n }\r\n\r\n if (typeof video.stream.width !== 'undefined') {\r\n width = video.stream.width;\r\n }\r\n\r\n if (typeof video.stream.height !== 'undefined') {\r\n height = video.stream.height;\r\n }\r\n\r\n context.drawImage(video, x, y, width, height);\r\n\r\n if (typeof video.stream.onRender === 'function') {\r\n video.stream.onRender(context, x, y, width, height, idx);\r\n }\r\n }\r\n\r\n function getMixedStream() {\r\n isStopDrawingFrames = false;\r\n var mixedVideoStream = getMixedVideoStream();\r\n\r\n var mixedAudioStream = getMixedAudioStream();\r\n if (mixedAudioStream) {\r\n mixedAudioStream.getTracks().filter(function(t) {\r\n return t.kind === 'audio';\r\n }).forEach(function(track) {\r\n mixedVideoStream.addTrack(track);\r\n });\r\n }\r\n\r\n var fullcanvas;\r\n arrayOfMediaStreams.forEach(function(stream) {\r\n if (stream.fullcanvas) {\r\n fullcanvas = true;\r\n }\r\n });\r\n\r\n // mixedVideoStream.prototype.appendStreams = appendStreams;\r\n // mixedVideoStream.prototype.resetVideoStreams = resetVideoStreams;\r\n // mixedVideoStream.prototype.clearRecordedData = clearRecordedData;\r\n\r\n return mixedVideoStream;\r\n }\r\n\r\n function getMixedVideoStream() {\r\n resetVideoStreams();\r\n\r\n var capturedStream;\r\n\r\n if ('captureStream' in canvas) {\r\n capturedStream = canvas.captureStream();\r\n } else if ('mozCaptureStream' in canvas) {\r\n capturedStream = canvas.mozCaptureStream();\r\n } else if (!self.disableLogs) {\r\n console.error('Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features');\r\n }\r\n\r\n var videoStream = new MediaStream();\r\n\r\n capturedStream.getTracks().filter(function(t) {\r\n return t.kind === 'video';\r\n }).forEach(function(track) {\r\n videoStream.addTrack(track);\r\n });\r\n\r\n canvas.stream = videoStream;\r\n\r\n return videoStream;\r\n }\r\n\r\n function getMixedAudioStream() {\r\n // via: @pehrsons\r\n if (!Storage.AudioContextConstructor) {\r\n Storage.AudioContextConstructor = new Storage.AudioContext();\r\n }\r\n\r\n self.audioContext = Storage.AudioContextConstructor;\r\n\r\n self.audioSources = [];\r\n\r\n if (self.useGainNode === true) {\r\n self.gainNode = self.audioContext.createGain();\r\n self.gainNode.connect(self.audioContext.destination);\r\n self.gainNode.gain.value = 0; // don't hear self\r\n }\r\n\r\n var audioTracksLength = 0;\r\n arrayOfMediaStreams.forEach(function(stream) {\r\n if (!stream.getTracks().filter(function(t) {\r\n return t.kind === 'audio';\r\n }).length) {\r\n return;\r\n }\r\n\r\n audioTracksLength++;\r\n\r\n var audioSource = self.audioContext.createMediaStreamSource(stream);\r\n\r\n if (self.useGainNode === true) {\r\n audioSource.connect(self.gainNode);\r\n }\r\n\r\n self.audioSources.push(audioSource);\r\n });\r\n\r\n if (!audioTracksLength) {\r\n // because \"self.audioContext\" is not initialized\r\n // that's why we've to ignore rest of the code\r\n return;\r\n }\r\n\r\n self.audioDestination = self.audioContext.createMediaStreamDestination();\r\n self.audioSources.forEach(function(audioSource) {\r\n audioSource.connect(self.audioDestination);\r\n });\r\n return self.audioDestination.stream;\r\n }\r\n\r\n function getVideo(stream) {\r\n var video = document.createElement('video');\r\n\r\n setSrcObject(stream, video);\r\n\r\n video.className = elementClass;\r\n\r\n video.muted = true;\r\n video.volume = 0;\r\n\r\n video.width = stream.width || self.width || 360;\r\n video.height = stream.height || self.height || 240;\r\n\r\n video.play();\r\n\r\n return video;\r\n }\r\n\r\n this.appendStreams = function(streams) {\r\n if (!streams) {\r\n throw 'First parameter is required.';\r\n }\r\n\r\n if (!(streams instanceof Array)) {\r\n streams = [streams];\r\n }\r\n\r\n streams.forEach(function(stream) {\r\n var newStream = new MediaStream();\r\n\r\n if (stream.getTracks().filter(function(t) {\r\n return t.kind === 'video';\r\n }).length) {\r\n var video = getVideo(stream);\r\n video.stream = stream;\r\n videos.push(video);\r\n\r\n newStream.addTrack(stream.getTracks().filter(function(t) {\r\n return t.kind === 'video';\r\n })[0]);\r\n }\r\n\r\n if (stream.getTracks().filter(function(t) {\r\n return t.kind === 'audio';\r\n }).length) {\r\n var audioSource = self.audioContext.createMediaStreamSource(stream);\r\n self.audioDestination = self.audioContext.createMediaStreamDestination();\r\n audioSource.connect(self.audioDestination);\r\n\r\n newStream.addTrack(self.audioDestination.stream.getTracks().filter(function(t) {\r\n return t.kind === 'audio';\r\n })[0]);\r\n }\r\n\r\n arrayOfMediaStreams.push(newStream);\r\n });\r\n };\r\n\r\n this.releaseStreams = function() {\r\n videos = [];\r\n isStopDrawingFrames = true;\r\n\r\n if (self.gainNode) {\r\n self.gainNode.disconnect();\r\n self.gainNode = null;\r\n }\r\n\r\n if (self.audioSources.length) {\r\n self.audioSources.forEach(function(source) {\r\n source.disconnect();\r\n });\r\n self.audioSources = [];\r\n }\r\n\r\n if (self.audioDestination) {\r\n self.audioDestination.disconnect();\r\n self.audioDestination = null;\r\n }\r\n\r\n if (self.audioContext) {\r\n self.audioContext.close();\r\n }\r\n\r\n self.audioContext = null;\r\n\r\n context.clearRect(0, 0, canvas.width, canvas.height);\r\n\r\n if (canvas.stream) {\r\n canvas.stream.stop();\r\n canvas.stream = null;\r\n }\r\n };\r\n\r\n this.resetVideoStreams = function(streams) {\r\n if (streams && !(streams instanceof Array)) {\r\n streams = [streams];\r\n }\r\n\r\n resetVideoStreams(streams);\r\n };\r\n\r\n function resetVideoStreams(streams) {\r\n videos = [];\r\n streams = streams || arrayOfMediaStreams;\r\n\r\n // via: @adrian-ber\r\n streams.forEach(function(stream) {\r\n if (!stream.getTracks().filter(function(t) {\r\n return t.kind === 'video';\r\n }).length) {\r\n return;\r\n }\r\n\r\n var video = getVideo(stream);\r\n video.stream = stream;\r\n videos.push(video);\r\n });\r\n }\r\n\r\n // for debugging\r\n this.name = 'MultiStreamsMixer';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n\r\n this.getMixedStream = getMixedStream;\r\n\r\n}\r\n\r\nif (typeof RecordRTC === 'undefined') {\r\n if (typeof module !== 'undefined' /* && !!module.exports*/ ) {\r\n module.exports = MultiStreamsMixer;\r\n }\r\n\r\n if (typeof define === 'function' && define.amd) {\r\n define('MultiStreamsMixer', [], function() {\r\n return MultiStreamsMixer;\r\n });\r\n }\r\n}\n\r\n// ______________________\r\n// MultiStreamRecorder.js\r\n\r\n/*\r\n * Video conference recording, using captureStream API along with WebAudio and Canvas2D API.\r\n */\r\n\r\n/**\r\n * MultiStreamRecorder can record multiple videos in single container.\r\n * @summary Multi-videos recorder.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef MultiStreamRecorder\r\n * @class\r\n * @example\r\n * var options = {\r\n * mimeType: 'video/webm'\r\n * }\r\n * var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options);\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n *\r\n * // or\r\n * var blob = recorder.blob;\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStreams} mediaStreams - Array of MediaStreams.\r\n * @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: \"video/webm\"}\r\n */\r\n\r\nfunction MultiStreamRecorder(arrayOfMediaStreams, options) {\r\n arrayOfMediaStreams = arrayOfMediaStreams || [];\r\n var self = this;\r\n\r\n var mixer;\r\n var mediaRecorder;\r\n\r\n options = options || {\r\n elementClass: 'multi-streams-mixer',\r\n mimeType: 'video/webm',\r\n video: {\r\n width: 360,\r\n height: 240\r\n }\r\n };\r\n\r\n if (!options.frameInterval) {\r\n options.frameInterval = 10;\r\n }\r\n\r\n if (!options.video) {\r\n options.video = {};\r\n }\r\n\r\n if (!options.video.width) {\r\n options.video.width = 360;\r\n }\r\n\r\n if (!options.video.height) {\r\n options.video.height = 240;\r\n }\r\n\r\n /**\r\n * This method records all MediaStreams.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n // github/muaz-khan/MultiStreamsMixer\r\n mixer = new MultiStreamsMixer(arrayOfMediaStreams, options.elementClass || 'multi-streams-mixer');\r\n\r\n if (getAllVideoTracks().length) {\r\n mixer.frameInterval = options.frameInterval || 10;\r\n mixer.width = options.video.width || 360;\r\n mixer.height = options.video.height || 240;\r\n mixer.startDrawingFrames();\r\n }\r\n\r\n if (options.previewStream && typeof options.previewStream === 'function') {\r\n options.previewStream(mixer.getMixedStream());\r\n }\r\n\r\n // record using MediaRecorder API\r\n mediaRecorder = new MediaStreamRecorder(mixer.getMixedStream(), options);\r\n mediaRecorder.record();\r\n };\r\n\r\n function getAllVideoTracks() {\r\n var tracks = [];\r\n arrayOfMediaStreams.forEach(function(stream) {\r\n getTracks(stream, 'video').forEach(function(track) {\r\n tracks.push(track);\r\n });\r\n });\r\n return tracks;\r\n }\r\n\r\n /**\r\n * This method stops recording MediaStream.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n if (!mediaRecorder) {\r\n return;\r\n }\r\n\r\n mediaRecorder.stop(function(blob) {\r\n self.blob = blob;\r\n\r\n callback(blob);\r\n\r\n self.clearRecordedData();\r\n });\r\n };\r\n\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n if (mediaRecorder) {\r\n mediaRecorder.pause();\r\n }\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n if (mediaRecorder) {\r\n mediaRecorder.resume();\r\n }\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n if (mediaRecorder) {\r\n mediaRecorder.clearRecordedData();\r\n mediaRecorder = null;\r\n }\r\n\r\n if (mixer) {\r\n mixer.releaseStreams();\r\n mixer = null;\r\n }\r\n };\r\n\r\n /**\r\n * Add extra media-streams to existing recordings.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @param {MediaStreams} mediaStreams - Array of MediaStreams\r\n * @example\r\n * recorder.addStreams([newAudioStream, newVideoStream]);\r\n */\r\n this.addStreams = function(streams) {\r\n if (!streams) {\r\n throw 'First parameter is required.';\r\n }\r\n\r\n if (!(streams instanceof Array)) {\r\n streams = [streams];\r\n }\r\n\r\n arrayOfMediaStreams.concat(streams);\r\n\r\n if (!mediaRecorder || !mixer) {\r\n return;\r\n }\r\n\r\n mixer.appendStreams(streams);\r\n\r\n if (options.previewStream && typeof options.previewStream === 'function') {\r\n options.previewStream(mixer.getMixedStream());\r\n }\r\n };\r\n\r\n /**\r\n * Reset videos during live recording. Replace old videos e.g. replace cameras with full-screen.\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @param {MediaStreams} mediaStreams - Array of MediaStreams\r\n * @example\r\n * recorder.resetVideoStreams([newVideo1, newVideo2]);\r\n */\r\n this.resetVideoStreams = function(streams) {\r\n if (!mixer) {\r\n return;\r\n }\r\n\r\n if (streams && !(streams instanceof Array)) {\r\n streams = [streams];\r\n }\r\n\r\n mixer.resetVideoStreams(streams);\r\n };\r\n\r\n /**\r\n * Returns MultiStreamsMixer\r\n * @method\r\n * @memberof MultiStreamRecorder\r\n * @example\r\n * let mixer = recorder.getMixer();\r\n * mixer.appendStreams([newStream]);\r\n */\r\n this.getMixer = function() {\r\n return mixer;\r\n };\r\n\r\n // for debugging\r\n this.name = 'MultiStreamRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.MultiStreamRecorder = MultiStreamRecorder;\r\n}\n\r\n// _____________________\r\n// RecordRTC.promises.js\r\n\r\n/**\r\n * RecordRTCPromisesHandler adds promises support in {@link RecordRTC}. Try a {@link https://github.com/muaz-khan/RecordRTC/blob/master/simple-demos/RecordRTCPromisesHandler.html|demo here}\r\n * @summary Promises for {@link RecordRTC}\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef RecordRTCPromisesHandler\r\n * @class\r\n * @example\r\n * var recorder = new RecordRTCPromisesHandler(mediaStream, options);\r\n * recorder.startRecording()\r\n * .then(successCB)\r\n * .catch(errorCB);\r\n * // Note: You can access all RecordRTC API using \"recorder.recordRTC\" e.g. \r\n * recorder.recordRTC.onStateChanged = function(state) {};\r\n * recorder.recordRTC.setRecordingDuration(5000);\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.\r\n * @param {object} config - {type:\"video\", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}\r\n * @throws Will throw an error if \"new\" keyword is not used to initiate \"RecordRTCPromisesHandler\". Also throws error if first argument \"MediaStream\" is missing.\r\n * @requires {@link RecordRTC}\r\n */\r\n\r\nfunction RecordRTCPromisesHandler(mediaStream, options) {\r\n if (!this) {\r\n throw 'Use \"new RecordRTCPromisesHandler()\"';\r\n }\r\n\r\n if (typeof mediaStream === 'undefined') {\r\n throw 'First argument \"MediaStream\" is required.';\r\n }\r\n\r\n var self = this;\r\n\r\n /**\r\n * @property {Blob} blob - Access/reach the native {@link RecordRTC} object.\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * let internal = recorder.recordRTC.getInternalRecorder();\r\n * alert(internal instanceof MediaStreamRecorder);\r\n * recorder.recordRTC.onStateChanged = function(state) {};\r\n */\r\n self.recordRTC = new RecordRTC(mediaStream, options);\r\n\r\n /**\r\n * This method records MediaStream.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.startRecording()\r\n * .then(successCB)\r\n * .catch(errorCB);\r\n */\r\n this.startRecording = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n self.recordRTC.startRecording();\r\n resolve();\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method stops the recording.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.stopRecording().then(function() {\r\n * var blob = recorder.getBlob();\r\n * }).catch(errorCB);\r\n */\r\n this.stopRecording = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n self.recordRTC.stopRecording(function(url) {\r\n self.blob = self.recordRTC.getBlob();\r\n\r\n if (!self.blob || !self.blob.size) {\r\n reject('Empty blob.', self.blob);\r\n return;\r\n }\r\n\r\n resolve(url);\r\n });\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method pauses the recording. You can resume recording using \"resumeRecording\" method.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.pauseRecording()\r\n * .then(successCB)\r\n * .catch(errorCB);\r\n */\r\n this.pauseRecording = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n self.recordRTC.pauseRecording();\r\n resolve();\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method resumes the recording.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.resumeRecording()\r\n * .then(successCB)\r\n * .catch(errorCB);\r\n */\r\n this.resumeRecording = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n self.recordRTC.resumeRecording();\r\n resolve();\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method returns data-url for the recorded blob.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.stopRecording().then(function() {\r\n * recorder.getDataURL().then(function(dataURL) {\r\n * window.open(dataURL);\r\n * }).catch(errorCB);;\r\n * }).catch(errorCB);\r\n */\r\n this.getDataURL = function(callback) {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n self.recordRTC.getDataURL(function(dataURL) {\r\n resolve(dataURL);\r\n });\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method returns the recorded blob.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.stopRecording().then(function() {\r\n * recorder.getBlob().then(function(blob) {})\r\n * }).catch(errorCB);\r\n */\r\n this.getBlob = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n resolve(self.recordRTC.getBlob());\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method returns the internal recording object.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * let internalRecorder = await recorder.getInternalRecorder();\r\n * if(internalRecorder instanceof MultiStreamRecorder) {\r\n * internalRecorder.addStreams([newAudioStream]);\r\n * internalRecorder.resetVideoStreams([screenStream]);\r\n * }\r\n * @returns {Object} \r\n */\r\n this.getInternalRecorder = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n resolve(self.recordRTC.getInternalRecorder());\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * This method resets the recorder. So that you can reuse single recorder instance many times.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * await recorder.reset();\r\n * recorder.startRecording(); // record again\r\n */\r\n this.reset = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n resolve(self.recordRTC.reset());\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * Destroy RecordRTC instance. Clear all recorders and objects.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * recorder.destroy().then(successCB).catch(errorCB);\r\n */\r\n this.destroy = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n resolve(self.recordRTC.destroy());\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * Get recorder's readonly state.\r\n * @method\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * let state = await recorder.getState();\r\n * // or\r\n * recorder.getState().then(state => { console.log(state); })\r\n * @returns {String} Returns recording state.\r\n */\r\n this.getState = function() {\r\n return new Promise(function(resolve, reject) {\r\n try {\r\n resolve(self.recordRTC.getState());\r\n } catch (e) {\r\n reject(e);\r\n }\r\n });\r\n };\r\n\r\n /**\r\n * @property {Blob} blob - Recorded data as \"Blob\" object.\r\n * @memberof RecordRTCPromisesHandler\r\n * @example\r\n * await recorder.stopRecording();\r\n * let blob = recorder.getBlob(); // or \"recorder.recordRTC.blob\"\r\n * invokeSaveAsDialog(blob);\r\n */\r\n this.blob = null;\r\n\r\n /**\r\n * RecordRTC version number\r\n * @property {String} version - Release version number.\r\n * @memberof RecordRTCPromisesHandler\r\n * @static\r\n * @readonly\r\n * @example\r\n * alert(recorder.version);\r\n */\r\n this.version = '5.6.2';\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.RecordRTCPromisesHandler = RecordRTCPromisesHandler;\r\n}\n\r\n// ______________________\r\n// WebAssemblyRecorder.js\r\n\r\n/**\r\n * WebAssemblyRecorder lets you create webm videos in JavaScript via WebAssembly. The library consumes raw RGBA32 buffers (4 bytes per pixel) and turns them into a webm video with the given framerate and quality. This makes it compatible out-of-the-box with ImageData from a CANVAS. With realtime mode you can also use webm-wasm for streaming webm videos.\r\n * @summary Video recording feature in Chrome, Firefox and maybe Edge.\r\n * @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}\r\n * @author {@link https://MuazKhan.com|Muaz Khan}\r\n * @typedef WebAssemblyRecorder\r\n * @class\r\n * @example\r\n * var recorder = new WebAssemblyRecorder(mediaStream);\r\n * recorder.record();\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}\r\n * @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.\r\n * @param {object} config - {webAssemblyPath:'webm-wasm.wasm',workerPath: 'webm-worker.js', frameRate: 30, width: 1920, height: 1080, bitrate: 1024, realtime: true}\r\n */\r\nfunction WebAssemblyRecorder(stream, config) {\r\n // based on: github.com/GoogleChromeLabs/webm-wasm\r\n\r\n if (typeof ReadableStream === 'undefined' || typeof WritableStream === 'undefined') {\r\n // because it fixes readable/writable streams issues\r\n console.error('Following polyfill is strongly recommended: https://unpkg.com/@mattiasbuelens/web-streams-polyfill/dist/polyfill.min.js');\r\n }\r\n\r\n config = config || {};\r\n\r\n config.width = config.width || 640;\r\n config.height = config.height || 480;\r\n config.frameRate = config.frameRate || 30;\r\n config.bitrate = config.bitrate || 1200;\r\n config.realtime = config.realtime || true;\r\n\r\n function createBufferURL(buffer, type) {\r\n return URL.createObjectURL(new Blob([buffer], {\r\n type: type || ''\r\n }));\r\n }\r\n\r\n var finished;\r\n\r\n function cameraStream() {\r\n return new ReadableStream({\r\n start: function(controller) {\r\n var cvs = document.createElement('canvas');\r\n var video = document.createElement('video');\r\n var first = true;\r\n video.srcObject = stream;\r\n video.muted = true;\r\n video.height = config.height;\r\n video.width = config.width;\r\n video.volume = 0;\r\n video.onplaying = function() {\r\n cvs.width = config.width;\r\n cvs.height = config.height;\r\n var ctx = cvs.getContext('2d');\r\n var frameTimeout = 1000 / config.frameRate;\r\n var cameraTimer = setInterval(function f() {\r\n if (finished) {\r\n clearInterval(cameraTimer);\r\n controller.close();\r\n }\r\n\r\n if (first) {\r\n first = false;\r\n if (config.onVideoProcessStarted) {\r\n config.onVideoProcessStarted();\r\n }\r\n }\r\n\r\n ctx.drawImage(video, 0, 0);\r\n if (controller._controlledReadableStream.state !== 'closed') {\r\n try {\r\n controller.enqueue(\r\n ctx.getImageData(0, 0, config.width, config.height)\r\n );\r\n } catch (e) {}\r\n }\r\n }, frameTimeout);\r\n };\r\n video.play();\r\n }\r\n });\r\n }\r\n\r\n var worker;\r\n\r\n function startRecording(stream, buffer) {\r\n if (!config.workerPath && !buffer) {\r\n finished = false;\r\n\r\n // is it safe to use @latest ?\r\n\r\n fetch(\r\n 'https://unpkg.com/webm-wasm@latest/dist/webm-worker.js'\r\n ).then(function(r) {\r\n r.arrayBuffer().then(function(buffer) {\r\n startRecording(stream, buffer);\r\n });\r\n });\r\n return;\r\n }\r\n\r\n if (!config.workerPath && buffer instanceof ArrayBuffer) {\r\n var blob = new Blob([buffer], {\r\n type: 'text/javascript'\r\n });\r\n config.workerPath = URL.createObjectURL(blob);\r\n }\r\n\r\n if (!config.workerPath) {\r\n console.error('workerPath parameter is missing.');\r\n }\r\n\r\n worker = new Worker(config.workerPath);\r\n\r\n worker.postMessage(config.webAssemblyPath || 'https://unpkg.com/webm-wasm@latest/dist/webm-wasm.wasm');\r\n worker.addEventListener('message', function(event) {\r\n if (event.data === 'READY') {\r\n worker.postMessage({\r\n width: config.width,\r\n height: config.height,\r\n bitrate: config.bitrate || 1200,\r\n timebaseDen: config.frameRate || 30,\r\n realtime: config.realtime\r\n });\r\n\r\n cameraStream().pipeTo(new WritableStream({\r\n write: function(image) {\r\n if (finished) {\r\n console.error('Got image, but recorder is finished!');\r\n return;\r\n }\r\n\r\n worker.postMessage(image.data.buffer, [image.data.buffer]);\r\n }\r\n }));\r\n } else if (!!event.data) {\r\n if (!isPaused) {\r\n arrayOfBuffers.push(event.data);\r\n }\r\n }\r\n });\r\n }\r\n\r\n /**\r\n * This method records video.\r\n * @method\r\n * @memberof WebAssemblyRecorder\r\n * @example\r\n * recorder.record();\r\n */\r\n this.record = function() {\r\n arrayOfBuffers = [];\r\n isPaused = false;\r\n this.blob = null;\r\n startRecording(stream);\r\n\r\n if (typeof config.initCallback === 'function') {\r\n config.initCallback();\r\n }\r\n };\r\n\r\n var isPaused;\r\n\r\n /**\r\n * This method pauses the recording process.\r\n * @method\r\n * @memberof WebAssemblyRecorder\r\n * @example\r\n * recorder.pause();\r\n */\r\n this.pause = function() {\r\n isPaused = true;\r\n };\r\n\r\n /**\r\n * This method resumes the recording process.\r\n * @method\r\n * @memberof WebAssemblyRecorder\r\n * @example\r\n * recorder.resume();\r\n */\r\n this.resume = function() {\r\n isPaused = false;\r\n };\r\n\r\n function terminate(callback) {\r\n if (!worker) {\r\n if (callback) {\r\n callback();\r\n }\r\n\r\n return;\r\n }\r\n\r\n // Wait for null event data to indicate that the encoding is complete\r\n worker.addEventListener('message', function(event) {\r\n if (event.data === null) {\r\n worker.terminate();\r\n worker = null;\r\n\r\n if (callback) {\r\n callback();\r\n }\r\n }\r\n });\r\n\r\n worker.postMessage(null);\r\n }\r\n\r\n var arrayOfBuffers = [];\r\n\r\n /**\r\n * This method stops recording video.\r\n * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.\r\n * @method\r\n * @memberof WebAssemblyRecorder\r\n * @example\r\n * recorder.stop(function(blob) {\r\n * video.src = URL.createObjectURL(blob);\r\n * });\r\n */\r\n this.stop = function(callback) {\r\n finished = true;\r\n\r\n var recorder = this;\r\n\r\n terminate(function() {\r\n recorder.blob = new Blob(arrayOfBuffers, {\r\n type: 'video/webm'\r\n });\r\n\r\n callback(recorder.blob);\r\n });\r\n };\r\n\r\n // for debugging\r\n this.name = 'WebAssemblyRecorder';\r\n this.toString = function() {\r\n return this.name;\r\n };\r\n\r\n /**\r\n * This method resets currently recorded data.\r\n * @method\r\n * @memberof WebAssemblyRecorder\r\n * @example\r\n * recorder.clearRecordedData();\r\n */\r\n this.clearRecordedData = function() {\r\n arrayOfBuffers = [];\r\n isPaused = false;\r\n this.blob = null;\r\n\r\n // todo: if recording-ON then STOP it first\r\n };\r\n\r\n /**\r\n * @property {Blob} blob - The recorded blob object.\r\n * @memberof WebAssemblyRecorder\r\n * @example\r\n * recorder.stop(function(){\r\n * var blob = recorder.blob;\r\n * });\r\n */\r\n this.blob = null;\r\n}\r\n\r\nif (typeof RecordRTC !== 'undefined') {\r\n RecordRTC.WebAssemblyRecorder = WebAssemblyRecorder;\r\n}\n","/**\n * @license React\n * scheduler.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n'use strict';function f(a,b){var c=a.length;a.push(b);a:for(;0a||125d?(a.sortIndex=c,f(t,a),null===h(r)&&a===h(t)&&(B?(E(L),L=-1):B=!0,K(H,c-d))):(a.sortIndex=e,f(r,a),A||z||(A=!0,I(J)));return a};\nexports.unstable_shouldYield=M;exports.unstable_wrapCallback=function(a){var b=y;return function(){var c=y;y=b;try{return a.apply(this,arguments)}finally{y=c}}};\n","'use strict';\n\nif (process.env.NODE_ENV === 'production') {\n module.exports = require('./cjs/scheduler.production.min.js');\n} else {\n module.exports = require('./cjs/scheduler.development.js');\n}\n","function _interopRequireDefault(obj) {\n return obj && obj.__esModule ? obj : {\n \"default\": obj\n };\n}\nmodule.exports = _interopRequireDefault, module.exports.__esModule = true, module.exports[\"default\"] = module.exports;","export default function _arrayLikeToArray(arr, len) {\n if (len == null || len > arr.length) len = arr.length;\n for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];\n return arr2;\n}","import toPropertyKey from \"./toPropertyKey.js\";\nexport default function _defineProperty(obj, key, value) {\n key = toPropertyKey(key);\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n return obj;\n}","export default function _extends() {\n _extends = Object.assign ? Object.assign.bind() : function (target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i];\n for (var key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n target[key] = source[key];\n }\n }\n }\n return target;\n };\n return _extends.apply(this, arguments);\n}","export default function _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n return target;\n}","import arrayWithHoles from \"./arrayWithHoles.js\";\nimport iterableToArrayLimit from \"./iterableToArrayLimit.js\";\nimport unsupportedIterableToArray from \"./unsupportedIterableToArray.js\";\nimport nonIterableRest from \"./nonIterableRest.js\";\nexport default function _slicedToArray(arr, i) {\n return arrayWithHoles(arr) || iterableToArrayLimit(arr, i) || unsupportedIterableToArray(arr, i) || nonIterableRest();\n}","export default function _arrayWithHoles(arr) {\n if (Array.isArray(arr)) return arr;\n}","export default function _iterableToArrayLimit(arr, i) {\n var _i = null == arr ? null : \"undefined\" != typeof Symbol && arr[Symbol.iterator] || arr[\"@@iterator\"];\n if (null != _i) {\n var _s,\n _e,\n _x,\n _r,\n _arr = [],\n _n = !0,\n _d = !1;\n try {\n if (_x = (_i = _i.call(arr)).next, 0 === i) {\n if (Object(_i) !== _i) return;\n _n = !1;\n } else for (; !(_n = (_s = _x.call(_i)).done) && (_arr.push(_s.value), _arr.length !== i); _n = !0);\n } catch (err) {\n _d = !0, _e = err;\n } finally {\n try {\n if (!_n && null != _i[\"return\"] && (_r = _i[\"return\"](), Object(_r) !== _r)) return;\n } finally {\n if (_d) throw _e;\n }\n }\n return _arr;\n }\n}","export default function _nonIterableRest() {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\");\n}","import arrayWithoutHoles from \"./arrayWithoutHoles.js\";\nimport iterableToArray from \"./iterableToArray.js\";\nimport unsupportedIterableToArray from \"./unsupportedIterableToArray.js\";\nimport nonIterableSpread from \"./nonIterableSpread.js\";\nexport default function _toConsumableArray(arr) {\n return arrayWithoutHoles(arr) || iterableToArray(arr) || unsupportedIterableToArray(arr) || nonIterableSpread();\n}","import arrayLikeToArray from \"./arrayLikeToArray.js\";\nexport default function _arrayWithoutHoles(arr) {\n if (Array.isArray(arr)) return arrayLikeToArray(arr);\n}","export default function _iterableToArray(iter) {\n if (typeof Symbol !== \"undefined\" && iter[Symbol.iterator] != null || iter[\"@@iterator\"] != null) return Array.from(iter);\n}","export default function _nonIterableSpread() {\n throw new TypeError(\"Invalid attempt to spread non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\");\n}","import _typeof from \"./typeof.js\";\nimport toPrimitive from \"./toPrimitive.js\";\nexport default function _toPropertyKey(arg) {\n var key = toPrimitive(arg, \"string\");\n return _typeof(key) === \"symbol\" ? key : String(key);\n}","import _typeof from \"./typeof.js\";\nexport default function _toPrimitive(input, hint) {\n if (_typeof(input) !== \"object\" || input === null) return input;\n var prim = input[Symbol.toPrimitive];\n if (prim !== undefined) {\n var res = prim.call(input, hint || \"default\");\n if (_typeof(res) !== \"object\") return res;\n throw new TypeError(\"@@toPrimitive must return a primitive value.\");\n }\n return (hint === \"string\" ? String : Number)(input);\n}","export default function _typeof(obj) {\n \"@babel/helpers - typeof\";\n\n return _typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (obj) {\n return typeof obj;\n } : function (obj) {\n return obj && \"function\" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj;\n }, _typeof(obj);\n}","import arrayLikeToArray from \"./arrayLikeToArray.js\";\nexport default function _unsupportedIterableToArray(o, minLen) {\n if (!o) return;\n if (typeof o === \"string\") return arrayLikeToArray(o, minLen);\n var n = Object.prototype.toString.call(o).slice(8, -1);\n if (n === \"Object\" && o.constructor) n = o.constructor.name;\n if (n === \"Map\" || n === \"Set\") return Array.from(o);\n if (n === \"Arguments\" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return arrayLikeToArray(o, minLen);\n}","function r(e){var t,f,n=\"\";if(\"string\"==typeof e||\"number\"==typeof e)n+=e;else if(\"object\"==typeof e)if(Array.isArray(e))for(t=0;t